commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
91bb95fa8e481ffd4b1e083cc607f2fb8ce9a1f2
|
scripts/remove_after_use/update_datacite_dois.py
|
scripts/remove_after_use/update_datacite_dois.py
|
"""
Script to send updates to Datacite for projects that were updated
while the DISABLE_DATACITE_DOIS switch was active.
Start date:
Dec 14, 2018 @ 10:09 PM EST = Dec 15, 2018 @ 03:09 UTC
End date:
Dec 15, 2018 @ 12:34 PM EST = Dec 15, 2018 @ 17:34 UTC
"""
import datetime
import logging
import pytz
import waffle
from website.app import setup_django
setup_django()
from osf import features
from osf.models import Node
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
START_DATE = datetime.datetime(2018, 12, 15, 3, 9, tzinfo=pytz.UTC)
END_DATE = datetime.datetime(2018, 12, 15, 17, 34, tzinfo=pytz.UTC)
def main():
assert not waffle.switch_is_active(features.DISABLE_DATACITE_DOIS)
nodes = Node.objects.filter(
identifiers__category='doi',
identifiers__deleted__isnull=True,
last_logged__gte=START_DATE,
last_logged__lte=END_DATE
)
logger.info('Sending {} nodes to Datacite'.format(nodes.count()))
for node in nodes:
logger.info('Sending {} to Datacite for update.'.format(node._id))
node.request_identifier_update(category='doi')
if __name__ == '__main__':
main()
|
Add script to send updated projects to Datacite.
|
Add script to send updated projects to Datacite.
[PLAT-1273]
|
Python
|
apache-2.0
|
adlius/osf.io,felliott/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,felliott/osf.io,baylee-d/osf.io,mattclark/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,mfraezz/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,baylee-d/osf.io,cslzchen/osf.io,saradbowman/osf.io,mattclark/osf.io,pattisdr/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,mfraezz/osf.io,adlius/osf.io,mfraezz/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,aaxelb/osf.io
|
Add script to send updated projects to Datacite.
[PLAT-1273]
|
"""
Script to send updates to Datacite for projects that were updated
while the DISABLE_DATACITE_DOIS switch was active.
Start date:
Dec 14, 2018 @ 10:09 PM EST = Dec 15, 2018 @ 03:09 UTC
End date:
Dec 15, 2018 @ 12:34 PM EST = Dec 15, 2018 @ 17:34 UTC
"""
import datetime
import logging
import pytz
import waffle
from website.app import setup_django
setup_django()
from osf import features
from osf.models import Node
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
START_DATE = datetime.datetime(2018, 12, 15, 3, 9, tzinfo=pytz.UTC)
END_DATE = datetime.datetime(2018, 12, 15, 17, 34, tzinfo=pytz.UTC)
def main():
assert not waffle.switch_is_active(features.DISABLE_DATACITE_DOIS)
nodes = Node.objects.filter(
identifiers__category='doi',
identifiers__deleted__isnull=True,
last_logged__gte=START_DATE,
last_logged__lte=END_DATE
)
logger.info('Sending {} nodes to Datacite'.format(nodes.count()))
for node in nodes:
logger.info('Sending {} to Datacite for update.'.format(node._id))
node.request_identifier_update(category='doi')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to send updated projects to Datacite.
[PLAT-1273]<commit_after>
|
"""
Script to send updates to Datacite for projects that were updated
while the DISABLE_DATACITE_DOIS switch was active.
Start date:
Dec 14, 2018 @ 10:09 PM EST = Dec 15, 2018 @ 03:09 UTC
End date:
Dec 15, 2018 @ 12:34 PM EST = Dec 15, 2018 @ 17:34 UTC
"""
import datetime
import logging
import pytz
import waffle
from website.app import setup_django
setup_django()
from osf import features
from osf.models import Node
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
START_DATE = datetime.datetime(2018, 12, 15, 3, 9, tzinfo=pytz.UTC)
END_DATE = datetime.datetime(2018, 12, 15, 17, 34, tzinfo=pytz.UTC)
def main():
assert not waffle.switch_is_active(features.DISABLE_DATACITE_DOIS)
nodes = Node.objects.filter(
identifiers__category='doi',
identifiers__deleted__isnull=True,
last_logged__gte=START_DATE,
last_logged__lte=END_DATE
)
logger.info('Sending {} nodes to Datacite'.format(nodes.count()))
for node in nodes:
logger.info('Sending {} to Datacite for update.'.format(node._id))
node.request_identifier_update(category='doi')
if __name__ == '__main__':
main()
|
Add script to send updated projects to Datacite.
[PLAT-1273]"""
Script to send updates to Datacite for projects that were updated
while the DISABLE_DATACITE_DOIS switch was active.
Start date:
Dec 14, 2018 @ 10:09 PM EST = Dec 15, 2018 @ 03:09 UTC
End date:
Dec 15, 2018 @ 12:34 PM EST = Dec 15, 2018 @ 17:34 UTC
"""
import datetime
import logging
import pytz
import waffle
from website.app import setup_django
setup_django()
from osf import features
from osf.models import Node
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
START_DATE = datetime.datetime(2018, 12, 15, 3, 9, tzinfo=pytz.UTC)
END_DATE = datetime.datetime(2018, 12, 15, 17, 34, tzinfo=pytz.UTC)
def main():
assert not waffle.switch_is_active(features.DISABLE_DATACITE_DOIS)
nodes = Node.objects.filter(
identifiers__category='doi',
identifiers__deleted__isnull=True,
last_logged__gte=START_DATE,
last_logged__lte=END_DATE
)
logger.info('Sending {} nodes to Datacite'.format(nodes.count()))
for node in nodes:
logger.info('Sending {} to Datacite for update.'.format(node._id))
node.request_identifier_update(category='doi')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to send updated projects to Datacite.
[PLAT-1273]<commit_after>"""
Script to send updates to Datacite for projects that were updated
while the DISABLE_DATACITE_DOIS switch was active.
Start date:
Dec 14, 2018 @ 10:09 PM EST = Dec 15, 2018 @ 03:09 UTC
End date:
Dec 15, 2018 @ 12:34 PM EST = Dec 15, 2018 @ 17:34 UTC
"""
import datetime
import logging
import pytz
import waffle
from website.app import setup_django
setup_django()
from osf import features
from osf.models import Node
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
START_DATE = datetime.datetime(2018, 12, 15, 3, 9, tzinfo=pytz.UTC)
END_DATE = datetime.datetime(2018, 12, 15, 17, 34, tzinfo=pytz.UTC)
def main():
assert not waffle.switch_is_active(features.DISABLE_DATACITE_DOIS)
nodes = Node.objects.filter(
identifiers__category='doi',
identifiers__deleted__isnull=True,
last_logged__gte=START_DATE,
last_logged__lte=END_DATE
)
logger.info('Sending {} nodes to Datacite'.format(nodes.count()))
for node in nodes:
logger.info('Sending {} to Datacite for update.'.format(node._id))
node.request_identifier_update(category='doi')
if __name__ == '__main__':
main()
|
|
1eb40936310828fe353ea4c4dd902fabea235f77
|
Python/112_PathSum.py
|
Python/112_PathSum.py
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
#if root is empty, return false
if not root:
return False
# if not empty (root has a value), then deduct root.val from sum,
# if sum==0, then the path exists
# else continue to check left child and right child.
# If any of the childs (OR) return True, then the tree has path, else not.
sum -= root.val
if sum == 0 and not root.left and not root.right:
#it is a leaf and the sum of path equals what we want
return True
return self.hasPathSum(root.left,sum) or self.hasPathSum(root.right, sum)
|
Add solution for 112 Path Sum.
|
Add solution for 112 Path Sum.
|
Python
|
mit
|
comicxmz001/LeetCode,comicxmz001/LeetCode
|
Add solution for 112 Path Sum.
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
#if root is empty, return false
if not root:
return False
# if not empty (root has a value), then deduct root.val from sum,
# if sum==0, then the path exists
# else continue to check left child and right child.
# If any of the childs (OR) return True, then the tree has path, else not.
sum -= root.val
if sum == 0 and not root.left and not root.right:
#it is a leaf and the sum of path equals what we want
return True
return self.hasPathSum(root.left,sum) or self.hasPathSum(root.right, sum)
|
<commit_before><commit_msg>Add solution for 112 Path Sum.<commit_after>
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
#if root is empty, return false
if not root:
return False
# if not empty (root has a value), then deduct root.val from sum,
# if sum==0, then the path exists
# else continue to check left child and right child.
# If any of the childs (OR) return True, then the tree has path, else not.
sum -= root.val
if sum == 0 and not root.left and not root.right:
#it is a leaf and the sum of path equals what we want
return True
return self.hasPathSum(root.left,sum) or self.hasPathSum(root.right, sum)
|
Add solution for 112 Path Sum.# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
#if root is empty, return false
if not root:
return False
# if not empty (root has a value), then deduct root.val from sum,
# if sum==0, then the path exists
# else continue to check left child and right child.
# If any of the childs (OR) return True, then the tree has path, else not.
sum -= root.val
if sum == 0 and not root.left and not root.right:
#it is a leaf and the sum of path equals what we want
return True
return self.hasPathSum(root.left,sum) or self.hasPathSum(root.right, sum)
|
<commit_before><commit_msg>Add solution for 112 Path Sum.<commit_after># Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
#if root is empty, return false
if not root:
return False
# if not empty (root has a value), then deduct root.val from sum,
# if sum==0, then the path exists
# else continue to check left child and right child.
# If any of the childs (OR) return True, then the tree has path, else not.
sum -= root.val
if sum == 0 and not root.left and not root.right:
#it is a leaf and the sum of path equals what we want
return True
return self.hasPathSum(root.left,sum) or self.hasPathSum(root.right, sum)
|
|
115c62e6a9a9167633eff93ef0f1a355505a0e5d
|
tests/test_python33_bdist_egg.py
|
tests/test_python33_bdist_egg.py
|
import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from distribute_setup import (use_setuptools, _build_egg, _python_cmd,
_do_download, _install, DEFAULT_URL,
DEFAULT_VERSION)
import distribute_setup
class TestPython33BdistEgg(unittest.TestCase):
def test_build_egg(self):
os.chdir(os.path.join(CURDIR, 'python3.3_bdist_egg_test'))
_python_cmd("setup.py", "bdist_egg")
if __name__ == '__main__':
unittest.main()
|
Add a test for Python 3.3 bdist_egg issue
|
Add a test for Python 3.3 bdist_egg issue
--HG--
branch : distribute
extra : rebase_source : e83ee69c3b15e4a75780811a7bb3612b8f7f54d1
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
Add a test for Python 3.3 bdist_egg issue
--HG--
branch : distribute
extra : rebase_source : e83ee69c3b15e4a75780811a7bb3612b8f7f54d1
|
import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from distribute_setup import (use_setuptools, _build_egg, _python_cmd,
_do_download, _install, DEFAULT_URL,
DEFAULT_VERSION)
import distribute_setup
class TestPython33BdistEgg(unittest.TestCase):
def test_build_egg(self):
os.chdir(os.path.join(CURDIR, 'python3.3_bdist_egg_test'))
_python_cmd("setup.py", "bdist_egg")
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test for Python 3.3 bdist_egg issue
--HG--
branch : distribute
extra : rebase_source : e83ee69c3b15e4a75780811a7bb3612b8f7f54d1<commit_after>
|
import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from distribute_setup import (use_setuptools, _build_egg, _python_cmd,
_do_download, _install, DEFAULT_URL,
DEFAULT_VERSION)
import distribute_setup
class TestPython33BdistEgg(unittest.TestCase):
def test_build_egg(self):
os.chdir(os.path.join(CURDIR, 'python3.3_bdist_egg_test'))
_python_cmd("setup.py", "bdist_egg")
if __name__ == '__main__':
unittest.main()
|
Add a test for Python 3.3 bdist_egg issue
--HG--
branch : distribute
extra : rebase_source : e83ee69c3b15e4a75780811a7bb3612b8f7f54d1import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from distribute_setup import (use_setuptools, _build_egg, _python_cmd,
_do_download, _install, DEFAULT_URL,
DEFAULT_VERSION)
import distribute_setup
class TestPython33BdistEgg(unittest.TestCase):
def test_build_egg(self):
os.chdir(os.path.join(CURDIR, 'python3.3_bdist_egg_test'))
_python_cmd("setup.py", "bdist_egg")
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test for Python 3.3 bdist_egg issue
--HG--
branch : distribute
extra : rebase_source : e83ee69c3b15e4a75780811a7bb3612b8f7f54d1<commit_after>import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from distribute_setup import (use_setuptools, _build_egg, _python_cmd,
_do_download, _install, DEFAULT_URL,
DEFAULT_VERSION)
import distribute_setup
class TestPython33BdistEgg(unittest.TestCase):
def test_build_egg(self):
os.chdir(os.path.join(CURDIR, 'python3.3_bdist_egg_test'))
_python_cmd("setup.py", "bdist_egg")
if __name__ == '__main__':
unittest.main()
|
|
962472ab094ec6a5d9bc70628ceb98c48cf803ac
|
add_liwc_entities.py
|
add_liwc_entities.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Add LIWC words as entities in FoLiA XML file.
Usage: python add_liwc_entities.py <file in>
"""
from lxml import etree
from bs4 import BeautifulSoup
from emotools import bs4_helpers
import argparse
import json
def add_entity(soup, sentence, cls, word_ids):
# bevat sentence al een entity layer?
if sentence.find('entities'):
entities = sentence.entities
else:
entities = soup.new_tag('entities')
sentence.append(entities)
entity = soup.new_tag('entity')
entity['xml:id'] = 'entity_id'
entity['class'] = cls
for w in word_ids:
entity.append(soup.new_tag('wref', id=w))
entities.append(entity)
print entities
print '-----'
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('file_in', help='the name of the FoLiA XML file add ' \
'LIWC entities to')
args = parser.parse_args()
file_name = args.file_in
# Load liwc dict
with open('LIWC_Dutch_dictionary.dic', 'r') as f:
lines = f.readlines()
liwc_dict = {}
for line in lines:
# word
if line[0].isalpha():
entry = line.split()
term = entry[0]
categories = entry[1:]
liwc_dict[term] = categories
# Load document
#doc = folia.Document(file='medea-folia-no_events.xml')
with open(file_name, 'r') as f:
soup = BeautifulSoup(f, 'xml')
words = soup.find_all('w')
for word in words:
w = word.t.string
if w in liwc_dict.keys():
# posemo
if '13' in liwc_dict[w]:
add_entity(soup, word.parent, 'liwc-posemo', [word['xml:id']])
# negemo
if '16' in liwc_dict[w]:
add_entity(soup, word.parent, 'liwc-negemo', [word['xml:id']])
output_xml = soup.prettify("utf-8")
with open('test.xml', 'w') as file:
file.write(output_xml)
|
Add script for adding LIWC entities to FoLiA XML file
|
Add script for adding LIWC entities to FoLiA XML file
Added a script that checks whether a word in the FoLiA file also occurs
in the historic LIWC dictionary. If it does, and the word has certain
categories (at this time the script checks for posemo and negemo
words), an entity is added to the FoLiA XML file.
What needs to be added is: a function that generates an appropriate id
for entities, an entity-annotation tag, and a way to customize at run
time what entities will be added.
|
Python
|
apache-2.0
|
NLeSC/embodied-emotions-scripts,NLeSC/embodied-emotions-scripts
|
Add script for adding LIWC entities to FoLiA XML file
Added a script that checks whether a word in the FoLiA file also occurs
in the historic LIWC dictionary. If it does, and the word has certain
categories (at this time the script checks for posemo and negemo
words), an entity is added to the FoLiA XML file.
What needs to be added is: a function that generates an appropriate id
for entities, an entity-annotation tag, and a way to customize at run
time what entities will be added.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Add LIWC words as entities in FoLiA XML file.
Usage: python add_liwc_entities.py <file in>
"""
from lxml import etree
from bs4 import BeautifulSoup
from emotools import bs4_helpers
import argparse
import json
def add_entity(soup, sentence, cls, word_ids):
# bevat sentence al een entity layer?
if sentence.find('entities'):
entities = sentence.entities
else:
entities = soup.new_tag('entities')
sentence.append(entities)
entity = soup.new_tag('entity')
entity['xml:id'] = 'entity_id'
entity['class'] = cls
for w in word_ids:
entity.append(soup.new_tag('wref', id=w))
entities.append(entity)
print entities
print '-----'
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('file_in', help='the name of the FoLiA XML file add ' \
'LIWC entities to')
args = parser.parse_args()
file_name = args.file_in
# Load liwc dict
with open('LIWC_Dutch_dictionary.dic', 'r') as f:
lines = f.readlines()
liwc_dict = {}
for line in lines:
# word
if line[0].isalpha():
entry = line.split()
term = entry[0]
categories = entry[1:]
liwc_dict[term] = categories
# Load document
#doc = folia.Document(file='medea-folia-no_events.xml')
with open(file_name, 'r') as f:
soup = BeautifulSoup(f, 'xml')
words = soup.find_all('w')
for word in words:
w = word.t.string
if w in liwc_dict.keys():
# posemo
if '13' in liwc_dict[w]:
add_entity(soup, word.parent, 'liwc-posemo', [word['xml:id']])
# negemo
if '16' in liwc_dict[w]:
add_entity(soup, word.parent, 'liwc-negemo', [word['xml:id']])
output_xml = soup.prettify("utf-8")
with open('test.xml', 'w') as file:
file.write(output_xml)
|
<commit_before><commit_msg>Add script for adding LIWC entities to FoLiA XML file
Added a script that checks whether a word in the FoLiA file also occurs
in the historic LIWC dictionary. If it does, and the word has certain
categories (at this time the script checks for posemo and negemo
words), an entity is added to the FoLiA XML file.
What needs to be added is: a function that generates an appropriate id
for entities, an entity-annotation tag, and a way to customize at run
time what entities will be added.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Add LIWC words as entities in FoLiA XML file.
Usage: python add_liwc_entities.py <file in>
"""
from lxml import etree
from bs4 import BeautifulSoup
from emotools import bs4_helpers
import argparse
import json
def add_entity(soup, sentence, cls, word_ids):
# bevat sentence al een entity layer?
if sentence.find('entities'):
entities = sentence.entities
else:
entities = soup.new_tag('entities')
sentence.append(entities)
entity = soup.new_tag('entity')
entity['xml:id'] = 'entity_id'
entity['class'] = cls
for w in word_ids:
entity.append(soup.new_tag('wref', id=w))
entities.append(entity)
print entities
print '-----'
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('file_in', help='the name of the FoLiA XML file add ' \
'LIWC entities to')
args = parser.parse_args()
file_name = args.file_in
# Load liwc dict
with open('LIWC_Dutch_dictionary.dic', 'r') as f:
lines = f.readlines()
liwc_dict = {}
for line in lines:
# word
if line[0].isalpha():
entry = line.split()
term = entry[0]
categories = entry[1:]
liwc_dict[term] = categories
# Load document
#doc = folia.Document(file='medea-folia-no_events.xml')
with open(file_name, 'r') as f:
soup = BeautifulSoup(f, 'xml')
words = soup.find_all('w')
for word in words:
w = word.t.string
if w in liwc_dict.keys():
# posemo
if '13' in liwc_dict[w]:
add_entity(soup, word.parent, 'liwc-posemo', [word['xml:id']])
# negemo
if '16' in liwc_dict[w]:
add_entity(soup, word.parent, 'liwc-negemo', [word['xml:id']])
output_xml = soup.prettify("utf-8")
with open('test.xml', 'w') as file:
file.write(output_xml)
|
Add script for adding LIWC entities to FoLiA XML file
Added a script that checks whether a word in the FoLiA file also occurs
in the historic LIWC dictionary. If it does, and the word has certain
categories (at this time the script checks for posemo and negemo
words), an entity is added to the FoLiA XML file.
What needs to be added is: a function that generates an appropriate id
for entities, an entity-annotation tag, and a way to customize at run
time what entities will be added.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Add LIWC words as entities in FoLiA XML file.
Usage: python add_liwc_entities.py <file in>
"""
from lxml import etree
from bs4 import BeautifulSoup
from emotools import bs4_helpers
import argparse
import json
def add_entity(soup, sentence, cls, word_ids):
# bevat sentence al een entity layer?
if sentence.find('entities'):
entities = sentence.entities
else:
entities = soup.new_tag('entities')
sentence.append(entities)
entity = soup.new_tag('entity')
entity['xml:id'] = 'entity_id'
entity['class'] = cls
for w in word_ids:
entity.append(soup.new_tag('wref', id=w))
entities.append(entity)
print entities
print '-----'
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('file_in', help='the name of the FoLiA XML file add ' \
'LIWC entities to')
args = parser.parse_args()
file_name = args.file_in
# Load liwc dict
with open('LIWC_Dutch_dictionary.dic', 'r') as f:
lines = f.readlines()
liwc_dict = {}
for line in lines:
# word
if line[0].isalpha():
entry = line.split()
term = entry[0]
categories = entry[1:]
liwc_dict[term] = categories
# Load document
#doc = folia.Document(file='medea-folia-no_events.xml')
with open(file_name, 'r') as f:
soup = BeautifulSoup(f, 'xml')
words = soup.find_all('w')
for word in words:
w = word.t.string
if w in liwc_dict.keys():
# posemo
if '13' in liwc_dict[w]:
add_entity(soup, word.parent, 'liwc-posemo', [word['xml:id']])
# negemo
if '16' in liwc_dict[w]:
add_entity(soup, word.parent, 'liwc-negemo', [word['xml:id']])
output_xml = soup.prettify("utf-8")
with open('test.xml', 'w') as file:
file.write(output_xml)
|
<commit_before><commit_msg>Add script for adding LIWC entities to FoLiA XML file
Added a script that checks whether a word in the FoLiA file also occurs
in the historic LIWC dictionary. If it does, and the word has certain
categories (at this time the script checks for posemo and negemo
words), an entity is added to the FoLiA XML file.
What needs to be added is: a function that generates an appropriate id
for entities, an entity-annotation tag, and a way to customize at run
time what entities will be added.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Add LIWC words as entities in FoLiA XML file.
Usage: python add_liwc_entities.py <file in>
"""
from lxml import etree
from bs4 import BeautifulSoup
from emotools import bs4_helpers
import argparse
import json
def add_entity(soup, sentence, cls, word_ids):
# bevat sentence al een entity layer?
if sentence.find('entities'):
entities = sentence.entities
else:
entities = soup.new_tag('entities')
sentence.append(entities)
entity = soup.new_tag('entity')
entity['xml:id'] = 'entity_id'
entity['class'] = cls
for w in word_ids:
entity.append(soup.new_tag('wref', id=w))
entities.append(entity)
print entities
print '-----'
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('file_in', help='the name of the FoLiA XML file add ' \
'LIWC entities to')
args = parser.parse_args()
file_name = args.file_in
# Load liwc dict
with open('LIWC_Dutch_dictionary.dic', 'r') as f:
lines = f.readlines()
liwc_dict = {}
for line in lines:
# word
if line[0].isalpha():
entry = line.split()
term = entry[0]
categories = entry[1:]
liwc_dict[term] = categories
# Load document
#doc = folia.Document(file='medea-folia-no_events.xml')
with open(file_name, 'r') as f:
soup = BeautifulSoup(f, 'xml')
words = soup.find_all('w')
for word in words:
w = word.t.string
if w in liwc_dict.keys():
# posemo
if '13' in liwc_dict[w]:
add_entity(soup, word.parent, 'liwc-posemo', [word['xml:id']])
# negemo
if '16' in liwc_dict[w]:
add_entity(soup, word.parent, 'liwc-negemo', [word['xml:id']])
output_xml = soup.prettify("utf-8")
with open('test.xml', 'w') as file:
file.write(output_xml)
|
|
e15d982ec3ef0dba52af67ce5b7b448eb8914767
|
benchmark_pencode.py
|
benchmark_pencode.py
|
#!/usr/bin/env python
import perf
from chopsticks.pencode import pencode, pdecode
def setup():
return [[
1000+i,
str(1000+i),
42,
42.0,
10121071034790721094712093712037123,
None,
True,
b'qwertyuiop',
u'qwertyuiop',
['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'],
('q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'),
{'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'},
frozenset(['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p']),
{'e': 101, 'i': 105, 'o': 111, 'q': 113, 'p': 112,
'r': 114, 'u': 117, 't': 116, 'w': 119, 'y': 121},
['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i],
('q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i),
{'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i},
frozenset(['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i]),
{'e': 101, 'i': 105, 'o': 111, 'q': 113, 'p': 112,
'r': 114, 'u': 117, 't': 116, 'w': 119, 'y': 121, 'x': i},
] for i in range(1000)]
runner = perf.Runner()
if __name__ == '__main__':
v = setup()
assert pdecode(pencode(v)) == v
#pencode(v)
runner.timeit(
name='pencode',
stmt='pencode(v)',
globals={'v': v, 'pencode': pencode},
)
|
Add benchmark script for pencode.pencode()
|
Add benchmark script for pencode.pencode()
Baseline on a MBP A1389, i7-4750HQ CPU @ 2.00GHz, Ubuntu 17.10 x64,
Python 2.7.14, kernel 4.13.0-36-generic
pencode: Mean +- std dev: 95.8 ms +- 1.2 ms
|
Python
|
apache-2.0
|
lordmauve/chopsticks,lordmauve/chopsticks
|
Add benchmark script for pencode.pencode()
Baseline on a MBP A1389, i7-4750HQ CPU @ 2.00GHz, Ubuntu 17.10 x64,
Python 2.7.14, kernel 4.13.0-36-generic
pencode: Mean +- std dev: 95.8 ms +- 1.2 ms
|
#!/usr/bin/env python
import perf
from chopsticks.pencode import pencode, pdecode
def setup():
return [[
1000+i,
str(1000+i),
42,
42.0,
10121071034790721094712093712037123,
None,
True,
b'qwertyuiop',
u'qwertyuiop',
['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'],
('q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'),
{'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'},
frozenset(['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p']),
{'e': 101, 'i': 105, 'o': 111, 'q': 113, 'p': 112,
'r': 114, 'u': 117, 't': 116, 'w': 119, 'y': 121},
['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i],
('q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i),
{'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i},
frozenset(['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i]),
{'e': 101, 'i': 105, 'o': 111, 'q': 113, 'p': 112,
'r': 114, 'u': 117, 't': 116, 'w': 119, 'y': 121, 'x': i},
] for i in range(1000)]
runner = perf.Runner()
if __name__ == '__main__':
v = setup()
assert pdecode(pencode(v)) == v
#pencode(v)
runner.timeit(
name='pencode',
stmt='pencode(v)',
globals={'v': v, 'pencode': pencode},
)
|
<commit_before><commit_msg>Add benchmark script for pencode.pencode()
Baseline on a MBP A1389, i7-4750HQ CPU @ 2.00GHz, Ubuntu 17.10 x64,
Python 2.7.14, kernel 4.13.0-36-generic
pencode: Mean +- std dev: 95.8 ms +- 1.2 ms<commit_after>
|
#!/usr/bin/env python
import perf
from chopsticks.pencode import pencode, pdecode
def setup():
return [[
1000+i,
str(1000+i),
42,
42.0,
10121071034790721094712093712037123,
None,
True,
b'qwertyuiop',
u'qwertyuiop',
['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'],
('q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'),
{'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'},
frozenset(['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p']),
{'e': 101, 'i': 105, 'o': 111, 'q': 113, 'p': 112,
'r': 114, 'u': 117, 't': 116, 'w': 119, 'y': 121},
['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i],
('q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i),
{'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i},
frozenset(['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i]),
{'e': 101, 'i': 105, 'o': 111, 'q': 113, 'p': 112,
'r': 114, 'u': 117, 't': 116, 'w': 119, 'y': 121, 'x': i},
] for i in range(1000)]
runner = perf.Runner()
if __name__ == '__main__':
v = setup()
assert pdecode(pencode(v)) == v
#pencode(v)
runner.timeit(
name='pencode',
stmt='pencode(v)',
globals={'v': v, 'pencode': pencode},
)
|
Add benchmark script for pencode.pencode()
Baseline on a MBP A1389, i7-4750HQ CPU @ 2.00GHz, Ubuntu 17.10 x64,
Python 2.7.14, kernel 4.13.0-36-generic
pencode: Mean +- std dev: 95.8 ms +- 1.2 ms#!/usr/bin/env python
import perf
from chopsticks.pencode import pencode, pdecode
def setup():
return [[
1000+i,
str(1000+i),
42,
42.0,
10121071034790721094712093712037123,
None,
True,
b'qwertyuiop',
u'qwertyuiop',
['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'],
('q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'),
{'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'},
frozenset(['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p']),
{'e': 101, 'i': 105, 'o': 111, 'q': 113, 'p': 112,
'r': 114, 'u': 117, 't': 116, 'w': 119, 'y': 121},
['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i],
('q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i),
{'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i},
frozenset(['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i]),
{'e': 101, 'i': 105, 'o': 111, 'q': 113, 'p': 112,
'r': 114, 'u': 117, 't': 116, 'w': 119, 'y': 121, 'x': i},
] for i in range(1000)]
runner = perf.Runner()
if __name__ == '__main__':
v = setup()
assert pdecode(pencode(v)) == v
#pencode(v)
runner.timeit(
name='pencode',
stmt='pencode(v)',
globals={'v': v, 'pencode': pencode},
)
|
<commit_before><commit_msg>Add benchmark script for pencode.pencode()
Baseline on a MBP A1389, i7-4750HQ CPU @ 2.00GHz, Ubuntu 17.10 x64,
Python 2.7.14, kernel 4.13.0-36-generic
pencode: Mean +- std dev: 95.8 ms +- 1.2 ms<commit_after>#!/usr/bin/env python
import perf
from chopsticks.pencode import pencode, pdecode
def setup():
return [[
1000+i,
str(1000+i),
42,
42.0,
10121071034790721094712093712037123,
None,
True,
b'qwertyuiop',
u'qwertyuiop',
['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'],
('q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'),
{'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'},
frozenset(['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p']),
{'e': 101, 'i': 105, 'o': 111, 'q': 113, 'p': 112,
'r': 114, 'u': 117, 't': 116, 'w': 119, 'y': 121},
['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i],
('q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i),
{'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i},
frozenset(['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', i]),
{'e': 101, 'i': 105, 'o': 111, 'q': 113, 'p': 112,
'r': 114, 'u': 117, 't': 116, 'w': 119, 'y': 121, 'x': i},
] for i in range(1000)]
runner = perf.Runner()
if __name__ == '__main__':
v = setup()
assert pdecode(pencode(v)) == v
#pencode(v)
runner.timeit(
name='pencode',
stmt='pencode(v)',
globals={'v': v, 'pencode': pencode},
)
|
|
f93c9592de5dfcc8968f06cbc692cbee455ebf47
|
lintcode/Medium/004_Ugly_Number_II.py
|
lintcode/Medium/004_Ugly_Number_II.py
|
class Solution:
"""
@param {int} n an integer.
@return {int} the nth prime number as description.
"""
def nthUglyNumber(self, n):
# write your code here
uglies = [1]
index0, index1, index2 = 0, 0, 0
while(len(uglies) < n):
nextNum = min(uglies[index0] * 2, uglies[index1] * 3, uglies[index2] * 5)
if (nextNum == uglies[index0] * 2):
index0 += 1
if (nextNum == uglies[index1] * 3):
index1 += 1
if (nextNum == uglies[index2] * 5):
index2 += 1
uglies.append(nextNum)
return uglies[-1]
|
Add solution to lintcode question 004
|
Add solution to lintcode question 004
|
Python
|
mit
|
Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode
|
Add solution to lintcode question 004
|
class Solution:
"""
@param {int} n an integer.
@return {int} the nth prime number as description.
"""
def nthUglyNumber(self, n):
# write your code here
uglies = [1]
index0, index1, index2 = 0, 0, 0
while(len(uglies) < n):
nextNum = min(uglies[index0] * 2, uglies[index1] * 3, uglies[index2] * 5)
if (nextNum == uglies[index0] * 2):
index0 += 1
if (nextNum == uglies[index1] * 3):
index1 += 1
if (nextNum == uglies[index2] * 5):
index2 += 1
uglies.append(nextNum)
return uglies[-1]
|
<commit_before><commit_msg>Add solution to lintcode question 004<commit_after>
|
class Solution:
"""
@param {int} n an integer.
@return {int} the nth prime number as description.
"""
def nthUglyNumber(self, n):
# write your code here
uglies = [1]
index0, index1, index2 = 0, 0, 0
while(len(uglies) < n):
nextNum = min(uglies[index0] * 2, uglies[index1] * 3, uglies[index2] * 5)
if (nextNum == uglies[index0] * 2):
index0 += 1
if (nextNum == uglies[index1] * 3):
index1 += 1
if (nextNum == uglies[index2] * 5):
index2 += 1
uglies.append(nextNum)
return uglies[-1]
|
Add solution to lintcode question 004class Solution:
"""
@param {int} n an integer.
@return {int} the nth prime number as description.
"""
def nthUglyNumber(self, n):
# write your code here
uglies = [1]
index0, index1, index2 = 0, 0, 0
while(len(uglies) < n):
nextNum = min(uglies[index0] * 2, uglies[index1] * 3, uglies[index2] * 5)
if (nextNum == uglies[index0] * 2):
index0 += 1
if (nextNum == uglies[index1] * 3):
index1 += 1
if (nextNum == uglies[index2] * 5):
index2 += 1
uglies.append(nextNum)
return uglies[-1]
|
<commit_before><commit_msg>Add solution to lintcode question 004<commit_after>class Solution:
"""
@param {int} n an integer.
@return {int} the nth prime number as description.
"""
def nthUglyNumber(self, n):
# write your code here
uglies = [1]
index0, index1, index2 = 0, 0, 0
while(len(uglies) < n):
nextNum = min(uglies[index0] * 2, uglies[index1] * 3, uglies[index2] * 5)
if (nextNum == uglies[index0] * 2):
index0 += 1
if (nextNum == uglies[index1] * 3):
index1 += 1
if (nextNum == uglies[index2] * 5):
index2 += 1
uglies.append(nextNum)
return uglies[-1]
|
|
b9ed7e58e54536d761fe5658e50dcbbd3d1b4d3f
|
py/arithmetic-slices.py
|
py/arithmetic-slices.py
|
class Solution(object):
def numberOfArithmeticSlices(self, A):
"""
:type A: List[int]
:rtype: int
"""
lA = len(A)
if lA < 3:
return 0
p, q = 0, 0
ans = 0
while p < lA - 2:
if A[p + 1] * 2 != A[p] + A[p + 2]:
p += 1
else:
q = p
while q < lA - 2 and A[q + 1] * 2 == A[q] + A[q + 2]:
q += 1
l = q + 2 - p
ans += (l - 1) * (l - 2) / 2
p = q
return ans
|
Add py solution for 413. Arithmetic Slices
|
Add py solution for 413. Arithmetic Slices
413. Arithmetic Slices: https://leetcode.com/problems/arithmetic-slices/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 413. Arithmetic Slices
413. Arithmetic Slices: https://leetcode.com/problems/arithmetic-slices/
|
class Solution(object):
def numberOfArithmeticSlices(self, A):
"""
:type A: List[int]
:rtype: int
"""
lA = len(A)
if lA < 3:
return 0
p, q = 0, 0
ans = 0
while p < lA - 2:
if A[p + 1] * 2 != A[p] + A[p + 2]:
p += 1
else:
q = p
while q < lA - 2 and A[q + 1] * 2 == A[q] + A[q + 2]:
q += 1
l = q + 2 - p
ans += (l - 1) * (l - 2) / 2
p = q
return ans
|
<commit_before><commit_msg>Add py solution for 413. Arithmetic Slices
413. Arithmetic Slices: https://leetcode.com/problems/arithmetic-slices/<commit_after>
|
class Solution(object):
def numberOfArithmeticSlices(self, A):
"""
:type A: List[int]
:rtype: int
"""
lA = len(A)
if lA < 3:
return 0
p, q = 0, 0
ans = 0
while p < lA - 2:
if A[p + 1] * 2 != A[p] + A[p + 2]:
p += 1
else:
q = p
while q < lA - 2 and A[q + 1] * 2 == A[q] + A[q + 2]:
q += 1
l = q + 2 - p
ans += (l - 1) * (l - 2) / 2
p = q
return ans
|
Add py solution for 413. Arithmetic Slices
413. Arithmetic Slices: https://leetcode.com/problems/arithmetic-slices/class Solution(object):
def numberOfArithmeticSlices(self, A):
"""
:type A: List[int]
:rtype: int
"""
lA = len(A)
if lA < 3:
return 0
p, q = 0, 0
ans = 0
while p < lA - 2:
if A[p + 1] * 2 != A[p] + A[p + 2]:
p += 1
else:
q = p
while q < lA - 2 and A[q + 1] * 2 == A[q] + A[q + 2]:
q += 1
l = q + 2 - p
ans += (l - 1) * (l - 2) / 2
p = q
return ans
|
<commit_before><commit_msg>Add py solution for 413. Arithmetic Slices
413. Arithmetic Slices: https://leetcode.com/problems/arithmetic-slices/<commit_after>class Solution(object):
def numberOfArithmeticSlices(self, A):
"""
:type A: List[int]
:rtype: int
"""
lA = len(A)
if lA < 3:
return 0
p, q = 0, 0
ans = 0
while p < lA - 2:
if A[p + 1] * 2 != A[p] + A[p + 2]:
p += 1
else:
q = p
while q < lA - 2 and A[q + 1] * 2 == A[q] + A[q + 2]:
q += 1
l = q + 2 - p
ans += (l - 1) * (l - 2) / 2
p = q
return ans
|
|
bfcccf63d0ce17bc91efec6aa66196f1353a1eb7
|
scripts/lowercase_log_nids.py
|
scripts/lowercase_log_nids.py
|
import sys
from framework.mongo import database as db
from framework.transactions.context import TokuTransaction
from website.app import init_app
def lowercase_nids():
for log in db.nodelog.find({'$or': [
{'params.node': {'$regex': '[A-Z]'}},
{'params.project': {'$regex': '[A-Z]'}},
{'params.registration': {'$regex': '[A-Z]'}},
{'__backrefs.logged.node.logs': {'$regex': '[A-Z]'}},
]}):
update = {}
if log.get('__backrefs', {}).get('logged', {}).get('node', {}).get('logs'):
update['__backrefs.logged.node.logs'] = [nid.lower() for nid in log['__backrefs']['logged']['node']['logs']]
if log['params'].get('node'):
update['params.node'] = log['params']['node'].lower()
if log['params'].get('project'):
update['params.project'] = log['params']['project'].lower()
if log['params'].get('registration'):
update['params.registration'] = log['params']['registration'].lower()
db.nodelog.update({'_id': log['_id']}, {'$set': update})
assert db.nodelog.find({'$or': [
{'params.node': {'$regex': '[A-Z]'}},
{'params.project': {'$regex': '[A-Z]'}},
{'params.registration': {'$regex': '[A-Z]'}},
{'__backrefs.logged.node.logs': {'$regex': '[A-Z]'}},
]}).count() == 0
def main():
init_app(routes=False)
dry_run = '--dry' in sys.argv
with TokuTransaction():
lowercase_nids()
if dry_run:
raise Exception('Dry run')
if __name__ == '__main__':
main()
|
Add a script to fix mixcased backrefs and params
|
Add a script to fix mixcased backrefs and params
|
Python
|
apache-2.0
|
SSJohns/osf.io,SSJohns/osf.io,icereval/osf.io,acshi/osf.io,leb2dg/osf.io,sloria/osf.io,samchrisinger/osf.io,caneruguz/osf.io,rdhyee/osf.io,laurenrevere/osf.io,RomanZWang/osf.io,emetsger/osf.io,RomanZWang/osf.io,pattisdr/osf.io,acshi/osf.io,amyshi188/osf.io,laurenrevere/osf.io,mfraezz/osf.io,mluo613/osf.io,mluke93/osf.io,caneruguz/osf.io,samchrisinger/osf.io,mluo613/osf.io,zachjanicki/osf.io,SSJohns/osf.io,mattclark/osf.io,amyshi188/osf.io,acshi/osf.io,DanielSBrown/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,icereval/osf.io,TomBaxter/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,jnayak1/osf.io,kch8qx/osf.io,cslzchen/osf.io,acshi/osf.io,cwisecarver/osf.io,Nesiehr/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,pattisdr/osf.io,caseyrollins/osf.io,abought/osf.io,mluke93/osf.io,zachjanicki/osf.io,caneruguz/osf.io,icereval/osf.io,mluo613/osf.io,alexschiller/osf.io,emetsger/osf.io,rdhyee/osf.io,felliott/osf.io,chrisseto/osf.io,felliott/osf.io,mluo613/osf.io,zachjanicki/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,crcresearch/osf.io,chrisseto/osf.io,doublebits/osf.io,felliott/osf.io,kwierman/osf.io,rdhyee/osf.io,mattclark/osf.io,mluke93/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,chennan47/osf.io,jnayak1/osf.io,DanielSBrown/osf.io,mluke93/osf.io,baylee-d/osf.io,cslzchen/osf.io,zamattiac/osf.io,HalcyonChimera/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,binoculars/osf.io,samchrisinger/osf.io,erinspace/osf.io,monikagrabowska/osf.io,saradbowman/osf.io,adlius/osf.io,pattisdr/osf.io,caneruguz/osf.io,RomanZWang/osf.io,hmoco/osf.io,kch8qx/osf.io,binoculars/osf.io,cwisecarver/osf.io,hmoco/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,zachjanicki/osf.io,leb2dg/osf.io,kwierman/osf.io,wearpants/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,DanielSBrown/osf.io,RomanZWang/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,jnayak1/osf.io,hmoco/osf.io,zamattiac/osf.io,cslzchen/osf.io,sloria/osf.io,sloria/osf.io,binoculars/osf.io,rdhyee/osf.io,erinspace/osf.io,samchrisinger/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,TomBaxter/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,TomHeatwole/osf.io,doublebits/osf.io,kwierman/osf.io,doublebits/osf.io,amyshi188/osf.io,abought/osf.io,amyshi188/osf.io,Nesiehr/osf.io,crcresearch/osf.io,chrisseto/osf.io,wearpants/osf.io,DanielSBrown/osf.io,zamattiac/osf.io,kch8qx/osf.io,kch8qx/osf.io,Nesiehr/osf.io,mluo613/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,chrisseto/osf.io,Nesiehr/osf.io,wearpants/osf.io,laurenrevere/osf.io,saradbowman/osf.io,kch8qx/osf.io,cslzchen/osf.io,doublebits/osf.io,Johnetordoff/osf.io,emetsger/osf.io,cwisecarver/osf.io,mfraezz/osf.io,chennan47/osf.io,doublebits/osf.io,mattclark/osf.io,SSJohns/osf.io,emetsger/osf.io,alexschiller/osf.io,mfraezz/osf.io,TomBaxter/osf.io,adlius/osf.io,alexschiller/osf.io,abought/osf.io,adlius/osf.io,jnayak1/osf.io,abought/osf.io,adlius/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,TomHeatwole/osf.io,felliott/osf.io,kwierman/osf.io,brianjgeiger/osf.io,wearpants/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,hmoco/osf.io,TomHeatwole/osf.io,alexschiller/osf.io,aaxelb/osf.io,chennan47/osf.io
|
Add a script to fix mixcased backrefs and params
|
import sys
from framework.mongo import database as db
from framework.transactions.context import TokuTransaction
from website.app import init_app
def lowercase_nids():
for log in db.nodelog.find({'$or': [
{'params.node': {'$regex': '[A-Z]'}},
{'params.project': {'$regex': '[A-Z]'}},
{'params.registration': {'$regex': '[A-Z]'}},
{'__backrefs.logged.node.logs': {'$regex': '[A-Z]'}},
]}):
update = {}
if log.get('__backrefs', {}).get('logged', {}).get('node', {}).get('logs'):
update['__backrefs.logged.node.logs'] = [nid.lower() for nid in log['__backrefs']['logged']['node']['logs']]
if log['params'].get('node'):
update['params.node'] = log['params']['node'].lower()
if log['params'].get('project'):
update['params.project'] = log['params']['project'].lower()
if log['params'].get('registration'):
update['params.registration'] = log['params']['registration'].lower()
db.nodelog.update({'_id': log['_id']}, {'$set': update})
assert db.nodelog.find({'$or': [
{'params.node': {'$regex': '[A-Z]'}},
{'params.project': {'$regex': '[A-Z]'}},
{'params.registration': {'$regex': '[A-Z]'}},
{'__backrefs.logged.node.logs': {'$regex': '[A-Z]'}},
]}).count() == 0
def main():
init_app(routes=False)
dry_run = '--dry' in sys.argv
with TokuTransaction():
lowercase_nids()
if dry_run:
raise Exception('Dry run')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script to fix mixcased backrefs and params<commit_after>
|
import sys
from framework.mongo import database as db
from framework.transactions.context import TokuTransaction
from website.app import init_app
def lowercase_nids():
for log in db.nodelog.find({'$or': [
{'params.node': {'$regex': '[A-Z]'}},
{'params.project': {'$regex': '[A-Z]'}},
{'params.registration': {'$regex': '[A-Z]'}},
{'__backrefs.logged.node.logs': {'$regex': '[A-Z]'}},
]}):
update = {}
if log.get('__backrefs', {}).get('logged', {}).get('node', {}).get('logs'):
update['__backrefs.logged.node.logs'] = [nid.lower() for nid in log['__backrefs']['logged']['node']['logs']]
if log['params'].get('node'):
update['params.node'] = log['params']['node'].lower()
if log['params'].get('project'):
update['params.project'] = log['params']['project'].lower()
if log['params'].get('registration'):
update['params.registration'] = log['params']['registration'].lower()
db.nodelog.update({'_id': log['_id']}, {'$set': update})
assert db.nodelog.find({'$or': [
{'params.node': {'$regex': '[A-Z]'}},
{'params.project': {'$regex': '[A-Z]'}},
{'params.registration': {'$regex': '[A-Z]'}},
{'__backrefs.logged.node.logs': {'$regex': '[A-Z]'}},
]}).count() == 0
def main():
init_app(routes=False)
dry_run = '--dry' in sys.argv
with TokuTransaction():
lowercase_nids()
if dry_run:
raise Exception('Dry run')
if __name__ == '__main__':
main()
|
Add a script to fix mixcased backrefs and paramsimport sys
from framework.mongo import database as db
from framework.transactions.context import TokuTransaction
from website.app import init_app
def lowercase_nids():
for log in db.nodelog.find({'$or': [
{'params.node': {'$regex': '[A-Z]'}},
{'params.project': {'$regex': '[A-Z]'}},
{'params.registration': {'$regex': '[A-Z]'}},
{'__backrefs.logged.node.logs': {'$regex': '[A-Z]'}},
]}):
update = {}
if log.get('__backrefs', {}).get('logged', {}).get('node', {}).get('logs'):
update['__backrefs.logged.node.logs'] = [nid.lower() for nid in log['__backrefs']['logged']['node']['logs']]
if log['params'].get('node'):
update['params.node'] = log['params']['node'].lower()
if log['params'].get('project'):
update['params.project'] = log['params']['project'].lower()
if log['params'].get('registration'):
update['params.registration'] = log['params']['registration'].lower()
db.nodelog.update({'_id': log['_id']}, {'$set': update})
assert db.nodelog.find({'$or': [
{'params.node': {'$regex': '[A-Z]'}},
{'params.project': {'$regex': '[A-Z]'}},
{'params.registration': {'$regex': '[A-Z]'}},
{'__backrefs.logged.node.logs': {'$regex': '[A-Z]'}},
]}).count() == 0
def main():
init_app(routes=False)
dry_run = '--dry' in sys.argv
with TokuTransaction():
lowercase_nids()
if dry_run:
raise Exception('Dry run')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script to fix mixcased backrefs and params<commit_after>import sys
from framework.mongo import database as db
from framework.transactions.context import TokuTransaction
from website.app import init_app
def lowercase_nids():
for log in db.nodelog.find({'$or': [
{'params.node': {'$regex': '[A-Z]'}},
{'params.project': {'$regex': '[A-Z]'}},
{'params.registration': {'$regex': '[A-Z]'}},
{'__backrefs.logged.node.logs': {'$regex': '[A-Z]'}},
]}):
update = {}
if log.get('__backrefs', {}).get('logged', {}).get('node', {}).get('logs'):
update['__backrefs.logged.node.logs'] = [nid.lower() for nid in log['__backrefs']['logged']['node']['logs']]
if log['params'].get('node'):
update['params.node'] = log['params']['node'].lower()
if log['params'].get('project'):
update['params.project'] = log['params']['project'].lower()
if log['params'].get('registration'):
update['params.registration'] = log['params']['registration'].lower()
db.nodelog.update({'_id': log['_id']}, {'$set': update})
assert db.nodelog.find({'$or': [
{'params.node': {'$regex': '[A-Z]'}},
{'params.project': {'$regex': '[A-Z]'}},
{'params.registration': {'$regex': '[A-Z]'}},
{'__backrefs.logged.node.logs': {'$regex': '[A-Z]'}},
]}).count() == 0
def main():
init_app(routes=False)
dry_run = '--dry' in sys.argv
with TokuTransaction():
lowercase_nids()
if dry_run:
raise Exception('Dry run')
if __name__ == '__main__':
main()
|
|
84b984fc96dbea18cb3272d4ac9f8185c7df1d3b
|
froide/document/migrations/0006_auto_20180522_0114.py
|
froide/document/migrations/0006_auto_20180522_0114.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-05-21 23:14
from __future__ import unicode_literals
from django.db import migrations, models
import froide.document.models
import froide.helper.storage
import functools
class Migration(migrations.Migration):
dependencies = [
('document', '0005_auto_20180521_2048'),
]
operations = [
migrations.AlterField(
model_name='document',
name='pdf_file',
field=models.FileField(blank=True, max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=froide.document.models.get_document_path),
),
migrations.AlterField(
model_name='page',
name='image',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'original'})),
),
migrations.AlterField(
model_name='page',
name='image_large',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'large'})),
),
migrations.AlterField(
model_name='page',
name='image_normal',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'normal'})),
),
migrations.AlterField(
model_name='page',
name='image_small',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'small'})),
),
]
|
Add storage to document image fields
|
Add storage to document image fields
|
Python
|
mit
|
fin/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide,stefanw/froide
|
Add storage to document image fields
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-05-21 23:14
from __future__ import unicode_literals
from django.db import migrations, models
import froide.document.models
import froide.helper.storage
import functools
class Migration(migrations.Migration):
dependencies = [
('document', '0005_auto_20180521_2048'),
]
operations = [
migrations.AlterField(
model_name='document',
name='pdf_file',
field=models.FileField(blank=True, max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=froide.document.models.get_document_path),
),
migrations.AlterField(
model_name='page',
name='image',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'original'})),
),
migrations.AlterField(
model_name='page',
name='image_large',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'large'})),
),
migrations.AlterField(
model_name='page',
name='image_normal',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'normal'})),
),
migrations.AlterField(
model_name='page',
name='image_small',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'small'})),
),
]
|
<commit_before><commit_msg>Add storage to document image fields<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-05-21 23:14
from __future__ import unicode_literals
from django.db import migrations, models
import froide.document.models
import froide.helper.storage
import functools
class Migration(migrations.Migration):
dependencies = [
('document', '0005_auto_20180521_2048'),
]
operations = [
migrations.AlterField(
model_name='document',
name='pdf_file',
field=models.FileField(blank=True, max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=froide.document.models.get_document_path),
),
migrations.AlterField(
model_name='page',
name='image',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'original'})),
),
migrations.AlterField(
model_name='page',
name='image_large',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'large'})),
),
migrations.AlterField(
model_name='page',
name='image_normal',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'normal'})),
),
migrations.AlterField(
model_name='page',
name='image_small',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'small'})),
),
]
|
Add storage to document image fields# -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-05-21 23:14
from __future__ import unicode_literals
from django.db import migrations, models
import froide.document.models
import froide.helper.storage
import functools
class Migration(migrations.Migration):
dependencies = [
('document', '0005_auto_20180521_2048'),
]
operations = [
migrations.AlterField(
model_name='document',
name='pdf_file',
field=models.FileField(blank=True, max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=froide.document.models.get_document_path),
),
migrations.AlterField(
model_name='page',
name='image',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'original'})),
),
migrations.AlterField(
model_name='page',
name='image_large',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'large'})),
),
migrations.AlterField(
model_name='page',
name='image_normal',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'normal'})),
),
migrations.AlterField(
model_name='page',
name='image_small',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'small'})),
),
]
|
<commit_before><commit_msg>Add storage to document image fields<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-05-21 23:14
from __future__ import unicode_literals
from django.db import migrations, models
import froide.document.models
import froide.helper.storage
import functools
class Migration(migrations.Migration):
dependencies = [
('document', '0005_auto_20180521_2048'),
]
operations = [
migrations.AlterField(
model_name='document',
name='pdf_file',
field=models.FileField(blank=True, max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=froide.document.models.get_document_path),
),
migrations.AlterField(
model_name='page',
name='image',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'original'})),
),
migrations.AlterField(
model_name='page',
name='image_large',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'large'})),
),
migrations.AlterField(
model_name='page',
name='image_normal',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'normal'})),
),
migrations.AlterField(
model_name='page',
name='image_small',
field=models.ImageField(max_length=255, storage=froide.helper.storage.OverwriteStorage(), upload_to=functools.partial(froide.document.models.get_page_filename, *(), **{'size': 'small'})),
),
]
|
|
3e7a99967c68d8d5f516889f62cec6a9e2de66aa
|
category_test.py
|
category_test.py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright 2015 Pascual Martinez-Gomez
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from lxml import etree
from nltk.sem.logic import Expression
from category import Category
from ccg2lambda_tools import (assign_semantics_to_ccg, type_raise, build_ccg_tree)
from logic_parser import lexpr
from semantic_index import (SemanticRule, SemanticIndex,
get_attributes_from_ccg_node_recursively, find_node_by_id)
class CategoryTestCase(unittest.TestCase):
def test_category_matches(self):
cat1 = Category('N')
cat2 = Category('N')
self.assertTrue(cat1.match(cat2))
def test_category_no_matches(self):
cat1 = Category('N')
cat2 = Category('X')
self.assertFalse(cat1.match(cat2))
def test_category_feat_equal_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[dcl=true]')
self.assertTrue(cat1.match(cat2))
def test_category_feat_diff_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[dcl=false]')
self.assertFalse(cat1.match(cat2))
def test_category_feat_disjoint_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[pss=true]')
self.assertFalse(cat1.match(cat2))
def test_category_nofeat_feat_matches(self):
cat1 = Category('N')
cat2 = Category('N[dcl=true]')
self.assertTrue(cat1.match(cat2))
def test_category_nofeat_feat_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N')
self.assertFalse(cat1.match(cat2))
if __name__ == '__main__':
suite1 = unittest.TestLoader().loadTestsFromTestCase(CategoryTestCase)
suites = unittest.TestSuite([suite1])
unittest.TextTestRunner(verbosity=2).run(suites)
|
Add test for category class
|
Add test for category class
|
Python
|
apache-2.0
|
mynlp/ccg2lambda,mynlp/ccg2lambda,mynlp/ccg2lambda
|
Add test for category class
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright 2015 Pascual Martinez-Gomez
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from lxml import etree
from nltk.sem.logic import Expression
from category import Category
from ccg2lambda_tools import (assign_semantics_to_ccg, type_raise, build_ccg_tree)
from logic_parser import lexpr
from semantic_index import (SemanticRule, SemanticIndex,
get_attributes_from_ccg_node_recursively, find_node_by_id)
class CategoryTestCase(unittest.TestCase):
def test_category_matches(self):
cat1 = Category('N')
cat2 = Category('N')
self.assertTrue(cat1.match(cat2))
def test_category_no_matches(self):
cat1 = Category('N')
cat2 = Category('X')
self.assertFalse(cat1.match(cat2))
def test_category_feat_equal_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[dcl=true]')
self.assertTrue(cat1.match(cat2))
def test_category_feat_diff_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[dcl=false]')
self.assertFalse(cat1.match(cat2))
def test_category_feat_disjoint_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[pss=true]')
self.assertFalse(cat1.match(cat2))
def test_category_nofeat_feat_matches(self):
cat1 = Category('N')
cat2 = Category('N[dcl=true]')
self.assertTrue(cat1.match(cat2))
def test_category_nofeat_feat_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N')
self.assertFalse(cat1.match(cat2))
if __name__ == '__main__':
suite1 = unittest.TestLoader().loadTestsFromTestCase(CategoryTestCase)
suites = unittest.TestSuite([suite1])
unittest.TextTestRunner(verbosity=2).run(suites)
|
<commit_before><commit_msg>Add test for category class<commit_after>
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright 2015 Pascual Martinez-Gomez
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from lxml import etree
from nltk.sem.logic import Expression
from category import Category
from ccg2lambda_tools import (assign_semantics_to_ccg, type_raise, build_ccg_tree)
from logic_parser import lexpr
from semantic_index import (SemanticRule, SemanticIndex,
get_attributes_from_ccg_node_recursively, find_node_by_id)
class CategoryTestCase(unittest.TestCase):
def test_category_matches(self):
cat1 = Category('N')
cat2 = Category('N')
self.assertTrue(cat1.match(cat2))
def test_category_no_matches(self):
cat1 = Category('N')
cat2 = Category('X')
self.assertFalse(cat1.match(cat2))
def test_category_feat_equal_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[dcl=true]')
self.assertTrue(cat1.match(cat2))
def test_category_feat_diff_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[dcl=false]')
self.assertFalse(cat1.match(cat2))
def test_category_feat_disjoint_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[pss=true]')
self.assertFalse(cat1.match(cat2))
def test_category_nofeat_feat_matches(self):
cat1 = Category('N')
cat2 = Category('N[dcl=true]')
self.assertTrue(cat1.match(cat2))
def test_category_nofeat_feat_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N')
self.assertFalse(cat1.match(cat2))
if __name__ == '__main__':
suite1 = unittest.TestLoader().loadTestsFromTestCase(CategoryTestCase)
suites = unittest.TestSuite([suite1])
unittest.TextTestRunner(verbosity=2).run(suites)
|
Add test for category class#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright 2015 Pascual Martinez-Gomez
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from lxml import etree
from nltk.sem.logic import Expression
from category import Category
from ccg2lambda_tools import (assign_semantics_to_ccg, type_raise, build_ccg_tree)
from logic_parser import lexpr
from semantic_index import (SemanticRule, SemanticIndex,
get_attributes_from_ccg_node_recursively, find_node_by_id)
class CategoryTestCase(unittest.TestCase):
def test_category_matches(self):
cat1 = Category('N')
cat2 = Category('N')
self.assertTrue(cat1.match(cat2))
def test_category_no_matches(self):
cat1 = Category('N')
cat2 = Category('X')
self.assertFalse(cat1.match(cat2))
def test_category_feat_equal_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[dcl=true]')
self.assertTrue(cat1.match(cat2))
def test_category_feat_diff_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[dcl=false]')
self.assertFalse(cat1.match(cat2))
def test_category_feat_disjoint_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[pss=true]')
self.assertFalse(cat1.match(cat2))
def test_category_nofeat_feat_matches(self):
cat1 = Category('N')
cat2 = Category('N[dcl=true]')
self.assertTrue(cat1.match(cat2))
def test_category_nofeat_feat_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N')
self.assertFalse(cat1.match(cat2))
if __name__ == '__main__':
suite1 = unittest.TestLoader().loadTestsFromTestCase(CategoryTestCase)
suites = unittest.TestSuite([suite1])
unittest.TextTestRunner(verbosity=2).run(suites)
|
<commit_before><commit_msg>Add test for category class<commit_after>#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright 2015 Pascual Martinez-Gomez
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from lxml import etree
from nltk.sem.logic import Expression
from category import Category
from ccg2lambda_tools import (assign_semantics_to_ccg, type_raise, build_ccg_tree)
from logic_parser import lexpr
from semantic_index import (SemanticRule, SemanticIndex,
get_attributes_from_ccg_node_recursively, find_node_by_id)
class CategoryTestCase(unittest.TestCase):
def test_category_matches(self):
cat1 = Category('N')
cat2 = Category('N')
self.assertTrue(cat1.match(cat2))
def test_category_no_matches(self):
cat1 = Category('N')
cat2 = Category('X')
self.assertFalse(cat1.match(cat2))
def test_category_feat_equal_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[dcl=true]')
self.assertTrue(cat1.match(cat2))
def test_category_feat_diff_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[dcl=false]')
self.assertFalse(cat1.match(cat2))
def test_category_feat_disjoint_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N[pss=true]')
self.assertFalse(cat1.match(cat2))
def test_category_nofeat_feat_matches(self):
cat1 = Category('N')
cat2 = Category('N[dcl=true]')
self.assertTrue(cat1.match(cat2))
def test_category_nofeat_feat_no_matches(self):
cat1 = Category('N[dcl=true]')
cat2 = Category('N')
self.assertFalse(cat1.match(cat2))
if __name__ == '__main__':
suite1 = unittest.TestLoader().loadTestsFromTestCase(CategoryTestCase)
suites = unittest.TestSuite([suite1])
unittest.TextTestRunner(verbosity=2).run(suites)
|
|
362d339d39c9e5303cd8f5a99c475c7a68fe1324
|
app/process_tweets.py
|
app/process_tweets.py
|
# -*- coding: utf-8 -*-
from string import punctuation
from test import _writeJSON, _readJSON
tweetData = _readJSON('var/tweet_test.json')
# Punctuation to be removed.
mySymbols = punctuation.replace(u'#', u'').replace(u'@', u'')
wordsDict = {}
for t in tweetData:
# case?
# apostrophes in words? ' vs ’?
# Split by spaces and new line characters.
words = t['text'].split(u' ')
for w in words:
print w
cleanW = w.replace('\n', '')
try:
for p in mySymbols:
if p in mySymbols:
cleanW = cleanW.replace(p, '')
if cleanW.lower() not in (u'and', u'not', u'or', u'in') and \
not cleanW.lower().startswith(u'http'):
if cleanW not in wordsDict:
wordsDict.update({cleanW:1})
else:
wordsDict[cleanW] +=1
except UnicodeEncodeError:
print 'unicode'
print w
raise
print [cleanW]
print
# America\u2019s ?
# this what happens when printing unicode string in a list or set
# Trump’s
#[u'Trump\u2019s']
# it can't be forced to string or get ascii error.
# however it's decoded fine when printing.
print wordsDict
print
for x in set(wordsDict):
print x
# We can't easily get phrases from tweets to match up with trending topcs
# But we can search for presence of a topic in a user's tweets.
# after removing punctutation depending on rule for trending phrase?
# or do regex.
#print dir(set(wordsDict))
#['__and__', '__class__', '__cmp__', '__contains__', '__delattr__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__iand__', '__init__', '__ior__', '__isub__', '__iter__', '__ixor__', '__le__', '__len__', '__lt__', '__ne__', '__new__', '__or__', '__rand__', '__reduce__', '__reduce_ex__', '__repr__', '__ror__', '__rsub__', '__rxor__', '__setattr__', '__sizeof__', '__str__', '__sub__', '__subclasshook__', '__xor__', 'add', 'clear', 'copy', 'difference', 'difference_update', 'discard', 'intersection', 'intersection_update', 'isdisjoint', 'issubset', 'issuperset', 'pop', 'remove', 'symmetric_difference', 'symmetric_difference_update', 'union', 'update']
|
Extend test for processing words of tweets.
|
Extend test for processing words of tweets.
|
Python
|
mit
|
MichaelCurrin/twitterverse,MichaelCurrin/twitterverse
|
Extend test for processing words of tweets.
|
# -*- coding: utf-8 -*-
from string import punctuation
from test import _writeJSON, _readJSON
tweetData = _readJSON('var/tweet_test.json')
# Punctuation to be removed.
mySymbols = punctuation.replace(u'#', u'').replace(u'@', u'')
wordsDict = {}
for t in tweetData:
# case?
# apostrophes in words? ' vs ’?
# Split by spaces and new line characters.
words = t['text'].split(u' ')
for w in words:
print w
cleanW = w.replace('\n', '')
try:
for p in mySymbols:
if p in mySymbols:
cleanW = cleanW.replace(p, '')
if cleanW.lower() not in (u'and', u'not', u'or', u'in') and \
not cleanW.lower().startswith(u'http'):
if cleanW not in wordsDict:
wordsDict.update({cleanW:1})
else:
wordsDict[cleanW] +=1
except UnicodeEncodeError:
print 'unicode'
print w
raise
print [cleanW]
print
# America\u2019s ?
# this what happens when printing unicode string in a list or set
# Trump’s
#[u'Trump\u2019s']
# it can't be forced to string or get ascii error.
# however it's decoded fine when printing.
print wordsDict
print
for x in set(wordsDict):
print x
# We can't easily get phrases from tweets to match up with trending topcs
# But we can search for presence of a topic in a user's tweets.
# after removing punctutation depending on rule for trending phrase?
# or do regex.
#print dir(set(wordsDict))
#['__and__', '__class__', '__cmp__', '__contains__', '__delattr__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__iand__', '__init__', '__ior__', '__isub__', '__iter__', '__ixor__', '__le__', '__len__', '__lt__', '__ne__', '__new__', '__or__', '__rand__', '__reduce__', '__reduce_ex__', '__repr__', '__ror__', '__rsub__', '__rxor__', '__setattr__', '__sizeof__', '__str__', '__sub__', '__subclasshook__', '__xor__', 'add', 'clear', 'copy', 'difference', 'difference_update', 'discard', 'intersection', 'intersection_update', 'isdisjoint', 'issubset', 'issuperset', 'pop', 'remove', 'symmetric_difference', 'symmetric_difference_update', 'union', 'update']
|
<commit_before><commit_msg>Extend test for processing words of tweets.<commit_after>
|
# -*- coding: utf-8 -*-
from string import punctuation
from test import _writeJSON, _readJSON
tweetData = _readJSON('var/tweet_test.json')
# Punctuation to be removed.
mySymbols = punctuation.replace(u'#', u'').replace(u'@', u'')
wordsDict = {}
for t in tweetData:
# case?
# apostrophes in words? ' vs ’?
# Split by spaces and new line characters.
words = t['text'].split(u' ')
for w in words:
print w
cleanW = w.replace('\n', '')
try:
for p in mySymbols:
if p in mySymbols:
cleanW = cleanW.replace(p, '')
if cleanW.lower() not in (u'and', u'not', u'or', u'in') and \
not cleanW.lower().startswith(u'http'):
if cleanW not in wordsDict:
wordsDict.update({cleanW:1})
else:
wordsDict[cleanW] +=1
except UnicodeEncodeError:
print 'unicode'
print w
raise
print [cleanW]
print
# America\u2019s ?
# this what happens when printing unicode string in a list or set
# Trump’s
#[u'Trump\u2019s']
# it can't be forced to string or get ascii error.
# however it's decoded fine when printing.
print wordsDict
print
for x in set(wordsDict):
print x
# We can't easily get phrases from tweets to match up with trending topcs
# But we can search for presence of a topic in a user's tweets.
# after removing punctutation depending on rule for trending phrase?
# or do regex.
#print dir(set(wordsDict))
#['__and__', '__class__', '__cmp__', '__contains__', '__delattr__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__iand__', '__init__', '__ior__', '__isub__', '__iter__', '__ixor__', '__le__', '__len__', '__lt__', '__ne__', '__new__', '__or__', '__rand__', '__reduce__', '__reduce_ex__', '__repr__', '__ror__', '__rsub__', '__rxor__', '__setattr__', '__sizeof__', '__str__', '__sub__', '__subclasshook__', '__xor__', 'add', 'clear', 'copy', 'difference', 'difference_update', 'discard', 'intersection', 'intersection_update', 'isdisjoint', 'issubset', 'issuperset', 'pop', 'remove', 'symmetric_difference', 'symmetric_difference_update', 'union', 'update']
|
Extend test for processing words of tweets.# -*- coding: utf-8 -*-
from string import punctuation
from test import _writeJSON, _readJSON
tweetData = _readJSON('var/tweet_test.json')
# Punctuation to be removed.
mySymbols = punctuation.replace(u'#', u'').replace(u'@', u'')
wordsDict = {}
for t in tweetData:
# case?
# apostrophes in words? ' vs ’?
# Split by spaces and new line characters.
words = t['text'].split(u' ')
for w in words:
print w
cleanW = w.replace('\n', '')
try:
for p in mySymbols:
if p in mySymbols:
cleanW = cleanW.replace(p, '')
if cleanW.lower() not in (u'and', u'not', u'or', u'in') and \
not cleanW.lower().startswith(u'http'):
if cleanW not in wordsDict:
wordsDict.update({cleanW:1})
else:
wordsDict[cleanW] +=1
except UnicodeEncodeError:
print 'unicode'
print w
raise
print [cleanW]
print
# America\u2019s ?
# this what happens when printing unicode string in a list or set
# Trump’s
#[u'Trump\u2019s']
# it can't be forced to string or get ascii error.
# however it's decoded fine when printing.
print wordsDict
print
for x in set(wordsDict):
print x
# We can't easily get phrases from tweets to match up with trending topcs
# But we can search for presence of a topic in a user's tweets.
# after removing punctutation depending on rule for trending phrase?
# or do regex.
#print dir(set(wordsDict))
#['__and__', '__class__', '__cmp__', '__contains__', '__delattr__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__iand__', '__init__', '__ior__', '__isub__', '__iter__', '__ixor__', '__le__', '__len__', '__lt__', '__ne__', '__new__', '__or__', '__rand__', '__reduce__', '__reduce_ex__', '__repr__', '__ror__', '__rsub__', '__rxor__', '__setattr__', '__sizeof__', '__str__', '__sub__', '__subclasshook__', '__xor__', 'add', 'clear', 'copy', 'difference', 'difference_update', 'discard', 'intersection', 'intersection_update', 'isdisjoint', 'issubset', 'issuperset', 'pop', 'remove', 'symmetric_difference', 'symmetric_difference_update', 'union', 'update']
|
<commit_before><commit_msg>Extend test for processing words of tweets.<commit_after># -*- coding: utf-8 -*-
from string import punctuation
from test import _writeJSON, _readJSON
tweetData = _readJSON('var/tweet_test.json')
# Punctuation to be removed.
mySymbols = punctuation.replace(u'#', u'').replace(u'@', u'')
wordsDict = {}
for t in tweetData:
# case?
# apostrophes in words? ' vs ’?
# Split by spaces and new line characters.
words = t['text'].split(u' ')
for w in words:
print w
cleanW = w.replace('\n', '')
try:
for p in mySymbols:
if p in mySymbols:
cleanW = cleanW.replace(p, '')
if cleanW.lower() not in (u'and', u'not', u'or', u'in') and \
not cleanW.lower().startswith(u'http'):
if cleanW not in wordsDict:
wordsDict.update({cleanW:1})
else:
wordsDict[cleanW] +=1
except UnicodeEncodeError:
print 'unicode'
print w
raise
print [cleanW]
print
# America\u2019s ?
# this what happens when printing unicode string in a list or set
# Trump’s
#[u'Trump\u2019s']
# it can't be forced to string or get ascii error.
# however it's decoded fine when printing.
print wordsDict
print
for x in set(wordsDict):
print x
# We can't easily get phrases from tweets to match up with trending topcs
# But we can search for presence of a topic in a user's tweets.
# after removing punctutation depending on rule for trending phrase?
# or do regex.
#print dir(set(wordsDict))
#['__and__', '__class__', '__cmp__', '__contains__', '__delattr__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__iand__', '__init__', '__ior__', '__isub__', '__iter__', '__ixor__', '__le__', '__len__', '__lt__', '__ne__', '__new__', '__or__', '__rand__', '__reduce__', '__reduce_ex__', '__repr__', '__ror__', '__rsub__', '__rxor__', '__setattr__', '__sizeof__', '__str__', '__sub__', '__subclasshook__', '__xor__', 'add', 'clear', 'copy', 'difference', 'difference_update', 'discard', 'intersection', 'intersection_update', 'isdisjoint', 'issubset', 'issuperset', 'pop', 'remove', 'symmetric_difference', 'symmetric_difference_update', 'union', 'update']
|
|
45e9e53bfb857e9658e2c42dc9fd8542da6fbf8e
|
scripts/sync_local_file_with_swift.py
|
scripts/sync_local_file_with_swift.py
|
#!/usr/bin/env python
import os
import io
import tqdm
from dci import dci_config
from dci.db import models
from sqlalchemy import sql
conf = dci_config.generate_conf()
swift = dci_config.get_store()
engine = dci_config.get_engine(conf).connect()
_TABLE = models.FILES
# Calculate the total files to sync
file_list = os.walk(conf['FILES_UPLOAD_FOLDER'])
with tqdm.tqdm(total=sum(1 for _ in file_list)) as pbar:
for dirname, dirnames, filenames in os.walk(conf['FILES_UPLOAD_FOLDER']):
if not filenames:
pbar.update(1)
continue
for filename in filenames:
# Check if file exist in the DB
query = sql.select([_TABLE]).where(_TABLE.c.id == filename)
result = engine.execute(query)
# If not, do not sync, that's an orphan file
if result.rowcount == 0:
tqdm.tqdm.write("File %s not found, do not sync" % filename)
continue
# If the file exist, check if it is already present in swift
# and then upload it to swift if needed
if result.rowcount == 1:
tqdm.tqdm.write("File %s found in DB" % filename)
top_path = dirname[len(conf['FILES_UPLOAD_FOLDER']):]
swift_path = top_path + filename
tqdm.tqdm.write("Check if file is in swift : %s" % swift_path)
try:
swift.head(swift_path)
tqdm.tqdm.write("File exist on swift")
except:
tqdm.tqdm.write("File not found on swift, we will sync it")
f = io.open(dirname + "/" + filename, "r")
swift.upload(swift_path, f)
pbar.update(1)
|
Add a script to sync FS files to Swift
|
Add a script to sync FS files to Swift
Change-Id: I7a475177dd008040582943f7924a3f26df1df638
|
Python
|
apache-2.0
|
redhat-cip/dci-control-server,enovance/dci-control-server,redhat-cip/dci-control-server,enovance/dci-control-server
|
Add a script to sync FS files to Swift
Change-Id: I7a475177dd008040582943f7924a3f26df1df638
|
#!/usr/bin/env python
import os
import io
import tqdm
from dci import dci_config
from dci.db import models
from sqlalchemy import sql
conf = dci_config.generate_conf()
swift = dci_config.get_store()
engine = dci_config.get_engine(conf).connect()
_TABLE = models.FILES
# Calculate the total files to sync
file_list = os.walk(conf['FILES_UPLOAD_FOLDER'])
with tqdm.tqdm(total=sum(1 for _ in file_list)) as pbar:
for dirname, dirnames, filenames in os.walk(conf['FILES_UPLOAD_FOLDER']):
if not filenames:
pbar.update(1)
continue
for filename in filenames:
# Check if file exist in the DB
query = sql.select([_TABLE]).where(_TABLE.c.id == filename)
result = engine.execute(query)
# If not, do not sync, that's an orphan file
if result.rowcount == 0:
tqdm.tqdm.write("File %s not found, do not sync" % filename)
continue
# If the file exist, check if it is already present in swift
# and then upload it to swift if needed
if result.rowcount == 1:
tqdm.tqdm.write("File %s found in DB" % filename)
top_path = dirname[len(conf['FILES_UPLOAD_FOLDER']):]
swift_path = top_path + filename
tqdm.tqdm.write("Check if file is in swift : %s" % swift_path)
try:
swift.head(swift_path)
tqdm.tqdm.write("File exist on swift")
except:
tqdm.tqdm.write("File not found on swift, we will sync it")
f = io.open(dirname + "/" + filename, "r")
swift.upload(swift_path, f)
pbar.update(1)
|
<commit_before><commit_msg>Add a script to sync FS files to Swift
Change-Id: I7a475177dd008040582943f7924a3f26df1df638<commit_after>
|
#!/usr/bin/env python
import os
import io
import tqdm
from dci import dci_config
from dci.db import models
from sqlalchemy import sql
conf = dci_config.generate_conf()
swift = dci_config.get_store()
engine = dci_config.get_engine(conf).connect()
_TABLE = models.FILES
# Calculate the total files to sync
file_list = os.walk(conf['FILES_UPLOAD_FOLDER'])
with tqdm.tqdm(total=sum(1 for _ in file_list)) as pbar:
for dirname, dirnames, filenames in os.walk(conf['FILES_UPLOAD_FOLDER']):
if not filenames:
pbar.update(1)
continue
for filename in filenames:
# Check if file exist in the DB
query = sql.select([_TABLE]).where(_TABLE.c.id == filename)
result = engine.execute(query)
# If not, do not sync, that's an orphan file
if result.rowcount == 0:
tqdm.tqdm.write("File %s not found, do not sync" % filename)
continue
# If the file exist, check if it is already present in swift
# and then upload it to swift if needed
if result.rowcount == 1:
tqdm.tqdm.write("File %s found in DB" % filename)
top_path = dirname[len(conf['FILES_UPLOAD_FOLDER']):]
swift_path = top_path + filename
tqdm.tqdm.write("Check if file is in swift : %s" % swift_path)
try:
swift.head(swift_path)
tqdm.tqdm.write("File exist on swift")
except:
tqdm.tqdm.write("File not found on swift, we will sync it")
f = io.open(dirname + "/" + filename, "r")
swift.upload(swift_path, f)
pbar.update(1)
|
Add a script to sync FS files to Swift
Change-Id: I7a475177dd008040582943f7924a3f26df1df638#!/usr/bin/env python
import os
import io
import tqdm
from dci import dci_config
from dci.db import models
from sqlalchemy import sql
conf = dci_config.generate_conf()
swift = dci_config.get_store()
engine = dci_config.get_engine(conf).connect()
_TABLE = models.FILES
# Calculate the total files to sync
file_list = os.walk(conf['FILES_UPLOAD_FOLDER'])
with tqdm.tqdm(total=sum(1 for _ in file_list)) as pbar:
for dirname, dirnames, filenames in os.walk(conf['FILES_UPLOAD_FOLDER']):
if not filenames:
pbar.update(1)
continue
for filename in filenames:
# Check if file exist in the DB
query = sql.select([_TABLE]).where(_TABLE.c.id == filename)
result = engine.execute(query)
# If not, do not sync, that's an orphan file
if result.rowcount == 0:
tqdm.tqdm.write("File %s not found, do not sync" % filename)
continue
# If the file exist, check if it is already present in swift
# and then upload it to swift if needed
if result.rowcount == 1:
tqdm.tqdm.write("File %s found in DB" % filename)
top_path = dirname[len(conf['FILES_UPLOAD_FOLDER']):]
swift_path = top_path + filename
tqdm.tqdm.write("Check if file is in swift : %s" % swift_path)
try:
swift.head(swift_path)
tqdm.tqdm.write("File exist on swift")
except:
tqdm.tqdm.write("File not found on swift, we will sync it")
f = io.open(dirname + "/" + filename, "r")
swift.upload(swift_path, f)
pbar.update(1)
|
<commit_before><commit_msg>Add a script to sync FS files to Swift
Change-Id: I7a475177dd008040582943f7924a3f26df1df638<commit_after>#!/usr/bin/env python
import os
import io
import tqdm
from dci import dci_config
from dci.db import models
from sqlalchemy import sql
conf = dci_config.generate_conf()
swift = dci_config.get_store()
engine = dci_config.get_engine(conf).connect()
_TABLE = models.FILES
# Calculate the total files to sync
file_list = os.walk(conf['FILES_UPLOAD_FOLDER'])
with tqdm.tqdm(total=sum(1 for _ in file_list)) as pbar:
for dirname, dirnames, filenames in os.walk(conf['FILES_UPLOAD_FOLDER']):
if not filenames:
pbar.update(1)
continue
for filename in filenames:
# Check if file exist in the DB
query = sql.select([_TABLE]).where(_TABLE.c.id == filename)
result = engine.execute(query)
# If not, do not sync, that's an orphan file
if result.rowcount == 0:
tqdm.tqdm.write("File %s not found, do not sync" % filename)
continue
# If the file exist, check if it is already present in swift
# and then upload it to swift if needed
if result.rowcount == 1:
tqdm.tqdm.write("File %s found in DB" % filename)
top_path = dirname[len(conf['FILES_UPLOAD_FOLDER']):]
swift_path = top_path + filename
tqdm.tqdm.write("Check if file is in swift : %s" % swift_path)
try:
swift.head(swift_path)
tqdm.tqdm.write("File exist on swift")
except:
tqdm.tqdm.write("File not found on swift, we will sync it")
f = io.open(dirname + "/" + filename, "r")
swift.upload(swift_path, f)
pbar.update(1)
|
|
7cee0a3ac98ecde609cb6077a4b1490b1751838b
|
cardbox/card_forms.py
|
cardbox/card_forms.py
|
from django.forms import Textarea, ModelForm
from card_model import Card
class CardForm(ModelForm):
"""The basic form for updating or editing cards"""
class Meta:
model = Card
fields = ('front', 'back')
widgets = {
'front': Textarea(attrs={'class': "form-control"}),
'back': Textarea(attrs={'class': "form-control"}),
}
|
Add custom card model form for edit and create
|
Add custom card model form for edit and create
|
Python
|
mit
|
DummyDivision/Tsune,DummyDivision/Tsune,DummyDivision/Tsune
|
Add custom card model form for edit and create
|
from django.forms import Textarea, ModelForm
from card_model import Card
class CardForm(ModelForm):
"""The basic form for updating or editing cards"""
class Meta:
model = Card
fields = ('front', 'back')
widgets = {
'front': Textarea(attrs={'class': "form-control"}),
'back': Textarea(attrs={'class': "form-control"}),
}
|
<commit_before><commit_msg>Add custom card model form for edit and create<commit_after>
|
from django.forms import Textarea, ModelForm
from card_model import Card
class CardForm(ModelForm):
"""The basic form for updating or editing cards"""
class Meta:
model = Card
fields = ('front', 'back')
widgets = {
'front': Textarea(attrs={'class': "form-control"}),
'back': Textarea(attrs={'class': "form-control"}),
}
|
Add custom card model form for edit and createfrom django.forms import Textarea, ModelForm
from card_model import Card
class CardForm(ModelForm):
"""The basic form for updating or editing cards"""
class Meta:
model = Card
fields = ('front', 'back')
widgets = {
'front': Textarea(attrs={'class': "form-control"}),
'back': Textarea(attrs={'class': "form-control"}),
}
|
<commit_before><commit_msg>Add custom card model form for edit and create<commit_after>from django.forms import Textarea, ModelForm
from card_model import Card
class CardForm(ModelForm):
"""The basic form for updating or editing cards"""
class Meta:
model = Card
fields = ('front', 'back')
widgets = {
'front': Textarea(attrs={'class': "form-control"}),
'back': Textarea(attrs={'class': "form-control"}),
}
|
|
b2b62818345f8062e42b4cfdc19e389cbc430efb
|
regscrape/sec_cftc/commands/sec_cftc_name_dockets.py
|
regscrape/sec_cftc/commands/sec_cftc_name_dockets.py
|
GEVENT = False
from regs_models import *
import datetime
def run():
for docket in Docket.objects(source="sec_cftc", scraped="no"):
now = datetime.datetime.now()
if not docket.title:
candidates = list(Doc.objects(docket_id=docket.id, type__in=("rule", "proposed_rule", "notice")))
candidates = sorted(candidates, key=lambda c: c.details.get('Date_Posted', now))
if candidates:
ctitle = candidates[0].title
else:
ctitle = docket.id
print "For docket %s, proposing title: %s" % (docket.id, ctitle)
docket.title = ctitle
docket.scraped = 'yes'
docket.save()
|
Add docket namer for SEC/CFTC.
|
Add docket namer for SEC/CFTC.
|
Python
|
bsd-3-clause
|
sunlightlabs/regulations-scraper,sunlightlabs/regulations-scraper,sunlightlabs/regulations-scraper
|
Add docket namer for SEC/CFTC.
|
GEVENT = False
from regs_models import *
import datetime
def run():
for docket in Docket.objects(source="sec_cftc", scraped="no"):
now = datetime.datetime.now()
if not docket.title:
candidates = list(Doc.objects(docket_id=docket.id, type__in=("rule", "proposed_rule", "notice")))
candidates = sorted(candidates, key=lambda c: c.details.get('Date_Posted', now))
if candidates:
ctitle = candidates[0].title
else:
ctitle = docket.id
print "For docket %s, proposing title: %s" % (docket.id, ctitle)
docket.title = ctitle
docket.scraped = 'yes'
docket.save()
|
<commit_before><commit_msg>Add docket namer for SEC/CFTC.<commit_after>
|
GEVENT = False
from regs_models import *
import datetime
def run():
for docket in Docket.objects(source="sec_cftc", scraped="no"):
now = datetime.datetime.now()
if not docket.title:
candidates = list(Doc.objects(docket_id=docket.id, type__in=("rule", "proposed_rule", "notice")))
candidates = sorted(candidates, key=lambda c: c.details.get('Date_Posted', now))
if candidates:
ctitle = candidates[0].title
else:
ctitle = docket.id
print "For docket %s, proposing title: %s" % (docket.id, ctitle)
docket.title = ctitle
docket.scraped = 'yes'
docket.save()
|
Add docket namer for SEC/CFTC.GEVENT = False
from regs_models import *
import datetime
def run():
for docket in Docket.objects(source="sec_cftc", scraped="no"):
now = datetime.datetime.now()
if not docket.title:
candidates = list(Doc.objects(docket_id=docket.id, type__in=("rule", "proposed_rule", "notice")))
candidates = sorted(candidates, key=lambda c: c.details.get('Date_Posted', now))
if candidates:
ctitle = candidates[0].title
else:
ctitle = docket.id
print "For docket %s, proposing title: %s" % (docket.id, ctitle)
docket.title = ctitle
docket.scraped = 'yes'
docket.save()
|
<commit_before><commit_msg>Add docket namer for SEC/CFTC.<commit_after>GEVENT = False
from regs_models import *
import datetime
def run():
for docket in Docket.objects(source="sec_cftc", scraped="no"):
now = datetime.datetime.now()
if not docket.title:
candidates = list(Doc.objects(docket_id=docket.id, type__in=("rule", "proposed_rule", "notice")))
candidates = sorted(candidates, key=lambda c: c.details.get('Date_Posted', now))
if candidates:
ctitle = candidates[0].title
else:
ctitle = docket.id
print "For docket %s, proposing title: %s" % (docket.id, ctitle)
docket.title = ctitle
docket.scraped = 'yes'
docket.save()
|
|
eb804fbf08822053c8d891ece3ebf206cad8a8b8
|
es_synonyms/utils.py
|
es_synonyms/utils.py
|
import requests
from codecs import open
from requests.exceptions import MissingSchema, InvalidSchema, InvalidURL
from .parser import SynParser
def load_synonyms(path):
try:
r = requests.get(path)
content = r.text
except (MissingSchema, InvalidSchema, InvalidURL):
try:
with open(path, encoding='utf-8') as fp:
content = fp.read()
except OSError:
raise TypeError('Invalid Path: "{0}". Ensure it is either a URL or Correct FS Path.'.format(path))
return SynParser.get_mapping(content)
|
Add utility for quickly loading the synonym file
|
Add utility for quickly loading the synonym file
|
Python
|
mit
|
prashnts/elasticsearch-synonyms,prashnts/elasticsearch-synonyms
|
Add utility for quickly loading the synonym file
|
import requests
from codecs import open
from requests.exceptions import MissingSchema, InvalidSchema, InvalidURL
from .parser import SynParser
def load_synonyms(path):
try:
r = requests.get(path)
content = r.text
except (MissingSchema, InvalidSchema, InvalidURL):
try:
with open(path, encoding='utf-8') as fp:
content = fp.read()
except OSError:
raise TypeError('Invalid Path: "{0}". Ensure it is either a URL or Correct FS Path.'.format(path))
return SynParser.get_mapping(content)
|
<commit_before><commit_msg>Add utility for quickly loading the synonym file<commit_after>
|
import requests
from codecs import open
from requests.exceptions import MissingSchema, InvalidSchema, InvalidURL
from .parser import SynParser
def load_synonyms(path):
try:
r = requests.get(path)
content = r.text
except (MissingSchema, InvalidSchema, InvalidURL):
try:
with open(path, encoding='utf-8') as fp:
content = fp.read()
except OSError:
raise TypeError('Invalid Path: "{0}". Ensure it is either a URL or Correct FS Path.'.format(path))
return SynParser.get_mapping(content)
|
Add utility for quickly loading the synonym fileimport requests
from codecs import open
from requests.exceptions import MissingSchema, InvalidSchema, InvalidURL
from .parser import SynParser
def load_synonyms(path):
try:
r = requests.get(path)
content = r.text
except (MissingSchema, InvalidSchema, InvalidURL):
try:
with open(path, encoding='utf-8') as fp:
content = fp.read()
except OSError:
raise TypeError('Invalid Path: "{0}". Ensure it is either a URL or Correct FS Path.'.format(path))
return SynParser.get_mapping(content)
|
<commit_before><commit_msg>Add utility for quickly loading the synonym file<commit_after>import requests
from codecs import open
from requests.exceptions import MissingSchema, InvalidSchema, InvalidURL
from .parser import SynParser
def load_synonyms(path):
try:
r = requests.get(path)
content = r.text
except (MissingSchema, InvalidSchema, InvalidURL):
try:
with open(path, encoding='utf-8') as fp:
content = fp.read()
except OSError:
raise TypeError('Invalid Path: "{0}". Ensure it is either a URL or Correct FS Path.'.format(path))
return SynParser.get_mapping(content)
|
|
575580d005802d1920402b385bfe963bb4390fac
|
data/Crumb_data/Crumb_data_loading.py
|
data/Crumb_data/Crumb_data_loading.py
|
# coding: utf-8
# In[ ]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
#import sys #(just for version number)
#import matplotlib #(just for version number)
#print('Python version ' + sys.version)
#print('Pandas version ' + pd.__version__)
#print('Matplotlib version ' + matplotlib.__version__)
# In[ ]:
file_name = 'python_input_data.csv'
df = pd.read_csv(file_name, names=['Drug','Channel','Experiment','Concentration','Inhibition'])
df
# In[ ]:
drug_and_channel = df[['Concentration','Inhibition']][df['Drug'] == 'Amiodarone'][df['Channel'] == 'Cav1.2']
drug_and_channel
drug_and_channel.values
# In[ ]:
drugs = df.Drug.unique()
print(drugs)
# In[ ]:
channels = df.Channel.unique()
print(channels)
# In[ ]:
for drug in drugs:
for channel in channels:
drug_and_channel_values = df[['Concentration','Inhibition']][df['Drug'] == drug][df['Channel'] == channel]
print(drug,channel)
print(drug_and_channel_values)
# In[ ]:
|
Add an example python code for loading the resulting data
|
Add an example python code for loading the resulting data
|
Python
|
bsd-3-clause
|
mirams/PyHillFit,mirams/PyHillFit,mirams/PyHillFit
|
Add an example python code for loading the resulting data
|
# coding: utf-8
# In[ ]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
#import sys #(just for version number)
#import matplotlib #(just for version number)
#print('Python version ' + sys.version)
#print('Pandas version ' + pd.__version__)
#print('Matplotlib version ' + matplotlib.__version__)
# In[ ]:
file_name = 'python_input_data.csv'
df = pd.read_csv(file_name, names=['Drug','Channel','Experiment','Concentration','Inhibition'])
df
# In[ ]:
drug_and_channel = df[['Concentration','Inhibition']][df['Drug'] == 'Amiodarone'][df['Channel'] == 'Cav1.2']
drug_and_channel
drug_and_channel.values
# In[ ]:
drugs = df.Drug.unique()
print(drugs)
# In[ ]:
channels = df.Channel.unique()
print(channels)
# In[ ]:
for drug in drugs:
for channel in channels:
drug_and_channel_values = df[['Concentration','Inhibition']][df['Drug'] == drug][df['Channel'] == channel]
print(drug,channel)
print(drug_and_channel_values)
# In[ ]:
|
<commit_before><commit_msg>Add an example python code for loading the resulting data<commit_after>
|
# coding: utf-8
# In[ ]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
#import sys #(just for version number)
#import matplotlib #(just for version number)
#print('Python version ' + sys.version)
#print('Pandas version ' + pd.__version__)
#print('Matplotlib version ' + matplotlib.__version__)
# In[ ]:
file_name = 'python_input_data.csv'
df = pd.read_csv(file_name, names=['Drug','Channel','Experiment','Concentration','Inhibition'])
df
# In[ ]:
drug_and_channel = df[['Concentration','Inhibition']][df['Drug'] == 'Amiodarone'][df['Channel'] == 'Cav1.2']
drug_and_channel
drug_and_channel.values
# In[ ]:
drugs = df.Drug.unique()
print(drugs)
# In[ ]:
channels = df.Channel.unique()
print(channels)
# In[ ]:
for drug in drugs:
for channel in channels:
drug_and_channel_values = df[['Concentration','Inhibition']][df['Drug'] == drug][df['Channel'] == channel]
print(drug,channel)
print(drug_and_channel_values)
# In[ ]:
|
Add an example python code for loading the resulting data
# coding: utf-8
# In[ ]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
#import sys #(just for version number)
#import matplotlib #(just for version number)
#print('Python version ' + sys.version)
#print('Pandas version ' + pd.__version__)
#print('Matplotlib version ' + matplotlib.__version__)
# In[ ]:
file_name = 'python_input_data.csv'
df = pd.read_csv(file_name, names=['Drug','Channel','Experiment','Concentration','Inhibition'])
df
# In[ ]:
drug_and_channel = df[['Concentration','Inhibition']][df['Drug'] == 'Amiodarone'][df['Channel'] == 'Cav1.2']
drug_and_channel
drug_and_channel.values
# In[ ]:
drugs = df.Drug.unique()
print(drugs)
# In[ ]:
channels = df.Channel.unique()
print(channels)
# In[ ]:
for drug in drugs:
for channel in channels:
drug_and_channel_values = df[['Concentration','Inhibition']][df['Drug'] == drug][df['Channel'] == channel]
print(drug,channel)
print(drug_and_channel_values)
# In[ ]:
|
<commit_before><commit_msg>Add an example python code for loading the resulting data<commit_after>
# coding: utf-8
# In[ ]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
#import sys #(just for version number)
#import matplotlib #(just for version number)
#print('Python version ' + sys.version)
#print('Pandas version ' + pd.__version__)
#print('Matplotlib version ' + matplotlib.__version__)
# In[ ]:
file_name = 'python_input_data.csv'
df = pd.read_csv(file_name, names=['Drug','Channel','Experiment','Concentration','Inhibition'])
df
# In[ ]:
drug_and_channel = df[['Concentration','Inhibition']][df['Drug'] == 'Amiodarone'][df['Channel'] == 'Cav1.2']
drug_and_channel
drug_and_channel.values
# In[ ]:
drugs = df.Drug.unique()
print(drugs)
# In[ ]:
channels = df.Channel.unique()
print(channels)
# In[ ]:
for drug in drugs:
for channel in channels:
drug_and_channel_values = df[['Concentration','Inhibition']][df['Drug'] == drug][df['Channel'] == channel]
print(drug,channel)
print(drug_and_channel_values)
# In[ ]:
|
|
4665b59415138e900f46b176cd44f20f23eddf2a
|
django-mserve/settings-prestoprime.py
|
django-mserve/settings-prestoprime.py
|
# Do PrestoPRIME setup
PRESTOPRIME = True
DEFAULT_DELIVERY_SUCCESS_CONSTANT_MIN = 15.0
DEFAULT_DELIVERY_SUCCESS_MULTIPLIER_GB = 1.0
DELIVERY_SUCCESS_METRIC = "http://mserve/deliverySuccess"
if PRESTOPRIME:
CELERY_IMPORTS += ("prestoprime.tasks",)
INSTALLED_APPS += ('prestoprime',)
|
Add prestoprime specific settings file.
|
Add prestoprime specific settings file.
|
Python
|
lgpl-2.1
|
it-innovation/MServe-PrestoPRIME
|
Add prestoprime specific settings file.
|
# Do PrestoPRIME setup
PRESTOPRIME = True
DEFAULT_DELIVERY_SUCCESS_CONSTANT_MIN = 15.0
DEFAULT_DELIVERY_SUCCESS_MULTIPLIER_GB = 1.0
DELIVERY_SUCCESS_METRIC = "http://mserve/deliverySuccess"
if PRESTOPRIME:
CELERY_IMPORTS += ("prestoprime.tasks",)
INSTALLED_APPS += ('prestoprime',)
|
<commit_before><commit_msg>Add prestoprime specific settings file.<commit_after>
|
# Do PrestoPRIME setup
PRESTOPRIME = True
DEFAULT_DELIVERY_SUCCESS_CONSTANT_MIN = 15.0
DEFAULT_DELIVERY_SUCCESS_MULTIPLIER_GB = 1.0
DELIVERY_SUCCESS_METRIC = "http://mserve/deliverySuccess"
if PRESTOPRIME:
CELERY_IMPORTS += ("prestoprime.tasks",)
INSTALLED_APPS += ('prestoprime',)
|
Add prestoprime specific settings file.# Do PrestoPRIME setup
PRESTOPRIME = True
DEFAULT_DELIVERY_SUCCESS_CONSTANT_MIN = 15.0
DEFAULT_DELIVERY_SUCCESS_MULTIPLIER_GB = 1.0
DELIVERY_SUCCESS_METRIC = "http://mserve/deliverySuccess"
if PRESTOPRIME:
CELERY_IMPORTS += ("prestoprime.tasks",)
INSTALLED_APPS += ('prestoprime',)
|
<commit_before><commit_msg>Add prestoprime specific settings file.<commit_after># Do PrestoPRIME setup
PRESTOPRIME = True
DEFAULT_DELIVERY_SUCCESS_CONSTANT_MIN = 15.0
DEFAULT_DELIVERY_SUCCESS_MULTIPLIER_GB = 1.0
DELIVERY_SUCCESS_METRIC = "http://mserve/deliverySuccess"
if PRESTOPRIME:
CELERY_IMPORTS += ("prestoprime.tasks",)
INSTALLED_APPS += ('prestoprime',)
|
|
a47c591b77720d342721e3ff3672145d574c65b6
|
tests/test_cli.py
|
tests/test_cli.py
|
# coding: utf-8
""" Tests for pypel.cli.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import unittest
from pypel.cli import Row
class RowTestCase(unittest.TestCase):
def test_empty(self):
row = Row()
self.assertEqual(row.len(), None)
with self.assertRaises(TypeError):
row.format()
with self.assertRaises(KeyError):
row.format('price')
def test_float(self):
row = Row({'price': 2.71})
self.assertEqual(row.len('price'), 4)
self.assertEqual(row.format('price'), '{price:>{price_len}.2f}')
def test_int(self):
row = Row({'price': 2})
self.assertEqual(row.len('price'), 1)
self.assertEqual(row.format('price'), '{price:{price_len}}')
def test_str(self):
row = Row({'note': 'A simply note.'})
self.assertEqual(row.len('note'), 14)
self.assertEqual(row.format('note'), '{note:{note_len}}')
|
Add tests for Row class
|
Add tests for Row class
|
Python
|
bsd-3-clause
|
eriol/pypel
|
Add tests for Row class
|
# coding: utf-8
""" Tests for pypel.cli.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import unittest
from pypel.cli import Row
class RowTestCase(unittest.TestCase):
def test_empty(self):
row = Row()
self.assertEqual(row.len(), None)
with self.assertRaises(TypeError):
row.format()
with self.assertRaises(KeyError):
row.format('price')
def test_float(self):
row = Row({'price': 2.71})
self.assertEqual(row.len('price'), 4)
self.assertEqual(row.format('price'), '{price:>{price_len}.2f}')
def test_int(self):
row = Row({'price': 2})
self.assertEqual(row.len('price'), 1)
self.assertEqual(row.format('price'), '{price:{price_len}}')
def test_str(self):
row = Row({'note': 'A simply note.'})
self.assertEqual(row.len('note'), 14)
self.assertEqual(row.format('note'), '{note:{note_len}}')
|
<commit_before><commit_msg>Add tests for Row class<commit_after>
|
# coding: utf-8
""" Tests for pypel.cli.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import unittest
from pypel.cli import Row
class RowTestCase(unittest.TestCase):
def test_empty(self):
row = Row()
self.assertEqual(row.len(), None)
with self.assertRaises(TypeError):
row.format()
with self.assertRaises(KeyError):
row.format('price')
def test_float(self):
row = Row({'price': 2.71})
self.assertEqual(row.len('price'), 4)
self.assertEqual(row.format('price'), '{price:>{price_len}.2f}')
def test_int(self):
row = Row({'price': 2})
self.assertEqual(row.len('price'), 1)
self.assertEqual(row.format('price'), '{price:{price_len}}')
def test_str(self):
row = Row({'note': 'A simply note.'})
self.assertEqual(row.len('note'), 14)
self.assertEqual(row.format('note'), '{note:{note_len}}')
|
Add tests for Row class# coding: utf-8
""" Tests for pypel.cli.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import unittest
from pypel.cli import Row
class RowTestCase(unittest.TestCase):
def test_empty(self):
row = Row()
self.assertEqual(row.len(), None)
with self.assertRaises(TypeError):
row.format()
with self.assertRaises(KeyError):
row.format('price')
def test_float(self):
row = Row({'price': 2.71})
self.assertEqual(row.len('price'), 4)
self.assertEqual(row.format('price'), '{price:>{price_len}.2f}')
def test_int(self):
row = Row({'price': 2})
self.assertEqual(row.len('price'), 1)
self.assertEqual(row.format('price'), '{price:{price_len}}')
def test_str(self):
row = Row({'note': 'A simply note.'})
self.assertEqual(row.len('note'), 14)
self.assertEqual(row.format('note'), '{note:{note_len}}')
|
<commit_before><commit_msg>Add tests for Row class<commit_after># coding: utf-8
""" Tests for pypel.cli.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import unittest
from pypel.cli import Row
class RowTestCase(unittest.TestCase):
def test_empty(self):
row = Row()
self.assertEqual(row.len(), None)
with self.assertRaises(TypeError):
row.format()
with self.assertRaises(KeyError):
row.format('price')
def test_float(self):
row = Row({'price': 2.71})
self.assertEqual(row.len('price'), 4)
self.assertEqual(row.format('price'), '{price:>{price_len}.2f}')
def test_int(self):
row = Row({'price': 2})
self.assertEqual(row.len('price'), 1)
self.assertEqual(row.format('price'), '{price:{price_len}}')
def test_str(self):
row = Row({'note': 'A simply note.'})
self.assertEqual(row.len('note'), 14)
self.assertEqual(row.format('note'), '{note:{note_len}}')
|
|
39df7991e3e305fb54b79012a9d4ec9719deed6c
|
tests/test_set.py
|
tests/test_set.py
|
from thingstance import Thing
def test_tags_are_a_set():
thing = Thing(fields={'latitude', 'longitude'})
assert thing.fields == {'latitude', 'longitude'}
thing = Thing(fields={'latitude', 'longitude', 'altitude'})
assert thing.fields == {'altitude', 'latitude', 'longitude'}
thing = Thing(fields={
'longitude',
'latitude',
'latitude',
'longitude',
'latitude',
'longitude'
})
assert thing.fields == {'latitude', 'longitude'}
|
Test fields property is a set
|
Test fields property is a set
|
Python
|
mit
|
openregister/openregister-python,openregister/entry,byrondover/entry
|
Test fields property is a set
|
from thingstance import Thing
def test_tags_are_a_set():
thing = Thing(fields={'latitude', 'longitude'})
assert thing.fields == {'latitude', 'longitude'}
thing = Thing(fields={'latitude', 'longitude', 'altitude'})
assert thing.fields == {'altitude', 'latitude', 'longitude'}
thing = Thing(fields={
'longitude',
'latitude',
'latitude',
'longitude',
'latitude',
'longitude'
})
assert thing.fields == {'latitude', 'longitude'}
|
<commit_before><commit_msg>Test fields property is a set<commit_after>
|
from thingstance import Thing
def test_tags_are_a_set():
thing = Thing(fields={'latitude', 'longitude'})
assert thing.fields == {'latitude', 'longitude'}
thing = Thing(fields={'latitude', 'longitude', 'altitude'})
assert thing.fields == {'altitude', 'latitude', 'longitude'}
thing = Thing(fields={
'longitude',
'latitude',
'latitude',
'longitude',
'latitude',
'longitude'
})
assert thing.fields == {'latitude', 'longitude'}
|
Test fields property is a setfrom thingstance import Thing
def test_tags_are_a_set():
thing = Thing(fields={'latitude', 'longitude'})
assert thing.fields == {'latitude', 'longitude'}
thing = Thing(fields={'latitude', 'longitude', 'altitude'})
assert thing.fields == {'altitude', 'latitude', 'longitude'}
thing = Thing(fields={
'longitude',
'latitude',
'latitude',
'longitude',
'latitude',
'longitude'
})
assert thing.fields == {'latitude', 'longitude'}
|
<commit_before><commit_msg>Test fields property is a set<commit_after>from thingstance import Thing
def test_tags_are_a_set():
thing = Thing(fields={'latitude', 'longitude'})
assert thing.fields == {'latitude', 'longitude'}
thing = Thing(fields={'latitude', 'longitude', 'altitude'})
assert thing.fields == {'altitude', 'latitude', 'longitude'}
thing = Thing(fields={
'longitude',
'latitude',
'latitude',
'longitude',
'latitude',
'longitude'
})
assert thing.fields == {'latitude', 'longitude'}
|
|
89cb0388c513f01f3dc00829bf21d50feed7ba27
|
rest/utils.py
|
rest/utils.py
|
from django.http import HttpResponse
from rest_framework.views import exception_handler
from rest_framework.renderers import JSONRenderer
# Initially taken from
# http://www.django-rest-framework.org/tutorial/1-serialization/
class JSONResponse(HttpResponse):
"""
An HttpResponse that renders its content into JSON.
"""
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs['content_type'] = 'application/json; charset=utf-8'
super(JSONResponse, self).__init__(content, **kwargs)
class DefaultException(Exception):
"""
Default exception because rest_framework's default API exception wouldn't
allow me to manage response data directly like I wanted to.
"""
def __init__(self, data, status):
self.data = data
self.status=status
class MalformedId(DefaultException):
"""
Exception for an invalid request id. This is NOT a not found, this exception
indicates the id wasn't in a valid form so we couldn't even look for the
resource.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 400)
class InvalidField(DefaultException):
"""
Exception for a missing field.
"""
def __init__(self, name, value):
DefaultException.__init__(self, {name: value}, 400)
class NotFound(DefaultException):
"""
Exception for a request that was looking for a resource that couldn't be
found.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 404)
class ResourceConflict(DefaultException):
"""
Exception for a request trying to create a resource where one already
exists.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 409)
class MissingFields(DefaultException):
"""
Exception for a request trying to create a resource while missing required
fields.
"""
def __init__(self, fields):
DefaultException.__init__(self, {'required': fields}, 422)
class MalformedBody(DefaultException):
"""
Exception for a request with malformed json body.
"""
def __init__(self, body):
DefaultException.__init__(self, {'json': body}, 400)
def exceptionHandler(exc, context):
if isinstance(exc, DefaultException):
response = JSONResponse(exc.data, status=exc.status)
else:
response = exception_handler(exc, context)
return response
|
Add custom exception handler for API.
|
Add custom exception handler for API.
|
Python
|
apache-2.0
|
CMPUT404W17T06/CMPUT404-project,CMPUT404W17T06/CMPUT404-project,CMPUT404W17T06/CMPUT404-project
|
Add custom exception handler for API.
|
from django.http import HttpResponse
from rest_framework.views import exception_handler
from rest_framework.renderers import JSONRenderer
# Initially taken from
# http://www.django-rest-framework.org/tutorial/1-serialization/
class JSONResponse(HttpResponse):
"""
An HttpResponse that renders its content into JSON.
"""
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs['content_type'] = 'application/json; charset=utf-8'
super(JSONResponse, self).__init__(content, **kwargs)
class DefaultException(Exception):
"""
Default exception because rest_framework's default API exception wouldn't
allow me to manage response data directly like I wanted to.
"""
def __init__(self, data, status):
self.data = data
self.status=status
class MalformedId(DefaultException):
"""
Exception for an invalid request id. This is NOT a not found, this exception
indicates the id wasn't in a valid form so we couldn't even look for the
resource.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 400)
class InvalidField(DefaultException):
"""
Exception for a missing field.
"""
def __init__(self, name, value):
DefaultException.__init__(self, {name: value}, 400)
class NotFound(DefaultException):
"""
Exception for a request that was looking for a resource that couldn't be
found.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 404)
class ResourceConflict(DefaultException):
"""
Exception for a request trying to create a resource where one already
exists.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 409)
class MissingFields(DefaultException):
"""
Exception for a request trying to create a resource while missing required
fields.
"""
def __init__(self, fields):
DefaultException.__init__(self, {'required': fields}, 422)
class MalformedBody(DefaultException):
"""
Exception for a request with malformed json body.
"""
def __init__(self, body):
DefaultException.__init__(self, {'json': body}, 400)
def exceptionHandler(exc, context):
if isinstance(exc, DefaultException):
response = JSONResponse(exc.data, status=exc.status)
else:
response = exception_handler(exc, context)
return response
|
<commit_before><commit_msg>Add custom exception handler for API.<commit_after>
|
from django.http import HttpResponse
from rest_framework.views import exception_handler
from rest_framework.renderers import JSONRenderer
# Initially taken from
# http://www.django-rest-framework.org/tutorial/1-serialization/
class JSONResponse(HttpResponse):
"""
An HttpResponse that renders its content into JSON.
"""
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs['content_type'] = 'application/json; charset=utf-8'
super(JSONResponse, self).__init__(content, **kwargs)
class DefaultException(Exception):
"""
Default exception because rest_framework's default API exception wouldn't
allow me to manage response data directly like I wanted to.
"""
def __init__(self, data, status):
self.data = data
self.status=status
class MalformedId(DefaultException):
"""
Exception for an invalid request id. This is NOT a not found, this exception
indicates the id wasn't in a valid form so we couldn't even look for the
resource.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 400)
class InvalidField(DefaultException):
"""
Exception for a missing field.
"""
def __init__(self, name, value):
DefaultException.__init__(self, {name: value}, 400)
class NotFound(DefaultException):
"""
Exception for a request that was looking for a resource that couldn't be
found.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 404)
class ResourceConflict(DefaultException):
"""
Exception for a request trying to create a resource where one already
exists.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 409)
class MissingFields(DefaultException):
"""
Exception for a request trying to create a resource while missing required
fields.
"""
def __init__(self, fields):
DefaultException.__init__(self, {'required': fields}, 422)
class MalformedBody(DefaultException):
"""
Exception for a request with malformed json body.
"""
def __init__(self, body):
DefaultException.__init__(self, {'json': body}, 400)
def exceptionHandler(exc, context):
if isinstance(exc, DefaultException):
response = JSONResponse(exc.data, status=exc.status)
else:
response = exception_handler(exc, context)
return response
|
Add custom exception handler for API.from django.http import HttpResponse
from rest_framework.views import exception_handler
from rest_framework.renderers import JSONRenderer
# Initially taken from
# http://www.django-rest-framework.org/tutorial/1-serialization/
class JSONResponse(HttpResponse):
"""
An HttpResponse that renders its content into JSON.
"""
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs['content_type'] = 'application/json; charset=utf-8'
super(JSONResponse, self).__init__(content, **kwargs)
class DefaultException(Exception):
"""
Default exception because rest_framework's default API exception wouldn't
allow me to manage response data directly like I wanted to.
"""
def __init__(self, data, status):
self.data = data
self.status=status
class MalformedId(DefaultException):
"""
Exception for an invalid request id. This is NOT a not found, this exception
indicates the id wasn't in a valid form so we couldn't even look for the
resource.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 400)
class InvalidField(DefaultException):
"""
Exception for a missing field.
"""
def __init__(self, name, value):
DefaultException.__init__(self, {name: value}, 400)
class NotFound(DefaultException):
"""
Exception for a request that was looking for a resource that couldn't be
found.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 404)
class ResourceConflict(DefaultException):
"""
Exception for a request trying to create a resource where one already
exists.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 409)
class MissingFields(DefaultException):
"""
Exception for a request trying to create a resource while missing required
fields.
"""
def __init__(self, fields):
DefaultException.__init__(self, {'required': fields}, 422)
class MalformedBody(DefaultException):
"""
Exception for a request with malformed json body.
"""
def __init__(self, body):
DefaultException.__init__(self, {'json': body}, 400)
def exceptionHandler(exc, context):
if isinstance(exc, DefaultException):
response = JSONResponse(exc.data, status=exc.status)
else:
response = exception_handler(exc, context)
return response
|
<commit_before><commit_msg>Add custom exception handler for API.<commit_after>from django.http import HttpResponse
from rest_framework.views import exception_handler
from rest_framework.renderers import JSONRenderer
# Initially taken from
# http://www.django-rest-framework.org/tutorial/1-serialization/
class JSONResponse(HttpResponse):
"""
An HttpResponse that renders its content into JSON.
"""
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs['content_type'] = 'application/json; charset=utf-8'
super(JSONResponse, self).__init__(content, **kwargs)
class DefaultException(Exception):
"""
Default exception because rest_framework's default API exception wouldn't
allow me to manage response data directly like I wanted to.
"""
def __init__(self, data, status):
self.data = data
self.status=status
class MalformedId(DefaultException):
"""
Exception for an invalid request id. This is NOT a not found, this exception
indicates the id wasn't in a valid form so we couldn't even look for the
resource.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 400)
class InvalidField(DefaultException):
"""
Exception for a missing field.
"""
def __init__(self, name, value):
DefaultException.__init__(self, {name: value}, 400)
class NotFound(DefaultException):
"""
Exception for a request that was looking for a resource that couldn't be
found.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 404)
class ResourceConflict(DefaultException):
"""
Exception for a request trying to create a resource where one already
exists.
"""
def __init__(self, objectName, objectId):
DefaultException.__init__(self, {objectName + '_id': objectId}, 409)
class MissingFields(DefaultException):
"""
Exception for a request trying to create a resource while missing required
fields.
"""
def __init__(self, fields):
DefaultException.__init__(self, {'required': fields}, 422)
class MalformedBody(DefaultException):
"""
Exception for a request with malformed json body.
"""
def __init__(self, body):
DefaultException.__init__(self, {'json': body}, 400)
def exceptionHandler(exc, context):
if isinstance(exc, DefaultException):
response = JSONResponse(exc.data, status=exc.status)
else:
response = exception_handler(exc, context)
return response
|
|
b1354f4b5f59706bbf8a0f6e39457564c5949a9c
|
samples/_create_webmap.py
|
samples/_create_webmap.py
|
"""
create a webmap from code,
add a dynamic layer, with dynamiclayer option for changing the symbols
"""
import arcrest
import arcrest.webmap
import arcrest.agol
import json
USER = "XXXXX"
PASSWORD = "xxxxxx"
ORGANISATION = "xxxxxx"
wm = arcrest.webmap.layers.AGSMapServiceLayer("http://sampleserver6.arcgisonline.com/arcgis/rest/services/Census/MapServer", "", "Service Census")
wm.add_layer({ "id":0, "minScale":0,"maxScale":0, "layerDefinition": { "source": { "type":"mapLayer", "mapLayerId":3}, "drawingInfo":{"renderer":{"type":"simple","symbol":{"color":[0,0,0,128],"outline":{"color":[0,0,0,255],"width":1.5,"type":"esriSLS","style":"esriSLSSolid"},"type":"esriSFS","style":"esriSFSSolid"}}},"name":"Geology (Stratigraphy)","parentLayerId":-1,"defaultVisibility":True}})
topomap = arcrest.webmap.layers.BaseMapLayer("defaultBaseMap", url="http://services.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer")
bm = arcrest.webmap.webmapobjects.BaseMap("Topographic", [topomap])
w = arcrest.webmap.webmap.WebMap(baseMap = bm,
operationalLayers = [ wm ])
pa = arcrest.agol.admin.AGOL(USER, PASSWORD, ORGANISATION)
pa.addItem("MyNew WebMap","montag", "description de la webmap", "snippet", json.loads(str(w)),None)
|
Add a sample on WebMap Serialization
|
Add a sample on WebMap Serialization
|
Python
|
apache-2.0
|
Esri/ArcREST,pLeBlanc93/ArcREST,jgravois/ArcREST,BrunoCaimar/ArcREST,DShokes/ArcREST,adegwerth/ArcREST,achapkowski/ArcREST
|
Add a sample on WebMap Serialization
|
"""
create a webmap from code,
add a dynamic layer, with dynamiclayer option for changing the symbols
"""
import arcrest
import arcrest.webmap
import arcrest.agol
import json
USER = "XXXXX"
PASSWORD = "xxxxxx"
ORGANISATION = "xxxxxx"
wm = arcrest.webmap.layers.AGSMapServiceLayer("http://sampleserver6.arcgisonline.com/arcgis/rest/services/Census/MapServer", "", "Service Census")
wm.add_layer({ "id":0, "minScale":0,"maxScale":0, "layerDefinition": { "source": { "type":"mapLayer", "mapLayerId":3}, "drawingInfo":{"renderer":{"type":"simple","symbol":{"color":[0,0,0,128],"outline":{"color":[0,0,0,255],"width":1.5,"type":"esriSLS","style":"esriSLSSolid"},"type":"esriSFS","style":"esriSFSSolid"}}},"name":"Geology (Stratigraphy)","parentLayerId":-1,"defaultVisibility":True}})
topomap = arcrest.webmap.layers.BaseMapLayer("defaultBaseMap", url="http://services.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer")
bm = arcrest.webmap.webmapobjects.BaseMap("Topographic", [topomap])
w = arcrest.webmap.webmap.WebMap(baseMap = bm,
operationalLayers = [ wm ])
pa = arcrest.agol.admin.AGOL(USER, PASSWORD, ORGANISATION)
pa.addItem("MyNew WebMap","montag", "description de la webmap", "snippet", json.loads(str(w)),None)
|
<commit_before><commit_msg>Add a sample on WebMap Serialization<commit_after>
|
"""
create a webmap from code,
add a dynamic layer, with dynamiclayer option for changing the symbols
"""
import arcrest
import arcrest.webmap
import arcrest.agol
import json
USER = "XXXXX"
PASSWORD = "xxxxxx"
ORGANISATION = "xxxxxx"
wm = arcrest.webmap.layers.AGSMapServiceLayer("http://sampleserver6.arcgisonline.com/arcgis/rest/services/Census/MapServer", "", "Service Census")
wm.add_layer({ "id":0, "minScale":0,"maxScale":0, "layerDefinition": { "source": { "type":"mapLayer", "mapLayerId":3}, "drawingInfo":{"renderer":{"type":"simple","symbol":{"color":[0,0,0,128],"outline":{"color":[0,0,0,255],"width":1.5,"type":"esriSLS","style":"esriSLSSolid"},"type":"esriSFS","style":"esriSFSSolid"}}},"name":"Geology (Stratigraphy)","parentLayerId":-1,"defaultVisibility":True}})
topomap = arcrest.webmap.layers.BaseMapLayer("defaultBaseMap", url="http://services.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer")
bm = arcrest.webmap.webmapobjects.BaseMap("Topographic", [topomap])
w = arcrest.webmap.webmap.WebMap(baseMap = bm,
operationalLayers = [ wm ])
pa = arcrest.agol.admin.AGOL(USER, PASSWORD, ORGANISATION)
pa.addItem("MyNew WebMap","montag", "description de la webmap", "snippet", json.loads(str(w)),None)
|
Add a sample on WebMap Serialization"""
create a webmap from code,
add a dynamic layer, with dynamiclayer option for changing the symbols
"""
import arcrest
import arcrest.webmap
import arcrest.agol
import json
USER = "XXXXX"
PASSWORD = "xxxxxx"
ORGANISATION = "xxxxxx"
wm = arcrest.webmap.layers.AGSMapServiceLayer("http://sampleserver6.arcgisonline.com/arcgis/rest/services/Census/MapServer", "", "Service Census")
wm.add_layer({ "id":0, "minScale":0,"maxScale":0, "layerDefinition": { "source": { "type":"mapLayer", "mapLayerId":3}, "drawingInfo":{"renderer":{"type":"simple","symbol":{"color":[0,0,0,128],"outline":{"color":[0,0,0,255],"width":1.5,"type":"esriSLS","style":"esriSLSSolid"},"type":"esriSFS","style":"esriSFSSolid"}}},"name":"Geology (Stratigraphy)","parentLayerId":-1,"defaultVisibility":True}})
topomap = arcrest.webmap.layers.BaseMapLayer("defaultBaseMap", url="http://services.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer")
bm = arcrest.webmap.webmapobjects.BaseMap("Topographic", [topomap])
w = arcrest.webmap.webmap.WebMap(baseMap = bm,
operationalLayers = [ wm ])
pa = arcrest.agol.admin.AGOL(USER, PASSWORD, ORGANISATION)
pa.addItem("MyNew WebMap","montag", "description de la webmap", "snippet", json.loads(str(w)),None)
|
<commit_before><commit_msg>Add a sample on WebMap Serialization<commit_after>"""
create a webmap from code,
add a dynamic layer, with dynamiclayer option for changing the symbols
"""
import arcrest
import arcrest.webmap
import arcrest.agol
import json
USER = "XXXXX"
PASSWORD = "xxxxxx"
ORGANISATION = "xxxxxx"
wm = arcrest.webmap.layers.AGSMapServiceLayer("http://sampleserver6.arcgisonline.com/arcgis/rest/services/Census/MapServer", "", "Service Census")
wm.add_layer({ "id":0, "minScale":0,"maxScale":0, "layerDefinition": { "source": { "type":"mapLayer", "mapLayerId":3}, "drawingInfo":{"renderer":{"type":"simple","symbol":{"color":[0,0,0,128],"outline":{"color":[0,0,0,255],"width":1.5,"type":"esriSLS","style":"esriSLSSolid"},"type":"esriSFS","style":"esriSFSSolid"}}},"name":"Geology (Stratigraphy)","parentLayerId":-1,"defaultVisibility":True}})
topomap = arcrest.webmap.layers.BaseMapLayer("defaultBaseMap", url="http://services.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer")
bm = arcrest.webmap.webmapobjects.BaseMap("Topographic", [topomap])
w = arcrest.webmap.webmap.WebMap(baseMap = bm,
operationalLayers = [ wm ])
pa = arcrest.agol.admin.AGOL(USER, PASSWORD, ORGANISATION)
pa.addItem("MyNew WebMap","montag", "description de la webmap", "snippet", json.loads(str(w)),None)
|
|
12d922665ad1bba8a696ea35a1c0ced45fccd907
|
pylibofp/__main__.py
|
pylibofp/__main__.py
|
from .ofp_app import ofp_run
import importlib
import argparse
import sys
def main():
args = parse_args()
for module in args.modules:
import_module(module)
ofp_run()
def parse_args():
parser = argparse.ArgumentParser(
prog='ofp_app',
description='ofp_app runner',
epilog='(M) indicates an option may appear more than once\n')
parser.add_argument(
'--shell', action='store_true', help='use command shell')
parser.add_argument(
'--listen',
action='append',
default=['6653'],
help='listen endpoint [addr:]port (M)')
parser.add_argument('--loglevel', default='INFO', help='log level')
parser.add_argument('--logfile', default=None, help='log file')
parser.add_argument('modules', metavar='module', type=str, nargs='+', help='modules to import')
return parser.parse_args()
def import_module(module):
try:
importlib.import_module(module)
except ImportError as ex:
print(ex, file=sys.stderr, flush=True)
sys.exit(1)
if __name__ == '__main__':
main()
|
Add support for python -m.
|
Add support for python -m.
|
Python
|
mit
|
byllyfish/pylibofp,byllyfish/pylibofp
|
Add support for python -m.
|
from .ofp_app import ofp_run
import importlib
import argparse
import sys
def main():
args = parse_args()
for module in args.modules:
import_module(module)
ofp_run()
def parse_args():
parser = argparse.ArgumentParser(
prog='ofp_app',
description='ofp_app runner',
epilog='(M) indicates an option may appear more than once\n')
parser.add_argument(
'--shell', action='store_true', help='use command shell')
parser.add_argument(
'--listen',
action='append',
default=['6653'],
help='listen endpoint [addr:]port (M)')
parser.add_argument('--loglevel', default='INFO', help='log level')
parser.add_argument('--logfile', default=None, help='log file')
parser.add_argument('modules', metavar='module', type=str, nargs='+', help='modules to import')
return parser.parse_args()
def import_module(module):
try:
importlib.import_module(module)
except ImportError as ex:
print(ex, file=sys.stderr, flush=True)
sys.exit(1)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add support for python -m.<commit_after>
|
from .ofp_app import ofp_run
import importlib
import argparse
import sys
def main():
args = parse_args()
for module in args.modules:
import_module(module)
ofp_run()
def parse_args():
parser = argparse.ArgumentParser(
prog='ofp_app',
description='ofp_app runner',
epilog='(M) indicates an option may appear more than once\n')
parser.add_argument(
'--shell', action='store_true', help='use command shell')
parser.add_argument(
'--listen',
action='append',
default=['6653'],
help='listen endpoint [addr:]port (M)')
parser.add_argument('--loglevel', default='INFO', help='log level')
parser.add_argument('--logfile', default=None, help='log file')
parser.add_argument('modules', metavar='module', type=str, nargs='+', help='modules to import')
return parser.parse_args()
def import_module(module):
try:
importlib.import_module(module)
except ImportError as ex:
print(ex, file=sys.stderr, flush=True)
sys.exit(1)
if __name__ == '__main__':
main()
|
Add support for python -m.from .ofp_app import ofp_run
import importlib
import argparse
import sys
def main():
args = parse_args()
for module in args.modules:
import_module(module)
ofp_run()
def parse_args():
parser = argparse.ArgumentParser(
prog='ofp_app',
description='ofp_app runner',
epilog='(M) indicates an option may appear more than once\n')
parser.add_argument(
'--shell', action='store_true', help='use command shell')
parser.add_argument(
'--listen',
action='append',
default=['6653'],
help='listen endpoint [addr:]port (M)')
parser.add_argument('--loglevel', default='INFO', help='log level')
parser.add_argument('--logfile', default=None, help='log file')
parser.add_argument('modules', metavar='module', type=str, nargs='+', help='modules to import')
return parser.parse_args()
def import_module(module):
try:
importlib.import_module(module)
except ImportError as ex:
print(ex, file=sys.stderr, flush=True)
sys.exit(1)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add support for python -m.<commit_after>from .ofp_app import ofp_run
import importlib
import argparse
import sys
def main():
args = parse_args()
for module in args.modules:
import_module(module)
ofp_run()
def parse_args():
parser = argparse.ArgumentParser(
prog='ofp_app',
description='ofp_app runner',
epilog='(M) indicates an option may appear more than once\n')
parser.add_argument(
'--shell', action='store_true', help='use command shell')
parser.add_argument(
'--listen',
action='append',
default=['6653'],
help='listen endpoint [addr:]port (M)')
parser.add_argument('--loglevel', default='INFO', help='log level')
parser.add_argument('--logfile', default=None, help='log file')
parser.add_argument('modules', metavar='module', type=str, nargs='+', help='modules to import')
return parser.parse_args()
def import_module(module):
try:
importlib.import_module(module)
except ImportError as ex:
print(ex, file=sys.stderr, flush=True)
sys.exit(1)
if __name__ == '__main__':
main()
|
|
71b80cbf4b519823ecc72f1f38196c738ccf7c11
|
registration/urls.py
|
registration/urls.py
|
# created by Chirath R, chirath.02@gmail.com
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from registration.views import UserSignUpView, login, UserUpdateView, ProfileDetailView, ProfileListView
urlpatterns = [
url(r'^login/$', login, name="login"),
url(r'^signup/$', UserSignUpView.as_view(), name="signup"),
url(r'^$', ProfileListView.as_view(), name="profile_list"),
url(r'^(?P<pk>[0-9])/$', ProfileDetailView.as_view(), name="profile"),
url(r'^(?P<pk>[0-9])/update/$', login_required(UserUpdateView.as_view()), name="update_profile"),
url(
r'^signup/success/$',
TemplateView.as_view(template_name='registration/signup_success.html'),
name="signup_success"
),
url(
r'^signup/already-logged-in/$',
TemplateView.as_view(template_name='registration/already_logged_in.html'),
name="already_logged_in"
),
url(
r'^permission-denied/$',
TemplateView.as_view(template_name='registration/permission_denied.html'),
name="permission_denied"
),
url(
r'^error/$',
TemplateView.as_view(template_name='registration/error.html'),
name="error"
),
]
|
# created by Chirath R, chirath.02@gmail.com
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from registration.views import UserSignUpView, login, UserUpdateView, ProfileDetailView, ProfileListView
urlpatterns = [
url(r'^login/$', login, name="login"),
url(r'^signup/$', UserSignUpView.as_view(), name="signup"),
url(r'^$', ProfileListView.as_view(), name="profile_list"),
url(r'^(?P<pk>[0-9]+)/$', ProfileDetailView.as_view(), name="profile"),
url(r'^(?P<pk>[0-9]+)/update/$', login_required(UserUpdateView.as_view()), name="update_profile"),
url(
r'^signup/success/$',
TemplateView.as_view(template_name='registration/signup_success.html'),
name="signup_success"
),
url(
r'^signup/already-logged-in/$',
TemplateView.as_view(template_name='registration/already_logged_in.html'),
name="already_logged_in"
),
url(
r'^permission-denied/$',
TemplateView.as_view(template_name='registration/permission_denied.html'),
name="permission_denied"
),
url(
r'^error/$',
TemplateView.as_view(template_name='registration/error.html'),
name="error"
),
]
|
Change the default no of users from 9
|
Change the default no of users from 9
|
Python
|
mit
|
akshaya9/fosswebsite,Sparker0i/fosswebsite,akshayharidas/fosswebsite,csriharsha/fosswebsite,akshaya9/fosswebsite,csriharsha/fosswebsite,amfoss/fosswebsite,rahulk98/fosswebsite,akshayharidas/fosswebsite,navisk13/fosswebsite,akshayharidas/fosswebsite,amfoss/fosswebsite,Sparker0i/fosswebsite,manikishan/fosswebsite,csriharsha/fosswebsite,navisk13/fosswebsite,manikishan/fosswebsite,amfoss/fosswebsite,manikishan/fosswebsite,rahulk98/fosswebsite,akshaya9/fosswebsite,Sparker0i/fosswebsite,rahulk98/fosswebsite,navisk13/fosswebsite
|
# created by Chirath R, chirath.02@gmail.com
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from registration.views import UserSignUpView, login, UserUpdateView, ProfileDetailView, ProfileListView
urlpatterns = [
url(r'^login/$', login, name="login"),
url(r'^signup/$', UserSignUpView.as_view(), name="signup"),
url(r'^$', ProfileListView.as_view(), name="profile_list"),
url(r'^(?P<pk>[0-9])/$', ProfileDetailView.as_view(), name="profile"),
url(r'^(?P<pk>[0-9])/update/$', login_required(UserUpdateView.as_view()), name="update_profile"),
url(
r'^signup/success/$',
TemplateView.as_view(template_name='registration/signup_success.html'),
name="signup_success"
),
url(
r'^signup/already-logged-in/$',
TemplateView.as_view(template_name='registration/already_logged_in.html'),
name="already_logged_in"
),
url(
r'^permission-denied/$',
TemplateView.as_view(template_name='registration/permission_denied.html'),
name="permission_denied"
),
url(
r'^error/$',
TemplateView.as_view(template_name='registration/error.html'),
name="error"
),
]
Change the default no of users from 9
|
# created by Chirath R, chirath.02@gmail.com
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from registration.views import UserSignUpView, login, UserUpdateView, ProfileDetailView, ProfileListView
urlpatterns = [
url(r'^login/$', login, name="login"),
url(r'^signup/$', UserSignUpView.as_view(), name="signup"),
url(r'^$', ProfileListView.as_view(), name="profile_list"),
url(r'^(?P<pk>[0-9]+)/$', ProfileDetailView.as_view(), name="profile"),
url(r'^(?P<pk>[0-9]+)/update/$', login_required(UserUpdateView.as_view()), name="update_profile"),
url(
r'^signup/success/$',
TemplateView.as_view(template_name='registration/signup_success.html'),
name="signup_success"
),
url(
r'^signup/already-logged-in/$',
TemplateView.as_view(template_name='registration/already_logged_in.html'),
name="already_logged_in"
),
url(
r'^permission-denied/$',
TemplateView.as_view(template_name='registration/permission_denied.html'),
name="permission_denied"
),
url(
r'^error/$',
TemplateView.as_view(template_name='registration/error.html'),
name="error"
),
]
|
<commit_before># created by Chirath R, chirath.02@gmail.com
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from registration.views import UserSignUpView, login, UserUpdateView, ProfileDetailView, ProfileListView
urlpatterns = [
url(r'^login/$', login, name="login"),
url(r'^signup/$', UserSignUpView.as_view(), name="signup"),
url(r'^$', ProfileListView.as_view(), name="profile_list"),
url(r'^(?P<pk>[0-9])/$', ProfileDetailView.as_view(), name="profile"),
url(r'^(?P<pk>[0-9])/update/$', login_required(UserUpdateView.as_view()), name="update_profile"),
url(
r'^signup/success/$',
TemplateView.as_view(template_name='registration/signup_success.html'),
name="signup_success"
),
url(
r'^signup/already-logged-in/$',
TemplateView.as_view(template_name='registration/already_logged_in.html'),
name="already_logged_in"
),
url(
r'^permission-denied/$',
TemplateView.as_view(template_name='registration/permission_denied.html'),
name="permission_denied"
),
url(
r'^error/$',
TemplateView.as_view(template_name='registration/error.html'),
name="error"
),
]
<commit_msg>Change the default no of users from 9<commit_after>
|
# created by Chirath R, chirath.02@gmail.com
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from registration.views import UserSignUpView, login, UserUpdateView, ProfileDetailView, ProfileListView
urlpatterns = [
url(r'^login/$', login, name="login"),
url(r'^signup/$', UserSignUpView.as_view(), name="signup"),
url(r'^$', ProfileListView.as_view(), name="profile_list"),
url(r'^(?P<pk>[0-9]+)/$', ProfileDetailView.as_view(), name="profile"),
url(r'^(?P<pk>[0-9]+)/update/$', login_required(UserUpdateView.as_view()), name="update_profile"),
url(
r'^signup/success/$',
TemplateView.as_view(template_name='registration/signup_success.html'),
name="signup_success"
),
url(
r'^signup/already-logged-in/$',
TemplateView.as_view(template_name='registration/already_logged_in.html'),
name="already_logged_in"
),
url(
r'^permission-denied/$',
TemplateView.as_view(template_name='registration/permission_denied.html'),
name="permission_denied"
),
url(
r'^error/$',
TemplateView.as_view(template_name='registration/error.html'),
name="error"
),
]
|
# created by Chirath R, chirath.02@gmail.com
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from registration.views import UserSignUpView, login, UserUpdateView, ProfileDetailView, ProfileListView
urlpatterns = [
url(r'^login/$', login, name="login"),
url(r'^signup/$', UserSignUpView.as_view(), name="signup"),
url(r'^$', ProfileListView.as_view(), name="profile_list"),
url(r'^(?P<pk>[0-9])/$', ProfileDetailView.as_view(), name="profile"),
url(r'^(?P<pk>[0-9])/update/$', login_required(UserUpdateView.as_view()), name="update_profile"),
url(
r'^signup/success/$',
TemplateView.as_view(template_name='registration/signup_success.html'),
name="signup_success"
),
url(
r'^signup/already-logged-in/$',
TemplateView.as_view(template_name='registration/already_logged_in.html'),
name="already_logged_in"
),
url(
r'^permission-denied/$',
TemplateView.as_view(template_name='registration/permission_denied.html'),
name="permission_denied"
),
url(
r'^error/$',
TemplateView.as_view(template_name='registration/error.html'),
name="error"
),
]
Change the default no of users from 9# created by Chirath R, chirath.02@gmail.com
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from registration.views import UserSignUpView, login, UserUpdateView, ProfileDetailView, ProfileListView
urlpatterns = [
url(r'^login/$', login, name="login"),
url(r'^signup/$', UserSignUpView.as_view(), name="signup"),
url(r'^$', ProfileListView.as_view(), name="profile_list"),
url(r'^(?P<pk>[0-9]+)/$', ProfileDetailView.as_view(), name="profile"),
url(r'^(?P<pk>[0-9]+)/update/$', login_required(UserUpdateView.as_view()), name="update_profile"),
url(
r'^signup/success/$',
TemplateView.as_view(template_name='registration/signup_success.html'),
name="signup_success"
),
url(
r'^signup/already-logged-in/$',
TemplateView.as_view(template_name='registration/already_logged_in.html'),
name="already_logged_in"
),
url(
r'^permission-denied/$',
TemplateView.as_view(template_name='registration/permission_denied.html'),
name="permission_denied"
),
url(
r'^error/$',
TemplateView.as_view(template_name='registration/error.html'),
name="error"
),
]
|
<commit_before># created by Chirath R, chirath.02@gmail.com
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from registration.views import UserSignUpView, login, UserUpdateView, ProfileDetailView, ProfileListView
urlpatterns = [
url(r'^login/$', login, name="login"),
url(r'^signup/$', UserSignUpView.as_view(), name="signup"),
url(r'^$', ProfileListView.as_view(), name="profile_list"),
url(r'^(?P<pk>[0-9])/$', ProfileDetailView.as_view(), name="profile"),
url(r'^(?P<pk>[0-9])/update/$', login_required(UserUpdateView.as_view()), name="update_profile"),
url(
r'^signup/success/$',
TemplateView.as_view(template_name='registration/signup_success.html'),
name="signup_success"
),
url(
r'^signup/already-logged-in/$',
TemplateView.as_view(template_name='registration/already_logged_in.html'),
name="already_logged_in"
),
url(
r'^permission-denied/$',
TemplateView.as_view(template_name='registration/permission_denied.html'),
name="permission_denied"
),
url(
r'^error/$',
TemplateView.as_view(template_name='registration/error.html'),
name="error"
),
]
<commit_msg>Change the default no of users from 9<commit_after># created by Chirath R, chirath.02@gmail.com
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from registration.views import UserSignUpView, login, UserUpdateView, ProfileDetailView, ProfileListView
urlpatterns = [
url(r'^login/$', login, name="login"),
url(r'^signup/$', UserSignUpView.as_view(), name="signup"),
url(r'^$', ProfileListView.as_view(), name="profile_list"),
url(r'^(?P<pk>[0-9]+)/$', ProfileDetailView.as_view(), name="profile"),
url(r'^(?P<pk>[0-9]+)/update/$', login_required(UserUpdateView.as_view()), name="update_profile"),
url(
r'^signup/success/$',
TemplateView.as_view(template_name='registration/signup_success.html'),
name="signup_success"
),
url(
r'^signup/already-logged-in/$',
TemplateView.as_view(template_name='registration/already_logged_in.html'),
name="already_logged_in"
),
url(
r'^permission-denied/$',
TemplateView.as_view(template_name='registration/permission_denied.html'),
name="permission_denied"
),
url(
r'^error/$',
TemplateView.as_view(template_name='registration/error.html'),
name="error"
),
]
|
f4160538b9e55fa2f886a5b6e2a93a26ceb3d5da
|
tests/GIR/test_940_content_manager.py
|
tests/GIR/test_940_content_manager.py
|
# coding=utf-8
import sys
import struct
import unittest
import time
from test_000_config import TestConfig
from test_020_connection import TestConnection
from gi.repository import Midgard
from gi.repository import GObject
class TestContentManagerJobCreate(unittest.TestCase):
mgd = None
bookstore = None
reference = None
manager = None
def setUp(self):
if self.mgd == None:
self.mgd = TestConnection.openConnection()
if self.bookstore is None:
self.bookstore = Midgard.Object.factory(self.mgd, "gir_test_book_store", None)
if self.reference is None:
self.reference = Midgard.ObjectReference(id = Midgard.Guid.new(self.mgd), name = "TestReferenceOne")
if self.manager is None:
self.manager = Midgard.SqlContentManager(connection = self.mgd)
def tearDown(self):
self.bookstore = None
self.reference = None
self.job = None
self.manager = None
self.mgd.close()
self.mgd = None
def testInheritance(self):
self.assertIsInstance(self.manager, GObject.Object)
self.assertIsInstance(self.manager, Midgard.ContentManager)
def testGetConnection(self):
self.assertNotEqual(self.manager.get_connection(), None)
self.assertEqual(self.manager.get_connection(), self.mgd)
def testCreateLoadJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.LOAD, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobLoad)
def testCreateCreateJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.CREATE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobCreate)
def testCreateUpdateJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.UPDATE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobUpdate)
def testCreateDeleteJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.DELETE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobDelete)
def testCreatePurgeJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.PURGE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobPurge)
if __name__ == "__main__":
unittest.main()
|
Test sql content manager. Refs gh-168
|
Test sql content manager. Refs gh-168
|
Python
|
lgpl-2.1
|
midgardproject/midgard-core,midgardproject/midgard-core,midgardproject/midgard-core,midgardproject/midgard-core
|
Test sql content manager. Refs gh-168
|
# coding=utf-8
import sys
import struct
import unittest
import time
from test_000_config import TestConfig
from test_020_connection import TestConnection
from gi.repository import Midgard
from gi.repository import GObject
class TestContentManagerJobCreate(unittest.TestCase):
mgd = None
bookstore = None
reference = None
manager = None
def setUp(self):
if self.mgd == None:
self.mgd = TestConnection.openConnection()
if self.bookstore is None:
self.bookstore = Midgard.Object.factory(self.mgd, "gir_test_book_store", None)
if self.reference is None:
self.reference = Midgard.ObjectReference(id = Midgard.Guid.new(self.mgd), name = "TestReferenceOne")
if self.manager is None:
self.manager = Midgard.SqlContentManager(connection = self.mgd)
def tearDown(self):
self.bookstore = None
self.reference = None
self.job = None
self.manager = None
self.mgd.close()
self.mgd = None
def testInheritance(self):
self.assertIsInstance(self.manager, GObject.Object)
self.assertIsInstance(self.manager, Midgard.ContentManager)
def testGetConnection(self):
self.assertNotEqual(self.manager.get_connection(), None)
self.assertEqual(self.manager.get_connection(), self.mgd)
def testCreateLoadJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.LOAD, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobLoad)
def testCreateCreateJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.CREATE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobCreate)
def testCreateUpdateJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.UPDATE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobUpdate)
def testCreateDeleteJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.DELETE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobDelete)
def testCreatePurgeJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.PURGE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobPurge)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Test sql content manager. Refs gh-168<commit_after>
|
# coding=utf-8
import sys
import struct
import unittest
import time
from test_000_config import TestConfig
from test_020_connection import TestConnection
from gi.repository import Midgard
from gi.repository import GObject
class TestContentManagerJobCreate(unittest.TestCase):
mgd = None
bookstore = None
reference = None
manager = None
def setUp(self):
if self.mgd == None:
self.mgd = TestConnection.openConnection()
if self.bookstore is None:
self.bookstore = Midgard.Object.factory(self.mgd, "gir_test_book_store", None)
if self.reference is None:
self.reference = Midgard.ObjectReference(id = Midgard.Guid.new(self.mgd), name = "TestReferenceOne")
if self.manager is None:
self.manager = Midgard.SqlContentManager(connection = self.mgd)
def tearDown(self):
self.bookstore = None
self.reference = None
self.job = None
self.manager = None
self.mgd.close()
self.mgd = None
def testInheritance(self):
self.assertIsInstance(self.manager, GObject.Object)
self.assertIsInstance(self.manager, Midgard.ContentManager)
def testGetConnection(self):
self.assertNotEqual(self.manager.get_connection(), None)
self.assertEqual(self.manager.get_connection(), self.mgd)
def testCreateLoadJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.LOAD, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobLoad)
def testCreateCreateJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.CREATE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobCreate)
def testCreateUpdateJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.UPDATE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobUpdate)
def testCreateDeleteJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.DELETE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobDelete)
def testCreatePurgeJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.PURGE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobPurge)
if __name__ == "__main__":
unittest.main()
|
Test sql content manager. Refs gh-168# coding=utf-8
import sys
import struct
import unittest
import time
from test_000_config import TestConfig
from test_020_connection import TestConnection
from gi.repository import Midgard
from gi.repository import GObject
class TestContentManagerJobCreate(unittest.TestCase):
mgd = None
bookstore = None
reference = None
manager = None
def setUp(self):
if self.mgd == None:
self.mgd = TestConnection.openConnection()
if self.bookstore is None:
self.bookstore = Midgard.Object.factory(self.mgd, "gir_test_book_store", None)
if self.reference is None:
self.reference = Midgard.ObjectReference(id = Midgard.Guid.new(self.mgd), name = "TestReferenceOne")
if self.manager is None:
self.manager = Midgard.SqlContentManager(connection = self.mgd)
def tearDown(self):
self.bookstore = None
self.reference = None
self.job = None
self.manager = None
self.mgd.close()
self.mgd = None
def testInheritance(self):
self.assertIsInstance(self.manager, GObject.Object)
self.assertIsInstance(self.manager, Midgard.ContentManager)
def testGetConnection(self):
self.assertNotEqual(self.manager.get_connection(), None)
self.assertEqual(self.manager.get_connection(), self.mgd)
def testCreateLoadJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.LOAD, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobLoad)
def testCreateCreateJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.CREATE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobCreate)
def testCreateUpdateJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.UPDATE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobUpdate)
def testCreateDeleteJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.DELETE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobDelete)
def testCreatePurgeJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.PURGE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobPurge)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Test sql content manager. Refs gh-168<commit_after># coding=utf-8
import sys
import struct
import unittest
import time
from test_000_config import TestConfig
from test_020_connection import TestConnection
from gi.repository import Midgard
from gi.repository import GObject
class TestContentManagerJobCreate(unittest.TestCase):
mgd = None
bookstore = None
reference = None
manager = None
def setUp(self):
if self.mgd == None:
self.mgd = TestConnection.openConnection()
if self.bookstore is None:
self.bookstore = Midgard.Object.factory(self.mgd, "gir_test_book_store", None)
if self.reference is None:
self.reference = Midgard.ObjectReference(id = Midgard.Guid.new(self.mgd), name = "TestReferenceOne")
if self.manager is None:
self.manager = Midgard.SqlContentManager(connection = self.mgd)
def tearDown(self):
self.bookstore = None
self.reference = None
self.job = None
self.manager = None
self.mgd.close()
self.mgd = None
def testInheritance(self):
self.assertIsInstance(self.manager, GObject.Object)
self.assertIsInstance(self.manager, Midgard.ContentManager)
def testGetConnection(self):
self.assertNotEqual(self.manager.get_connection(), None)
self.assertEqual(self.manager.get_connection(), self.mgd)
def testCreateLoadJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.LOAD, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobLoad)
def testCreateCreateJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.CREATE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobCreate)
def testCreateUpdateJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.UPDATE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobUpdate)
def testCreateDeleteJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.DELETE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobDelete)
def testCreatePurgeJob(self):
job = self.manager.create_job(Midgard.ContentManagerJobType.PURGE, self.bookstore, self.reference, None)
self.assertIsInstance(job, Midgard.SqlContentManagerJobPurge)
if __name__ == "__main__":
unittest.main()
|
|
35c264819bac12fcb3baf8a2a33d63dd916f5f86
|
mezzanine_fluent_pages/mezzanine_layout_page/widgets.py
|
mezzanine_fluent_pages/mezzanine_layout_page/widgets.py
|
from django.forms.widgets import Select
class LayoutSelector(Select):
"""
Modified `Select` class to select the original value.
This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets
.py` in the `django-fluent-pages` app.
"""
def render(self, name, value, attrs=None, choices=()):
"""
Modified render to set the data original value.
:param name: The name of the `Select` field.
:param value: The value of the `Select` field.
:param attrs: Additional attributes of the `Select` field.
:param choices: Available choices for the `Select` field.
:return: HTML select.
"""
if attrs:
attrs['data-original-value'] = value
return super(LayoutSelector, self).render(name, value, attrs, choices)
|
from django.forms.widgets import Select
class LayoutSelector(Select):
"""
Modified `Select` class to select the original value.
This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets
.py` in the `django-fluent-pages` app.
"""
def render(self, name, value, attrs=None, *args, **kwargs):
"""
Modified render to set the data original value.
:param name: The name of the `Select` field.
:param value: The value of the `Select` field.
:param attrs: Additional attributes of the `Select` field.
:param args: pass along any other arguments.
:param kwargs: pass along any other keyword arguments.
:return: HTML select.
"""
if attrs:
attrs['data-original-value'] = value
return super(LayoutSelector, self).render(name, value, attrs, *args, **kwargs)
|
Remove keyword argument and allow generic argument passing.
|
Remove keyword argument and allow generic argument passing.
|
Python
|
bsd-2-clause
|
sjdines/mezzanine-fluent-pages,sjdines/mezzanine-fluent-pages,sjdines/mezzanine-fluent-pages
|
from django.forms.widgets import Select
class LayoutSelector(Select):
"""
Modified `Select` class to select the original value.
This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets
.py` in the `django-fluent-pages` app.
"""
def render(self, name, value, attrs=None, choices=()):
"""
Modified render to set the data original value.
:param name: The name of the `Select` field.
:param value: The value of the `Select` field.
:param attrs: Additional attributes of the `Select` field.
:param choices: Available choices for the `Select` field.
:return: HTML select.
"""
if attrs:
attrs['data-original-value'] = value
return super(LayoutSelector, self).render(name, value, attrs, choices)
Remove keyword argument and allow generic argument passing.
|
from django.forms.widgets import Select
class LayoutSelector(Select):
"""
Modified `Select` class to select the original value.
This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets
.py` in the `django-fluent-pages` app.
"""
def render(self, name, value, attrs=None, *args, **kwargs):
"""
Modified render to set the data original value.
:param name: The name of the `Select` field.
:param value: The value of the `Select` field.
:param attrs: Additional attributes of the `Select` field.
:param args: pass along any other arguments.
:param kwargs: pass along any other keyword arguments.
:return: HTML select.
"""
if attrs:
attrs['data-original-value'] = value
return super(LayoutSelector, self).render(name, value, attrs, *args, **kwargs)
|
<commit_before>from django.forms.widgets import Select
class LayoutSelector(Select):
"""
Modified `Select` class to select the original value.
This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets
.py` in the `django-fluent-pages` app.
"""
def render(self, name, value, attrs=None, choices=()):
"""
Modified render to set the data original value.
:param name: The name of the `Select` field.
:param value: The value of the `Select` field.
:param attrs: Additional attributes of the `Select` field.
:param choices: Available choices for the `Select` field.
:return: HTML select.
"""
if attrs:
attrs['data-original-value'] = value
return super(LayoutSelector, self).render(name, value, attrs, choices)
<commit_msg>Remove keyword argument and allow generic argument passing.<commit_after>
|
from django.forms.widgets import Select
class LayoutSelector(Select):
"""
Modified `Select` class to select the original value.
This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets
.py` in the `django-fluent-pages` app.
"""
def render(self, name, value, attrs=None, *args, **kwargs):
"""
Modified render to set the data original value.
:param name: The name of the `Select` field.
:param value: The value of the `Select` field.
:param attrs: Additional attributes of the `Select` field.
:param args: pass along any other arguments.
:param kwargs: pass along any other keyword arguments.
:return: HTML select.
"""
if attrs:
attrs['data-original-value'] = value
return super(LayoutSelector, self).render(name, value, attrs, *args, **kwargs)
|
from django.forms.widgets import Select
class LayoutSelector(Select):
"""
Modified `Select` class to select the original value.
This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets
.py` in the `django-fluent-pages` app.
"""
def render(self, name, value, attrs=None, choices=()):
"""
Modified render to set the data original value.
:param name: The name of the `Select` field.
:param value: The value of the `Select` field.
:param attrs: Additional attributes of the `Select` field.
:param choices: Available choices for the `Select` field.
:return: HTML select.
"""
if attrs:
attrs['data-original-value'] = value
return super(LayoutSelector, self).render(name, value, attrs, choices)
Remove keyword argument and allow generic argument passing.from django.forms.widgets import Select
class LayoutSelector(Select):
"""
Modified `Select` class to select the original value.
This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets
.py` in the `django-fluent-pages` app.
"""
def render(self, name, value, attrs=None, *args, **kwargs):
"""
Modified render to set the data original value.
:param name: The name of the `Select` field.
:param value: The value of the `Select` field.
:param attrs: Additional attributes of the `Select` field.
:param args: pass along any other arguments.
:param kwargs: pass along any other keyword arguments.
:return: HTML select.
"""
if attrs:
attrs['data-original-value'] = value
return super(LayoutSelector, self).render(name, value, attrs, *args, **kwargs)
|
<commit_before>from django.forms.widgets import Select
class LayoutSelector(Select):
"""
Modified `Select` class to select the original value.
This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets
.py` in the `django-fluent-pages` app.
"""
def render(self, name, value, attrs=None, choices=()):
"""
Modified render to set the data original value.
:param name: The name of the `Select` field.
:param value: The value of the `Select` field.
:param attrs: Additional attributes of the `Select` field.
:param choices: Available choices for the `Select` field.
:return: HTML select.
"""
if attrs:
attrs['data-original-value'] = value
return super(LayoutSelector, self).render(name, value, attrs, choices)
<commit_msg>Remove keyword argument and allow generic argument passing.<commit_after>from django.forms.widgets import Select
class LayoutSelector(Select):
"""
Modified `Select` class to select the original value.
This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets
.py` in the `django-fluent-pages` app.
"""
def render(self, name, value, attrs=None, *args, **kwargs):
"""
Modified render to set the data original value.
:param name: The name of the `Select` field.
:param value: The value of the `Select` field.
:param attrs: Additional attributes of the `Select` field.
:param args: pass along any other arguments.
:param kwargs: pass along any other keyword arguments.
:return: HTML select.
"""
if attrs:
attrs['data-original-value'] = value
return super(LayoutSelector, self).render(name, value, attrs, *args, **kwargs)
|
62bace1f2a326ab6ab660a23bc1776a1895f5f3d
|
piper/abc.py
|
piper/abc.py
|
import abc
import logbook
import jsonschema
from piper.utils import DotDict
class DynamicItem(object):
"""
Dynamic base class that defines things all Piper classes need.
Many parts of the piper infrastructure is about being able to dynamically
choose which classes should execute actions. This class includes the things
that are identical in all of these, solely to avoid repetition.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, ns, config):
self.ns = ns
self.config = DotDict(config)
self.log = logbook.Logger(self.__class__.__name__)
def __repr__(self): # pragma: nocover
return self.__str__()
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = {
'$schema': 'http://json-schema.org/draft-04/schema',
'type': 'object',
'additionalProperties': False,
'required': ['class'],
'properties': {
'class': {
'description': 'Dynamic class to load.',
'type': 'string',
},
},
}
return self._schema
def validate(self):
jsonschema.validate(self.config.data, self.schema)
|
Add abstract base class for dynamic items
|
Add abstract base class for dynamic items
|
Python
|
mit
|
thiderman/piper
|
Add abstract base class for dynamic items
|
import abc
import logbook
import jsonschema
from piper.utils import DotDict
class DynamicItem(object):
"""
Dynamic base class that defines things all Piper classes need.
Many parts of the piper infrastructure is about being able to dynamically
choose which classes should execute actions. This class includes the things
that are identical in all of these, solely to avoid repetition.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, ns, config):
self.ns = ns
self.config = DotDict(config)
self.log = logbook.Logger(self.__class__.__name__)
def __repr__(self): # pragma: nocover
return self.__str__()
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = {
'$schema': 'http://json-schema.org/draft-04/schema',
'type': 'object',
'additionalProperties': False,
'required': ['class'],
'properties': {
'class': {
'description': 'Dynamic class to load.',
'type': 'string',
},
},
}
return self._schema
def validate(self):
jsonschema.validate(self.config.data, self.schema)
|
<commit_before><commit_msg>Add abstract base class for dynamic items<commit_after>
|
import abc
import logbook
import jsonschema
from piper.utils import DotDict
class DynamicItem(object):
"""
Dynamic base class that defines things all Piper classes need.
Many parts of the piper infrastructure is about being able to dynamically
choose which classes should execute actions. This class includes the things
that are identical in all of these, solely to avoid repetition.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, ns, config):
self.ns = ns
self.config = DotDict(config)
self.log = logbook.Logger(self.__class__.__name__)
def __repr__(self): # pragma: nocover
return self.__str__()
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = {
'$schema': 'http://json-schema.org/draft-04/schema',
'type': 'object',
'additionalProperties': False,
'required': ['class'],
'properties': {
'class': {
'description': 'Dynamic class to load.',
'type': 'string',
},
},
}
return self._schema
def validate(self):
jsonschema.validate(self.config.data, self.schema)
|
Add abstract base class for dynamic itemsimport abc
import logbook
import jsonschema
from piper.utils import DotDict
class DynamicItem(object):
"""
Dynamic base class that defines things all Piper classes need.
Many parts of the piper infrastructure is about being able to dynamically
choose which classes should execute actions. This class includes the things
that are identical in all of these, solely to avoid repetition.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, ns, config):
self.ns = ns
self.config = DotDict(config)
self.log = logbook.Logger(self.__class__.__name__)
def __repr__(self): # pragma: nocover
return self.__str__()
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = {
'$schema': 'http://json-schema.org/draft-04/schema',
'type': 'object',
'additionalProperties': False,
'required': ['class'],
'properties': {
'class': {
'description': 'Dynamic class to load.',
'type': 'string',
},
},
}
return self._schema
def validate(self):
jsonschema.validate(self.config.data, self.schema)
|
<commit_before><commit_msg>Add abstract base class for dynamic items<commit_after>import abc
import logbook
import jsonschema
from piper.utils import DotDict
class DynamicItem(object):
"""
Dynamic base class that defines things all Piper classes need.
Many parts of the piper infrastructure is about being able to dynamically
choose which classes should execute actions. This class includes the things
that are identical in all of these, solely to avoid repetition.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, ns, config):
self.ns = ns
self.config = DotDict(config)
self.log = logbook.Logger(self.__class__.__name__)
def __repr__(self): # pragma: nocover
return self.__str__()
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = {
'$schema': 'http://json-schema.org/draft-04/schema',
'type': 'object',
'additionalProperties': False,
'required': ['class'],
'properties': {
'class': {
'description': 'Dynamic class to load.',
'type': 'string',
},
},
}
return self._schema
def validate(self):
jsonschema.validate(self.config.data, self.schema)
|
|
5616a5a3987106654ca0c15d73d1db6c1b8c7f3d
|
distribution/create_linux_shortcuts.py
|
distribution/create_linux_shortcuts.py
|
#!/usr/bin/env python3
import sys
import os
import os.path as op
try:
import picasso
except ImportError:
print("This script must be run within an environment "
"in which picasso is installed!", file=sys.stderr)
raise
SUBCMD = ("average", "design", "filter", "localize", "render", "simulate")
SCRIPT_PATH_ROOT = (os.sep, "usr", "bin", "picasso")
DESKTOP_PATH_ROOT = (os.sep, "usr", "share", "applications", "picasso_{subcmd}.desktop")
SCRIPT_PATH_USER = ("~", "bin", "picasso")
DESKTOP_PATH_USER = ("~", ".local", "share", "applications", "picasso_{subcmd}.desktop")
DESKTOP_TEMPLATE = """[Desktop Entry]
Name=Picasso {subcmd_cap}
Exec={exec_path} -m picasso {subcmd}
Terminal=false
Type=Application
Icon={icon_path}
Categories=Education;
"""
SCRIPT_TEMPLATE = """#!{exec_path}
if __name__ == "__main__":
from picasso.__main__ import main
main()
"""
def main(exec_path=None, icon_path=None, script_path=None, desktop_path=None):
if exec_path is None:
exec_path = sys.executable
if icon_path is None:
import picasso.gui
icon_path = op.join(op.dirname(picasso.gui.__file__), "icons")
if os.geteuid() == 0:
if script_path is None:
script_path = op.join(*SCRIPT_PATH_ROOT)
if desktop_path is None:
desktop_path = op.join(*DESKTOP_PATH_ROOT)
else:
if script_path is None:
script_path = op.expanduser(op.join(*SCRIPT_PATH_USER))
if desktop_path is None:
desktop_path = op.expanduser(op.join(*DESKTOP_PATH_USER))
print("Writing files:")
with open(script_path, 'xt') as f:
f.write(SCRIPT_TEMPLATE.format(exec_path=exec_path))
print(script_path)
os.chmod(script_path, 0o755)
for subcmd in SUBCMD:
icon_file = op.join(icon_path, f"{subcmd}.ico")
desktop_file = op.join(desktop_path.format(subcmd=subcmd))
with open(desktop_file, 'xt') as f:
f.write(DESKTOP_TEMPLATE.format(
subcmd=subcmd,
subcmd_cap=subcmd.capitalize(),
exec_path=exec_path,
icon_path=icon_file,
))
print(desktop_file)
os.chmod(desktop_file, 0o755)
if __name__ == '__main__':
if sys.platform != 'linux':
raise RuntimeError("Other operating system than Linux detected.")
main()
|
Add script for creating shortcuts in linux
|
Add script for creating shortcuts in linux
|
Python
|
mit
|
jungmannlab/picasso,jungmannlab/picasso,jungmannlab/picasso
|
Add script for creating shortcuts in linux
|
#!/usr/bin/env python3
import sys
import os
import os.path as op
try:
import picasso
except ImportError:
print("This script must be run within an environment "
"in which picasso is installed!", file=sys.stderr)
raise
SUBCMD = ("average", "design", "filter", "localize", "render", "simulate")
SCRIPT_PATH_ROOT = (os.sep, "usr", "bin", "picasso")
DESKTOP_PATH_ROOT = (os.sep, "usr", "share", "applications", "picasso_{subcmd}.desktop")
SCRIPT_PATH_USER = ("~", "bin", "picasso")
DESKTOP_PATH_USER = ("~", ".local", "share", "applications", "picasso_{subcmd}.desktop")
DESKTOP_TEMPLATE = """[Desktop Entry]
Name=Picasso {subcmd_cap}
Exec={exec_path} -m picasso {subcmd}
Terminal=false
Type=Application
Icon={icon_path}
Categories=Education;
"""
SCRIPT_TEMPLATE = """#!{exec_path}
if __name__ == "__main__":
from picasso.__main__ import main
main()
"""
def main(exec_path=None, icon_path=None, script_path=None, desktop_path=None):
if exec_path is None:
exec_path = sys.executable
if icon_path is None:
import picasso.gui
icon_path = op.join(op.dirname(picasso.gui.__file__), "icons")
if os.geteuid() == 0:
if script_path is None:
script_path = op.join(*SCRIPT_PATH_ROOT)
if desktop_path is None:
desktop_path = op.join(*DESKTOP_PATH_ROOT)
else:
if script_path is None:
script_path = op.expanduser(op.join(*SCRIPT_PATH_USER))
if desktop_path is None:
desktop_path = op.expanduser(op.join(*DESKTOP_PATH_USER))
print("Writing files:")
with open(script_path, 'xt') as f:
f.write(SCRIPT_TEMPLATE.format(exec_path=exec_path))
print(script_path)
os.chmod(script_path, 0o755)
for subcmd in SUBCMD:
icon_file = op.join(icon_path, f"{subcmd}.ico")
desktop_file = op.join(desktop_path.format(subcmd=subcmd))
with open(desktop_file, 'xt') as f:
f.write(DESKTOP_TEMPLATE.format(
subcmd=subcmd,
subcmd_cap=subcmd.capitalize(),
exec_path=exec_path,
icon_path=icon_file,
))
print(desktop_file)
os.chmod(desktop_file, 0o755)
if __name__ == '__main__':
if sys.platform != 'linux':
raise RuntimeError("Other operating system than Linux detected.")
main()
|
<commit_before><commit_msg>Add script for creating shortcuts in linux<commit_after>
|
#!/usr/bin/env python3
import sys
import os
import os.path as op
try:
import picasso
except ImportError:
print("This script must be run within an environment "
"in which picasso is installed!", file=sys.stderr)
raise
SUBCMD = ("average", "design", "filter", "localize", "render", "simulate")
SCRIPT_PATH_ROOT = (os.sep, "usr", "bin", "picasso")
DESKTOP_PATH_ROOT = (os.sep, "usr", "share", "applications", "picasso_{subcmd}.desktop")
SCRIPT_PATH_USER = ("~", "bin", "picasso")
DESKTOP_PATH_USER = ("~", ".local", "share", "applications", "picasso_{subcmd}.desktop")
DESKTOP_TEMPLATE = """[Desktop Entry]
Name=Picasso {subcmd_cap}
Exec={exec_path} -m picasso {subcmd}
Terminal=false
Type=Application
Icon={icon_path}
Categories=Education;
"""
SCRIPT_TEMPLATE = """#!{exec_path}
if __name__ == "__main__":
from picasso.__main__ import main
main()
"""
def main(exec_path=None, icon_path=None, script_path=None, desktop_path=None):
if exec_path is None:
exec_path = sys.executable
if icon_path is None:
import picasso.gui
icon_path = op.join(op.dirname(picasso.gui.__file__), "icons")
if os.geteuid() == 0:
if script_path is None:
script_path = op.join(*SCRIPT_PATH_ROOT)
if desktop_path is None:
desktop_path = op.join(*DESKTOP_PATH_ROOT)
else:
if script_path is None:
script_path = op.expanduser(op.join(*SCRIPT_PATH_USER))
if desktop_path is None:
desktop_path = op.expanduser(op.join(*DESKTOP_PATH_USER))
print("Writing files:")
with open(script_path, 'xt') as f:
f.write(SCRIPT_TEMPLATE.format(exec_path=exec_path))
print(script_path)
os.chmod(script_path, 0o755)
for subcmd in SUBCMD:
icon_file = op.join(icon_path, f"{subcmd}.ico")
desktop_file = op.join(desktop_path.format(subcmd=subcmd))
with open(desktop_file, 'xt') as f:
f.write(DESKTOP_TEMPLATE.format(
subcmd=subcmd,
subcmd_cap=subcmd.capitalize(),
exec_path=exec_path,
icon_path=icon_file,
))
print(desktop_file)
os.chmod(desktop_file, 0o755)
if __name__ == '__main__':
if sys.platform != 'linux':
raise RuntimeError("Other operating system than Linux detected.")
main()
|
Add script for creating shortcuts in linux#!/usr/bin/env python3
import sys
import os
import os.path as op
try:
import picasso
except ImportError:
print("This script must be run within an environment "
"in which picasso is installed!", file=sys.stderr)
raise
SUBCMD = ("average", "design", "filter", "localize", "render", "simulate")
SCRIPT_PATH_ROOT = (os.sep, "usr", "bin", "picasso")
DESKTOP_PATH_ROOT = (os.sep, "usr", "share", "applications", "picasso_{subcmd}.desktop")
SCRIPT_PATH_USER = ("~", "bin", "picasso")
DESKTOP_PATH_USER = ("~", ".local", "share", "applications", "picasso_{subcmd}.desktop")
DESKTOP_TEMPLATE = """[Desktop Entry]
Name=Picasso {subcmd_cap}
Exec={exec_path} -m picasso {subcmd}
Terminal=false
Type=Application
Icon={icon_path}
Categories=Education;
"""
SCRIPT_TEMPLATE = """#!{exec_path}
if __name__ == "__main__":
from picasso.__main__ import main
main()
"""
def main(exec_path=None, icon_path=None, script_path=None, desktop_path=None):
if exec_path is None:
exec_path = sys.executable
if icon_path is None:
import picasso.gui
icon_path = op.join(op.dirname(picasso.gui.__file__), "icons")
if os.geteuid() == 0:
if script_path is None:
script_path = op.join(*SCRIPT_PATH_ROOT)
if desktop_path is None:
desktop_path = op.join(*DESKTOP_PATH_ROOT)
else:
if script_path is None:
script_path = op.expanduser(op.join(*SCRIPT_PATH_USER))
if desktop_path is None:
desktop_path = op.expanduser(op.join(*DESKTOP_PATH_USER))
print("Writing files:")
with open(script_path, 'xt') as f:
f.write(SCRIPT_TEMPLATE.format(exec_path=exec_path))
print(script_path)
os.chmod(script_path, 0o755)
for subcmd in SUBCMD:
icon_file = op.join(icon_path, f"{subcmd}.ico")
desktop_file = op.join(desktop_path.format(subcmd=subcmd))
with open(desktop_file, 'xt') as f:
f.write(DESKTOP_TEMPLATE.format(
subcmd=subcmd,
subcmd_cap=subcmd.capitalize(),
exec_path=exec_path,
icon_path=icon_file,
))
print(desktop_file)
os.chmod(desktop_file, 0o755)
if __name__ == '__main__':
if sys.platform != 'linux':
raise RuntimeError("Other operating system than Linux detected.")
main()
|
<commit_before><commit_msg>Add script for creating shortcuts in linux<commit_after>#!/usr/bin/env python3
import sys
import os
import os.path as op
try:
import picasso
except ImportError:
print("This script must be run within an environment "
"in which picasso is installed!", file=sys.stderr)
raise
SUBCMD = ("average", "design", "filter", "localize", "render", "simulate")
SCRIPT_PATH_ROOT = (os.sep, "usr", "bin", "picasso")
DESKTOP_PATH_ROOT = (os.sep, "usr", "share", "applications", "picasso_{subcmd}.desktop")
SCRIPT_PATH_USER = ("~", "bin", "picasso")
DESKTOP_PATH_USER = ("~", ".local", "share", "applications", "picasso_{subcmd}.desktop")
DESKTOP_TEMPLATE = """[Desktop Entry]
Name=Picasso {subcmd_cap}
Exec={exec_path} -m picasso {subcmd}
Terminal=false
Type=Application
Icon={icon_path}
Categories=Education;
"""
SCRIPT_TEMPLATE = """#!{exec_path}
if __name__ == "__main__":
from picasso.__main__ import main
main()
"""
def main(exec_path=None, icon_path=None, script_path=None, desktop_path=None):
if exec_path is None:
exec_path = sys.executable
if icon_path is None:
import picasso.gui
icon_path = op.join(op.dirname(picasso.gui.__file__), "icons")
if os.geteuid() == 0:
if script_path is None:
script_path = op.join(*SCRIPT_PATH_ROOT)
if desktop_path is None:
desktop_path = op.join(*DESKTOP_PATH_ROOT)
else:
if script_path is None:
script_path = op.expanduser(op.join(*SCRIPT_PATH_USER))
if desktop_path is None:
desktop_path = op.expanduser(op.join(*DESKTOP_PATH_USER))
print("Writing files:")
with open(script_path, 'xt') as f:
f.write(SCRIPT_TEMPLATE.format(exec_path=exec_path))
print(script_path)
os.chmod(script_path, 0o755)
for subcmd in SUBCMD:
icon_file = op.join(icon_path, f"{subcmd}.ico")
desktop_file = op.join(desktop_path.format(subcmd=subcmd))
with open(desktop_file, 'xt') as f:
f.write(DESKTOP_TEMPLATE.format(
subcmd=subcmd,
subcmd_cap=subcmd.capitalize(),
exec_path=exec_path,
icon_path=icon_file,
))
print(desktop_file)
os.chmod(desktop_file, 0o755)
if __name__ == '__main__':
if sys.platform != 'linux':
raise RuntimeError("Other operating system than Linux detected.")
main()
|
|
b8cd62385b904ba536642c68eedd9316344bf822
|
authenticate_imgur.py
|
authenticate_imgur.py
|
#!/usr/bin/env python3
from imgurpython import ImgurClient
import config
CONFIG_FILE = "config.yaml"
def authenticate():
conf = config.read_file(CONFIG_FILE)
# Get client ID and secret from auth.ini
client = ImgurClient(conf.imgur.client_id, conf.imgur.client_secret)
# Authorization flow, pin example (see docs for other auth types)
authorization_url = client.get_auth_url('pin')
print("Go to the following URL: {0}".format(authorization_url))
# Read in the pin
pin = input("Enter pin code: ")
# ... redirect user to `authorization_url`, obtain pin (or code or token) ...
credentials = client.authorize(pin, 'pin')
client.set_user_auth(credentials['access_token'], credentials['refresh_token'])
print("Authentication successful! Here are the details:")
print(" Access token: {0}".format(credentials['access_token']))
print(" Refresh token: {0}".format(credentials['refresh_token']))
if __name__ == "__main__":
authenticate()
|
Add script to generate refresh_token
|
Add script to generate refresh_token
https://github.com/Imgur/imgurpython/blob/master/examples/auth.py
|
Python
|
mit
|
FichteFoll/CodetalkIRCBot,FichteFoll/TelegramIRCImageProxy,codetalkio/TelegramIRCImageProxy
|
Add script to generate refresh_token
https://github.com/Imgur/imgurpython/blob/master/examples/auth.py
|
#!/usr/bin/env python3
from imgurpython import ImgurClient
import config
CONFIG_FILE = "config.yaml"
def authenticate():
conf = config.read_file(CONFIG_FILE)
# Get client ID and secret from auth.ini
client = ImgurClient(conf.imgur.client_id, conf.imgur.client_secret)
# Authorization flow, pin example (see docs for other auth types)
authorization_url = client.get_auth_url('pin')
print("Go to the following URL: {0}".format(authorization_url))
# Read in the pin
pin = input("Enter pin code: ")
# ... redirect user to `authorization_url`, obtain pin (or code or token) ...
credentials = client.authorize(pin, 'pin')
client.set_user_auth(credentials['access_token'], credentials['refresh_token'])
print("Authentication successful! Here are the details:")
print(" Access token: {0}".format(credentials['access_token']))
print(" Refresh token: {0}".format(credentials['refresh_token']))
if __name__ == "__main__":
authenticate()
|
<commit_before><commit_msg>Add script to generate refresh_token
https://github.com/Imgur/imgurpython/blob/master/examples/auth.py<commit_after>
|
#!/usr/bin/env python3
from imgurpython import ImgurClient
import config
CONFIG_FILE = "config.yaml"
def authenticate():
conf = config.read_file(CONFIG_FILE)
# Get client ID and secret from auth.ini
client = ImgurClient(conf.imgur.client_id, conf.imgur.client_secret)
# Authorization flow, pin example (see docs for other auth types)
authorization_url = client.get_auth_url('pin')
print("Go to the following URL: {0}".format(authorization_url))
# Read in the pin
pin = input("Enter pin code: ")
# ... redirect user to `authorization_url`, obtain pin (or code or token) ...
credentials = client.authorize(pin, 'pin')
client.set_user_auth(credentials['access_token'], credentials['refresh_token'])
print("Authentication successful! Here are the details:")
print(" Access token: {0}".format(credentials['access_token']))
print(" Refresh token: {0}".format(credentials['refresh_token']))
if __name__ == "__main__":
authenticate()
|
Add script to generate refresh_token
https://github.com/Imgur/imgurpython/blob/master/examples/auth.py#!/usr/bin/env python3
from imgurpython import ImgurClient
import config
CONFIG_FILE = "config.yaml"
def authenticate():
conf = config.read_file(CONFIG_FILE)
# Get client ID and secret from auth.ini
client = ImgurClient(conf.imgur.client_id, conf.imgur.client_secret)
# Authorization flow, pin example (see docs for other auth types)
authorization_url = client.get_auth_url('pin')
print("Go to the following URL: {0}".format(authorization_url))
# Read in the pin
pin = input("Enter pin code: ")
# ... redirect user to `authorization_url`, obtain pin (or code or token) ...
credentials = client.authorize(pin, 'pin')
client.set_user_auth(credentials['access_token'], credentials['refresh_token'])
print("Authentication successful! Here are the details:")
print(" Access token: {0}".format(credentials['access_token']))
print(" Refresh token: {0}".format(credentials['refresh_token']))
if __name__ == "__main__":
authenticate()
|
<commit_before><commit_msg>Add script to generate refresh_token
https://github.com/Imgur/imgurpython/blob/master/examples/auth.py<commit_after>#!/usr/bin/env python3
from imgurpython import ImgurClient
import config
CONFIG_FILE = "config.yaml"
def authenticate():
conf = config.read_file(CONFIG_FILE)
# Get client ID and secret from auth.ini
client = ImgurClient(conf.imgur.client_id, conf.imgur.client_secret)
# Authorization flow, pin example (see docs for other auth types)
authorization_url = client.get_auth_url('pin')
print("Go to the following URL: {0}".format(authorization_url))
# Read in the pin
pin = input("Enter pin code: ")
# ... redirect user to `authorization_url`, obtain pin (or code or token) ...
credentials = client.authorize(pin, 'pin')
client.set_user_auth(credentials['access_token'], credentials['refresh_token'])
print("Authentication successful! Here are the details:")
print(" Access token: {0}".format(credentials['access_token']))
print(" Refresh token: {0}".format(credentials['refresh_token']))
if __name__ == "__main__":
authenticate()
|
|
9831af65019e245acd65ebd181d6936392d1d2ce
|
examples/dump_category.py
|
examples/dump_category.py
|
"""dump_category.py - utility script for mps_edits."""
import sys
import json
import getopt
import urllib
import urllib2
class ArticleFetchError(Exception):
pass
def get_articles(base, category):
regular_params = [
('cmtitle', "Category:%s" % (category)),
('action', 'query'),
('list', 'categorymembers'),
('cmlimit', '500'),
('cmprop', 'title|sortkey|timestamp'),
('format', 'json'),
]
continue_params = []
articles = []
while True:
qs = urllib.urlencode(regular_params + continue_params)
url = base + '/w/api.php?' + qs
req = urllib2.Request(url)
try:
json_data = urllib2.urlopen(req).read()
except urllib2.URLError, e:
raise ArticleFetchError(
u'Error fetching URL ' + url + u' ' + unicode(e))
try:
data = json.loads(json_data)
except ValueError, e:
raise ArticleFetchError(
u'Invalid JSON response from Wikipedia: %s' % (e,))
articles += [article['title'] for article in data['query']['categorymembers']]
if 'query-continue' in data:
# how to fetch the next page of results
continue_params = [(
'cmcontinue',
data['query-continue']['categorymembers']['cmcontinue'])]
continue
break
return articles
def usage():
return """%s - given a wikipedia category, dumps all article titles in it
Usage: %s [options] 'category name'
Options:
-w, --wiki=W Use the given wikipedia. Default: https://en.wikipedia.org
-h, --help Show this message and exit
""" % (sys.argv[0], sys.argv[0])
def main():
base = 'https://en.wikipedia.org'
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'w:h', ['wiki=', 'help'])
for o, a in opts:
if o in ('-w', '--wiki'):
base = a
if o in ('-h', '--help'):
print usage()
return 0
if len(args) != 1:
raise getopt.GetoptError('category name required.')
category = sys.argv[1]
except getopt.GetoptError, e:
print >> sys.stderr, e
print >> sys.stderr, usage()
return 2
try:
articles = get_articles(base, category)
except ArticleFetchError, e:
print >> sys.stderr, e
return 1
for article in articles:
print article
if __name__ == '__main__':
sys.exit(main())
|
Add example script that dumps a wikipedia category
|
Add example script that dumps a wikipedia category
|
Python
|
mit
|
flexo/wikitweets
|
Add example script that dumps a wikipedia category
|
"""dump_category.py - utility script for mps_edits."""
import sys
import json
import getopt
import urllib
import urllib2
class ArticleFetchError(Exception):
pass
def get_articles(base, category):
regular_params = [
('cmtitle', "Category:%s" % (category)),
('action', 'query'),
('list', 'categorymembers'),
('cmlimit', '500'),
('cmprop', 'title|sortkey|timestamp'),
('format', 'json'),
]
continue_params = []
articles = []
while True:
qs = urllib.urlencode(regular_params + continue_params)
url = base + '/w/api.php?' + qs
req = urllib2.Request(url)
try:
json_data = urllib2.urlopen(req).read()
except urllib2.URLError, e:
raise ArticleFetchError(
u'Error fetching URL ' + url + u' ' + unicode(e))
try:
data = json.loads(json_data)
except ValueError, e:
raise ArticleFetchError(
u'Invalid JSON response from Wikipedia: %s' % (e,))
articles += [article['title'] for article in data['query']['categorymembers']]
if 'query-continue' in data:
# how to fetch the next page of results
continue_params = [(
'cmcontinue',
data['query-continue']['categorymembers']['cmcontinue'])]
continue
break
return articles
def usage():
return """%s - given a wikipedia category, dumps all article titles in it
Usage: %s [options] 'category name'
Options:
-w, --wiki=W Use the given wikipedia. Default: https://en.wikipedia.org
-h, --help Show this message and exit
""" % (sys.argv[0], sys.argv[0])
def main():
base = 'https://en.wikipedia.org'
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'w:h', ['wiki=', 'help'])
for o, a in opts:
if o in ('-w', '--wiki'):
base = a
if o in ('-h', '--help'):
print usage()
return 0
if len(args) != 1:
raise getopt.GetoptError('category name required.')
category = sys.argv[1]
except getopt.GetoptError, e:
print >> sys.stderr, e
print >> sys.stderr, usage()
return 2
try:
articles = get_articles(base, category)
except ArticleFetchError, e:
print >> sys.stderr, e
return 1
for article in articles:
print article
if __name__ == '__main__':
sys.exit(main())
|
<commit_before><commit_msg>Add example script that dumps a wikipedia category<commit_after>
|
"""dump_category.py - utility script for mps_edits."""
import sys
import json
import getopt
import urllib
import urllib2
class ArticleFetchError(Exception):
pass
def get_articles(base, category):
regular_params = [
('cmtitle', "Category:%s" % (category)),
('action', 'query'),
('list', 'categorymembers'),
('cmlimit', '500'),
('cmprop', 'title|sortkey|timestamp'),
('format', 'json'),
]
continue_params = []
articles = []
while True:
qs = urllib.urlencode(regular_params + continue_params)
url = base + '/w/api.php?' + qs
req = urllib2.Request(url)
try:
json_data = urllib2.urlopen(req).read()
except urllib2.URLError, e:
raise ArticleFetchError(
u'Error fetching URL ' + url + u' ' + unicode(e))
try:
data = json.loads(json_data)
except ValueError, e:
raise ArticleFetchError(
u'Invalid JSON response from Wikipedia: %s' % (e,))
articles += [article['title'] for article in data['query']['categorymembers']]
if 'query-continue' in data:
# how to fetch the next page of results
continue_params = [(
'cmcontinue',
data['query-continue']['categorymembers']['cmcontinue'])]
continue
break
return articles
def usage():
return """%s - given a wikipedia category, dumps all article titles in it
Usage: %s [options] 'category name'
Options:
-w, --wiki=W Use the given wikipedia. Default: https://en.wikipedia.org
-h, --help Show this message and exit
""" % (sys.argv[0], sys.argv[0])
def main():
base = 'https://en.wikipedia.org'
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'w:h', ['wiki=', 'help'])
for o, a in opts:
if o in ('-w', '--wiki'):
base = a
if o in ('-h', '--help'):
print usage()
return 0
if len(args) != 1:
raise getopt.GetoptError('category name required.')
category = sys.argv[1]
except getopt.GetoptError, e:
print >> sys.stderr, e
print >> sys.stderr, usage()
return 2
try:
articles = get_articles(base, category)
except ArticleFetchError, e:
print >> sys.stderr, e
return 1
for article in articles:
print article
if __name__ == '__main__':
sys.exit(main())
|
Add example script that dumps a wikipedia category"""dump_category.py - utility script for mps_edits."""
import sys
import json
import getopt
import urllib
import urllib2
class ArticleFetchError(Exception):
pass
def get_articles(base, category):
regular_params = [
('cmtitle', "Category:%s" % (category)),
('action', 'query'),
('list', 'categorymembers'),
('cmlimit', '500'),
('cmprop', 'title|sortkey|timestamp'),
('format', 'json'),
]
continue_params = []
articles = []
while True:
qs = urllib.urlencode(regular_params + continue_params)
url = base + '/w/api.php?' + qs
req = urllib2.Request(url)
try:
json_data = urllib2.urlopen(req).read()
except urllib2.URLError, e:
raise ArticleFetchError(
u'Error fetching URL ' + url + u' ' + unicode(e))
try:
data = json.loads(json_data)
except ValueError, e:
raise ArticleFetchError(
u'Invalid JSON response from Wikipedia: %s' % (e,))
articles += [article['title'] for article in data['query']['categorymembers']]
if 'query-continue' in data:
# how to fetch the next page of results
continue_params = [(
'cmcontinue',
data['query-continue']['categorymembers']['cmcontinue'])]
continue
break
return articles
def usage():
return """%s - given a wikipedia category, dumps all article titles in it
Usage: %s [options] 'category name'
Options:
-w, --wiki=W Use the given wikipedia. Default: https://en.wikipedia.org
-h, --help Show this message and exit
""" % (sys.argv[0], sys.argv[0])
def main():
base = 'https://en.wikipedia.org'
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'w:h', ['wiki=', 'help'])
for o, a in opts:
if o in ('-w', '--wiki'):
base = a
if o in ('-h', '--help'):
print usage()
return 0
if len(args) != 1:
raise getopt.GetoptError('category name required.')
category = sys.argv[1]
except getopt.GetoptError, e:
print >> sys.stderr, e
print >> sys.stderr, usage()
return 2
try:
articles = get_articles(base, category)
except ArticleFetchError, e:
print >> sys.stderr, e
return 1
for article in articles:
print article
if __name__ == '__main__':
sys.exit(main())
|
<commit_before><commit_msg>Add example script that dumps a wikipedia category<commit_after>"""dump_category.py - utility script for mps_edits."""
import sys
import json
import getopt
import urllib
import urllib2
class ArticleFetchError(Exception):
pass
def get_articles(base, category):
regular_params = [
('cmtitle', "Category:%s" % (category)),
('action', 'query'),
('list', 'categorymembers'),
('cmlimit', '500'),
('cmprop', 'title|sortkey|timestamp'),
('format', 'json'),
]
continue_params = []
articles = []
while True:
qs = urllib.urlencode(regular_params + continue_params)
url = base + '/w/api.php?' + qs
req = urllib2.Request(url)
try:
json_data = urllib2.urlopen(req).read()
except urllib2.URLError, e:
raise ArticleFetchError(
u'Error fetching URL ' + url + u' ' + unicode(e))
try:
data = json.loads(json_data)
except ValueError, e:
raise ArticleFetchError(
u'Invalid JSON response from Wikipedia: %s' % (e,))
articles += [article['title'] for article in data['query']['categorymembers']]
if 'query-continue' in data:
# how to fetch the next page of results
continue_params = [(
'cmcontinue',
data['query-continue']['categorymembers']['cmcontinue'])]
continue
break
return articles
def usage():
return """%s - given a wikipedia category, dumps all article titles in it
Usage: %s [options] 'category name'
Options:
-w, --wiki=W Use the given wikipedia. Default: https://en.wikipedia.org
-h, --help Show this message and exit
""" % (sys.argv[0], sys.argv[0])
def main():
base = 'https://en.wikipedia.org'
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'w:h', ['wiki=', 'help'])
for o, a in opts:
if o in ('-w', '--wiki'):
base = a
if o in ('-h', '--help'):
print usage()
return 0
if len(args) != 1:
raise getopt.GetoptError('category name required.')
category = sys.argv[1]
except getopt.GetoptError, e:
print >> sys.stderr, e
print >> sys.stderr, usage()
return 2
try:
articles = get_articles(base, category)
except ArticleFetchError, e:
print >> sys.stderr, e
return 1
for article in articles:
print article
if __name__ == '__main__':
sys.exit(main())
|
|
76c8a5ca367e2a22953cde9541f61d564de5f6ff
|
temba/channels/migrations/0007_auto_20150402_2103.py
|
temba/channels/migrations/0007_auto_20150402_2103.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('channels', '0006_channel_bod'),
]
operations = [
migrations.AlterField(
model_name='channel',
name='channel_type',
field=models.CharField(default='A', help_text='Type of this channel, whether Android, Twilio or SMSC', max_length=3, verbose_name='Channel Type', choices=[('A', 'Android'), ('T', 'Twilio'), ('AT', "Africa's Talking"), ('ZV', 'Zenvia'), ('NX', 'Nexmo'), ('IB', 'Infobip'), ('VB', 'Verboice'), ('H9', 'Hub9'), ('VM', 'Vumi'), ('KN', 'Kannel'), ('EX', 'External'), ('TT', 'Twitter'), ('CT', 'Clickatell'), ('PL', 'Plivo'), ('SQ', 'Shaqodoon'), ('HX', 'High Connection')]),
preserve_default=True,
),
]
|
Add migration for HX channel type
|
Add migration for HX channel type
|
Python
|
agpl-3.0
|
reyrodrigues/EU-SMS,ewheeler/rapidpro,pulilab/rapidpro,ewheeler/rapidpro,Thapelo-Tsotetsi/rapidpro,tsotetsi/textily-web,harrissoerja/rapidpro,reyrodrigues/EU-SMS,harrissoerja/rapidpro,pulilab/rapidpro,praekelt/rapidpro,pulilab/rapidpro,ewheeler/rapidpro,tsotetsi/textily-web,praekelt/rapidpro,pulilab/rapidpro,harrissoerja/rapidpro,praekelt/rapidpro,praekelt/rapidpro,tsotetsi/textily-web,tsotetsi/textily-web,Thapelo-Tsotetsi/rapidpro,Thapelo-Tsotetsi/rapidpro,ewheeler/rapidpro,tsotetsi/textily-web,reyrodrigues/EU-SMS,pulilab/rapidpro
|
Add migration for HX channel type
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('channels', '0006_channel_bod'),
]
operations = [
migrations.AlterField(
model_name='channel',
name='channel_type',
field=models.CharField(default='A', help_text='Type of this channel, whether Android, Twilio or SMSC', max_length=3, verbose_name='Channel Type', choices=[('A', 'Android'), ('T', 'Twilio'), ('AT', "Africa's Talking"), ('ZV', 'Zenvia'), ('NX', 'Nexmo'), ('IB', 'Infobip'), ('VB', 'Verboice'), ('H9', 'Hub9'), ('VM', 'Vumi'), ('KN', 'Kannel'), ('EX', 'External'), ('TT', 'Twitter'), ('CT', 'Clickatell'), ('PL', 'Plivo'), ('SQ', 'Shaqodoon'), ('HX', 'High Connection')]),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add migration for HX channel type<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('channels', '0006_channel_bod'),
]
operations = [
migrations.AlterField(
model_name='channel',
name='channel_type',
field=models.CharField(default='A', help_text='Type of this channel, whether Android, Twilio or SMSC', max_length=3, verbose_name='Channel Type', choices=[('A', 'Android'), ('T', 'Twilio'), ('AT', "Africa's Talking"), ('ZV', 'Zenvia'), ('NX', 'Nexmo'), ('IB', 'Infobip'), ('VB', 'Verboice'), ('H9', 'Hub9'), ('VM', 'Vumi'), ('KN', 'Kannel'), ('EX', 'External'), ('TT', 'Twitter'), ('CT', 'Clickatell'), ('PL', 'Plivo'), ('SQ', 'Shaqodoon'), ('HX', 'High Connection')]),
preserve_default=True,
),
]
|
Add migration for HX channel type# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('channels', '0006_channel_bod'),
]
operations = [
migrations.AlterField(
model_name='channel',
name='channel_type',
field=models.CharField(default='A', help_text='Type of this channel, whether Android, Twilio or SMSC', max_length=3, verbose_name='Channel Type', choices=[('A', 'Android'), ('T', 'Twilio'), ('AT', "Africa's Talking"), ('ZV', 'Zenvia'), ('NX', 'Nexmo'), ('IB', 'Infobip'), ('VB', 'Verboice'), ('H9', 'Hub9'), ('VM', 'Vumi'), ('KN', 'Kannel'), ('EX', 'External'), ('TT', 'Twitter'), ('CT', 'Clickatell'), ('PL', 'Plivo'), ('SQ', 'Shaqodoon'), ('HX', 'High Connection')]),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add migration for HX channel type<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('channels', '0006_channel_bod'),
]
operations = [
migrations.AlterField(
model_name='channel',
name='channel_type',
field=models.CharField(default='A', help_text='Type of this channel, whether Android, Twilio or SMSC', max_length=3, verbose_name='Channel Type', choices=[('A', 'Android'), ('T', 'Twilio'), ('AT', "Africa's Talking"), ('ZV', 'Zenvia'), ('NX', 'Nexmo'), ('IB', 'Infobip'), ('VB', 'Verboice'), ('H9', 'Hub9'), ('VM', 'Vumi'), ('KN', 'Kannel'), ('EX', 'External'), ('TT', 'Twitter'), ('CT', 'Clickatell'), ('PL', 'Plivo'), ('SQ', 'Shaqodoon'), ('HX', 'High Connection')]),
preserve_default=True,
),
]
|
|
09bb45941583a126429e3cc27b2fb4bb6b01d56c
|
Car2know/analysis/QueryData.py
|
Car2know/analysis/QueryData.py
|
from urllib2 import Request, urlopen, URLError
from datetime import time
from datetime import date
import datetime
import time
import sys
import os
# Using request and urlopen to get data from car2go's API
# We need the city name and consumer_key
def querydata():
request = Request("http://www.car2go.com/api/v2.1/vehicles?loc=Seattle&oauth_consumer_key=Fleet-size&format=json")
try:
response = urlopen(request)
return response.read()
# raise Exception('spam', 'eggs')
except Exception as e:
print datetime.datetime.now(), ' Got an error:', str(e)
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
return ""
# The input is the time interval to query data in seconds
# as well as the storage path of the file
# At first, automatically generate the date when query data
# Then build a txt file with the date as the name
# If a new day come, automatically build a new txt file
# At last, query data from car2go's api
def StoreData(interval, path):
dt = datetime.datetime.now()
tt = dt.timetuple()
year = tt[0]
month = tt[1]
day = tt[2]
hour = tt[3]
minute = tt[4]
second = tt[5]
previousDate = str(year) + str(month).zfill(2) + str(day).zfill(2)
currentDate = str(year) + str(month).zfill(2) + str(day).zfill(2)
f = open(path + currentDate + '.txt','a')
while 1:
time.sleep(1)
dt = datetime.datetime.now()
tt = dt.timetuple()
year = str(tt[0])
month = str(tt[1])
day = str(tt[2])
hour = str(tt[3])
minute = str(tt[4])
second = str(tt[5])
currentDate = year + month.zfill(2) + day.zfill(2)
currentTime = hour.zfill(2) + minute.zfill(2)+ second.zfill(2)
if currentDate != previousDate:
f = open(path + currentDate + '.txt','a')
prviousDate = currentDate
if int(second) % interval == 0:
# print hour+" "+minute +" "+ second
data = querydata()
f.write(currentTime + ';' + data +'\n')
# Main function
if __name__ == '__main__':
path = 'F:\\car2godata\\Seattle\\'
StoreData(30, path)
|
Move the file to correct path
|
Move the file to correct path
|
Python
|
mit
|
gengho/Car2know,gengho/Car2know
|
Move the file to correct path
|
from urllib2 import Request, urlopen, URLError
from datetime import time
from datetime import date
import datetime
import time
import sys
import os
# Using request and urlopen to get data from car2go's API
# We need the city name and consumer_key
def querydata():
request = Request("http://www.car2go.com/api/v2.1/vehicles?loc=Seattle&oauth_consumer_key=Fleet-size&format=json")
try:
response = urlopen(request)
return response.read()
# raise Exception('spam', 'eggs')
except Exception as e:
print datetime.datetime.now(), ' Got an error:', str(e)
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
return ""
# The input is the time interval to query data in seconds
# as well as the storage path of the file
# At first, automatically generate the date when query data
# Then build a txt file with the date as the name
# If a new day come, automatically build a new txt file
# At last, query data from car2go's api
def StoreData(interval, path):
dt = datetime.datetime.now()
tt = dt.timetuple()
year = tt[0]
month = tt[1]
day = tt[2]
hour = tt[3]
minute = tt[4]
second = tt[5]
previousDate = str(year) + str(month).zfill(2) + str(day).zfill(2)
currentDate = str(year) + str(month).zfill(2) + str(day).zfill(2)
f = open(path + currentDate + '.txt','a')
while 1:
time.sleep(1)
dt = datetime.datetime.now()
tt = dt.timetuple()
year = str(tt[0])
month = str(tt[1])
day = str(tt[2])
hour = str(tt[3])
minute = str(tt[4])
second = str(tt[5])
currentDate = year + month.zfill(2) + day.zfill(2)
currentTime = hour.zfill(2) + minute.zfill(2)+ second.zfill(2)
if currentDate != previousDate:
f = open(path + currentDate + '.txt','a')
prviousDate = currentDate
if int(second) % interval == 0:
# print hour+" "+minute +" "+ second
data = querydata()
f.write(currentTime + ';' + data +'\n')
# Main function
if __name__ == '__main__':
path = 'F:\\car2godata\\Seattle\\'
StoreData(30, path)
|
<commit_before><commit_msg>Move the file to correct path<commit_after>
|
from urllib2 import Request, urlopen, URLError
from datetime import time
from datetime import date
import datetime
import time
import sys
import os
# Using request and urlopen to get data from car2go's API
# We need the city name and consumer_key
def querydata():
request = Request("http://www.car2go.com/api/v2.1/vehicles?loc=Seattle&oauth_consumer_key=Fleet-size&format=json")
try:
response = urlopen(request)
return response.read()
# raise Exception('spam', 'eggs')
except Exception as e:
print datetime.datetime.now(), ' Got an error:', str(e)
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
return ""
# The input is the time interval to query data in seconds
# as well as the storage path of the file
# At first, automatically generate the date when query data
# Then build a txt file with the date as the name
# If a new day come, automatically build a new txt file
# At last, query data from car2go's api
def StoreData(interval, path):
dt = datetime.datetime.now()
tt = dt.timetuple()
year = tt[0]
month = tt[1]
day = tt[2]
hour = tt[3]
minute = tt[4]
second = tt[5]
previousDate = str(year) + str(month).zfill(2) + str(day).zfill(2)
currentDate = str(year) + str(month).zfill(2) + str(day).zfill(2)
f = open(path + currentDate + '.txt','a')
while 1:
time.sleep(1)
dt = datetime.datetime.now()
tt = dt.timetuple()
year = str(tt[0])
month = str(tt[1])
day = str(tt[2])
hour = str(tt[3])
minute = str(tt[4])
second = str(tt[5])
currentDate = year + month.zfill(2) + day.zfill(2)
currentTime = hour.zfill(2) + minute.zfill(2)+ second.zfill(2)
if currentDate != previousDate:
f = open(path + currentDate + '.txt','a')
prviousDate = currentDate
if int(second) % interval == 0:
# print hour+" "+minute +" "+ second
data = querydata()
f.write(currentTime + ';' + data +'\n')
# Main function
if __name__ == '__main__':
path = 'F:\\car2godata\\Seattle\\'
StoreData(30, path)
|
Move the file to correct pathfrom urllib2 import Request, urlopen, URLError
from datetime import time
from datetime import date
import datetime
import time
import sys
import os
# Using request and urlopen to get data from car2go's API
# We need the city name and consumer_key
def querydata():
request = Request("http://www.car2go.com/api/v2.1/vehicles?loc=Seattle&oauth_consumer_key=Fleet-size&format=json")
try:
response = urlopen(request)
return response.read()
# raise Exception('spam', 'eggs')
except Exception as e:
print datetime.datetime.now(), ' Got an error:', str(e)
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
return ""
# The input is the time interval to query data in seconds
# as well as the storage path of the file
# At first, automatically generate the date when query data
# Then build a txt file with the date as the name
# If a new day come, automatically build a new txt file
# At last, query data from car2go's api
def StoreData(interval, path):
dt = datetime.datetime.now()
tt = dt.timetuple()
year = tt[0]
month = tt[1]
day = tt[2]
hour = tt[3]
minute = tt[4]
second = tt[5]
previousDate = str(year) + str(month).zfill(2) + str(day).zfill(2)
currentDate = str(year) + str(month).zfill(2) + str(day).zfill(2)
f = open(path + currentDate + '.txt','a')
while 1:
time.sleep(1)
dt = datetime.datetime.now()
tt = dt.timetuple()
year = str(tt[0])
month = str(tt[1])
day = str(tt[2])
hour = str(tt[3])
minute = str(tt[4])
second = str(tt[5])
currentDate = year + month.zfill(2) + day.zfill(2)
currentTime = hour.zfill(2) + minute.zfill(2)+ second.zfill(2)
if currentDate != previousDate:
f = open(path + currentDate + '.txt','a')
prviousDate = currentDate
if int(second) % interval == 0:
# print hour+" "+minute +" "+ second
data = querydata()
f.write(currentTime + ';' + data +'\n')
# Main function
if __name__ == '__main__':
path = 'F:\\car2godata\\Seattle\\'
StoreData(30, path)
|
<commit_before><commit_msg>Move the file to correct path<commit_after>from urllib2 import Request, urlopen, URLError
from datetime import time
from datetime import date
import datetime
import time
import sys
import os
# Using request and urlopen to get data from car2go's API
# We need the city name and consumer_key
def querydata():
request = Request("http://www.car2go.com/api/v2.1/vehicles?loc=Seattle&oauth_consumer_key=Fleet-size&format=json")
try:
response = urlopen(request)
return response.read()
# raise Exception('spam', 'eggs')
except Exception as e:
print datetime.datetime.now(), ' Got an error:', str(e)
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
return ""
# The input is the time interval to query data in seconds
# as well as the storage path of the file
# At first, automatically generate the date when query data
# Then build a txt file with the date as the name
# If a new day come, automatically build a new txt file
# At last, query data from car2go's api
def StoreData(interval, path):
dt = datetime.datetime.now()
tt = dt.timetuple()
year = tt[0]
month = tt[1]
day = tt[2]
hour = tt[3]
minute = tt[4]
second = tt[5]
previousDate = str(year) + str(month).zfill(2) + str(day).zfill(2)
currentDate = str(year) + str(month).zfill(2) + str(day).zfill(2)
f = open(path + currentDate + '.txt','a')
while 1:
time.sleep(1)
dt = datetime.datetime.now()
tt = dt.timetuple()
year = str(tt[0])
month = str(tt[1])
day = str(tt[2])
hour = str(tt[3])
minute = str(tt[4])
second = str(tt[5])
currentDate = year + month.zfill(2) + day.zfill(2)
currentTime = hour.zfill(2) + minute.zfill(2)+ second.zfill(2)
if currentDate != previousDate:
f = open(path + currentDate + '.txt','a')
prviousDate = currentDate
if int(second) % interval == 0:
# print hour+" "+minute +" "+ second
data = querydata()
f.write(currentTime + ';' + data +'\n')
# Main function
if __name__ == '__main__':
path = 'F:\\car2godata\\Seattle\\'
StoreData(30, path)
|
|
ed7bbd2ed53a2b3009acace7b8399a35842a4532
|
nameless/visitors.py
|
nameless/visitors.py
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
"""
visitors.py
@author ejnp
"""
import ast
import lambda_calculus_ast
class FreeVariables(ast.NodeVisitor):
"""Visits each node of a lambda calculus abstract syntax tree and
determines which variables (if any) are unbound. Ultimately provides a set
of string variable names.
"""
def visit_Variable(self, node):
"""FV(x) = {x}"""
return {node.name}
def visit_Application(self, node):
"""FV((e1 e2)) = FV(e1) U FV(e2)"""
return (self.visit(node.left_expression) |
self.visit(node.right_expression))
def visit_Abstraction(self, node):
"""FV(λx.e) = FV(e) - {x}"""
return self.visit(node.expression) - self.visit(node.parameter)
|
Add a visitor for determining free variables
|
Add a visitor for determining free variables
|
Python
|
mit
|
ElliotPenson/nameless
|
Add a visitor for determining free variables
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
"""
visitors.py
@author ejnp
"""
import ast
import lambda_calculus_ast
class FreeVariables(ast.NodeVisitor):
"""Visits each node of a lambda calculus abstract syntax tree and
determines which variables (if any) are unbound. Ultimately provides a set
of string variable names.
"""
def visit_Variable(self, node):
"""FV(x) = {x}"""
return {node.name}
def visit_Application(self, node):
"""FV((e1 e2)) = FV(e1) U FV(e2)"""
return (self.visit(node.left_expression) |
self.visit(node.right_expression))
def visit_Abstraction(self, node):
"""FV(λx.e) = FV(e) - {x}"""
return self.visit(node.expression) - self.visit(node.parameter)
|
<commit_before><commit_msg>Add a visitor for determining free variables<commit_after>
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
"""
visitors.py
@author ejnp
"""
import ast
import lambda_calculus_ast
class FreeVariables(ast.NodeVisitor):
"""Visits each node of a lambda calculus abstract syntax tree and
determines which variables (if any) are unbound. Ultimately provides a set
of string variable names.
"""
def visit_Variable(self, node):
"""FV(x) = {x}"""
return {node.name}
def visit_Application(self, node):
"""FV((e1 e2)) = FV(e1) U FV(e2)"""
return (self.visit(node.left_expression) |
self.visit(node.right_expression))
def visit_Abstraction(self, node):
"""FV(λx.e) = FV(e) - {x}"""
return self.visit(node.expression) - self.visit(node.parameter)
|
Add a visitor for determining free variables#!/usr/bin/python
# -*- coding: UTF-8 -*-
"""
visitors.py
@author ejnp
"""
import ast
import lambda_calculus_ast
class FreeVariables(ast.NodeVisitor):
"""Visits each node of a lambda calculus abstract syntax tree and
determines which variables (if any) are unbound. Ultimately provides a set
of string variable names.
"""
def visit_Variable(self, node):
"""FV(x) = {x}"""
return {node.name}
def visit_Application(self, node):
"""FV((e1 e2)) = FV(e1) U FV(e2)"""
return (self.visit(node.left_expression) |
self.visit(node.right_expression))
def visit_Abstraction(self, node):
"""FV(λx.e) = FV(e) - {x}"""
return self.visit(node.expression) - self.visit(node.parameter)
|
<commit_before><commit_msg>Add a visitor for determining free variables<commit_after>#!/usr/bin/python
# -*- coding: UTF-8 -*-
"""
visitors.py
@author ejnp
"""
import ast
import lambda_calculus_ast
class FreeVariables(ast.NodeVisitor):
"""Visits each node of a lambda calculus abstract syntax tree and
determines which variables (if any) are unbound. Ultimately provides a set
of string variable names.
"""
def visit_Variable(self, node):
"""FV(x) = {x}"""
return {node.name}
def visit_Application(self, node):
"""FV((e1 e2)) = FV(e1) U FV(e2)"""
return (self.visit(node.left_expression) |
self.visit(node.right_expression))
def visit_Abstraction(self, node):
"""FV(λx.e) = FV(e) - {x}"""
return self.visit(node.expression) - self.visit(node.parameter)
|
|
f5fb2f955843d802fb2a7225ee3aac98eac640e5
|
output/pprint_json.py
|
output/pprint_json.py
|
#!/bin/env python3
import json
import pprint
import sys
json_string = ""
for line in sys.stdin.readlines():
json_string = "{}{}".format(json_string, line.strip())
item = json.loads(json_string)
pprint.pprint(item)
|
Add pretty print json output
|
Add pretty print json output
|
Python
|
mit
|
dgengtek/scripts,dgengtek/scripts
|
Add pretty print json output
|
#!/bin/env python3
import json
import pprint
import sys
json_string = ""
for line in sys.stdin.readlines():
json_string = "{}{}".format(json_string, line.strip())
item = json.loads(json_string)
pprint.pprint(item)
|
<commit_before><commit_msg>Add pretty print json output<commit_after>
|
#!/bin/env python3
import json
import pprint
import sys
json_string = ""
for line in sys.stdin.readlines():
json_string = "{}{}".format(json_string, line.strip())
item = json.loads(json_string)
pprint.pprint(item)
|
Add pretty print json output#!/bin/env python3
import json
import pprint
import sys
json_string = ""
for line in sys.stdin.readlines():
json_string = "{}{}".format(json_string, line.strip())
item = json.loads(json_string)
pprint.pprint(item)
|
<commit_before><commit_msg>Add pretty print json output<commit_after>#!/bin/env python3
import json
import pprint
import sys
json_string = ""
for line in sys.stdin.readlines():
json_string = "{}{}".format(json_string, line.strip())
item = json.loads(json_string)
pprint.pprint(item)
|
|
f96d8119814488061c2bb6ef71bd8f054c69f082
|
app/api_preferences.py
|
app/api_preferences.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import configuration
RPX_REALM = 'mils-alumni-secure'
rpxnow = {
'api_auth_url': 'https://rpxnow.com/api/v2/auth_info',
'api_key': 'b771106aa4e3ef377c359495f52f2c99120f36ac',
'auth_token_url': configuration.ROOT_URL + 'auth_token'
'realm': RPX_REALM,
'lang': 'en',
'domain': '%s.rpxnow.com' % RPX_REALM,
}
|
Add api preferences for rpxnow
|
Add api preferences for rpxnow
|
Python
|
mit
|
yesudeep/old-milsalumni,yesudeep/old-milsalumni,yesudeep/old-milsalumni
|
Add api preferences for rpxnow
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import configuration
RPX_REALM = 'mils-alumni-secure'
rpxnow = {
'api_auth_url': 'https://rpxnow.com/api/v2/auth_info',
'api_key': 'b771106aa4e3ef377c359495f52f2c99120f36ac',
'auth_token_url': configuration.ROOT_URL + 'auth_token'
'realm': RPX_REALM,
'lang': 'en',
'domain': '%s.rpxnow.com' % RPX_REALM,
}
|
<commit_before><commit_msg>Add api preferences for rpxnow<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import configuration
RPX_REALM = 'mils-alumni-secure'
rpxnow = {
'api_auth_url': 'https://rpxnow.com/api/v2/auth_info',
'api_key': 'b771106aa4e3ef377c359495f52f2c99120f36ac',
'auth_token_url': configuration.ROOT_URL + 'auth_token'
'realm': RPX_REALM,
'lang': 'en',
'domain': '%s.rpxnow.com' % RPX_REALM,
}
|
Add api preferences for rpxnow#!/usr/bin/env python
# -*- coding: utf-8 -*-
import configuration
RPX_REALM = 'mils-alumni-secure'
rpxnow = {
'api_auth_url': 'https://rpxnow.com/api/v2/auth_info',
'api_key': 'b771106aa4e3ef377c359495f52f2c99120f36ac',
'auth_token_url': configuration.ROOT_URL + 'auth_token'
'realm': RPX_REALM,
'lang': 'en',
'domain': '%s.rpxnow.com' % RPX_REALM,
}
|
<commit_before><commit_msg>Add api preferences for rpxnow<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import configuration
RPX_REALM = 'mils-alumni-secure'
rpxnow = {
'api_auth_url': 'https://rpxnow.com/api/v2/auth_info',
'api_key': 'b771106aa4e3ef377c359495f52f2c99120f36ac',
'auth_token_url': configuration.ROOT_URL + 'auth_token'
'realm': RPX_REALM,
'lang': 'en',
'domain': '%s.rpxnow.com' % RPX_REALM,
}
|
|
6072022e2debeb4dcd75e4969bd2beb16bac8827
|
source/sqlserver_ado/fields.py
|
source/sqlserver_ado/fields.py
|
"""This module provides SQL Server specific fields for Django models."""
from django.db.models import AutoField, ForeignKey, IntegerField
class BigAutoField(AutoField):
"""A bigint IDENTITY field"""
def get_internal_type(self):
return "BigAutoField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
class BigForeignKey(ForeignKey):
"""A ForeignKey field that points to a BigAutoField or BigIntegerField"""
def db_type(self):
return BigIntegerField().db_type()
class BigIntegerField(IntegerField):
"""A BigInteger field, until Django ticket #399 lands (if ever.)"""
def get_internal_type(self):
return "BigIntegerField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
|
"""This module provides SQL Server specific fields for Django models."""
from django.db.models import AutoField, ForeignKey, IntegerField
from django.forms import ValidationError
class BigAutoField(AutoField):
"""A bigint IDENTITY field"""
def get_internal_type(self):
return "BigAutoField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
class BigForeignKey(ForeignKey):
"""A ForeignKey field that points to a BigAutoField or BigIntegerField"""
def db_type(self):
return BigIntegerField().db_type()
class BigIntegerField(IntegerField):
"""A BigInteger field, until Django ticket #399 lands (if ever.)"""
def get_internal_type(self):
return "BigIntegerField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
|
Fix import error for custom Field validation
|
Fix import error for custom Field validation
|
Python
|
mit
|
theoriginalgri/django-mssql,theoriginalgri/django-mssql
|
"""This module provides SQL Server specific fields for Django models."""
from django.db.models import AutoField, ForeignKey, IntegerField
class BigAutoField(AutoField):
"""A bigint IDENTITY field"""
def get_internal_type(self):
return "BigAutoField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
class BigForeignKey(ForeignKey):
"""A ForeignKey field that points to a BigAutoField or BigIntegerField"""
def db_type(self):
return BigIntegerField().db_type()
class BigIntegerField(IntegerField):
"""A BigInteger field, until Django ticket #399 lands (if ever.)"""
def get_internal_type(self):
return "BigIntegerField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
Fix import error for custom Field validation
|
"""This module provides SQL Server specific fields for Django models."""
from django.db.models import AutoField, ForeignKey, IntegerField
from django.forms import ValidationError
class BigAutoField(AutoField):
"""A bigint IDENTITY field"""
def get_internal_type(self):
return "BigAutoField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
class BigForeignKey(ForeignKey):
"""A ForeignKey field that points to a BigAutoField or BigIntegerField"""
def db_type(self):
return BigIntegerField().db_type()
class BigIntegerField(IntegerField):
"""A BigInteger field, until Django ticket #399 lands (if ever.)"""
def get_internal_type(self):
return "BigIntegerField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
|
<commit_before>"""This module provides SQL Server specific fields for Django models."""
from django.db.models import AutoField, ForeignKey, IntegerField
class BigAutoField(AutoField):
"""A bigint IDENTITY field"""
def get_internal_type(self):
return "BigAutoField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
class BigForeignKey(ForeignKey):
"""A ForeignKey field that points to a BigAutoField or BigIntegerField"""
def db_type(self):
return BigIntegerField().db_type()
class BigIntegerField(IntegerField):
"""A BigInteger field, until Django ticket #399 lands (if ever.)"""
def get_internal_type(self):
return "BigIntegerField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
<commit_msg>Fix import error for custom Field validation<commit_after>
|
"""This module provides SQL Server specific fields for Django models."""
from django.db.models import AutoField, ForeignKey, IntegerField
from django.forms import ValidationError
class BigAutoField(AutoField):
"""A bigint IDENTITY field"""
def get_internal_type(self):
return "BigAutoField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
class BigForeignKey(ForeignKey):
"""A ForeignKey field that points to a BigAutoField or BigIntegerField"""
def db_type(self):
return BigIntegerField().db_type()
class BigIntegerField(IntegerField):
"""A BigInteger field, until Django ticket #399 lands (if ever.)"""
def get_internal_type(self):
return "BigIntegerField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
|
"""This module provides SQL Server specific fields for Django models."""
from django.db.models import AutoField, ForeignKey, IntegerField
class BigAutoField(AutoField):
"""A bigint IDENTITY field"""
def get_internal_type(self):
return "BigAutoField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
class BigForeignKey(ForeignKey):
"""A ForeignKey field that points to a BigAutoField or BigIntegerField"""
def db_type(self):
return BigIntegerField().db_type()
class BigIntegerField(IntegerField):
"""A BigInteger field, until Django ticket #399 lands (if ever.)"""
def get_internal_type(self):
return "BigIntegerField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
Fix import error for custom Field validation"""This module provides SQL Server specific fields for Django models."""
from django.db.models import AutoField, ForeignKey, IntegerField
from django.forms import ValidationError
class BigAutoField(AutoField):
"""A bigint IDENTITY field"""
def get_internal_type(self):
return "BigAutoField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
class BigForeignKey(ForeignKey):
"""A ForeignKey field that points to a BigAutoField or BigIntegerField"""
def db_type(self):
return BigIntegerField().db_type()
class BigIntegerField(IntegerField):
"""A BigInteger field, until Django ticket #399 lands (if ever.)"""
def get_internal_type(self):
return "BigIntegerField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
|
<commit_before>"""This module provides SQL Server specific fields for Django models."""
from django.db.models import AutoField, ForeignKey, IntegerField
class BigAutoField(AutoField):
"""A bigint IDENTITY field"""
def get_internal_type(self):
return "BigAutoField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
class BigForeignKey(ForeignKey):
"""A ForeignKey field that points to a BigAutoField or BigIntegerField"""
def db_type(self):
return BigIntegerField().db_type()
class BigIntegerField(IntegerField):
"""A BigInteger field, until Django ticket #399 lands (if ever.)"""
def get_internal_type(self):
return "BigIntegerField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
<commit_msg>Fix import error for custom Field validation<commit_after>"""This module provides SQL Server specific fields for Django models."""
from django.db.models import AutoField, ForeignKey, IntegerField
from django.forms import ValidationError
class BigAutoField(AutoField):
"""A bigint IDENTITY field"""
def get_internal_type(self):
return "BigAutoField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
class BigForeignKey(ForeignKey):
"""A ForeignKey field that points to a BigAutoField or BigIntegerField"""
def db_type(self):
return BigIntegerField().db_type()
class BigIntegerField(IntegerField):
"""A BigInteger field, until Django ticket #399 lands (if ever.)"""
def get_internal_type(self):
return "BigIntegerField"
def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, ValueError):
raise ValidationError(
_("This value must be an long."))
def get_db_prep_value(self, value):
if value is None:
return None
return long(value)
|
1a0fe91b6ab9a90573b0f35d6ff81e7f0523acb4
|
praw/util/__init__.py
|
praw/util/__init__.py
|
"""Package imports for utilities."""
__all__ = ("cache",)
|
"""Package imports for utilities."""
import re
__all__ = ("cache", "camel_to_snake", "snake_case_keys")
_re_camel_to_snake = re.compile(r"([a-z0-9](?=[A-Z])|[A-Z](?=[A-Z][a-z]))")
def camel_to_snake(name):
"""Convert `name` from camelCase to snake_case."""
return _re_camel_to_snake.sub(r"\1_", name).lower()
def snake_case_keys(dictionary):
"""Return a new dictionary with keys converted to snake_case.
:param dictionary: The dict to be corrected.
"""
return {camel_to_snake(k): v for k, v in dictionary.items()}
|
Add `camel_to_snake()` and `snake_case_keys()` to praw.util
|
Add `camel_to_snake()` and `snake_case_keys()` to praw.util
|
Python
|
bsd-2-clause
|
praw-dev/praw,gschizas/praw,leviroth/praw,praw-dev/praw,gschizas/praw,leviroth/praw
|
"""Package imports for utilities."""
__all__ = ("cache",)
Add `camel_to_snake()` and `snake_case_keys()` to praw.util
|
"""Package imports for utilities."""
import re
__all__ = ("cache", "camel_to_snake", "snake_case_keys")
_re_camel_to_snake = re.compile(r"([a-z0-9](?=[A-Z])|[A-Z](?=[A-Z][a-z]))")
def camel_to_snake(name):
"""Convert `name` from camelCase to snake_case."""
return _re_camel_to_snake.sub(r"\1_", name).lower()
def snake_case_keys(dictionary):
"""Return a new dictionary with keys converted to snake_case.
:param dictionary: The dict to be corrected.
"""
return {camel_to_snake(k): v for k, v in dictionary.items()}
|
<commit_before>"""Package imports for utilities."""
__all__ = ("cache",)
<commit_msg>Add `camel_to_snake()` and `snake_case_keys()` to praw.util<commit_after>
|
"""Package imports for utilities."""
import re
__all__ = ("cache", "camel_to_snake", "snake_case_keys")
_re_camel_to_snake = re.compile(r"([a-z0-9](?=[A-Z])|[A-Z](?=[A-Z][a-z]))")
def camel_to_snake(name):
"""Convert `name` from camelCase to snake_case."""
return _re_camel_to_snake.sub(r"\1_", name).lower()
def snake_case_keys(dictionary):
"""Return a new dictionary with keys converted to snake_case.
:param dictionary: The dict to be corrected.
"""
return {camel_to_snake(k): v for k, v in dictionary.items()}
|
"""Package imports for utilities."""
__all__ = ("cache",)
Add `camel_to_snake()` and `snake_case_keys()` to praw.util"""Package imports for utilities."""
import re
__all__ = ("cache", "camel_to_snake", "snake_case_keys")
_re_camel_to_snake = re.compile(r"([a-z0-9](?=[A-Z])|[A-Z](?=[A-Z][a-z]))")
def camel_to_snake(name):
"""Convert `name` from camelCase to snake_case."""
return _re_camel_to_snake.sub(r"\1_", name).lower()
def snake_case_keys(dictionary):
"""Return a new dictionary with keys converted to snake_case.
:param dictionary: The dict to be corrected.
"""
return {camel_to_snake(k): v for k, v in dictionary.items()}
|
<commit_before>"""Package imports for utilities."""
__all__ = ("cache",)
<commit_msg>Add `camel_to_snake()` and `snake_case_keys()` to praw.util<commit_after>"""Package imports for utilities."""
import re
__all__ = ("cache", "camel_to_snake", "snake_case_keys")
_re_camel_to_snake = re.compile(r"([a-z0-9](?=[A-Z])|[A-Z](?=[A-Z][a-z]))")
def camel_to_snake(name):
"""Convert `name` from camelCase to snake_case."""
return _re_camel_to_snake.sub(r"\1_", name).lower()
def snake_case_keys(dictionary):
"""Return a new dictionary with keys converted to snake_case.
:param dictionary: The dict to be corrected.
"""
return {camel_to_snake(k): v for k, v in dictionary.items()}
|
4b3eb563a50a601bcb24a358f6ea63690cffeb27
|
raiden/network/nat.py
|
raiden/network/nat.py
|
import miniupnpc
from ethereum import slogging
MAX_PORT = 65535
RAIDEN_IDENTIFICATOR = "raiden-network udp service"
log = slogging.getLogger(__name__)
def connect():
"""Try to connect to the router.
Returns:
u (miniupnc.UPnP): the connected upnp-instance
router (string): the connection information
"""
u = miniupnpc.UPnP()
u.discoverdelay = 200
providers = u.discover()
if providers > 1:
log.warning("multiple upnp providers found", num_providers=providers)
elif providers < 1:
log.error("no upnp providers found")
return
router = u.selectigd()
log.debug("connected", router=router)
if u.lanaddr == '0.0.0.0':
log.error("could not query your lanaddr")
return
if u.externalipaddress() == '0.0.0.0' or u.externalipaddress() is None:
log.error("could not query your externalipaddress")
return
return u, router
def open_port(internal_port, external_start_port=None):
"""Open a port for the raiden service (listening at `internal_port`) through
UPnP.
Args:
internal_port (int): the target port of the raiden service
external_start_port (int): query for an external port starting here (default: internal_port)
Returns:
external_ip_address, external_port (tuple(str, int)): if successful or None
"""
if external_start_port is None:
external_start_port = internal_port
u, router = connect()
if u is None:
return
register = lambda internal, external: u.addportmapping(internal,
'UDP',
u.lanaddr,
external,
RAIDEN_IDENTIFICATOR,
'')
external_port = external_start_port
success = register(internal_port, external_port)
while not success and external_port <= MAX_PORT:
external_port += 1
success = register(internal_port, external_port)
if success:
log.info("registered port-mapping per upnp",
internal="{}:{}".format(u.lanaddr, internal_port),
external="{}:{}".format(u.externalipaddress(), external_port))
return (u.externalipaddress(), external_port)
else:
log.error("could not register a port-mapping", router='FIXME')
return
def release_port(internal_port):
"""Try to release the port mapping for `internal_port`.
Args:
internal_port (int): the port that was previously forwarded to.
Returns:
success (boolean): if the release was successful.
"""
u, router = connect()
mapping = u.getspecificportmapping(internal_port, 'UDP')
if mapping is None:
log.error("could not find a port mapping", router=router)
return False
if u.deleteportmapping(internal_port, 'UDP'):
log.info("successfully released port mapping", router=router)
return True
else:
log.warning("could not release port mapping, check your router for stale mappings", router=router)
return False
|
Add minimalistic UPnP NAT punching
|
Add minimalistic UPnP NAT punching
This is not in use yet.
|
Python
|
mit
|
charles-cooper/raiden,hackaugusto/raiden,tomashaber/raiden,tomashaber/raiden,tomashaber/raiden,tomashaber/raiden,hackaugusto/raiden,tomaaron/raiden,tomaaron/raiden,tomaaron/raiden,tomashaber/raiden,tomaaron/raiden,charles-cooper/raiden
|
Add minimalistic UPnP NAT punching
This is not in use yet.
|
import miniupnpc
from ethereum import slogging
MAX_PORT = 65535
RAIDEN_IDENTIFICATOR = "raiden-network udp service"
log = slogging.getLogger(__name__)
def connect():
"""Try to connect to the router.
Returns:
u (miniupnc.UPnP): the connected upnp-instance
router (string): the connection information
"""
u = miniupnpc.UPnP()
u.discoverdelay = 200
providers = u.discover()
if providers > 1:
log.warning("multiple upnp providers found", num_providers=providers)
elif providers < 1:
log.error("no upnp providers found")
return
router = u.selectigd()
log.debug("connected", router=router)
if u.lanaddr == '0.0.0.0':
log.error("could not query your lanaddr")
return
if u.externalipaddress() == '0.0.0.0' or u.externalipaddress() is None:
log.error("could not query your externalipaddress")
return
return u, router
def open_port(internal_port, external_start_port=None):
"""Open a port for the raiden service (listening at `internal_port`) through
UPnP.
Args:
internal_port (int): the target port of the raiden service
external_start_port (int): query for an external port starting here (default: internal_port)
Returns:
external_ip_address, external_port (tuple(str, int)): if successful or None
"""
if external_start_port is None:
external_start_port = internal_port
u, router = connect()
if u is None:
return
register = lambda internal, external: u.addportmapping(internal,
'UDP',
u.lanaddr,
external,
RAIDEN_IDENTIFICATOR,
'')
external_port = external_start_port
success = register(internal_port, external_port)
while not success and external_port <= MAX_PORT:
external_port += 1
success = register(internal_port, external_port)
if success:
log.info("registered port-mapping per upnp",
internal="{}:{}".format(u.lanaddr, internal_port),
external="{}:{}".format(u.externalipaddress(), external_port))
return (u.externalipaddress(), external_port)
else:
log.error("could not register a port-mapping", router='FIXME')
return
def release_port(internal_port):
"""Try to release the port mapping for `internal_port`.
Args:
internal_port (int): the port that was previously forwarded to.
Returns:
success (boolean): if the release was successful.
"""
u, router = connect()
mapping = u.getspecificportmapping(internal_port, 'UDP')
if mapping is None:
log.error("could not find a port mapping", router=router)
return False
if u.deleteportmapping(internal_port, 'UDP'):
log.info("successfully released port mapping", router=router)
return True
else:
log.warning("could not release port mapping, check your router for stale mappings", router=router)
return False
|
<commit_before><commit_msg>Add minimalistic UPnP NAT punching
This is not in use yet.<commit_after>
|
import miniupnpc
from ethereum import slogging
MAX_PORT = 65535
RAIDEN_IDENTIFICATOR = "raiden-network udp service"
log = slogging.getLogger(__name__)
def connect():
"""Try to connect to the router.
Returns:
u (miniupnc.UPnP): the connected upnp-instance
router (string): the connection information
"""
u = miniupnpc.UPnP()
u.discoverdelay = 200
providers = u.discover()
if providers > 1:
log.warning("multiple upnp providers found", num_providers=providers)
elif providers < 1:
log.error("no upnp providers found")
return
router = u.selectigd()
log.debug("connected", router=router)
if u.lanaddr == '0.0.0.0':
log.error("could not query your lanaddr")
return
if u.externalipaddress() == '0.0.0.0' or u.externalipaddress() is None:
log.error("could not query your externalipaddress")
return
return u, router
def open_port(internal_port, external_start_port=None):
"""Open a port for the raiden service (listening at `internal_port`) through
UPnP.
Args:
internal_port (int): the target port of the raiden service
external_start_port (int): query for an external port starting here (default: internal_port)
Returns:
external_ip_address, external_port (tuple(str, int)): if successful or None
"""
if external_start_port is None:
external_start_port = internal_port
u, router = connect()
if u is None:
return
register = lambda internal, external: u.addportmapping(internal,
'UDP',
u.lanaddr,
external,
RAIDEN_IDENTIFICATOR,
'')
external_port = external_start_port
success = register(internal_port, external_port)
while not success and external_port <= MAX_PORT:
external_port += 1
success = register(internal_port, external_port)
if success:
log.info("registered port-mapping per upnp",
internal="{}:{}".format(u.lanaddr, internal_port),
external="{}:{}".format(u.externalipaddress(), external_port))
return (u.externalipaddress(), external_port)
else:
log.error("could not register a port-mapping", router='FIXME')
return
def release_port(internal_port):
"""Try to release the port mapping for `internal_port`.
Args:
internal_port (int): the port that was previously forwarded to.
Returns:
success (boolean): if the release was successful.
"""
u, router = connect()
mapping = u.getspecificportmapping(internal_port, 'UDP')
if mapping is None:
log.error("could not find a port mapping", router=router)
return False
if u.deleteportmapping(internal_port, 'UDP'):
log.info("successfully released port mapping", router=router)
return True
else:
log.warning("could not release port mapping, check your router for stale mappings", router=router)
return False
|
Add minimalistic UPnP NAT punching
This is not in use yet.import miniupnpc
from ethereum import slogging
MAX_PORT = 65535
RAIDEN_IDENTIFICATOR = "raiden-network udp service"
log = slogging.getLogger(__name__)
def connect():
"""Try to connect to the router.
Returns:
u (miniupnc.UPnP): the connected upnp-instance
router (string): the connection information
"""
u = miniupnpc.UPnP()
u.discoverdelay = 200
providers = u.discover()
if providers > 1:
log.warning("multiple upnp providers found", num_providers=providers)
elif providers < 1:
log.error("no upnp providers found")
return
router = u.selectigd()
log.debug("connected", router=router)
if u.lanaddr == '0.0.0.0':
log.error("could not query your lanaddr")
return
if u.externalipaddress() == '0.0.0.0' or u.externalipaddress() is None:
log.error("could not query your externalipaddress")
return
return u, router
def open_port(internal_port, external_start_port=None):
"""Open a port for the raiden service (listening at `internal_port`) through
UPnP.
Args:
internal_port (int): the target port of the raiden service
external_start_port (int): query for an external port starting here (default: internal_port)
Returns:
external_ip_address, external_port (tuple(str, int)): if successful or None
"""
if external_start_port is None:
external_start_port = internal_port
u, router = connect()
if u is None:
return
register = lambda internal, external: u.addportmapping(internal,
'UDP',
u.lanaddr,
external,
RAIDEN_IDENTIFICATOR,
'')
external_port = external_start_port
success = register(internal_port, external_port)
while not success and external_port <= MAX_PORT:
external_port += 1
success = register(internal_port, external_port)
if success:
log.info("registered port-mapping per upnp",
internal="{}:{}".format(u.lanaddr, internal_port),
external="{}:{}".format(u.externalipaddress(), external_port))
return (u.externalipaddress(), external_port)
else:
log.error("could not register a port-mapping", router='FIXME')
return
def release_port(internal_port):
"""Try to release the port mapping for `internal_port`.
Args:
internal_port (int): the port that was previously forwarded to.
Returns:
success (boolean): if the release was successful.
"""
u, router = connect()
mapping = u.getspecificportmapping(internal_port, 'UDP')
if mapping is None:
log.error("could not find a port mapping", router=router)
return False
if u.deleteportmapping(internal_port, 'UDP'):
log.info("successfully released port mapping", router=router)
return True
else:
log.warning("could not release port mapping, check your router for stale mappings", router=router)
return False
|
<commit_before><commit_msg>Add minimalistic UPnP NAT punching
This is not in use yet.<commit_after>import miniupnpc
from ethereum import slogging
MAX_PORT = 65535
RAIDEN_IDENTIFICATOR = "raiden-network udp service"
log = slogging.getLogger(__name__)
def connect():
"""Try to connect to the router.
Returns:
u (miniupnc.UPnP): the connected upnp-instance
router (string): the connection information
"""
u = miniupnpc.UPnP()
u.discoverdelay = 200
providers = u.discover()
if providers > 1:
log.warning("multiple upnp providers found", num_providers=providers)
elif providers < 1:
log.error("no upnp providers found")
return
router = u.selectigd()
log.debug("connected", router=router)
if u.lanaddr == '0.0.0.0':
log.error("could not query your lanaddr")
return
if u.externalipaddress() == '0.0.0.0' or u.externalipaddress() is None:
log.error("could not query your externalipaddress")
return
return u, router
def open_port(internal_port, external_start_port=None):
"""Open a port for the raiden service (listening at `internal_port`) through
UPnP.
Args:
internal_port (int): the target port of the raiden service
external_start_port (int): query for an external port starting here (default: internal_port)
Returns:
external_ip_address, external_port (tuple(str, int)): if successful or None
"""
if external_start_port is None:
external_start_port = internal_port
u, router = connect()
if u is None:
return
register = lambda internal, external: u.addportmapping(internal,
'UDP',
u.lanaddr,
external,
RAIDEN_IDENTIFICATOR,
'')
external_port = external_start_port
success = register(internal_port, external_port)
while not success and external_port <= MAX_PORT:
external_port += 1
success = register(internal_port, external_port)
if success:
log.info("registered port-mapping per upnp",
internal="{}:{}".format(u.lanaddr, internal_port),
external="{}:{}".format(u.externalipaddress(), external_port))
return (u.externalipaddress(), external_port)
else:
log.error("could not register a port-mapping", router='FIXME')
return
def release_port(internal_port):
"""Try to release the port mapping for `internal_port`.
Args:
internal_port (int): the port that was previously forwarded to.
Returns:
success (boolean): if the release was successful.
"""
u, router = connect()
mapping = u.getspecificportmapping(internal_port, 'UDP')
if mapping is None:
log.error("could not find a port mapping", router=router)
return False
if u.deleteportmapping(internal_port, 'UDP'):
log.info("successfully released port mapping", router=router)
return True
else:
log.warning("could not release port mapping, check your router for stale mappings", router=router)
return False
|
|
4ad58714dc0fd8dfd464f804ca356328beed1ff2
|
Cloud.py
|
Cloud.py
|
from abc import ABCMeta, abstractmethod
class Cloud(object):
__metaclass__ = ABCMeta
def __init__(self):
self.driver = None
self.activeNode = None
@abstractmethod
def create(self):
raise Exception('Not implemented')
@abstractmethod
def destroy(self):
raise Exception('Not implemented')
|
Add a base class for OpenStack and Amazon.
|
Add a base class for OpenStack and Amazon.
|
Python
|
mit
|
minidfx/Cloud-Python-
|
Add a base class for OpenStack and Amazon.
|
from abc import ABCMeta, abstractmethod
class Cloud(object):
__metaclass__ = ABCMeta
def __init__(self):
self.driver = None
self.activeNode = None
@abstractmethod
def create(self):
raise Exception('Not implemented')
@abstractmethod
def destroy(self):
raise Exception('Not implemented')
|
<commit_before><commit_msg>Add a base class for OpenStack and Amazon.<commit_after>
|
from abc import ABCMeta, abstractmethod
class Cloud(object):
__metaclass__ = ABCMeta
def __init__(self):
self.driver = None
self.activeNode = None
@abstractmethod
def create(self):
raise Exception('Not implemented')
@abstractmethod
def destroy(self):
raise Exception('Not implemented')
|
Add a base class for OpenStack and Amazon.from abc import ABCMeta, abstractmethod
class Cloud(object):
__metaclass__ = ABCMeta
def __init__(self):
self.driver = None
self.activeNode = None
@abstractmethod
def create(self):
raise Exception('Not implemented')
@abstractmethod
def destroy(self):
raise Exception('Not implemented')
|
<commit_before><commit_msg>Add a base class for OpenStack and Amazon.<commit_after>from abc import ABCMeta, abstractmethod
class Cloud(object):
__metaclass__ = ABCMeta
def __init__(self):
self.driver = None
self.activeNode = None
@abstractmethod
def create(self):
raise Exception('Not implemented')
@abstractmethod
def destroy(self):
raise Exception('Not implemented')
|
|
91417abaeb2cdc3cfafdc96a5b30f31d0ce8be80
|
php4dvd/test_login.py
|
php4dvd/test_login.py
|
# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
import unittest
class Login(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox(capabilities={'native_events':True})
self.driver.implicitly_wait(10)
self.base_url = "http://hub.wart.ru/"
self.verificationErrors = []
self.accept_next_alert = True
self.driver.get(self.base_url + "php4dvd/")
self.driver.find_element_by_id("username").clear()
self.driver.find_element_by_id("username").send_keys("admin")
self.driver.find_element_by_name("password").clear()
self.driver.find_element_by_name("password").send_keys("admin")
self.driver.find_element_by_name("submit").click()
def test_login(self):
driver = self.driver
driver.find_element_by_id("q")
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.driver.find_element_by_link_text("Log out").click()
self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to log out[\s\S]$")
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
|
Move login test to a file by itself.
|
Move login test to a file by itself.
|
Python
|
bsd-2-clause
|
bsamorodov/selenium-py-training-samorodov
|
Move login test to a file by itself.
|
# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
import unittest
class Login(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox(capabilities={'native_events':True})
self.driver.implicitly_wait(10)
self.base_url = "http://hub.wart.ru/"
self.verificationErrors = []
self.accept_next_alert = True
self.driver.get(self.base_url + "php4dvd/")
self.driver.find_element_by_id("username").clear()
self.driver.find_element_by_id("username").send_keys("admin")
self.driver.find_element_by_name("password").clear()
self.driver.find_element_by_name("password").send_keys("admin")
self.driver.find_element_by_name("submit").click()
def test_login(self):
driver = self.driver
driver.find_element_by_id("q")
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.driver.find_element_by_link_text("Log out").click()
self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to log out[\s\S]$")
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Move login test to a file by itself.<commit_after>
|
# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
import unittest
class Login(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox(capabilities={'native_events':True})
self.driver.implicitly_wait(10)
self.base_url = "http://hub.wart.ru/"
self.verificationErrors = []
self.accept_next_alert = True
self.driver.get(self.base_url + "php4dvd/")
self.driver.find_element_by_id("username").clear()
self.driver.find_element_by_id("username").send_keys("admin")
self.driver.find_element_by_name("password").clear()
self.driver.find_element_by_name("password").send_keys("admin")
self.driver.find_element_by_name("submit").click()
def test_login(self):
driver = self.driver
driver.find_element_by_id("q")
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.driver.find_element_by_link_text("Log out").click()
self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to log out[\s\S]$")
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
|
Move login test to a file by itself.# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
import unittest
class Login(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox(capabilities={'native_events':True})
self.driver.implicitly_wait(10)
self.base_url = "http://hub.wart.ru/"
self.verificationErrors = []
self.accept_next_alert = True
self.driver.get(self.base_url + "php4dvd/")
self.driver.find_element_by_id("username").clear()
self.driver.find_element_by_id("username").send_keys("admin")
self.driver.find_element_by_name("password").clear()
self.driver.find_element_by_name("password").send_keys("admin")
self.driver.find_element_by_name("submit").click()
def test_login(self):
driver = self.driver
driver.find_element_by_id("q")
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.driver.find_element_by_link_text("Log out").click()
self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to log out[\s\S]$")
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Move login test to a file by itself.<commit_after># -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
import unittest
class Login(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox(capabilities={'native_events':True})
self.driver.implicitly_wait(10)
self.base_url = "http://hub.wart.ru/"
self.verificationErrors = []
self.accept_next_alert = True
self.driver.get(self.base_url + "php4dvd/")
self.driver.find_element_by_id("username").clear()
self.driver.find_element_by_id("username").send_keys("admin")
self.driver.find_element_by_name("password").clear()
self.driver.find_element_by_name("password").send_keys("admin")
self.driver.find_element_by_name("submit").click()
def test_login(self):
driver = self.driver
driver.find_element_by_id("q")
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.driver.find_element_by_link_text("Log out").click()
self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to log out[\s\S]$")
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
|
|
e835c8e1dd368d98eeb3d706983e803f1bb91a3a
|
2018/CodingInterview/3주차/code_sc.py
|
2018/CodingInterview/3주차/code_sc.py
|
# input_data = """
# 10101111
# 01111101
# 11001110
# 00000010
# 2
# 3 -1
# 1 1
# """
# matrix_data = input_data.strip().split("\n")[:4]
#
# n_direction_data = input_data.strip().split("\n")[5:]
#
# matrix = [deque(list(line)) for line in matrix_data]
# step = int(input_data.strip().split("\n")[4])
# n, direction = zip(*[list(map(int, line.split(" "))) for line in n_direction_data])
from collections import deque
matrix = [list(map(lambda x: str(x), input())) for i in range(4)]
step = int(input())
n_direction = list(map(int, input().split()) for i in range(step))
n, direction = zip(*n_direction)
matrix = [deque(list(line)) for line in matrix]
for i in range(step):
change_list = [0,0,0,0]
target_gear = n[i]-1
left_gear_index = list(range(0, target_gear))[::-1]
right_gear_index = list(range(target_gear+1, 4))
direction_value = direction[i]
change_list[target_gear] = direction_value
for l_i in left_gear_index:
if matrix[l_i][2] == matrix[target_gear][6]:
break
if matrix[l_i][2] != matrix[target_gear][6]:
direction_value = -direction_value
target_gear = l_i
change_list[target_gear] = direction_value
direction_value = direction[i]
for r_i in right_gear_index:
if matrix[r_i][6] == matrix[target_gear][2]:
break
if matrix[r_i][6] != matrix[target_gear][2]:
direction_value = -direction_value
target_gear = r_i
change_list[target_gear] = direction_value
for index, line in zip(change_list, matrix):
line.rotate(index)
result = 0
for i, line in enumerate(matrix):
result += int(line[0]) * (2**(i))
print(result)
|
Add 3rd week assignment of SC
|
Add 3rd week assignment of SC
|
Python
|
mit
|
TeamLab/lab_study_group,kjihee/lab_study_group
|
Add 3rd week assignment of SC
|
# input_data = """
# 10101111
# 01111101
# 11001110
# 00000010
# 2
# 3 -1
# 1 1
# """
# matrix_data = input_data.strip().split("\n")[:4]
#
# n_direction_data = input_data.strip().split("\n")[5:]
#
# matrix = [deque(list(line)) for line in matrix_data]
# step = int(input_data.strip().split("\n")[4])
# n, direction = zip(*[list(map(int, line.split(" "))) for line in n_direction_data])
from collections import deque
matrix = [list(map(lambda x: str(x), input())) for i in range(4)]
step = int(input())
n_direction = list(map(int, input().split()) for i in range(step))
n, direction = zip(*n_direction)
matrix = [deque(list(line)) for line in matrix]
for i in range(step):
change_list = [0,0,0,0]
target_gear = n[i]-1
left_gear_index = list(range(0, target_gear))[::-1]
right_gear_index = list(range(target_gear+1, 4))
direction_value = direction[i]
change_list[target_gear] = direction_value
for l_i in left_gear_index:
if matrix[l_i][2] == matrix[target_gear][6]:
break
if matrix[l_i][2] != matrix[target_gear][6]:
direction_value = -direction_value
target_gear = l_i
change_list[target_gear] = direction_value
direction_value = direction[i]
for r_i in right_gear_index:
if matrix[r_i][6] == matrix[target_gear][2]:
break
if matrix[r_i][6] != matrix[target_gear][2]:
direction_value = -direction_value
target_gear = r_i
change_list[target_gear] = direction_value
for index, line in zip(change_list, matrix):
line.rotate(index)
result = 0
for i, line in enumerate(matrix):
result += int(line[0]) * (2**(i))
print(result)
|
<commit_before><commit_msg>Add 3rd week assignment of SC<commit_after>
|
# input_data = """
# 10101111
# 01111101
# 11001110
# 00000010
# 2
# 3 -1
# 1 1
# """
# matrix_data = input_data.strip().split("\n")[:4]
#
# n_direction_data = input_data.strip().split("\n")[5:]
#
# matrix = [deque(list(line)) for line in matrix_data]
# step = int(input_data.strip().split("\n")[4])
# n, direction = zip(*[list(map(int, line.split(" "))) for line in n_direction_data])
from collections import deque
matrix = [list(map(lambda x: str(x), input())) for i in range(4)]
step = int(input())
n_direction = list(map(int, input().split()) for i in range(step))
n, direction = zip(*n_direction)
matrix = [deque(list(line)) for line in matrix]
for i in range(step):
change_list = [0,0,0,0]
target_gear = n[i]-1
left_gear_index = list(range(0, target_gear))[::-1]
right_gear_index = list(range(target_gear+1, 4))
direction_value = direction[i]
change_list[target_gear] = direction_value
for l_i in left_gear_index:
if matrix[l_i][2] == matrix[target_gear][6]:
break
if matrix[l_i][2] != matrix[target_gear][6]:
direction_value = -direction_value
target_gear = l_i
change_list[target_gear] = direction_value
direction_value = direction[i]
for r_i in right_gear_index:
if matrix[r_i][6] == matrix[target_gear][2]:
break
if matrix[r_i][6] != matrix[target_gear][2]:
direction_value = -direction_value
target_gear = r_i
change_list[target_gear] = direction_value
for index, line in zip(change_list, matrix):
line.rotate(index)
result = 0
for i, line in enumerate(matrix):
result += int(line[0]) * (2**(i))
print(result)
|
Add 3rd week assignment of SC
# input_data = """
# 10101111
# 01111101
# 11001110
# 00000010
# 2
# 3 -1
# 1 1
# """
# matrix_data = input_data.strip().split("\n")[:4]
#
# n_direction_data = input_data.strip().split("\n")[5:]
#
# matrix = [deque(list(line)) for line in matrix_data]
# step = int(input_data.strip().split("\n")[4])
# n, direction = zip(*[list(map(int, line.split(" "))) for line in n_direction_data])
from collections import deque
matrix = [list(map(lambda x: str(x), input())) for i in range(4)]
step = int(input())
n_direction = list(map(int, input().split()) for i in range(step))
n, direction = zip(*n_direction)
matrix = [deque(list(line)) for line in matrix]
for i in range(step):
change_list = [0,0,0,0]
target_gear = n[i]-1
left_gear_index = list(range(0, target_gear))[::-1]
right_gear_index = list(range(target_gear+1, 4))
direction_value = direction[i]
change_list[target_gear] = direction_value
for l_i in left_gear_index:
if matrix[l_i][2] == matrix[target_gear][6]:
break
if matrix[l_i][2] != matrix[target_gear][6]:
direction_value = -direction_value
target_gear = l_i
change_list[target_gear] = direction_value
direction_value = direction[i]
for r_i in right_gear_index:
if matrix[r_i][6] == matrix[target_gear][2]:
break
if matrix[r_i][6] != matrix[target_gear][2]:
direction_value = -direction_value
target_gear = r_i
change_list[target_gear] = direction_value
for index, line in zip(change_list, matrix):
line.rotate(index)
result = 0
for i, line in enumerate(matrix):
result += int(line[0]) * (2**(i))
print(result)
|
<commit_before><commit_msg>Add 3rd week assignment of SC<commit_after>
# input_data = """
# 10101111
# 01111101
# 11001110
# 00000010
# 2
# 3 -1
# 1 1
# """
# matrix_data = input_data.strip().split("\n")[:4]
#
# n_direction_data = input_data.strip().split("\n")[5:]
#
# matrix = [deque(list(line)) for line in matrix_data]
# step = int(input_data.strip().split("\n")[4])
# n, direction = zip(*[list(map(int, line.split(" "))) for line in n_direction_data])
from collections import deque
matrix = [list(map(lambda x: str(x), input())) for i in range(4)]
step = int(input())
n_direction = list(map(int, input().split()) for i in range(step))
n, direction = zip(*n_direction)
matrix = [deque(list(line)) for line in matrix]
for i in range(step):
change_list = [0,0,0,0]
target_gear = n[i]-1
left_gear_index = list(range(0, target_gear))[::-1]
right_gear_index = list(range(target_gear+1, 4))
direction_value = direction[i]
change_list[target_gear] = direction_value
for l_i in left_gear_index:
if matrix[l_i][2] == matrix[target_gear][6]:
break
if matrix[l_i][2] != matrix[target_gear][6]:
direction_value = -direction_value
target_gear = l_i
change_list[target_gear] = direction_value
direction_value = direction[i]
for r_i in right_gear_index:
if matrix[r_i][6] == matrix[target_gear][2]:
break
if matrix[r_i][6] != matrix[target_gear][2]:
direction_value = -direction_value
target_gear = r_i
change_list[target_gear] = direction_value
for index, line in zip(change_list, matrix):
line.rotate(index)
result = 0
for i, line in enumerate(matrix):
result += int(line[0]) * (2**(i))
print(result)
|
|
a165962921ccdfbd6ba4eb4a6c7cbd38b57fdf47
|
hackerrank_datatype.py
|
hackerrank_datatype.py
|
i=10
d=2.5
s="HackerRank"
# Declare second integer, double, and String variables.
ii=int(raw_input())
dd=float(raw_input())
ss=raw_input()
# Read and save an integer, double, and String to your variables.
# Print the sum of both integer variables on a new line.
print i+ii
# Print the sum of the double variables on a new line.
print d+dd
# Concatenate and print the String variables on a new line
print s+ " " + ss
# The 's' variable above should be printed first.
|
Print sum of same data types on a different line
|
Print sum of same data types on a different line
|
Python
|
mit
|
kumarisneha/practice_repo
|
Print sum of same data types on a different line
|
i=10
d=2.5
s="HackerRank"
# Declare second integer, double, and String variables.
ii=int(raw_input())
dd=float(raw_input())
ss=raw_input()
# Read and save an integer, double, and String to your variables.
# Print the sum of both integer variables on a new line.
print i+ii
# Print the sum of the double variables on a new line.
print d+dd
# Concatenate and print the String variables on a new line
print s+ " " + ss
# The 's' variable above should be printed first.
|
<commit_before><commit_msg>Print sum of same data types on a different line<commit_after>
|
i=10
d=2.5
s="HackerRank"
# Declare second integer, double, and String variables.
ii=int(raw_input())
dd=float(raw_input())
ss=raw_input()
# Read and save an integer, double, and String to your variables.
# Print the sum of both integer variables on a new line.
print i+ii
# Print the sum of the double variables on a new line.
print d+dd
# Concatenate and print the String variables on a new line
print s+ " " + ss
# The 's' variable above should be printed first.
|
Print sum of same data types on a different linei=10
d=2.5
s="HackerRank"
# Declare second integer, double, and String variables.
ii=int(raw_input())
dd=float(raw_input())
ss=raw_input()
# Read and save an integer, double, and String to your variables.
# Print the sum of both integer variables on a new line.
print i+ii
# Print the sum of the double variables on a new line.
print d+dd
# Concatenate and print the String variables on a new line
print s+ " " + ss
# The 's' variable above should be printed first.
|
<commit_before><commit_msg>Print sum of same data types on a different line<commit_after>i=10
d=2.5
s="HackerRank"
# Declare second integer, double, and String variables.
ii=int(raw_input())
dd=float(raw_input())
ss=raw_input()
# Read and save an integer, double, and String to your variables.
# Print the sum of both integer variables on a new line.
print i+ii
# Print the sum of the double variables on a new line.
print d+dd
# Concatenate and print the String variables on a new line
print s+ " " + ss
# The 's' variable above should be printed first.
|
|
00f9a05f651c4c51c8c2a3c359f6eecc3faca699
|
py/ones-and-zeroes.py
|
py/ones-and-zeroes.py
|
from collections import Counter
class Solution(object):
def findMaxForm(self, strs, m, n):
"""
:type strs: List[str]
:type m: int
:type n: int
:rtype: int
"""
str_counter = []
for s in strs:
c0 = s.count('0')
str_counter.append((c0, len(s) - c0))
table = Counter()
table[m, n] = 0
ans = 0
for c0, c1 in str_counter:
for (remm, remn), cnt in table.items():
if c0 <= remm and c1 <= remn:
nxtm, nxtn = remm - c0, remn - c1
table[nxtm, nxtn] = max(table[nxtm, nxtn], cnt + 1)
ans = max(table[nxtm, nxtn], ans)
return ans
|
Add py solution for 474. Ones and Zeroes
|
Add py solution for 474. Ones and Zeroes
474. Ones and Zeroes: https://leetcode.com/problems/ones-and-zeroes/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 474. Ones and Zeroes
474. Ones and Zeroes: https://leetcode.com/problems/ones-and-zeroes/
|
from collections import Counter
class Solution(object):
def findMaxForm(self, strs, m, n):
"""
:type strs: List[str]
:type m: int
:type n: int
:rtype: int
"""
str_counter = []
for s in strs:
c0 = s.count('0')
str_counter.append((c0, len(s) - c0))
table = Counter()
table[m, n] = 0
ans = 0
for c0, c1 in str_counter:
for (remm, remn), cnt in table.items():
if c0 <= remm and c1 <= remn:
nxtm, nxtn = remm - c0, remn - c1
table[nxtm, nxtn] = max(table[nxtm, nxtn], cnt + 1)
ans = max(table[nxtm, nxtn], ans)
return ans
|
<commit_before><commit_msg>Add py solution for 474. Ones and Zeroes
474. Ones and Zeroes: https://leetcode.com/problems/ones-and-zeroes/<commit_after>
|
from collections import Counter
class Solution(object):
def findMaxForm(self, strs, m, n):
"""
:type strs: List[str]
:type m: int
:type n: int
:rtype: int
"""
str_counter = []
for s in strs:
c0 = s.count('0')
str_counter.append((c0, len(s) - c0))
table = Counter()
table[m, n] = 0
ans = 0
for c0, c1 in str_counter:
for (remm, remn), cnt in table.items():
if c0 <= remm and c1 <= remn:
nxtm, nxtn = remm - c0, remn - c1
table[nxtm, nxtn] = max(table[nxtm, nxtn], cnt + 1)
ans = max(table[nxtm, nxtn], ans)
return ans
|
Add py solution for 474. Ones and Zeroes
474. Ones and Zeroes: https://leetcode.com/problems/ones-and-zeroes/from collections import Counter
class Solution(object):
def findMaxForm(self, strs, m, n):
"""
:type strs: List[str]
:type m: int
:type n: int
:rtype: int
"""
str_counter = []
for s in strs:
c0 = s.count('0')
str_counter.append((c0, len(s) - c0))
table = Counter()
table[m, n] = 0
ans = 0
for c0, c1 in str_counter:
for (remm, remn), cnt in table.items():
if c0 <= remm and c1 <= remn:
nxtm, nxtn = remm - c0, remn - c1
table[nxtm, nxtn] = max(table[nxtm, nxtn], cnt + 1)
ans = max(table[nxtm, nxtn], ans)
return ans
|
<commit_before><commit_msg>Add py solution for 474. Ones and Zeroes
474. Ones and Zeroes: https://leetcode.com/problems/ones-and-zeroes/<commit_after>from collections import Counter
class Solution(object):
def findMaxForm(self, strs, m, n):
"""
:type strs: List[str]
:type m: int
:type n: int
:rtype: int
"""
str_counter = []
for s in strs:
c0 = s.count('0')
str_counter.append((c0, len(s) - c0))
table = Counter()
table[m, n] = 0
ans = 0
for c0, c1 in str_counter:
for (remm, remn), cnt in table.items():
if c0 <= remm and c1 <= remn:
nxtm, nxtn = remm - c0, remn - c1
table[nxtm, nxtn] = max(table[nxtm, nxtn], cnt + 1)
ans = max(table[nxtm, nxtn], ans)
return ans
|
|
1874783a4626c0aaecaf1527b111ec3cd4e34c42
|
examples/test_save_screenshots.py
|
examples/test_save_screenshots.py
|
import os
from seleniumbase import BaseCase
class ScreenshotTests(BaseCase):
def test_save_screenshot(self):
self.open("https://seleniumbase.io/demo_page")
# "./downloaded_files" is a special SeleniumBase folder for downloads
self.save_screenshot("demo_page.png", folder="./downloaded_files")
self.assert_downloaded_file("demo_page.png")
print('\n"%s/%s" was saved!' % ("downloaded_files", "demo_page.png"))
def test_save_screenshot_to_logs(self):
self.open("https://seleniumbase.io/demo_page")
self.save_screenshot_to_logs()
# "self.log_path" is the absolute path to the "./latest_logs" folder.
# Each test that generates log files will create a subfolder in there
test_logpath = os.path.join(self.log_path, self.test_id)
expected_screenshot = os.path.join(test_logpath, "_1_screenshot.png")
self.assert_true(os.path.exists(expected_screenshot))
print('\n"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/tinymce/")
self.save_screenshot_to_logs()
expected_screenshot = os.path.join(test_logpath, "_2_screenshot.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/error_page/")
self.save_screenshot_to_logs("error_page")
expected_screenshot = os.path.join(test_logpath, "_3_error_page.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/devices/")
self.save_screenshot_to_logs("devices")
expected_screenshot = os.path.join(test_logpath, "_4_devices.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
|
Add a test for testing methods that save screenshots
|
Add a test for testing methods that save screenshots
|
Python
|
mit
|
mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase
|
Add a test for testing methods that save screenshots
|
import os
from seleniumbase import BaseCase
class ScreenshotTests(BaseCase):
def test_save_screenshot(self):
self.open("https://seleniumbase.io/demo_page")
# "./downloaded_files" is a special SeleniumBase folder for downloads
self.save_screenshot("demo_page.png", folder="./downloaded_files")
self.assert_downloaded_file("demo_page.png")
print('\n"%s/%s" was saved!' % ("downloaded_files", "demo_page.png"))
def test_save_screenshot_to_logs(self):
self.open("https://seleniumbase.io/demo_page")
self.save_screenshot_to_logs()
# "self.log_path" is the absolute path to the "./latest_logs" folder.
# Each test that generates log files will create a subfolder in there
test_logpath = os.path.join(self.log_path, self.test_id)
expected_screenshot = os.path.join(test_logpath, "_1_screenshot.png")
self.assert_true(os.path.exists(expected_screenshot))
print('\n"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/tinymce/")
self.save_screenshot_to_logs()
expected_screenshot = os.path.join(test_logpath, "_2_screenshot.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/error_page/")
self.save_screenshot_to_logs("error_page")
expected_screenshot = os.path.join(test_logpath, "_3_error_page.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/devices/")
self.save_screenshot_to_logs("devices")
expected_screenshot = os.path.join(test_logpath, "_4_devices.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
|
<commit_before><commit_msg>Add a test for testing methods that save screenshots<commit_after>
|
import os
from seleniumbase import BaseCase
class ScreenshotTests(BaseCase):
def test_save_screenshot(self):
self.open("https://seleniumbase.io/demo_page")
# "./downloaded_files" is a special SeleniumBase folder for downloads
self.save_screenshot("demo_page.png", folder="./downloaded_files")
self.assert_downloaded_file("demo_page.png")
print('\n"%s/%s" was saved!' % ("downloaded_files", "demo_page.png"))
def test_save_screenshot_to_logs(self):
self.open("https://seleniumbase.io/demo_page")
self.save_screenshot_to_logs()
# "self.log_path" is the absolute path to the "./latest_logs" folder.
# Each test that generates log files will create a subfolder in there
test_logpath = os.path.join(self.log_path, self.test_id)
expected_screenshot = os.path.join(test_logpath, "_1_screenshot.png")
self.assert_true(os.path.exists(expected_screenshot))
print('\n"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/tinymce/")
self.save_screenshot_to_logs()
expected_screenshot = os.path.join(test_logpath, "_2_screenshot.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/error_page/")
self.save_screenshot_to_logs("error_page")
expected_screenshot = os.path.join(test_logpath, "_3_error_page.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/devices/")
self.save_screenshot_to_logs("devices")
expected_screenshot = os.path.join(test_logpath, "_4_devices.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
|
Add a test for testing methods that save screenshotsimport os
from seleniumbase import BaseCase
class ScreenshotTests(BaseCase):
def test_save_screenshot(self):
self.open("https://seleniumbase.io/demo_page")
# "./downloaded_files" is a special SeleniumBase folder for downloads
self.save_screenshot("demo_page.png", folder="./downloaded_files")
self.assert_downloaded_file("demo_page.png")
print('\n"%s/%s" was saved!' % ("downloaded_files", "demo_page.png"))
def test_save_screenshot_to_logs(self):
self.open("https://seleniumbase.io/demo_page")
self.save_screenshot_to_logs()
# "self.log_path" is the absolute path to the "./latest_logs" folder.
# Each test that generates log files will create a subfolder in there
test_logpath = os.path.join(self.log_path, self.test_id)
expected_screenshot = os.path.join(test_logpath, "_1_screenshot.png")
self.assert_true(os.path.exists(expected_screenshot))
print('\n"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/tinymce/")
self.save_screenshot_to_logs()
expected_screenshot = os.path.join(test_logpath, "_2_screenshot.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/error_page/")
self.save_screenshot_to_logs("error_page")
expected_screenshot = os.path.join(test_logpath, "_3_error_page.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/devices/")
self.save_screenshot_to_logs("devices")
expected_screenshot = os.path.join(test_logpath, "_4_devices.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
|
<commit_before><commit_msg>Add a test for testing methods that save screenshots<commit_after>import os
from seleniumbase import BaseCase
class ScreenshotTests(BaseCase):
def test_save_screenshot(self):
self.open("https://seleniumbase.io/demo_page")
# "./downloaded_files" is a special SeleniumBase folder for downloads
self.save_screenshot("demo_page.png", folder="./downloaded_files")
self.assert_downloaded_file("demo_page.png")
print('\n"%s/%s" was saved!' % ("downloaded_files", "demo_page.png"))
def test_save_screenshot_to_logs(self):
self.open("https://seleniumbase.io/demo_page")
self.save_screenshot_to_logs()
# "self.log_path" is the absolute path to the "./latest_logs" folder.
# Each test that generates log files will create a subfolder in there
test_logpath = os.path.join(self.log_path, self.test_id)
expected_screenshot = os.path.join(test_logpath, "_1_screenshot.png")
self.assert_true(os.path.exists(expected_screenshot))
print('\n"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/tinymce/")
self.save_screenshot_to_logs()
expected_screenshot = os.path.join(test_logpath, "_2_screenshot.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/error_page/")
self.save_screenshot_to_logs("error_page")
expected_screenshot = os.path.join(test_logpath, "_3_error_page.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
self.open("https://seleniumbase.io/devices/")
self.save_screenshot_to_logs("devices")
expected_screenshot = os.path.join(test_logpath, "_4_devices.png")
self.assert_true(os.path.exists(expected_screenshot))
print('"%s" was saved!' % (expected_screenshot))
|
|
7088faefe52ecb69666fcc5c08398131dccf39df
|
rtorrent-interface.py
|
rtorrent-interface.py
|
#!./bin/python3
from functools import wraps
from rpc import RTorrentXMLRPCClient
import os
from flask import Flask, g, json, request
app = Flask(__name__)
RTORRENT_XMLRPC_CONFIG_PATH = './cfg/rtorrent-interface.cfg'
app.config.from_object(__name__)
@app.route('/')
def index():
return app.send_static_file('index.html')
@app.route('/js/<path>')
def static_proxy_js(path):
return app.send_static_file(os.path.join('js', path))
@app.route('/css/<path>')
def static_proxy_css(path):
return app.send_static_file(os.path.join('css', path))
@app.route('/fonts/<path>')
def static_fonts_css(path):
return app.send_static_file(os.path.join('fonts', path))
def give_xmlrpc_access(f):
@wraps(f)
def decorated_function(*args, **kwargs):
g.rtorrent = RTorrentXMLRPCClient(app.config['RTORRENT_XMLRPC_CONFIG_PATH'])
return f(*args, **kwargs)
return decorated_function
@app.route('/get_torrents/', methods=['GET'])
@give_xmlrpc_access
def get_torrents():
"""
Returns a list of all current torrents that available as json.
Data returned:
* Torrent hash
* Torrent name
* Completion in percent
"""
torrents = []
for hash in g.rtorrent.download_list():
name = g.rtorrent.d.get_name(hash)
completed_bytes = int(g.rtorrent.d.get_completed_bytes(hash))
total_bytes = int(g.rtorrent.d.get_size_bytes(hash))
progress = round(100 * (completed_bytes / total_bytes))
torrents.append({
'name': name,
'progress': progress
})
return json.jsonify({'data': torrents})
@app.route('/add_magnet/', methods=['POST'])
@give_xmlrpc_access
def add_magnet():
"""
Adds a magnet link to the downloads list and starts it.
Required parameters:
* magnet link
"""
g.rtorrent.load_start(request.values['link'])
return json.jsonify({'success': True})
if __name__ == '__main__':
app.run(debug=True)
|
Add small web backend serving static index.html and being api for getting torrent download info
|
Add small web backend serving static index.html and being api for getting torrent download info
|
Python
|
mit
|
lfxgroove/rtorrent-interface,lfxgroove/rtorrent-interface
|
Add small web backend serving static index.html and being api for getting torrent download info
|
#!./bin/python3
from functools import wraps
from rpc import RTorrentXMLRPCClient
import os
from flask import Flask, g, json, request
app = Flask(__name__)
RTORRENT_XMLRPC_CONFIG_PATH = './cfg/rtorrent-interface.cfg'
app.config.from_object(__name__)
@app.route('/')
def index():
return app.send_static_file('index.html')
@app.route('/js/<path>')
def static_proxy_js(path):
return app.send_static_file(os.path.join('js', path))
@app.route('/css/<path>')
def static_proxy_css(path):
return app.send_static_file(os.path.join('css', path))
@app.route('/fonts/<path>')
def static_fonts_css(path):
return app.send_static_file(os.path.join('fonts', path))
def give_xmlrpc_access(f):
@wraps(f)
def decorated_function(*args, **kwargs):
g.rtorrent = RTorrentXMLRPCClient(app.config['RTORRENT_XMLRPC_CONFIG_PATH'])
return f(*args, **kwargs)
return decorated_function
@app.route('/get_torrents/', methods=['GET'])
@give_xmlrpc_access
def get_torrents():
"""
Returns a list of all current torrents that available as json.
Data returned:
* Torrent hash
* Torrent name
* Completion in percent
"""
torrents = []
for hash in g.rtorrent.download_list():
name = g.rtorrent.d.get_name(hash)
completed_bytes = int(g.rtorrent.d.get_completed_bytes(hash))
total_bytes = int(g.rtorrent.d.get_size_bytes(hash))
progress = round(100 * (completed_bytes / total_bytes))
torrents.append({
'name': name,
'progress': progress
})
return json.jsonify({'data': torrents})
@app.route('/add_magnet/', methods=['POST'])
@give_xmlrpc_access
def add_magnet():
"""
Adds a magnet link to the downloads list and starts it.
Required parameters:
* magnet link
"""
g.rtorrent.load_start(request.values['link'])
return json.jsonify({'success': True})
if __name__ == '__main__':
app.run(debug=True)
|
<commit_before><commit_msg>Add small web backend serving static index.html and being api for getting torrent download info<commit_after>
|
#!./bin/python3
from functools import wraps
from rpc import RTorrentXMLRPCClient
import os
from flask import Flask, g, json, request
app = Flask(__name__)
RTORRENT_XMLRPC_CONFIG_PATH = './cfg/rtorrent-interface.cfg'
app.config.from_object(__name__)
@app.route('/')
def index():
return app.send_static_file('index.html')
@app.route('/js/<path>')
def static_proxy_js(path):
return app.send_static_file(os.path.join('js', path))
@app.route('/css/<path>')
def static_proxy_css(path):
return app.send_static_file(os.path.join('css', path))
@app.route('/fonts/<path>')
def static_fonts_css(path):
return app.send_static_file(os.path.join('fonts', path))
def give_xmlrpc_access(f):
@wraps(f)
def decorated_function(*args, **kwargs):
g.rtorrent = RTorrentXMLRPCClient(app.config['RTORRENT_XMLRPC_CONFIG_PATH'])
return f(*args, **kwargs)
return decorated_function
@app.route('/get_torrents/', methods=['GET'])
@give_xmlrpc_access
def get_torrents():
"""
Returns a list of all current torrents that available as json.
Data returned:
* Torrent hash
* Torrent name
* Completion in percent
"""
torrents = []
for hash in g.rtorrent.download_list():
name = g.rtorrent.d.get_name(hash)
completed_bytes = int(g.rtorrent.d.get_completed_bytes(hash))
total_bytes = int(g.rtorrent.d.get_size_bytes(hash))
progress = round(100 * (completed_bytes / total_bytes))
torrents.append({
'name': name,
'progress': progress
})
return json.jsonify({'data': torrents})
@app.route('/add_magnet/', methods=['POST'])
@give_xmlrpc_access
def add_magnet():
"""
Adds a magnet link to the downloads list and starts it.
Required parameters:
* magnet link
"""
g.rtorrent.load_start(request.values['link'])
return json.jsonify({'success': True})
if __name__ == '__main__':
app.run(debug=True)
|
Add small web backend serving static index.html and being api for getting torrent download info#!./bin/python3
from functools import wraps
from rpc import RTorrentXMLRPCClient
import os
from flask import Flask, g, json, request
app = Flask(__name__)
RTORRENT_XMLRPC_CONFIG_PATH = './cfg/rtorrent-interface.cfg'
app.config.from_object(__name__)
@app.route('/')
def index():
return app.send_static_file('index.html')
@app.route('/js/<path>')
def static_proxy_js(path):
return app.send_static_file(os.path.join('js', path))
@app.route('/css/<path>')
def static_proxy_css(path):
return app.send_static_file(os.path.join('css', path))
@app.route('/fonts/<path>')
def static_fonts_css(path):
return app.send_static_file(os.path.join('fonts', path))
def give_xmlrpc_access(f):
@wraps(f)
def decorated_function(*args, **kwargs):
g.rtorrent = RTorrentXMLRPCClient(app.config['RTORRENT_XMLRPC_CONFIG_PATH'])
return f(*args, **kwargs)
return decorated_function
@app.route('/get_torrents/', methods=['GET'])
@give_xmlrpc_access
def get_torrents():
"""
Returns a list of all current torrents that available as json.
Data returned:
* Torrent hash
* Torrent name
* Completion in percent
"""
torrents = []
for hash in g.rtorrent.download_list():
name = g.rtorrent.d.get_name(hash)
completed_bytes = int(g.rtorrent.d.get_completed_bytes(hash))
total_bytes = int(g.rtorrent.d.get_size_bytes(hash))
progress = round(100 * (completed_bytes / total_bytes))
torrents.append({
'name': name,
'progress': progress
})
return json.jsonify({'data': torrents})
@app.route('/add_magnet/', methods=['POST'])
@give_xmlrpc_access
def add_magnet():
"""
Adds a magnet link to the downloads list and starts it.
Required parameters:
* magnet link
"""
g.rtorrent.load_start(request.values['link'])
return json.jsonify({'success': True})
if __name__ == '__main__':
app.run(debug=True)
|
<commit_before><commit_msg>Add small web backend serving static index.html and being api for getting torrent download info<commit_after>#!./bin/python3
from functools import wraps
from rpc import RTorrentXMLRPCClient
import os
from flask import Flask, g, json, request
app = Flask(__name__)
RTORRENT_XMLRPC_CONFIG_PATH = './cfg/rtorrent-interface.cfg'
app.config.from_object(__name__)
@app.route('/')
def index():
return app.send_static_file('index.html')
@app.route('/js/<path>')
def static_proxy_js(path):
return app.send_static_file(os.path.join('js', path))
@app.route('/css/<path>')
def static_proxy_css(path):
return app.send_static_file(os.path.join('css', path))
@app.route('/fonts/<path>')
def static_fonts_css(path):
return app.send_static_file(os.path.join('fonts', path))
def give_xmlrpc_access(f):
@wraps(f)
def decorated_function(*args, **kwargs):
g.rtorrent = RTorrentXMLRPCClient(app.config['RTORRENT_XMLRPC_CONFIG_PATH'])
return f(*args, **kwargs)
return decorated_function
@app.route('/get_torrents/', methods=['GET'])
@give_xmlrpc_access
def get_torrents():
"""
Returns a list of all current torrents that available as json.
Data returned:
* Torrent hash
* Torrent name
* Completion in percent
"""
torrents = []
for hash in g.rtorrent.download_list():
name = g.rtorrent.d.get_name(hash)
completed_bytes = int(g.rtorrent.d.get_completed_bytes(hash))
total_bytes = int(g.rtorrent.d.get_size_bytes(hash))
progress = round(100 * (completed_bytes / total_bytes))
torrents.append({
'name': name,
'progress': progress
})
return json.jsonify({'data': torrents})
@app.route('/add_magnet/', methods=['POST'])
@give_xmlrpc_access
def add_magnet():
"""
Adds a magnet link to the downloads list and starts it.
Required parameters:
* magnet link
"""
g.rtorrent.load_start(request.values['link'])
return json.jsonify({'success': True})
if __name__ == '__main__':
app.run(debug=True)
|
|
5b8f125ca7a3c1b833105380316e325ce61b2c9d
|
test/test_priority.py
|
test/test_priority.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Simple test to send each priority level
import unittest
import logging
logging.basicConfig(level=logging.WARNING)
from gntp.notifier import GrowlNotifier
class TestHash(unittest.TestCase):
def setUp(self):
self.growl = GrowlNotifier('GNTP unittest', ['Testing'])
self.growl.register()
def test_lines(self):
for priority in [2, 1, 0, -1, -2]:
msg = 'Priority %s' % priority
self.growl.notify('Testing', msg, msg, priority=priority)
if __name__ == '__main__':
unittest.main()
|
Add a test for priority levels
|
Add a test for priority levels
|
Python
|
mit
|
kfdm/gntp
|
Add a test for priority levels
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Simple test to send each priority level
import unittest
import logging
logging.basicConfig(level=logging.WARNING)
from gntp.notifier import GrowlNotifier
class TestHash(unittest.TestCase):
def setUp(self):
self.growl = GrowlNotifier('GNTP unittest', ['Testing'])
self.growl.register()
def test_lines(self):
for priority in [2, 1, 0, -1, -2]:
msg = 'Priority %s' % priority
self.growl.notify('Testing', msg, msg, priority=priority)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test for priority levels<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Simple test to send each priority level
import unittest
import logging
logging.basicConfig(level=logging.WARNING)
from gntp.notifier import GrowlNotifier
class TestHash(unittest.TestCase):
def setUp(self):
self.growl = GrowlNotifier('GNTP unittest', ['Testing'])
self.growl.register()
def test_lines(self):
for priority in [2, 1, 0, -1, -2]:
msg = 'Priority %s' % priority
self.growl.notify('Testing', msg, msg, priority=priority)
if __name__ == '__main__':
unittest.main()
|
Add a test for priority levels#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Simple test to send each priority level
import unittest
import logging
logging.basicConfig(level=logging.WARNING)
from gntp.notifier import GrowlNotifier
class TestHash(unittest.TestCase):
def setUp(self):
self.growl = GrowlNotifier('GNTP unittest', ['Testing'])
self.growl.register()
def test_lines(self):
for priority in [2, 1, 0, -1, -2]:
msg = 'Priority %s' % priority
self.growl.notify('Testing', msg, msg, priority=priority)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test for priority levels<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Simple test to send each priority level
import unittest
import logging
logging.basicConfig(level=logging.WARNING)
from gntp.notifier import GrowlNotifier
class TestHash(unittest.TestCase):
def setUp(self):
self.growl = GrowlNotifier('GNTP unittest', ['Testing'])
self.growl.register()
def test_lines(self):
for priority in [2, 1, 0, -1, -2]:
msg = 'Priority %s' % priority
self.growl.notify('Testing', msg, msg, priority=priority)
if __name__ == '__main__':
unittest.main()
|
|
7300912727233500360a9e51f37161e04b04258b
|
util/hubget.py
|
util/hubget.py
|
#!/usr/bin/env python3
from cozify import hub, hub_api
import pprint, sys
def main(path):
hub.ping()
kwargs = {}
hub._fill_kwargs(kwargs)
response = hub_api.get(path, **kwargs)
pprint.pprint(response)
if __name__ == "__main__":
if len(sys.argv) > 1:
main(sys.argv[1])
else:
sys.exit(1)
|
Add a new utility for easily making arbitrary hub GET calls
|
Add a new utility for easily making arbitrary hub GET calls
Useful for exploring for undocumented endpoints without having to deal
with authentication, hub ip and so on.
|
Python
|
mit
|
Artanicus/python-cozify,Artanicus/python-cozify
|
Add a new utility for easily making arbitrary hub GET calls
Useful for exploring for undocumented endpoints without having to deal
with authentication, hub ip and so on.
|
#!/usr/bin/env python3
from cozify import hub, hub_api
import pprint, sys
def main(path):
hub.ping()
kwargs = {}
hub._fill_kwargs(kwargs)
response = hub_api.get(path, **kwargs)
pprint.pprint(response)
if __name__ == "__main__":
if len(sys.argv) > 1:
main(sys.argv[1])
else:
sys.exit(1)
|
<commit_before><commit_msg>Add a new utility for easily making arbitrary hub GET calls
Useful for exploring for undocumented endpoints without having to deal
with authentication, hub ip and so on.<commit_after>
|
#!/usr/bin/env python3
from cozify import hub, hub_api
import pprint, sys
def main(path):
hub.ping()
kwargs = {}
hub._fill_kwargs(kwargs)
response = hub_api.get(path, **kwargs)
pprint.pprint(response)
if __name__ == "__main__":
if len(sys.argv) > 1:
main(sys.argv[1])
else:
sys.exit(1)
|
Add a new utility for easily making arbitrary hub GET calls
Useful for exploring for undocumented endpoints without having to deal
with authentication, hub ip and so on.#!/usr/bin/env python3
from cozify import hub, hub_api
import pprint, sys
def main(path):
hub.ping()
kwargs = {}
hub._fill_kwargs(kwargs)
response = hub_api.get(path, **kwargs)
pprint.pprint(response)
if __name__ == "__main__":
if len(sys.argv) > 1:
main(sys.argv[1])
else:
sys.exit(1)
|
<commit_before><commit_msg>Add a new utility for easily making arbitrary hub GET calls
Useful for exploring for undocumented endpoints without having to deal
with authentication, hub ip and so on.<commit_after>#!/usr/bin/env python3
from cozify import hub, hub_api
import pprint, sys
def main(path):
hub.ping()
kwargs = {}
hub._fill_kwargs(kwargs)
response = hub_api.get(path, **kwargs)
pprint.pprint(response)
if __name__ == "__main__":
if len(sys.argv) > 1:
main(sys.argv[1])
else:
sys.exit(1)
|
|
c58370b3e38f663ac361d82ea984ed5aa5e03318
|
mecab_parsing.py
|
mecab_parsing.py
|
import operator
class WordInfo(object):
def __init__(self):
dictionary_form = ""
word_class = ""
spelling = ""
value = ""
features = []
def __str__(self):
return "{0},{1},{2},{3}".format(self.value, self.spelling, self.dictionary_form, self.word_class)
def PopulateWordInfo(m):
features = m.feature.split(",")
wordInfo = WordInfo()
wordInfo.value = m.surface
wordInfo.word_class = features[0]
if (len(features)>6):
wordInfo.dictionary_form = features[6]
if (len(features)>7):
wordInfo.spelling = features[7]
wordInfo.features=features
return wordInfo
def parse_file(path, word_dict):
#for i in itertools.islice(parse_srt(path, 'GB18030'), 69, 70):
for i in parse_srt(path, 'GB18030'):
if len(i.text) == 2:
m = tagger.parseToNode(i.text[0].encode("utf-8"))
while m:
if m.feature !="BOS/EOS":
word = PopulateWordInfo(m)
if word.dictionary_form in word_dict:
word_dict[word.dictionary_form] = word_dict[word.dictionary_form] +1
else:
word_dict[word.dictionary_form] = 1
m = m.next
def print_word_dict(word_dict):
print len(word_dict)
sorted_x = sorted(word_dict.items(), key=operator.itemgetter(1), reverse=True)
total = 0
valid = 0
for x in sorted_x:
total = total + x[1]
if (x[1]>1):
valid = valid+1
print "{0},{1}".format(x[0], x[1])
print total
print valid
|
Add mecab related utility function
|
Add mecab related utility function
|
Python
|
mit
|
Deathhush/HushUtility,Deathhush/Hush
|
Add mecab related utility function
|
import operator
class WordInfo(object):
def __init__(self):
dictionary_form = ""
word_class = ""
spelling = ""
value = ""
features = []
def __str__(self):
return "{0},{1},{2},{3}".format(self.value, self.spelling, self.dictionary_form, self.word_class)
def PopulateWordInfo(m):
features = m.feature.split(",")
wordInfo = WordInfo()
wordInfo.value = m.surface
wordInfo.word_class = features[0]
if (len(features)>6):
wordInfo.dictionary_form = features[6]
if (len(features)>7):
wordInfo.spelling = features[7]
wordInfo.features=features
return wordInfo
def parse_file(path, word_dict):
#for i in itertools.islice(parse_srt(path, 'GB18030'), 69, 70):
for i in parse_srt(path, 'GB18030'):
if len(i.text) == 2:
m = tagger.parseToNode(i.text[0].encode("utf-8"))
while m:
if m.feature !="BOS/EOS":
word = PopulateWordInfo(m)
if word.dictionary_form in word_dict:
word_dict[word.dictionary_form] = word_dict[word.dictionary_form] +1
else:
word_dict[word.dictionary_form] = 1
m = m.next
def print_word_dict(word_dict):
print len(word_dict)
sorted_x = sorted(word_dict.items(), key=operator.itemgetter(1), reverse=True)
total = 0
valid = 0
for x in sorted_x:
total = total + x[1]
if (x[1]>1):
valid = valid+1
print "{0},{1}".format(x[0], x[1])
print total
print valid
|
<commit_before><commit_msg>Add mecab related utility function<commit_after>
|
import operator
class WordInfo(object):
def __init__(self):
dictionary_form = ""
word_class = ""
spelling = ""
value = ""
features = []
def __str__(self):
return "{0},{1},{2},{3}".format(self.value, self.spelling, self.dictionary_form, self.word_class)
def PopulateWordInfo(m):
features = m.feature.split(",")
wordInfo = WordInfo()
wordInfo.value = m.surface
wordInfo.word_class = features[0]
if (len(features)>6):
wordInfo.dictionary_form = features[6]
if (len(features)>7):
wordInfo.spelling = features[7]
wordInfo.features=features
return wordInfo
def parse_file(path, word_dict):
#for i in itertools.islice(parse_srt(path, 'GB18030'), 69, 70):
for i in parse_srt(path, 'GB18030'):
if len(i.text) == 2:
m = tagger.parseToNode(i.text[0].encode("utf-8"))
while m:
if m.feature !="BOS/EOS":
word = PopulateWordInfo(m)
if word.dictionary_form in word_dict:
word_dict[word.dictionary_form] = word_dict[word.dictionary_form] +1
else:
word_dict[word.dictionary_form] = 1
m = m.next
def print_word_dict(word_dict):
print len(word_dict)
sorted_x = sorted(word_dict.items(), key=operator.itemgetter(1), reverse=True)
total = 0
valid = 0
for x in sorted_x:
total = total + x[1]
if (x[1]>1):
valid = valid+1
print "{0},{1}".format(x[0], x[1])
print total
print valid
|
Add mecab related utility functionimport operator
class WordInfo(object):
def __init__(self):
dictionary_form = ""
word_class = ""
spelling = ""
value = ""
features = []
def __str__(self):
return "{0},{1},{2},{3}".format(self.value, self.spelling, self.dictionary_form, self.word_class)
def PopulateWordInfo(m):
features = m.feature.split(",")
wordInfo = WordInfo()
wordInfo.value = m.surface
wordInfo.word_class = features[0]
if (len(features)>6):
wordInfo.dictionary_form = features[6]
if (len(features)>7):
wordInfo.spelling = features[7]
wordInfo.features=features
return wordInfo
def parse_file(path, word_dict):
#for i in itertools.islice(parse_srt(path, 'GB18030'), 69, 70):
for i in parse_srt(path, 'GB18030'):
if len(i.text) == 2:
m = tagger.parseToNode(i.text[0].encode("utf-8"))
while m:
if m.feature !="BOS/EOS":
word = PopulateWordInfo(m)
if word.dictionary_form in word_dict:
word_dict[word.dictionary_form] = word_dict[word.dictionary_form] +1
else:
word_dict[word.dictionary_form] = 1
m = m.next
def print_word_dict(word_dict):
print len(word_dict)
sorted_x = sorted(word_dict.items(), key=operator.itemgetter(1), reverse=True)
total = 0
valid = 0
for x in sorted_x:
total = total + x[1]
if (x[1]>1):
valid = valid+1
print "{0},{1}".format(x[0], x[1])
print total
print valid
|
<commit_before><commit_msg>Add mecab related utility function<commit_after>import operator
class WordInfo(object):
def __init__(self):
dictionary_form = ""
word_class = ""
spelling = ""
value = ""
features = []
def __str__(self):
return "{0},{1},{2},{3}".format(self.value, self.spelling, self.dictionary_form, self.word_class)
def PopulateWordInfo(m):
features = m.feature.split(",")
wordInfo = WordInfo()
wordInfo.value = m.surface
wordInfo.word_class = features[0]
if (len(features)>6):
wordInfo.dictionary_form = features[6]
if (len(features)>7):
wordInfo.spelling = features[7]
wordInfo.features=features
return wordInfo
def parse_file(path, word_dict):
#for i in itertools.islice(parse_srt(path, 'GB18030'), 69, 70):
for i in parse_srt(path, 'GB18030'):
if len(i.text) == 2:
m = tagger.parseToNode(i.text[0].encode("utf-8"))
while m:
if m.feature !="BOS/EOS":
word = PopulateWordInfo(m)
if word.dictionary_form in word_dict:
word_dict[word.dictionary_form] = word_dict[word.dictionary_form] +1
else:
word_dict[word.dictionary_form] = 1
m = m.next
def print_word_dict(word_dict):
print len(word_dict)
sorted_x = sorted(word_dict.items(), key=operator.itemgetter(1), reverse=True)
total = 0
valid = 0
for x in sorted_x:
total = total + x[1]
if (x[1]>1):
valid = valid+1
print "{0},{1}".format(x[0], x[1])
print total
print valid
|
|
c92d19ec8b689d63a95d3dc5f914c2e36d28daf6
|
ipywidgets/widgets/tests/test_widget.py
|
ipywidgets/widgets/tests/test_widget.py
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Test Widget."""
from IPython.utils.capture import capture_output
from IPython.display import display
from ..widget import Widget
def test_no_widget_view():
with capture_output() as cap:
w = Widget()
display(w)
assert len(cap.outputs) == 0
assert len(cap.stdout) == 0
assert len(cap.stderr) == 0
|
Test that Widget does not produce any view output
|
Test that Widget does not produce any view output
|
Python
|
bsd-3-clause
|
jupyter-widgets/ipywidgets,ipython/ipywidgets,ipython/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,SylvainCorlay/ipywidgets,SylvainCorlay/ipywidgets,jupyter-widgets/ipywidgets
|
Test that Widget does not produce any view output
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Test Widget."""
from IPython.utils.capture import capture_output
from IPython.display import display
from ..widget import Widget
def test_no_widget_view():
with capture_output() as cap:
w = Widget()
display(w)
assert len(cap.outputs) == 0
assert len(cap.stdout) == 0
assert len(cap.stderr) == 0
|
<commit_before><commit_msg>Test that Widget does not produce any view output<commit_after>
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Test Widget."""
from IPython.utils.capture import capture_output
from IPython.display import display
from ..widget import Widget
def test_no_widget_view():
with capture_output() as cap:
w = Widget()
display(w)
assert len(cap.outputs) == 0
assert len(cap.stdout) == 0
assert len(cap.stderr) == 0
|
Test that Widget does not produce any view output# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Test Widget."""
from IPython.utils.capture import capture_output
from IPython.display import display
from ..widget import Widget
def test_no_widget_view():
with capture_output() as cap:
w = Widget()
display(w)
assert len(cap.outputs) == 0
assert len(cap.stdout) == 0
assert len(cap.stderr) == 0
|
<commit_before><commit_msg>Test that Widget does not produce any view output<commit_after># Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Test Widget."""
from IPython.utils.capture import capture_output
from IPython.display import display
from ..widget import Widget
def test_no_widget_view():
with capture_output() as cap:
w = Widget()
display(w)
assert len(cap.outputs) == 0
assert len(cap.stdout) == 0
assert len(cap.stderr) == 0
|
|
3172aff3c1cc90a5e88d60d362cb73336b0dc646
|
nettests/simpletest.py
|
nettests/simpletest.py
|
from ooni import nettest
class SimpleTest(nettest.TestCase):
inputs = range(1,100)
optParameters = [['asset', 'a', None, 'Asset file'],
['controlserver', 'c', 'google.com', 'Specify the control server'],
['resume', 'r', 0, 'Resume at this index'],
['other', 'o', None, 'Other arguments']]
def test_foo(self, *arg, **kw):
print "Running %s with %s" % ("test_foo", self.input)
self.report['test_foo'] = 'Antani'
self.report['shared'] = "sblinda"
self.assertEqual(1,1)
def test_f4oo(self, *arg, **kw):
print "Running %s with %s" % ("test_f4oo", self.input)
self.report['test_f4oo'] = 'Antani'
self.report['shared'] = "sblinda2"
self.assertEqual(1,1)
|
Add a very simple test that *must* always pass. * Useful for testing the newstyle API
|
Add a very simple test that *must* always pass.
* Useful for testing the newstyle API
|
Python
|
bsd-2-clause
|
0xPoly/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,hackerberry/ooni-probe,0xPoly/ooni-probe,hackerberry/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe
|
Add a very simple test that *must* always pass.
* Useful for testing the newstyle API
|
from ooni import nettest
class SimpleTest(nettest.TestCase):
inputs = range(1,100)
optParameters = [['asset', 'a', None, 'Asset file'],
['controlserver', 'c', 'google.com', 'Specify the control server'],
['resume', 'r', 0, 'Resume at this index'],
['other', 'o', None, 'Other arguments']]
def test_foo(self, *arg, **kw):
print "Running %s with %s" % ("test_foo", self.input)
self.report['test_foo'] = 'Antani'
self.report['shared'] = "sblinda"
self.assertEqual(1,1)
def test_f4oo(self, *arg, **kw):
print "Running %s with %s" % ("test_f4oo", self.input)
self.report['test_f4oo'] = 'Antani'
self.report['shared'] = "sblinda2"
self.assertEqual(1,1)
|
<commit_before><commit_msg>Add a very simple test that *must* always pass.
* Useful for testing the newstyle API<commit_after>
|
from ooni import nettest
class SimpleTest(nettest.TestCase):
inputs = range(1,100)
optParameters = [['asset', 'a', None, 'Asset file'],
['controlserver', 'c', 'google.com', 'Specify the control server'],
['resume', 'r', 0, 'Resume at this index'],
['other', 'o', None, 'Other arguments']]
def test_foo(self, *arg, **kw):
print "Running %s with %s" % ("test_foo", self.input)
self.report['test_foo'] = 'Antani'
self.report['shared'] = "sblinda"
self.assertEqual(1,1)
def test_f4oo(self, *arg, **kw):
print "Running %s with %s" % ("test_f4oo", self.input)
self.report['test_f4oo'] = 'Antani'
self.report['shared'] = "sblinda2"
self.assertEqual(1,1)
|
Add a very simple test that *must* always pass.
* Useful for testing the newstyle APIfrom ooni import nettest
class SimpleTest(nettest.TestCase):
inputs = range(1,100)
optParameters = [['asset', 'a', None, 'Asset file'],
['controlserver', 'c', 'google.com', 'Specify the control server'],
['resume', 'r', 0, 'Resume at this index'],
['other', 'o', None, 'Other arguments']]
def test_foo(self, *arg, **kw):
print "Running %s with %s" % ("test_foo", self.input)
self.report['test_foo'] = 'Antani'
self.report['shared'] = "sblinda"
self.assertEqual(1,1)
def test_f4oo(self, *arg, **kw):
print "Running %s with %s" % ("test_f4oo", self.input)
self.report['test_f4oo'] = 'Antani'
self.report['shared'] = "sblinda2"
self.assertEqual(1,1)
|
<commit_before><commit_msg>Add a very simple test that *must* always pass.
* Useful for testing the newstyle API<commit_after>from ooni import nettest
class SimpleTest(nettest.TestCase):
inputs = range(1,100)
optParameters = [['asset', 'a', None, 'Asset file'],
['controlserver', 'c', 'google.com', 'Specify the control server'],
['resume', 'r', 0, 'Resume at this index'],
['other', 'o', None, 'Other arguments']]
def test_foo(self, *arg, **kw):
print "Running %s with %s" % ("test_foo", self.input)
self.report['test_foo'] = 'Antani'
self.report['shared'] = "sblinda"
self.assertEqual(1,1)
def test_f4oo(self, *arg, **kw):
print "Running %s with %s" % ("test_f4oo", self.input)
self.report['test_f4oo'] = 'Antani'
self.report['shared'] = "sblinda2"
self.assertEqual(1,1)
|
|
a91f69e2c7618f1a0bcfd6b82004186ec949fbff
|
oneflow/settings/snippets/djdt.py
|
oneflow/settings/snippets/djdt.py
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# leto.licorn.org
'82.236.133.193',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# gurney.licorn.org
'109.190.93.141',
# my LAN
'192.168.111.23',
'192.168.111.111',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
Fix the django debug toolbar not appearing and the user echo thing slowing my loadings in development.
|
Fix the django debug toolbar not appearing and the user echo thing slowing my loadings in development.
|
Python
|
agpl-3.0
|
1flow/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# leto.licorn.org
'82.236.133.193',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
Fix the django debug toolbar not appearing and the user echo thing slowing my loadings in development.
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# gurney.licorn.org
'109.190.93.141',
# my LAN
'192.168.111.23',
'192.168.111.111',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
<commit_before># Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# leto.licorn.org
'82.236.133.193',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
<commit_msg>Fix the django debug toolbar not appearing and the user echo thing slowing my loadings in development.<commit_after>
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# gurney.licorn.org
'109.190.93.141',
# my LAN
'192.168.111.23',
'192.168.111.111',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# leto.licorn.org
'82.236.133.193',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
Fix the django debug toolbar not appearing and the user echo thing slowing my loadings in development.# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# gurney.licorn.org
'109.190.93.141',
# my LAN
'192.168.111.23',
'192.168.111.111',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
<commit_before># Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# leto.licorn.org
'82.236.133.193',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
<commit_msg>Fix the django debug toolbar not appearing and the user echo thing slowing my loadings in development.<commit_after># Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# gurney.licorn.org
'109.190.93.141',
# my LAN
'192.168.111.23',
'192.168.111.111',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
bda6a897511ab3312da30ab71b60797f53f7a73a
|
indico/modules/events/sessions/views.py
|
indico/modules/events/sessions/views.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceModifBase
class WPManageSessions(WPJinjaMixin, WPConferenceModifBase):
template_prefix = 'events/sessions/'
sidemenu_option = 'sessions'
def getJSFiles(self):
return WPConferenceModifBase.getJSFiles(self) + self._asset_env['modules_sessions_js'].urls()
def getCSSFiles(self):
return WPConferenceModifBase.getCSSFiles(self) + self._asset_env['sessions_sass'].urls()
|
Add view class for the sessions module
|
Add view class for the sessions module
|
Python
|
mit
|
OmeGak/indico,mvidalgarcia/indico,DirkHoffmann/indico,pferreir/indico,pferreir/indico,mvidalgarcia/indico,ThiefMaster/indico,ThiefMaster/indico,OmeGak/indico,OmeGak/indico,mvidalgarcia/indico,mic4ael/indico,indico/indico,indico/indico,pferreir/indico,pferreir/indico,OmeGak/indico,ThiefMaster/indico,DirkHoffmann/indico,indico/indico,mic4ael/indico,DirkHoffmann/indico,mic4ael/indico,ThiefMaster/indico,mvidalgarcia/indico,indico/indico,DirkHoffmann/indico,mic4ael/indico
|
Add view class for the sessions module
|
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceModifBase
class WPManageSessions(WPJinjaMixin, WPConferenceModifBase):
template_prefix = 'events/sessions/'
sidemenu_option = 'sessions'
def getJSFiles(self):
return WPConferenceModifBase.getJSFiles(self) + self._asset_env['modules_sessions_js'].urls()
def getCSSFiles(self):
return WPConferenceModifBase.getCSSFiles(self) + self._asset_env['sessions_sass'].urls()
|
<commit_before><commit_msg>Add view class for the sessions module<commit_after>
|
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceModifBase
class WPManageSessions(WPJinjaMixin, WPConferenceModifBase):
template_prefix = 'events/sessions/'
sidemenu_option = 'sessions'
def getJSFiles(self):
return WPConferenceModifBase.getJSFiles(self) + self._asset_env['modules_sessions_js'].urls()
def getCSSFiles(self):
return WPConferenceModifBase.getCSSFiles(self) + self._asset_env['sessions_sass'].urls()
|
Add view class for the sessions module# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceModifBase
class WPManageSessions(WPJinjaMixin, WPConferenceModifBase):
template_prefix = 'events/sessions/'
sidemenu_option = 'sessions'
def getJSFiles(self):
return WPConferenceModifBase.getJSFiles(self) + self._asset_env['modules_sessions_js'].urls()
def getCSSFiles(self):
return WPConferenceModifBase.getCSSFiles(self) + self._asset_env['sessions_sass'].urls()
|
<commit_before><commit_msg>Add view class for the sessions module<commit_after># This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceModifBase
class WPManageSessions(WPJinjaMixin, WPConferenceModifBase):
template_prefix = 'events/sessions/'
sidemenu_option = 'sessions'
def getJSFiles(self):
return WPConferenceModifBase.getJSFiles(self) + self._asset_env['modules_sessions_js'].urls()
def getCSSFiles(self):
return WPConferenceModifBase.getCSSFiles(self) + self._asset_env['sessions_sass'].urls()
|
|
1aacd7d5e7403d3c3732a54e5f6111349436e00d
|
code/windingvstime.py
|
code/windingvstime.py
|
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from mpl_toolkits.mplot3d import Axes3D
import sys
import numpy.ma as ma
fuz = 1/4.;
if (len(sys.argv) < 2) :
print("Usage: \n dataplot.py path_to_binfile [clamp value]")
sys.exit()
elif (len(sys.argv) > 2) :
fuz = float(sys.argv[2])
binfile = sys.argv[1]
linedata = []
for i in np.linspace(140,33,15) :
lengthdata=np.fromfile(binfile+str(int(i))+'_lengths.bin', dtype="float32")
minLength = 500.0
winddata=np.fromfile(binfile+str(int(i))+'_windings.bin', dtype="float32")
datasize = int(np.sqrt(lengthdata.shape[0]))
lengthdata=lengthdata.reshape(datasize, datasize)
winddata=winddata.reshape(datasize, datasize)
masked= ma.masked_where(lengthdata<minLength,winddata)
masked = masked.filled(0)
linedata.append((masked[datasize/2,:],str(int(i))))
fig = plt.figure()
ax = fig.add_subplot(111)
matplotlib.rcParams['legend.numpoints'] = 1
cmap = matplotlib.cm.get_cmap('jet')
#Wist je dat dit ook zonder forloop kan?
for dataset,i in linedata :
x = np.linspace(0,np.pi,datasize)
ax.plot(x,dataset,'.',label='t='+i, color=cmap(int(i)/140.))
ax.set_ylim([-1,-0.5])
ax.set_xlim([x[100],x[800]])
ax.set_ylabel('Winding number')
ax.set_xlabel(r'$\sim r$')
plt.legend(loc='right')
plt.show()
|
Make a plot as function of time for some datasets.
|
Make a plot as function of time for some datasets.
|
Python
|
mit
|
TAdeJong/plasma-analysis,TAdeJong/plasma-analysis
|
Make a plot as function of time for some datasets.
|
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from mpl_toolkits.mplot3d import Axes3D
import sys
import numpy.ma as ma
fuz = 1/4.;
if (len(sys.argv) < 2) :
print("Usage: \n dataplot.py path_to_binfile [clamp value]")
sys.exit()
elif (len(sys.argv) > 2) :
fuz = float(sys.argv[2])
binfile = sys.argv[1]
linedata = []
for i in np.linspace(140,33,15) :
lengthdata=np.fromfile(binfile+str(int(i))+'_lengths.bin', dtype="float32")
minLength = 500.0
winddata=np.fromfile(binfile+str(int(i))+'_windings.bin', dtype="float32")
datasize = int(np.sqrt(lengthdata.shape[0]))
lengthdata=lengthdata.reshape(datasize, datasize)
winddata=winddata.reshape(datasize, datasize)
masked= ma.masked_where(lengthdata<minLength,winddata)
masked = masked.filled(0)
linedata.append((masked[datasize/2,:],str(int(i))))
fig = plt.figure()
ax = fig.add_subplot(111)
matplotlib.rcParams['legend.numpoints'] = 1
cmap = matplotlib.cm.get_cmap('jet')
#Wist je dat dit ook zonder forloop kan?
for dataset,i in linedata :
x = np.linspace(0,np.pi,datasize)
ax.plot(x,dataset,'.',label='t='+i, color=cmap(int(i)/140.))
ax.set_ylim([-1,-0.5])
ax.set_xlim([x[100],x[800]])
ax.set_ylabel('Winding number')
ax.set_xlabel(r'$\sim r$')
plt.legend(loc='right')
plt.show()
|
<commit_before><commit_msg>Make a plot as function of time for some datasets.<commit_after>
|
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from mpl_toolkits.mplot3d import Axes3D
import sys
import numpy.ma as ma
fuz = 1/4.;
if (len(sys.argv) < 2) :
print("Usage: \n dataplot.py path_to_binfile [clamp value]")
sys.exit()
elif (len(sys.argv) > 2) :
fuz = float(sys.argv[2])
binfile = sys.argv[1]
linedata = []
for i in np.linspace(140,33,15) :
lengthdata=np.fromfile(binfile+str(int(i))+'_lengths.bin', dtype="float32")
minLength = 500.0
winddata=np.fromfile(binfile+str(int(i))+'_windings.bin', dtype="float32")
datasize = int(np.sqrt(lengthdata.shape[0]))
lengthdata=lengthdata.reshape(datasize, datasize)
winddata=winddata.reshape(datasize, datasize)
masked= ma.masked_where(lengthdata<minLength,winddata)
masked = masked.filled(0)
linedata.append((masked[datasize/2,:],str(int(i))))
fig = plt.figure()
ax = fig.add_subplot(111)
matplotlib.rcParams['legend.numpoints'] = 1
cmap = matplotlib.cm.get_cmap('jet')
#Wist je dat dit ook zonder forloop kan?
for dataset,i in linedata :
x = np.linspace(0,np.pi,datasize)
ax.plot(x,dataset,'.',label='t='+i, color=cmap(int(i)/140.))
ax.set_ylim([-1,-0.5])
ax.set_xlim([x[100],x[800]])
ax.set_ylabel('Winding number')
ax.set_xlabel(r'$\sim r$')
plt.legend(loc='right')
plt.show()
|
Make a plot as function of time for some datasets.import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from mpl_toolkits.mplot3d import Axes3D
import sys
import numpy.ma as ma
fuz = 1/4.;
if (len(sys.argv) < 2) :
print("Usage: \n dataplot.py path_to_binfile [clamp value]")
sys.exit()
elif (len(sys.argv) > 2) :
fuz = float(sys.argv[2])
binfile = sys.argv[1]
linedata = []
for i in np.linspace(140,33,15) :
lengthdata=np.fromfile(binfile+str(int(i))+'_lengths.bin', dtype="float32")
minLength = 500.0
winddata=np.fromfile(binfile+str(int(i))+'_windings.bin', dtype="float32")
datasize = int(np.sqrt(lengthdata.shape[0]))
lengthdata=lengthdata.reshape(datasize, datasize)
winddata=winddata.reshape(datasize, datasize)
masked= ma.masked_where(lengthdata<minLength,winddata)
masked = masked.filled(0)
linedata.append((masked[datasize/2,:],str(int(i))))
fig = plt.figure()
ax = fig.add_subplot(111)
matplotlib.rcParams['legend.numpoints'] = 1
cmap = matplotlib.cm.get_cmap('jet')
#Wist je dat dit ook zonder forloop kan?
for dataset,i in linedata :
x = np.linspace(0,np.pi,datasize)
ax.plot(x,dataset,'.',label='t='+i, color=cmap(int(i)/140.))
ax.set_ylim([-1,-0.5])
ax.set_xlim([x[100],x[800]])
ax.set_ylabel('Winding number')
ax.set_xlabel(r'$\sim r$')
plt.legend(loc='right')
plt.show()
|
<commit_before><commit_msg>Make a plot as function of time for some datasets.<commit_after>import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from mpl_toolkits.mplot3d import Axes3D
import sys
import numpy.ma as ma
fuz = 1/4.;
if (len(sys.argv) < 2) :
print("Usage: \n dataplot.py path_to_binfile [clamp value]")
sys.exit()
elif (len(sys.argv) > 2) :
fuz = float(sys.argv[2])
binfile = sys.argv[1]
linedata = []
for i in np.linspace(140,33,15) :
lengthdata=np.fromfile(binfile+str(int(i))+'_lengths.bin', dtype="float32")
minLength = 500.0
winddata=np.fromfile(binfile+str(int(i))+'_windings.bin', dtype="float32")
datasize = int(np.sqrt(lengthdata.shape[0]))
lengthdata=lengthdata.reshape(datasize, datasize)
winddata=winddata.reshape(datasize, datasize)
masked= ma.masked_where(lengthdata<minLength,winddata)
masked = masked.filled(0)
linedata.append((masked[datasize/2,:],str(int(i))))
fig = plt.figure()
ax = fig.add_subplot(111)
matplotlib.rcParams['legend.numpoints'] = 1
cmap = matplotlib.cm.get_cmap('jet')
#Wist je dat dit ook zonder forloop kan?
for dataset,i in linedata :
x = np.linspace(0,np.pi,datasize)
ax.plot(x,dataset,'.',label='t='+i, color=cmap(int(i)/140.))
ax.set_ylim([-1,-0.5])
ax.set_xlim([x[100],x[800]])
ax.set_ylabel('Winding number')
ax.set_xlabel(r'$\sim r$')
plt.legend(loc='right')
plt.show()
|
|
cf73f1c3ac17ca163cb7c930e3024ffd1994cef1
|
odm/catalogs/CatalogReader.py
|
odm/catalogs/CatalogReader.py
|
class CatalogReader(object):
"""
Basic "interface" to the ODM catalog readers.
Resembles the structure of CKAN harvesters, for later CKAN imports.
"""
def info(self):
pass
def gather(self):
pass
def fetch(self, data_dict):
pass
def import_data(self, data_dict):
pass
def read_data(self):
"""Get the data as a list of python dicts. """
g = self.gather()
f = map(self.fetch, g)
i = map(self.import_data, f)
return i
|
Add outline for catalog readers
|
Add outline for catalog readers
|
Python
|
mit
|
SebastianBerchtold/odm-catalogreaders,mattfullerton/odm-catalogreaders
|
Add outline for catalog readers
|
class CatalogReader(object):
"""
Basic "interface" to the ODM catalog readers.
Resembles the structure of CKAN harvesters, for later CKAN imports.
"""
def info(self):
pass
def gather(self):
pass
def fetch(self, data_dict):
pass
def import_data(self, data_dict):
pass
def read_data(self):
"""Get the data as a list of python dicts. """
g = self.gather()
f = map(self.fetch, g)
i = map(self.import_data, f)
return i
|
<commit_before><commit_msg>Add outline for catalog readers<commit_after>
|
class CatalogReader(object):
"""
Basic "interface" to the ODM catalog readers.
Resembles the structure of CKAN harvesters, for later CKAN imports.
"""
def info(self):
pass
def gather(self):
pass
def fetch(self, data_dict):
pass
def import_data(self, data_dict):
pass
def read_data(self):
"""Get the data as a list of python dicts. """
g = self.gather()
f = map(self.fetch, g)
i = map(self.import_data, f)
return i
|
Add outline for catalog readersclass CatalogReader(object):
"""
Basic "interface" to the ODM catalog readers.
Resembles the structure of CKAN harvesters, for later CKAN imports.
"""
def info(self):
pass
def gather(self):
pass
def fetch(self, data_dict):
pass
def import_data(self, data_dict):
pass
def read_data(self):
"""Get the data as a list of python dicts. """
g = self.gather()
f = map(self.fetch, g)
i = map(self.import_data, f)
return i
|
<commit_before><commit_msg>Add outline for catalog readers<commit_after>class CatalogReader(object):
"""
Basic "interface" to the ODM catalog readers.
Resembles the structure of CKAN harvesters, for later CKAN imports.
"""
def info(self):
pass
def gather(self):
pass
def fetch(self, data_dict):
pass
def import_data(self, data_dict):
pass
def read_data(self):
"""Get the data as a list of python dicts. """
g = self.gather()
f = map(self.fetch, g)
i = map(self.import_data, f)
return i
|
|
1b13a69b888d3cc925eae357f1959a6f420e5df5
|
remap_authors.py
|
remap_authors.py
|
#!/usr/bin/python3
import sys
import json
if len(sys.argv) < 4:
print("Usage: ./remap_authors.py <input_journal> <old_authors.json> <new_authors.json>")
print("The input journal will have its author id's updated to the ids of the same authors in <new_authors.json>")
sys.exit(1)
# Build a map of author name to the new id
author_ids = {}
with open(sys.argv[3], "r") as f:
authors = json.load(f)
for i, a in enumerate(authors):
if a["name"] not in author_ids:
author_ids[a["name"]] = i
journal = {}
with open(sys.argv[1], "r") as jf, open(sys.argv[2], "r") as old_auth:
old_authors = json.load(old_auth)
journal = json.load(jf)
# Go through and update each author in the journal
for i, a in enumerate(journal["authors"]):
name = old_authors[a]["name"]
new_id = author_ids[name]
print("Remapping {} from id {} to new id {}".format(name, a, new_id))
journal["authors"][i] = new_id
# Go through and update each article as well
for article in journal["articles"]:
for i, a in enumerate(article["authors"]):
name = old_authors[a]["name"]
new_id = author_ids[name]
print("Remapping journal author {} from id {} to new id {}".format(name, a, new_id))
article["authors"][i] = new_id
with open(sys.argv[1], "w") as fp:
json.dump(journal, fp, indent=4)
|
Add author remapping script to merge different datasets
|
Add author remapping script to merge different datasets
|
Python
|
mit
|
Twinklebear/dataviscourse-pr-collaboration-networks,Twinklebear/dataviscourse-pr-collaboration-networks,Twinklebear/dataviscourse-pr-collaboration-networks
|
Add author remapping script to merge different datasets
|
#!/usr/bin/python3
import sys
import json
if len(sys.argv) < 4:
print("Usage: ./remap_authors.py <input_journal> <old_authors.json> <new_authors.json>")
print("The input journal will have its author id's updated to the ids of the same authors in <new_authors.json>")
sys.exit(1)
# Build a map of author name to the new id
author_ids = {}
with open(sys.argv[3], "r") as f:
authors = json.load(f)
for i, a in enumerate(authors):
if a["name"] not in author_ids:
author_ids[a["name"]] = i
journal = {}
with open(sys.argv[1], "r") as jf, open(sys.argv[2], "r") as old_auth:
old_authors = json.load(old_auth)
journal = json.load(jf)
# Go through and update each author in the journal
for i, a in enumerate(journal["authors"]):
name = old_authors[a]["name"]
new_id = author_ids[name]
print("Remapping {} from id {} to new id {}".format(name, a, new_id))
journal["authors"][i] = new_id
# Go through and update each article as well
for article in journal["articles"]:
for i, a in enumerate(article["authors"]):
name = old_authors[a]["name"]
new_id = author_ids[name]
print("Remapping journal author {} from id {} to new id {}".format(name, a, new_id))
article["authors"][i] = new_id
with open(sys.argv[1], "w") as fp:
json.dump(journal, fp, indent=4)
|
<commit_before><commit_msg>Add author remapping script to merge different datasets<commit_after>
|
#!/usr/bin/python3
import sys
import json
if len(sys.argv) < 4:
print("Usage: ./remap_authors.py <input_journal> <old_authors.json> <new_authors.json>")
print("The input journal will have its author id's updated to the ids of the same authors in <new_authors.json>")
sys.exit(1)
# Build a map of author name to the new id
author_ids = {}
with open(sys.argv[3], "r") as f:
authors = json.load(f)
for i, a in enumerate(authors):
if a["name"] not in author_ids:
author_ids[a["name"]] = i
journal = {}
with open(sys.argv[1], "r") as jf, open(sys.argv[2], "r") as old_auth:
old_authors = json.load(old_auth)
journal = json.load(jf)
# Go through and update each author in the journal
for i, a in enumerate(journal["authors"]):
name = old_authors[a]["name"]
new_id = author_ids[name]
print("Remapping {} from id {} to new id {}".format(name, a, new_id))
journal["authors"][i] = new_id
# Go through and update each article as well
for article in journal["articles"]:
for i, a in enumerate(article["authors"]):
name = old_authors[a]["name"]
new_id = author_ids[name]
print("Remapping journal author {} from id {} to new id {}".format(name, a, new_id))
article["authors"][i] = new_id
with open(sys.argv[1], "w") as fp:
json.dump(journal, fp, indent=4)
|
Add author remapping script to merge different datasets#!/usr/bin/python3
import sys
import json
if len(sys.argv) < 4:
print("Usage: ./remap_authors.py <input_journal> <old_authors.json> <new_authors.json>")
print("The input journal will have its author id's updated to the ids of the same authors in <new_authors.json>")
sys.exit(1)
# Build a map of author name to the new id
author_ids = {}
with open(sys.argv[3], "r") as f:
authors = json.load(f)
for i, a in enumerate(authors):
if a["name"] not in author_ids:
author_ids[a["name"]] = i
journal = {}
with open(sys.argv[1], "r") as jf, open(sys.argv[2], "r") as old_auth:
old_authors = json.load(old_auth)
journal = json.load(jf)
# Go through and update each author in the journal
for i, a in enumerate(journal["authors"]):
name = old_authors[a]["name"]
new_id = author_ids[name]
print("Remapping {} from id {} to new id {}".format(name, a, new_id))
journal["authors"][i] = new_id
# Go through and update each article as well
for article in journal["articles"]:
for i, a in enumerate(article["authors"]):
name = old_authors[a]["name"]
new_id = author_ids[name]
print("Remapping journal author {} from id {} to new id {}".format(name, a, new_id))
article["authors"][i] = new_id
with open(sys.argv[1], "w") as fp:
json.dump(journal, fp, indent=4)
|
<commit_before><commit_msg>Add author remapping script to merge different datasets<commit_after>#!/usr/bin/python3
import sys
import json
if len(sys.argv) < 4:
print("Usage: ./remap_authors.py <input_journal> <old_authors.json> <new_authors.json>")
print("The input journal will have its author id's updated to the ids of the same authors in <new_authors.json>")
sys.exit(1)
# Build a map of author name to the new id
author_ids = {}
with open(sys.argv[3], "r") as f:
authors = json.load(f)
for i, a in enumerate(authors):
if a["name"] not in author_ids:
author_ids[a["name"]] = i
journal = {}
with open(sys.argv[1], "r") as jf, open(sys.argv[2], "r") as old_auth:
old_authors = json.load(old_auth)
journal = json.load(jf)
# Go through and update each author in the journal
for i, a in enumerate(journal["authors"]):
name = old_authors[a]["name"]
new_id = author_ids[name]
print("Remapping {} from id {} to new id {}".format(name, a, new_id))
journal["authors"][i] = new_id
# Go through and update each article as well
for article in journal["articles"]:
for i, a in enumerate(article["authors"]):
name = old_authors[a]["name"]
new_id = author_ids[name]
print("Remapping journal author {} from id {} to new id {}".format(name, a, new_id))
article["authors"][i] = new_id
with open(sys.argv[1], "w") as fp:
json.dump(journal, fp, indent=4)
|
|
9bbdd657cddd9d56437d05ab9701aea305af3010
|
embryo-interpolation.py
|
embryo-interpolation.py
|
# IPython log file
image = np.zeros((2048, 2048), dtype=float)
image[0, 0] = 1
np.percentile(image, [0.1, 99.9])
get_ipython().run_line_magic('pinfo', 'np.percentile')
np.stack([np.zeros((3, 2)), np.zeros(3)], axis=1)
from skimage import data
image = data.horse().astype(bool)
plt.imshow(image)
get_ipython().run_line_magic('timeit', 'ndi.distance_transform_edt(~image, return_distances=False,')
get_ipython().run_line_magic('timeit', 'ndi.distance_transform_edt(~image, return_distances=False, return_indices=True)')
x = ndi.distance_transform_edt(~image, return_distances=False, return_indices=True)
x.shape
image_src = np.arange(image.size).reshape(image)
image_src = np.arange(image.size).reshape(image.shape)
image_src[~image] = 0
ws = segmentation.watershed(image, image_src, compactness=1e10)
get_ipython().run_line_magic('timeit', 'segmentation.watershed(image, image_src, compactness=1e10)')
import sys
sys.path.append('/Users/jni/projects/unfold-embryo')
from gala import imio
v = imio.read_h5_stack('embA_0.3um_Probabilities.h5')
get_ipython().run_line_magic('cd', '~/Dropbox/data1/drosophila-embryo/')
v = imio.read_h5_stack('embA_0.3um_Probabilities.h5')
v.shape
v0 = v[..., 0]
smoothed_vm = filters.gaussian(v[..., 0], sigma=4)
b = smoothed_vm > 0.5
b2 = morphology.remove_small_objects(b, 10000)
sys.path.append('/Users/jni/projects/skan')
import unfold
g, idxs, path = unfold.define_mesoderm_axis(b2)
sources, ids, idxs = unfold.source_id_volume(b2, idxs, path)
c0 = unfold.coord0_volume(sources, idxs)
c1 = unfold.coord1_volume(b2)
get_ipython().run_line_magic('ls', '')
image = io.imread('embA_0.3um.tif')
image.shape
channels = [unfold.sample2d(c0, c1, image[..., c])
for c in range(3)]
reload(unfold)
channels = [unfold.sample2d(c0, c1, image[..., c])
for c in range(3)]
|
Add raw embryo interpolation code
|
Add raw embryo interpolation code
|
Python
|
bsd-3-clause
|
jni/useful-histories
|
Add raw embryo interpolation code
|
# IPython log file
image = np.zeros((2048, 2048), dtype=float)
image[0, 0] = 1
np.percentile(image, [0.1, 99.9])
get_ipython().run_line_magic('pinfo', 'np.percentile')
np.stack([np.zeros((3, 2)), np.zeros(3)], axis=1)
from skimage import data
image = data.horse().astype(bool)
plt.imshow(image)
get_ipython().run_line_magic('timeit', 'ndi.distance_transform_edt(~image, return_distances=False,')
get_ipython().run_line_magic('timeit', 'ndi.distance_transform_edt(~image, return_distances=False, return_indices=True)')
x = ndi.distance_transform_edt(~image, return_distances=False, return_indices=True)
x.shape
image_src = np.arange(image.size).reshape(image)
image_src = np.arange(image.size).reshape(image.shape)
image_src[~image] = 0
ws = segmentation.watershed(image, image_src, compactness=1e10)
get_ipython().run_line_magic('timeit', 'segmentation.watershed(image, image_src, compactness=1e10)')
import sys
sys.path.append('/Users/jni/projects/unfold-embryo')
from gala import imio
v = imio.read_h5_stack('embA_0.3um_Probabilities.h5')
get_ipython().run_line_magic('cd', '~/Dropbox/data1/drosophila-embryo/')
v = imio.read_h5_stack('embA_0.3um_Probabilities.h5')
v.shape
v0 = v[..., 0]
smoothed_vm = filters.gaussian(v[..., 0], sigma=4)
b = smoothed_vm > 0.5
b2 = morphology.remove_small_objects(b, 10000)
sys.path.append('/Users/jni/projects/skan')
import unfold
g, idxs, path = unfold.define_mesoderm_axis(b2)
sources, ids, idxs = unfold.source_id_volume(b2, idxs, path)
c0 = unfold.coord0_volume(sources, idxs)
c1 = unfold.coord1_volume(b2)
get_ipython().run_line_magic('ls', '')
image = io.imread('embA_0.3um.tif')
image.shape
channels = [unfold.sample2d(c0, c1, image[..., c])
for c in range(3)]
reload(unfold)
channels = [unfold.sample2d(c0, c1, image[..., c])
for c in range(3)]
|
<commit_before><commit_msg>Add raw embryo interpolation code<commit_after>
|
# IPython log file
image = np.zeros((2048, 2048), dtype=float)
image[0, 0] = 1
np.percentile(image, [0.1, 99.9])
get_ipython().run_line_magic('pinfo', 'np.percentile')
np.stack([np.zeros((3, 2)), np.zeros(3)], axis=1)
from skimage import data
image = data.horse().astype(bool)
plt.imshow(image)
get_ipython().run_line_magic('timeit', 'ndi.distance_transform_edt(~image, return_distances=False,')
get_ipython().run_line_magic('timeit', 'ndi.distance_transform_edt(~image, return_distances=False, return_indices=True)')
x = ndi.distance_transform_edt(~image, return_distances=False, return_indices=True)
x.shape
image_src = np.arange(image.size).reshape(image)
image_src = np.arange(image.size).reshape(image.shape)
image_src[~image] = 0
ws = segmentation.watershed(image, image_src, compactness=1e10)
get_ipython().run_line_magic('timeit', 'segmentation.watershed(image, image_src, compactness=1e10)')
import sys
sys.path.append('/Users/jni/projects/unfold-embryo')
from gala import imio
v = imio.read_h5_stack('embA_0.3um_Probabilities.h5')
get_ipython().run_line_magic('cd', '~/Dropbox/data1/drosophila-embryo/')
v = imio.read_h5_stack('embA_0.3um_Probabilities.h5')
v.shape
v0 = v[..., 0]
smoothed_vm = filters.gaussian(v[..., 0], sigma=4)
b = smoothed_vm > 0.5
b2 = morphology.remove_small_objects(b, 10000)
sys.path.append('/Users/jni/projects/skan')
import unfold
g, idxs, path = unfold.define_mesoderm_axis(b2)
sources, ids, idxs = unfold.source_id_volume(b2, idxs, path)
c0 = unfold.coord0_volume(sources, idxs)
c1 = unfold.coord1_volume(b2)
get_ipython().run_line_magic('ls', '')
image = io.imread('embA_0.3um.tif')
image.shape
channels = [unfold.sample2d(c0, c1, image[..., c])
for c in range(3)]
reload(unfold)
channels = [unfold.sample2d(c0, c1, image[..., c])
for c in range(3)]
|
Add raw embryo interpolation code# IPython log file
image = np.zeros((2048, 2048), dtype=float)
image[0, 0] = 1
np.percentile(image, [0.1, 99.9])
get_ipython().run_line_magic('pinfo', 'np.percentile')
np.stack([np.zeros((3, 2)), np.zeros(3)], axis=1)
from skimage import data
image = data.horse().astype(bool)
plt.imshow(image)
get_ipython().run_line_magic('timeit', 'ndi.distance_transform_edt(~image, return_distances=False,')
get_ipython().run_line_magic('timeit', 'ndi.distance_transform_edt(~image, return_distances=False, return_indices=True)')
x = ndi.distance_transform_edt(~image, return_distances=False, return_indices=True)
x.shape
image_src = np.arange(image.size).reshape(image)
image_src = np.arange(image.size).reshape(image.shape)
image_src[~image] = 0
ws = segmentation.watershed(image, image_src, compactness=1e10)
get_ipython().run_line_magic('timeit', 'segmentation.watershed(image, image_src, compactness=1e10)')
import sys
sys.path.append('/Users/jni/projects/unfold-embryo')
from gala import imio
v = imio.read_h5_stack('embA_0.3um_Probabilities.h5')
get_ipython().run_line_magic('cd', '~/Dropbox/data1/drosophila-embryo/')
v = imio.read_h5_stack('embA_0.3um_Probabilities.h5')
v.shape
v0 = v[..., 0]
smoothed_vm = filters.gaussian(v[..., 0], sigma=4)
b = smoothed_vm > 0.5
b2 = morphology.remove_small_objects(b, 10000)
sys.path.append('/Users/jni/projects/skan')
import unfold
g, idxs, path = unfold.define_mesoderm_axis(b2)
sources, ids, idxs = unfold.source_id_volume(b2, idxs, path)
c0 = unfold.coord0_volume(sources, idxs)
c1 = unfold.coord1_volume(b2)
get_ipython().run_line_magic('ls', '')
image = io.imread('embA_0.3um.tif')
image.shape
channels = [unfold.sample2d(c0, c1, image[..., c])
for c in range(3)]
reload(unfold)
channels = [unfold.sample2d(c0, c1, image[..., c])
for c in range(3)]
|
<commit_before><commit_msg>Add raw embryo interpolation code<commit_after># IPython log file
image = np.zeros((2048, 2048), dtype=float)
image[0, 0] = 1
np.percentile(image, [0.1, 99.9])
get_ipython().run_line_magic('pinfo', 'np.percentile')
np.stack([np.zeros((3, 2)), np.zeros(3)], axis=1)
from skimage import data
image = data.horse().astype(bool)
plt.imshow(image)
get_ipython().run_line_magic('timeit', 'ndi.distance_transform_edt(~image, return_distances=False,')
get_ipython().run_line_magic('timeit', 'ndi.distance_transform_edt(~image, return_distances=False, return_indices=True)')
x = ndi.distance_transform_edt(~image, return_distances=False, return_indices=True)
x.shape
image_src = np.arange(image.size).reshape(image)
image_src = np.arange(image.size).reshape(image.shape)
image_src[~image] = 0
ws = segmentation.watershed(image, image_src, compactness=1e10)
get_ipython().run_line_magic('timeit', 'segmentation.watershed(image, image_src, compactness=1e10)')
import sys
sys.path.append('/Users/jni/projects/unfold-embryo')
from gala import imio
v = imio.read_h5_stack('embA_0.3um_Probabilities.h5')
get_ipython().run_line_magic('cd', '~/Dropbox/data1/drosophila-embryo/')
v = imio.read_h5_stack('embA_0.3um_Probabilities.h5')
v.shape
v0 = v[..., 0]
smoothed_vm = filters.gaussian(v[..., 0], sigma=4)
b = smoothed_vm > 0.5
b2 = morphology.remove_small_objects(b, 10000)
sys.path.append('/Users/jni/projects/skan')
import unfold
g, idxs, path = unfold.define_mesoderm_axis(b2)
sources, ids, idxs = unfold.source_id_volume(b2, idxs, path)
c0 = unfold.coord0_volume(sources, idxs)
c1 = unfold.coord1_volume(b2)
get_ipython().run_line_magic('ls', '')
image = io.imread('embA_0.3um.tif')
image.shape
channels = [unfold.sample2d(c0, c1, image[..., c])
for c in range(3)]
reload(unfold)
channels = [unfold.sample2d(c0, c1, image[..., c])
for c in range(3)]
|
|
a42109b6992023dc31a3f77a173c19065e4c00d5
|
jsonrpcclient/clients/socket_client.py
|
jsonrpcclient/clients/socket_client.py
|
"""
Low-level socket client.
"""
import socket
from typing import Any
from ..client import Client
from ..response import Response
class SocketClient(Client):
"""
Args:
socket: Connected socket.
*args: Passed through to Client class.
**kwargs: Passed through to Client class.
"""
def __init__(
self, socket: socket.socket, *args: Any, **kwargs: Any
) -> None:
super().__init__(*args, **kwargs)
self.socket = socket
def send_message(self, request: str, **kwargs: Any) -> Response:
"""
Transport the message to the server and return the response.
:param request: The JSON-RPC request string.
:return: The response (a string for requests, None for notifications).
"""
payload = str(request) + '\n'
self.socket.send(payload.encode())
return Response(self.socket.recv().decode())
|
Add a low-level socket client
|
Add a low-level socket client
|
Python
|
mit
|
bcb/jsonrpcclient
|
Add a low-level socket client
|
"""
Low-level socket client.
"""
import socket
from typing import Any
from ..client import Client
from ..response import Response
class SocketClient(Client):
"""
Args:
socket: Connected socket.
*args: Passed through to Client class.
**kwargs: Passed through to Client class.
"""
def __init__(
self, socket: socket.socket, *args: Any, **kwargs: Any
) -> None:
super().__init__(*args, **kwargs)
self.socket = socket
def send_message(self, request: str, **kwargs: Any) -> Response:
"""
Transport the message to the server and return the response.
:param request: The JSON-RPC request string.
:return: The response (a string for requests, None for notifications).
"""
payload = str(request) + '\n'
self.socket.send(payload.encode())
return Response(self.socket.recv().decode())
|
<commit_before><commit_msg>Add a low-level socket client<commit_after>
|
"""
Low-level socket client.
"""
import socket
from typing import Any
from ..client import Client
from ..response import Response
class SocketClient(Client):
"""
Args:
socket: Connected socket.
*args: Passed through to Client class.
**kwargs: Passed through to Client class.
"""
def __init__(
self, socket: socket.socket, *args: Any, **kwargs: Any
) -> None:
super().__init__(*args, **kwargs)
self.socket = socket
def send_message(self, request: str, **kwargs: Any) -> Response:
"""
Transport the message to the server and return the response.
:param request: The JSON-RPC request string.
:return: The response (a string for requests, None for notifications).
"""
payload = str(request) + '\n'
self.socket.send(payload.encode())
return Response(self.socket.recv().decode())
|
Add a low-level socket client"""
Low-level socket client.
"""
import socket
from typing import Any
from ..client import Client
from ..response import Response
class SocketClient(Client):
"""
Args:
socket: Connected socket.
*args: Passed through to Client class.
**kwargs: Passed through to Client class.
"""
def __init__(
self, socket: socket.socket, *args: Any, **kwargs: Any
) -> None:
super().__init__(*args, **kwargs)
self.socket = socket
def send_message(self, request: str, **kwargs: Any) -> Response:
"""
Transport the message to the server and return the response.
:param request: The JSON-RPC request string.
:return: The response (a string for requests, None for notifications).
"""
payload = str(request) + '\n'
self.socket.send(payload.encode())
return Response(self.socket.recv().decode())
|
<commit_before><commit_msg>Add a low-level socket client<commit_after>"""
Low-level socket client.
"""
import socket
from typing import Any
from ..client import Client
from ..response import Response
class SocketClient(Client):
"""
Args:
socket: Connected socket.
*args: Passed through to Client class.
**kwargs: Passed through to Client class.
"""
def __init__(
self, socket: socket.socket, *args: Any, **kwargs: Any
) -> None:
super().__init__(*args, **kwargs)
self.socket = socket
def send_message(self, request: str, **kwargs: Any) -> Response:
"""
Transport the message to the server and return the response.
:param request: The JSON-RPC request string.
:return: The response (a string for requests, None for notifications).
"""
payload = str(request) + '\n'
self.socket.send(payload.encode())
return Response(self.socket.recv().decode())
|
|
25743b9e5bc406d3f8f7713e5dca560d5bb4f82f
|
tests/xmi/test_xmi_serialization.py
|
tests/xmi/test_xmi_serialization.py
|
import pytest
import os
import pyecore.ecore as Ecore
from pyecore.resources import *
from pyecore.resources.xmi import XMIResource
def test_resource_createset(tmpdir):
f = tmpdir.mkdir('pyecore-tmp').join('test.xmi')
resource = XMIResource(URI(str(f)))
# we create a simple metamodel by script
package = Ecore.EPackage('mypackage')
package.nsURI = 'http://simplemetamodel/1.0'
package.nsPrefix = 'simplemm'
AbsA = Ecore.EClass('AbsA', abstract=True)
A = Ecore.EClass('A', superclass=(AbsA,))
SubA = Ecore.EClass('SubA', superclass=(A,))
MyRoot = Ecore.EClass('MyRoot')
MyRoot.a_container = Ecore.EReference('a_container', eType=AbsA, upper=-1, containment=True)
MyRoot.eStructuralFeatures.append(MyRoot.a_container)
package.eClassifiers.extend([MyRoot, A, SubA])
# we create some instances
root = MyRoot()
a1 = A()
suba1 = SubA()
root.a_container.extend([a1, suba1])
# we add the elements to the resource
resource.append(root)
resource.save()
# we try to read it again (we register the metamodel first)
global_registry[package.nsURI] = package
resource = XMIResource(URI(str(f)))
resource.load()
assert resource.contents != []
assert len(resource.contents[0].eContents) == 2
|
Add simple integration test for model serialization
|
Add simple integration test for model serialization
This test creates a new metamodel in memory (by code), create an
instance of the metamodel, serialize the instance model as XMI, then
re-read it again from another resource.
|
Python
|
bsd-3-clause
|
aranega/pyecore,pyecore/pyecore
|
Add simple integration test for model serialization
This test creates a new metamodel in memory (by code), create an
instance of the metamodel, serialize the instance model as XMI, then
re-read it again from another resource.
|
import pytest
import os
import pyecore.ecore as Ecore
from pyecore.resources import *
from pyecore.resources.xmi import XMIResource
def test_resource_createset(tmpdir):
f = tmpdir.mkdir('pyecore-tmp').join('test.xmi')
resource = XMIResource(URI(str(f)))
# we create a simple metamodel by script
package = Ecore.EPackage('mypackage')
package.nsURI = 'http://simplemetamodel/1.0'
package.nsPrefix = 'simplemm'
AbsA = Ecore.EClass('AbsA', abstract=True)
A = Ecore.EClass('A', superclass=(AbsA,))
SubA = Ecore.EClass('SubA', superclass=(A,))
MyRoot = Ecore.EClass('MyRoot')
MyRoot.a_container = Ecore.EReference('a_container', eType=AbsA, upper=-1, containment=True)
MyRoot.eStructuralFeatures.append(MyRoot.a_container)
package.eClassifiers.extend([MyRoot, A, SubA])
# we create some instances
root = MyRoot()
a1 = A()
suba1 = SubA()
root.a_container.extend([a1, suba1])
# we add the elements to the resource
resource.append(root)
resource.save()
# we try to read it again (we register the metamodel first)
global_registry[package.nsURI] = package
resource = XMIResource(URI(str(f)))
resource.load()
assert resource.contents != []
assert len(resource.contents[0].eContents) == 2
|
<commit_before><commit_msg>Add simple integration test for model serialization
This test creates a new metamodel in memory (by code), create an
instance of the metamodel, serialize the instance model as XMI, then
re-read it again from another resource.<commit_after>
|
import pytest
import os
import pyecore.ecore as Ecore
from pyecore.resources import *
from pyecore.resources.xmi import XMIResource
def test_resource_createset(tmpdir):
f = tmpdir.mkdir('pyecore-tmp').join('test.xmi')
resource = XMIResource(URI(str(f)))
# we create a simple metamodel by script
package = Ecore.EPackage('mypackage')
package.nsURI = 'http://simplemetamodel/1.0'
package.nsPrefix = 'simplemm'
AbsA = Ecore.EClass('AbsA', abstract=True)
A = Ecore.EClass('A', superclass=(AbsA,))
SubA = Ecore.EClass('SubA', superclass=(A,))
MyRoot = Ecore.EClass('MyRoot')
MyRoot.a_container = Ecore.EReference('a_container', eType=AbsA, upper=-1, containment=True)
MyRoot.eStructuralFeatures.append(MyRoot.a_container)
package.eClassifiers.extend([MyRoot, A, SubA])
# we create some instances
root = MyRoot()
a1 = A()
suba1 = SubA()
root.a_container.extend([a1, suba1])
# we add the elements to the resource
resource.append(root)
resource.save()
# we try to read it again (we register the metamodel first)
global_registry[package.nsURI] = package
resource = XMIResource(URI(str(f)))
resource.load()
assert resource.contents != []
assert len(resource.contents[0].eContents) == 2
|
Add simple integration test for model serialization
This test creates a new metamodel in memory (by code), create an
instance of the metamodel, serialize the instance model as XMI, then
re-read it again from another resource.import pytest
import os
import pyecore.ecore as Ecore
from pyecore.resources import *
from pyecore.resources.xmi import XMIResource
def test_resource_createset(tmpdir):
f = tmpdir.mkdir('pyecore-tmp').join('test.xmi')
resource = XMIResource(URI(str(f)))
# we create a simple metamodel by script
package = Ecore.EPackage('mypackage')
package.nsURI = 'http://simplemetamodel/1.0'
package.nsPrefix = 'simplemm'
AbsA = Ecore.EClass('AbsA', abstract=True)
A = Ecore.EClass('A', superclass=(AbsA,))
SubA = Ecore.EClass('SubA', superclass=(A,))
MyRoot = Ecore.EClass('MyRoot')
MyRoot.a_container = Ecore.EReference('a_container', eType=AbsA, upper=-1, containment=True)
MyRoot.eStructuralFeatures.append(MyRoot.a_container)
package.eClassifiers.extend([MyRoot, A, SubA])
# we create some instances
root = MyRoot()
a1 = A()
suba1 = SubA()
root.a_container.extend([a1, suba1])
# we add the elements to the resource
resource.append(root)
resource.save()
# we try to read it again (we register the metamodel first)
global_registry[package.nsURI] = package
resource = XMIResource(URI(str(f)))
resource.load()
assert resource.contents != []
assert len(resource.contents[0].eContents) == 2
|
<commit_before><commit_msg>Add simple integration test for model serialization
This test creates a new metamodel in memory (by code), create an
instance of the metamodel, serialize the instance model as XMI, then
re-read it again from another resource.<commit_after>import pytest
import os
import pyecore.ecore as Ecore
from pyecore.resources import *
from pyecore.resources.xmi import XMIResource
def test_resource_createset(tmpdir):
f = tmpdir.mkdir('pyecore-tmp').join('test.xmi')
resource = XMIResource(URI(str(f)))
# we create a simple metamodel by script
package = Ecore.EPackage('mypackage')
package.nsURI = 'http://simplemetamodel/1.0'
package.nsPrefix = 'simplemm'
AbsA = Ecore.EClass('AbsA', abstract=True)
A = Ecore.EClass('A', superclass=(AbsA,))
SubA = Ecore.EClass('SubA', superclass=(A,))
MyRoot = Ecore.EClass('MyRoot')
MyRoot.a_container = Ecore.EReference('a_container', eType=AbsA, upper=-1, containment=True)
MyRoot.eStructuralFeatures.append(MyRoot.a_container)
package.eClassifiers.extend([MyRoot, A, SubA])
# we create some instances
root = MyRoot()
a1 = A()
suba1 = SubA()
root.a_container.extend([a1, suba1])
# we add the elements to the resource
resource.append(root)
resource.save()
# we try to read it again (we register the metamodel first)
global_registry[package.nsURI] = package
resource = XMIResource(URI(str(f)))
resource.load()
assert resource.contents != []
assert len(resource.contents[0].eContents) == 2
|
|
f6dd64353c864f965bed0091dc1ab447ff29a2fd
|
src/ggrc_workflows/migrations/versions/20151107014837_18bdb0671010_update_invalid_task_dates.py
|
src/ggrc_workflows/migrations/versions/20151107014837_18bdb0671010_update_invalid_task_dates.py
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
"""Update invalid task dates
Revision ID: 18bdb0671010
Revises: e81da7a55e7
Create Date: 2015-11-07 01:48:37.046586
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '18bdb0671010'
down_revision = 'e81da7a55e7'
def upgrade():
""" Fix all broken task group dates
This is the easiest solution for solving bad dates that might have been
imported (such as 12/31/15). We can assume that such data is not in
production. Setting todays date is still less wrong than having a 2000 year
old task that might break the app.
"""
op.execute("""
UPDATE task_group_tasks
SET start_date = CURDATE(), end_date=CURDATE()
WHERE (start_date IS NOT NULL AND start_date < "1900-01-01") OR
(end_date IS NOT NULL AND end_date < "1900-01-01")
""")
def downgrade():
""" We won't currupt the data on downgrade """
pass
|
Add a migration for fixing bad dates
|
Add a migration for fixing bad dates
This migration is for the case that some bad dates were imported before
the propper date validation was added.
|
Python
|
apache-2.0
|
AleksNeStu/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,jmakov/ggrc-core,VinnieJohns/ggrc-core
|
Add a migration for fixing bad dates
This migration is for the case that some bad dates were imported before
the propper date validation was added.
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
"""Update invalid task dates
Revision ID: 18bdb0671010
Revises: e81da7a55e7
Create Date: 2015-11-07 01:48:37.046586
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '18bdb0671010'
down_revision = 'e81da7a55e7'
def upgrade():
""" Fix all broken task group dates
This is the easiest solution for solving bad dates that might have been
imported (such as 12/31/15). We can assume that such data is not in
production. Setting todays date is still less wrong than having a 2000 year
old task that might break the app.
"""
op.execute("""
UPDATE task_group_tasks
SET start_date = CURDATE(), end_date=CURDATE()
WHERE (start_date IS NOT NULL AND start_date < "1900-01-01") OR
(end_date IS NOT NULL AND end_date < "1900-01-01")
""")
def downgrade():
""" We won't currupt the data on downgrade """
pass
|
<commit_before><commit_msg>Add a migration for fixing bad dates
This migration is for the case that some bad dates were imported before
the propper date validation was added.<commit_after>
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
"""Update invalid task dates
Revision ID: 18bdb0671010
Revises: e81da7a55e7
Create Date: 2015-11-07 01:48:37.046586
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '18bdb0671010'
down_revision = 'e81da7a55e7'
def upgrade():
""" Fix all broken task group dates
This is the easiest solution for solving bad dates that might have been
imported (such as 12/31/15). We can assume that such data is not in
production. Setting todays date is still less wrong than having a 2000 year
old task that might break the app.
"""
op.execute("""
UPDATE task_group_tasks
SET start_date = CURDATE(), end_date=CURDATE()
WHERE (start_date IS NOT NULL AND start_date < "1900-01-01") OR
(end_date IS NOT NULL AND end_date < "1900-01-01")
""")
def downgrade():
""" We won't currupt the data on downgrade """
pass
|
Add a migration for fixing bad dates
This migration is for the case that some bad dates were imported before
the propper date validation was added.# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
"""Update invalid task dates
Revision ID: 18bdb0671010
Revises: e81da7a55e7
Create Date: 2015-11-07 01:48:37.046586
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '18bdb0671010'
down_revision = 'e81da7a55e7'
def upgrade():
""" Fix all broken task group dates
This is the easiest solution for solving bad dates that might have been
imported (such as 12/31/15). We can assume that such data is not in
production. Setting todays date is still less wrong than having a 2000 year
old task that might break the app.
"""
op.execute("""
UPDATE task_group_tasks
SET start_date = CURDATE(), end_date=CURDATE()
WHERE (start_date IS NOT NULL AND start_date < "1900-01-01") OR
(end_date IS NOT NULL AND end_date < "1900-01-01")
""")
def downgrade():
""" We won't currupt the data on downgrade """
pass
|
<commit_before><commit_msg>Add a migration for fixing bad dates
This migration is for the case that some bad dates were imported before
the propper date validation was added.<commit_after># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
"""Update invalid task dates
Revision ID: 18bdb0671010
Revises: e81da7a55e7
Create Date: 2015-11-07 01:48:37.046586
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '18bdb0671010'
down_revision = 'e81da7a55e7'
def upgrade():
""" Fix all broken task group dates
This is the easiest solution for solving bad dates that might have been
imported (such as 12/31/15). We can assume that such data is not in
production. Setting todays date is still less wrong than having a 2000 year
old task that might break the app.
"""
op.execute("""
UPDATE task_group_tasks
SET start_date = CURDATE(), end_date=CURDATE()
WHERE (start_date IS NOT NULL AND start_date < "1900-01-01") OR
(end_date IS NOT NULL AND end_date < "1900-01-01")
""")
def downgrade():
""" We won't currupt the data on downgrade """
pass
|
|
eaa125e7195f065c4ca74088d8f9a5eb6464e056
|
basics/masking_utils.py
|
basics/masking_utils.py
|
import skimage.morphology as mo
import scipy.ndimage as nd
import warnings
try:
import cv2
CV2_FLAG = True
except ImportError:
warnings.warn("Cannot import cv2. Computing with scipy.ndimage")
CV2_FLAG = False
from utils import eight_conn
def smooth_edges(mask, filter_size, min_pixels):
no_small = mo.remove_small_holes(mask, min_size=min_pixels,
connectivity=2)
open_close = \
nd.binary_closing(nd.binary_opening(no_small, eight_conn), eight_conn)
medianed = nd.median_filter(open_close, filter_size)
return mo.remove_small_holes(medianed, min_size=min_pixels,
connectivity=2)
def remove_spurs(mask, min_distance=9):
'''
Remove spurious mask features with reconstruction.
'''
# Distance transform of the mask
dist_trans = nd.distance_transform_edt(mask)
# We don't want to return local maxima within the minimum distance
# Use reconstruction to remove.
seed = dist_trans + min_distance
reconst = mo.reconstruction(seed, dist_trans, method='erosion') - \
min_distance
if CV2_FLAG:
return cv2.morphologyEx((reconst > 0).astype("uint8"),
cv2.MORPH_DILATE,
mo.disk(min_distance).astype("uint8")).astype(bool)
else:
return mo.dilation(reconst > 0, selem=mo.disk(min_distance))
|
Split off masking utility functions
|
Split off masking utility functions
|
Python
|
mit
|
e-koch/BaSiCs
|
Split off masking utility functions
|
import skimage.morphology as mo
import scipy.ndimage as nd
import warnings
try:
import cv2
CV2_FLAG = True
except ImportError:
warnings.warn("Cannot import cv2. Computing with scipy.ndimage")
CV2_FLAG = False
from utils import eight_conn
def smooth_edges(mask, filter_size, min_pixels):
no_small = mo.remove_small_holes(mask, min_size=min_pixels,
connectivity=2)
open_close = \
nd.binary_closing(nd.binary_opening(no_small, eight_conn), eight_conn)
medianed = nd.median_filter(open_close, filter_size)
return mo.remove_small_holes(medianed, min_size=min_pixels,
connectivity=2)
def remove_spurs(mask, min_distance=9):
'''
Remove spurious mask features with reconstruction.
'''
# Distance transform of the mask
dist_trans = nd.distance_transform_edt(mask)
# We don't want to return local maxima within the minimum distance
# Use reconstruction to remove.
seed = dist_trans + min_distance
reconst = mo.reconstruction(seed, dist_trans, method='erosion') - \
min_distance
if CV2_FLAG:
return cv2.morphologyEx((reconst > 0).astype("uint8"),
cv2.MORPH_DILATE,
mo.disk(min_distance).astype("uint8")).astype(bool)
else:
return mo.dilation(reconst > 0, selem=mo.disk(min_distance))
|
<commit_before><commit_msg>Split off masking utility functions<commit_after>
|
import skimage.morphology as mo
import scipy.ndimage as nd
import warnings
try:
import cv2
CV2_FLAG = True
except ImportError:
warnings.warn("Cannot import cv2. Computing with scipy.ndimage")
CV2_FLAG = False
from utils import eight_conn
def smooth_edges(mask, filter_size, min_pixels):
no_small = mo.remove_small_holes(mask, min_size=min_pixels,
connectivity=2)
open_close = \
nd.binary_closing(nd.binary_opening(no_small, eight_conn), eight_conn)
medianed = nd.median_filter(open_close, filter_size)
return mo.remove_small_holes(medianed, min_size=min_pixels,
connectivity=2)
def remove_spurs(mask, min_distance=9):
'''
Remove spurious mask features with reconstruction.
'''
# Distance transform of the mask
dist_trans = nd.distance_transform_edt(mask)
# We don't want to return local maxima within the minimum distance
# Use reconstruction to remove.
seed = dist_trans + min_distance
reconst = mo.reconstruction(seed, dist_trans, method='erosion') - \
min_distance
if CV2_FLAG:
return cv2.morphologyEx((reconst > 0).astype("uint8"),
cv2.MORPH_DILATE,
mo.disk(min_distance).astype("uint8")).astype(bool)
else:
return mo.dilation(reconst > 0, selem=mo.disk(min_distance))
|
Split off masking utility functions
import skimage.morphology as mo
import scipy.ndimage as nd
import warnings
try:
import cv2
CV2_FLAG = True
except ImportError:
warnings.warn("Cannot import cv2. Computing with scipy.ndimage")
CV2_FLAG = False
from utils import eight_conn
def smooth_edges(mask, filter_size, min_pixels):
no_small = mo.remove_small_holes(mask, min_size=min_pixels,
connectivity=2)
open_close = \
nd.binary_closing(nd.binary_opening(no_small, eight_conn), eight_conn)
medianed = nd.median_filter(open_close, filter_size)
return mo.remove_small_holes(medianed, min_size=min_pixels,
connectivity=2)
def remove_spurs(mask, min_distance=9):
'''
Remove spurious mask features with reconstruction.
'''
# Distance transform of the mask
dist_trans = nd.distance_transform_edt(mask)
# We don't want to return local maxima within the minimum distance
# Use reconstruction to remove.
seed = dist_trans + min_distance
reconst = mo.reconstruction(seed, dist_trans, method='erosion') - \
min_distance
if CV2_FLAG:
return cv2.morphologyEx((reconst > 0).astype("uint8"),
cv2.MORPH_DILATE,
mo.disk(min_distance).astype("uint8")).astype(bool)
else:
return mo.dilation(reconst > 0, selem=mo.disk(min_distance))
|
<commit_before><commit_msg>Split off masking utility functions<commit_after>
import skimage.morphology as mo
import scipy.ndimage as nd
import warnings
try:
import cv2
CV2_FLAG = True
except ImportError:
warnings.warn("Cannot import cv2. Computing with scipy.ndimage")
CV2_FLAG = False
from utils import eight_conn
def smooth_edges(mask, filter_size, min_pixels):
no_small = mo.remove_small_holes(mask, min_size=min_pixels,
connectivity=2)
open_close = \
nd.binary_closing(nd.binary_opening(no_small, eight_conn), eight_conn)
medianed = nd.median_filter(open_close, filter_size)
return mo.remove_small_holes(medianed, min_size=min_pixels,
connectivity=2)
def remove_spurs(mask, min_distance=9):
'''
Remove spurious mask features with reconstruction.
'''
# Distance transform of the mask
dist_trans = nd.distance_transform_edt(mask)
# We don't want to return local maxima within the minimum distance
# Use reconstruction to remove.
seed = dist_trans + min_distance
reconst = mo.reconstruction(seed, dist_trans, method='erosion') - \
min_distance
if CV2_FLAG:
return cv2.morphologyEx((reconst > 0).astype("uint8"),
cv2.MORPH_DILATE,
mo.disk(min_distance).astype("uint8")).astype(bool)
else:
return mo.dilation(reconst > 0, selem=mo.disk(min_distance))
|
|
88fb0e30f50184641615bf70aa82ded6646854f3
|
watcher.py
|
watcher.py
|
from threading import Thread
from werkzeug._reloader import ReloaderLoop
class Watcher(Thread, ReloaderLoop):
def __init__(self, dirname, interval=1, static, tasks, *args, **kwargs):
self.dirname = dirname
self.static = static
self.tasks = tasks
super(Watcher, self).__init__(*args, **kwargs)
ReloaderLoop.__init__(self, interval=interval)
def run(self):
time = None
while True:
try:
mtime = os.stat(dirname).st_mtime
except OSError:
continue
if time is None:
time = mtime
elif mtime > time:
self.static.run(self.tasks)
break
self._sleep(self.interval)
|
Add thread implementation to watch files
|
Add thread implementation to watch files
|
Python
|
mit
|
rolurq/flask-gulp
|
Add thread implementation to watch files
|
from threading import Thread
from werkzeug._reloader import ReloaderLoop
class Watcher(Thread, ReloaderLoop):
def __init__(self, dirname, interval=1, static, tasks, *args, **kwargs):
self.dirname = dirname
self.static = static
self.tasks = tasks
super(Watcher, self).__init__(*args, **kwargs)
ReloaderLoop.__init__(self, interval=interval)
def run(self):
time = None
while True:
try:
mtime = os.stat(dirname).st_mtime
except OSError:
continue
if time is None:
time = mtime
elif mtime > time:
self.static.run(self.tasks)
break
self._sleep(self.interval)
|
<commit_before><commit_msg>Add thread implementation to watch files<commit_after>
|
from threading import Thread
from werkzeug._reloader import ReloaderLoop
class Watcher(Thread, ReloaderLoop):
def __init__(self, dirname, interval=1, static, tasks, *args, **kwargs):
self.dirname = dirname
self.static = static
self.tasks = tasks
super(Watcher, self).__init__(*args, **kwargs)
ReloaderLoop.__init__(self, interval=interval)
def run(self):
time = None
while True:
try:
mtime = os.stat(dirname).st_mtime
except OSError:
continue
if time is None:
time = mtime
elif mtime > time:
self.static.run(self.tasks)
break
self._sleep(self.interval)
|
Add thread implementation to watch filesfrom threading import Thread
from werkzeug._reloader import ReloaderLoop
class Watcher(Thread, ReloaderLoop):
def __init__(self, dirname, interval=1, static, tasks, *args, **kwargs):
self.dirname = dirname
self.static = static
self.tasks = tasks
super(Watcher, self).__init__(*args, **kwargs)
ReloaderLoop.__init__(self, interval=interval)
def run(self):
time = None
while True:
try:
mtime = os.stat(dirname).st_mtime
except OSError:
continue
if time is None:
time = mtime
elif mtime > time:
self.static.run(self.tasks)
break
self._sleep(self.interval)
|
<commit_before><commit_msg>Add thread implementation to watch files<commit_after>from threading import Thread
from werkzeug._reloader import ReloaderLoop
class Watcher(Thread, ReloaderLoop):
def __init__(self, dirname, interval=1, static, tasks, *args, **kwargs):
self.dirname = dirname
self.static = static
self.tasks = tasks
super(Watcher, self).__init__(*args, **kwargs)
ReloaderLoop.__init__(self, interval=interval)
def run(self):
time = None
while True:
try:
mtime = os.stat(dirname).st_mtime
except OSError:
continue
if time is None:
time = mtime
elif mtime > time:
self.static.run(self.tasks)
break
self._sleep(self.interval)
|
|
769541b58b1b163197717ea4b84c5ce0cde293e0
|
app/messaging_app.py
|
app/messaging_app.py
|
import os
import json
from .core.logger import configure_logging
from .core.messaging import MessageServer
def get_password():
if "REDIS_PASSWD" in os.environ:
return os.environ["REDIS_PASSWD"]
with open("/home/rpi/.variables") as f:
line = next(x.strip() for x in f if x.startswith("REDIS_PASSWD"))
return line.split("=")[1]
def create_app(config=None):
configure_logging()
if config is None:
config = {
"host": "alarmpi",
"password": get_password()
}
with open("app/queues.json") as queue_file:
return MessageServer(6668, config, json.load(queue_file))
|
Add main for messaging server.
|
Add main for messaging server.
|
Python
|
mit
|
supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer
|
Add main for messaging server.
|
import os
import json
from .core.logger import configure_logging
from .core.messaging import MessageServer
def get_password():
if "REDIS_PASSWD" in os.environ:
return os.environ["REDIS_PASSWD"]
with open("/home/rpi/.variables") as f:
line = next(x.strip() for x in f if x.startswith("REDIS_PASSWD"))
return line.split("=")[1]
def create_app(config=None):
configure_logging()
if config is None:
config = {
"host": "alarmpi",
"password": get_password()
}
with open("app/queues.json") as queue_file:
return MessageServer(6668, config, json.load(queue_file))
|
<commit_before><commit_msg>Add main for messaging server.<commit_after>
|
import os
import json
from .core.logger import configure_logging
from .core.messaging import MessageServer
def get_password():
if "REDIS_PASSWD" in os.environ:
return os.environ["REDIS_PASSWD"]
with open("/home/rpi/.variables") as f:
line = next(x.strip() for x in f if x.startswith("REDIS_PASSWD"))
return line.split("=")[1]
def create_app(config=None):
configure_logging()
if config is None:
config = {
"host": "alarmpi",
"password": get_password()
}
with open("app/queues.json") as queue_file:
return MessageServer(6668, config, json.load(queue_file))
|
Add main for messaging server.import os
import json
from .core.logger import configure_logging
from .core.messaging import MessageServer
def get_password():
if "REDIS_PASSWD" in os.environ:
return os.environ["REDIS_PASSWD"]
with open("/home/rpi/.variables") as f:
line = next(x.strip() for x in f if x.startswith("REDIS_PASSWD"))
return line.split("=")[1]
def create_app(config=None):
configure_logging()
if config is None:
config = {
"host": "alarmpi",
"password": get_password()
}
with open("app/queues.json") as queue_file:
return MessageServer(6668, config, json.load(queue_file))
|
<commit_before><commit_msg>Add main for messaging server.<commit_after>import os
import json
from .core.logger import configure_logging
from .core.messaging import MessageServer
def get_password():
if "REDIS_PASSWD" in os.environ:
return os.environ["REDIS_PASSWD"]
with open("/home/rpi/.variables") as f:
line = next(x.strip() for x in f if x.startswith("REDIS_PASSWD"))
return line.split("=")[1]
def create_app(config=None):
configure_logging()
if config is None:
config = {
"host": "alarmpi",
"password": get_password()
}
with open("app/queues.json") as queue_file:
return MessageServer(6668, config, json.load(queue_file))
|
|
53bbb9bfa6fdc1e946365e746b1acf4b03a0635e
|
regulations/templatetags/in_context.py
|
regulations/templatetags/in_context.py
|
from django import template
register = template.Library()
class InContextNode(template.Node):
def __init__(self, nodelist, subcontext_names):
self.nodelist = nodelist
self.subcontext_names = subcontext_names
def render(self, context):
new_context = {}
for field in self.subcontext_names:
value = context.get(field, {})
if isinstance(value, dict):
new_context.update(context.get(field, {}))
else:
new_context[field] = value
return self.nodelist.render(template.Context(new_context))
@register.tag('begincontext')
def in_context(parser, token):
"""
Replaces the context (inside of this block) for easy (and safe) inclusion
of sub-content.
For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}}
1: {{ name }} {{ size }}
{% begincontext sub %}
2: {{ name }} {{ size }}
{% endcontext %}
3: {{ name }} {{ size }}
Will print
1: Kitty
2: 5
3: Kitty
Arguments which are not dictionaries will 'cascade' into the inner
context.
"""
nodelist = parser.parse(('endcontext',))
parser.delete_first_token()
return InContextNode(nodelist, token.split_contents()[1:])
|
from django import template
register = template.Library()
class InContextNode(template.Node):
def __init__(self, nodelist, subcontext_names):
self.nodelist = nodelist
self.subcontext_names = subcontext_names
def render(self, context):
new_context = {}
for field in self.subcontext_names:
value = context.get(field, {})
if isinstance(value, dict):
new_context.update(context.get(field, {}))
else:
new_context[field] = value
new_context = context.new(new_context)
return self.nodelist.render(new_context)
@register.tag('begincontext')
def in_context(parser, token):
"""
Replaces the context (inside of this block) for easy (and safe) inclusion
of sub-content.
For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}}
1: {{ name }} {{ size }}
{% begincontext sub %}
2: {{ name }} {{ size }}
{% endcontext %}
3: {{ name }} {{ size }}
Will print
1: Kitty
2: 5
3: Kitty
Arguments which are not dictionaries will 'cascade' into the inner
context.
"""
nodelist = parser.parse(('endcontext',))
parser.delete_first_token()
return InContextNode(nodelist, token.split_contents()[1:])
|
Fix custom template tag to work with django 1.8
|
Fix custom template tag to work with django 1.8
|
Python
|
cc0-1.0
|
willbarton/regulations-site,grapesmoker/regulations-site,willbarton/regulations-site,willbarton/regulations-site,grapesmoker/regulations-site,willbarton/regulations-site,ascott1/regulations-site,ascott1/regulations-site,grapesmoker/regulations-site,ascott1/regulations-site,grapesmoker/regulations-site,ascott1/regulations-site
|
from django import template
register = template.Library()
class InContextNode(template.Node):
def __init__(self, nodelist, subcontext_names):
self.nodelist = nodelist
self.subcontext_names = subcontext_names
def render(self, context):
new_context = {}
for field in self.subcontext_names:
value = context.get(field, {})
if isinstance(value, dict):
new_context.update(context.get(field, {}))
else:
new_context[field] = value
return self.nodelist.render(template.Context(new_context))
@register.tag('begincontext')
def in_context(parser, token):
"""
Replaces the context (inside of this block) for easy (and safe) inclusion
of sub-content.
For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}}
1: {{ name }} {{ size }}
{% begincontext sub %}
2: {{ name }} {{ size }}
{% endcontext %}
3: {{ name }} {{ size }}
Will print
1: Kitty
2: 5
3: Kitty
Arguments which are not dictionaries will 'cascade' into the inner
context.
"""
nodelist = parser.parse(('endcontext',))
parser.delete_first_token()
return InContextNode(nodelist, token.split_contents()[1:])
Fix custom template tag to work with django 1.8
|
from django import template
register = template.Library()
class InContextNode(template.Node):
def __init__(self, nodelist, subcontext_names):
self.nodelist = nodelist
self.subcontext_names = subcontext_names
def render(self, context):
new_context = {}
for field in self.subcontext_names:
value = context.get(field, {})
if isinstance(value, dict):
new_context.update(context.get(field, {}))
else:
new_context[field] = value
new_context = context.new(new_context)
return self.nodelist.render(new_context)
@register.tag('begincontext')
def in_context(parser, token):
"""
Replaces the context (inside of this block) for easy (and safe) inclusion
of sub-content.
For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}}
1: {{ name }} {{ size }}
{% begincontext sub %}
2: {{ name }} {{ size }}
{% endcontext %}
3: {{ name }} {{ size }}
Will print
1: Kitty
2: 5
3: Kitty
Arguments which are not dictionaries will 'cascade' into the inner
context.
"""
nodelist = parser.parse(('endcontext',))
parser.delete_first_token()
return InContextNode(nodelist, token.split_contents()[1:])
|
<commit_before>from django import template
register = template.Library()
class InContextNode(template.Node):
def __init__(self, nodelist, subcontext_names):
self.nodelist = nodelist
self.subcontext_names = subcontext_names
def render(self, context):
new_context = {}
for field in self.subcontext_names:
value = context.get(field, {})
if isinstance(value, dict):
new_context.update(context.get(field, {}))
else:
new_context[field] = value
return self.nodelist.render(template.Context(new_context))
@register.tag('begincontext')
def in_context(parser, token):
"""
Replaces the context (inside of this block) for easy (and safe) inclusion
of sub-content.
For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}}
1: {{ name }} {{ size }}
{% begincontext sub %}
2: {{ name }} {{ size }}
{% endcontext %}
3: {{ name }} {{ size }}
Will print
1: Kitty
2: 5
3: Kitty
Arguments which are not dictionaries will 'cascade' into the inner
context.
"""
nodelist = parser.parse(('endcontext',))
parser.delete_first_token()
return InContextNode(nodelist, token.split_contents()[1:])
<commit_msg>Fix custom template tag to work with django 1.8<commit_after>
|
from django import template
register = template.Library()
class InContextNode(template.Node):
def __init__(self, nodelist, subcontext_names):
self.nodelist = nodelist
self.subcontext_names = subcontext_names
def render(self, context):
new_context = {}
for field in self.subcontext_names:
value = context.get(field, {})
if isinstance(value, dict):
new_context.update(context.get(field, {}))
else:
new_context[field] = value
new_context = context.new(new_context)
return self.nodelist.render(new_context)
@register.tag('begincontext')
def in_context(parser, token):
"""
Replaces the context (inside of this block) for easy (and safe) inclusion
of sub-content.
For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}}
1: {{ name }} {{ size }}
{% begincontext sub %}
2: {{ name }} {{ size }}
{% endcontext %}
3: {{ name }} {{ size }}
Will print
1: Kitty
2: 5
3: Kitty
Arguments which are not dictionaries will 'cascade' into the inner
context.
"""
nodelist = parser.parse(('endcontext',))
parser.delete_first_token()
return InContextNode(nodelist, token.split_contents()[1:])
|
from django import template
register = template.Library()
class InContextNode(template.Node):
def __init__(self, nodelist, subcontext_names):
self.nodelist = nodelist
self.subcontext_names = subcontext_names
def render(self, context):
new_context = {}
for field in self.subcontext_names:
value = context.get(field, {})
if isinstance(value, dict):
new_context.update(context.get(field, {}))
else:
new_context[field] = value
return self.nodelist.render(template.Context(new_context))
@register.tag('begincontext')
def in_context(parser, token):
"""
Replaces the context (inside of this block) for easy (and safe) inclusion
of sub-content.
For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}}
1: {{ name }} {{ size }}
{% begincontext sub %}
2: {{ name }} {{ size }}
{% endcontext %}
3: {{ name }} {{ size }}
Will print
1: Kitty
2: 5
3: Kitty
Arguments which are not dictionaries will 'cascade' into the inner
context.
"""
nodelist = parser.parse(('endcontext',))
parser.delete_first_token()
return InContextNode(nodelist, token.split_contents()[1:])
Fix custom template tag to work with django 1.8from django import template
register = template.Library()
class InContextNode(template.Node):
def __init__(self, nodelist, subcontext_names):
self.nodelist = nodelist
self.subcontext_names = subcontext_names
def render(self, context):
new_context = {}
for field in self.subcontext_names:
value = context.get(field, {})
if isinstance(value, dict):
new_context.update(context.get(field, {}))
else:
new_context[field] = value
new_context = context.new(new_context)
return self.nodelist.render(new_context)
@register.tag('begincontext')
def in_context(parser, token):
"""
Replaces the context (inside of this block) for easy (and safe) inclusion
of sub-content.
For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}}
1: {{ name }} {{ size }}
{% begincontext sub %}
2: {{ name }} {{ size }}
{% endcontext %}
3: {{ name }} {{ size }}
Will print
1: Kitty
2: 5
3: Kitty
Arguments which are not dictionaries will 'cascade' into the inner
context.
"""
nodelist = parser.parse(('endcontext',))
parser.delete_first_token()
return InContextNode(nodelist, token.split_contents()[1:])
|
<commit_before>from django import template
register = template.Library()
class InContextNode(template.Node):
def __init__(self, nodelist, subcontext_names):
self.nodelist = nodelist
self.subcontext_names = subcontext_names
def render(self, context):
new_context = {}
for field in self.subcontext_names:
value = context.get(field, {})
if isinstance(value, dict):
new_context.update(context.get(field, {}))
else:
new_context[field] = value
return self.nodelist.render(template.Context(new_context))
@register.tag('begincontext')
def in_context(parser, token):
"""
Replaces the context (inside of this block) for easy (and safe) inclusion
of sub-content.
For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}}
1: {{ name }} {{ size }}
{% begincontext sub %}
2: {{ name }} {{ size }}
{% endcontext %}
3: {{ name }} {{ size }}
Will print
1: Kitty
2: 5
3: Kitty
Arguments which are not dictionaries will 'cascade' into the inner
context.
"""
nodelist = parser.parse(('endcontext',))
parser.delete_first_token()
return InContextNode(nodelist, token.split_contents()[1:])
<commit_msg>Fix custom template tag to work with django 1.8<commit_after>from django import template
register = template.Library()
class InContextNode(template.Node):
def __init__(self, nodelist, subcontext_names):
self.nodelist = nodelist
self.subcontext_names = subcontext_names
def render(self, context):
new_context = {}
for field in self.subcontext_names:
value = context.get(field, {})
if isinstance(value, dict):
new_context.update(context.get(field, {}))
else:
new_context[field] = value
new_context = context.new(new_context)
return self.nodelist.render(new_context)
@register.tag('begincontext')
def in_context(parser, token):
"""
Replaces the context (inside of this block) for easy (and safe) inclusion
of sub-content.
For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}}
1: {{ name }} {{ size }}
{% begincontext sub %}
2: {{ name }} {{ size }}
{% endcontext %}
3: {{ name }} {{ size }}
Will print
1: Kitty
2: 5
3: Kitty
Arguments which are not dictionaries will 'cascade' into the inner
context.
"""
nodelist = parser.parse(('endcontext',))
parser.delete_first_token()
return InContextNode(nodelist, token.split_contents()[1:])
|
f6c21d21964d5211ab9d157ca2eedbcc064cd3bd
|
scripts/generate_hamilton_input_UPL.py
|
scripts/generate_hamilton_input_UPL.py
|
#!/usr/bin/env python
from EPPs.common import StepEPP, step_argparser
class GenerateHamiltonInputUPL(StepEPP):
""""Generate a CSV containing the necessary information to batch up ot 9 User Prepared Library receipt
into one DCT plate. Requires input and output plate containers and well positions from LIMS. Volume to be pipetted
is taken from the step UDF "DNA Volume (uL)"""
def _run(self):
all_inputs = self.process.all_inputs()
csv_array=[]
csv_column_headers=['Input Plate,','Input Well','Output Plate','Output Well,','DNA Volume,','TE Volume']
csv_array.append(csv_column_headers)
for input in all_inputs:
if input.type='Analyte'
outputs = input.outputs_per_input()
for output in outputs:
if output.type ='Analyte'
output_container=output.container.name
output_well=output.container.location
csv_line=[input.container.name,input.container.location,output_container,output_well,self.process.udf['DNA Volume (uL)'],'0']
csv_array.append(csv_line)
print(csv_array)
def main():
p = step_argparser()
args = p.parse_args()
load_config()
action = GenerateHamiltonInputUPL(args.step_uri, args.username, args.password, args.log_file)
action.run()
if __name__ == '__main__':
main()
|
Test updated for change to assign_to_workflow_receive_sample.py in last commit.
|
Test updated for change to assign_to_workflow_receive_sample.py in last commit.
|
Python
|
mit
|
EdinburghGenomics/clarity_scripts,EdinburghGenomics/clarity_scripts
|
Test updated for change to assign_to_workflow_receive_sample.py in last commit.
|
#!/usr/bin/env python
from EPPs.common import StepEPP, step_argparser
class GenerateHamiltonInputUPL(StepEPP):
""""Generate a CSV containing the necessary information to batch up ot 9 User Prepared Library receipt
into one DCT plate. Requires input and output plate containers and well positions from LIMS. Volume to be pipetted
is taken from the step UDF "DNA Volume (uL)"""
def _run(self):
all_inputs = self.process.all_inputs()
csv_array=[]
csv_column_headers=['Input Plate,','Input Well','Output Plate','Output Well,','DNA Volume,','TE Volume']
csv_array.append(csv_column_headers)
for input in all_inputs:
if input.type='Analyte'
outputs = input.outputs_per_input()
for output in outputs:
if output.type ='Analyte'
output_container=output.container.name
output_well=output.container.location
csv_line=[input.container.name,input.container.location,output_container,output_well,self.process.udf['DNA Volume (uL)'],'0']
csv_array.append(csv_line)
print(csv_array)
def main():
p = step_argparser()
args = p.parse_args()
load_config()
action = GenerateHamiltonInputUPL(args.step_uri, args.username, args.password, args.log_file)
action.run()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Test updated for change to assign_to_workflow_receive_sample.py in last commit.<commit_after>
|
#!/usr/bin/env python
from EPPs.common import StepEPP, step_argparser
class GenerateHamiltonInputUPL(StepEPP):
""""Generate a CSV containing the necessary information to batch up ot 9 User Prepared Library receipt
into one DCT plate. Requires input and output plate containers and well positions from LIMS. Volume to be pipetted
is taken from the step UDF "DNA Volume (uL)"""
def _run(self):
all_inputs = self.process.all_inputs()
csv_array=[]
csv_column_headers=['Input Plate,','Input Well','Output Plate','Output Well,','DNA Volume,','TE Volume']
csv_array.append(csv_column_headers)
for input in all_inputs:
if input.type='Analyte'
outputs = input.outputs_per_input()
for output in outputs:
if output.type ='Analyte'
output_container=output.container.name
output_well=output.container.location
csv_line=[input.container.name,input.container.location,output_container,output_well,self.process.udf['DNA Volume (uL)'],'0']
csv_array.append(csv_line)
print(csv_array)
def main():
p = step_argparser()
args = p.parse_args()
load_config()
action = GenerateHamiltonInputUPL(args.step_uri, args.username, args.password, args.log_file)
action.run()
if __name__ == '__main__':
main()
|
Test updated for change to assign_to_workflow_receive_sample.py in last commit.#!/usr/bin/env python
from EPPs.common import StepEPP, step_argparser
class GenerateHamiltonInputUPL(StepEPP):
""""Generate a CSV containing the necessary information to batch up ot 9 User Prepared Library receipt
into one DCT plate. Requires input and output plate containers and well positions from LIMS. Volume to be pipetted
is taken from the step UDF "DNA Volume (uL)"""
def _run(self):
all_inputs = self.process.all_inputs()
csv_array=[]
csv_column_headers=['Input Plate,','Input Well','Output Plate','Output Well,','DNA Volume,','TE Volume']
csv_array.append(csv_column_headers)
for input in all_inputs:
if input.type='Analyte'
outputs = input.outputs_per_input()
for output in outputs:
if output.type ='Analyte'
output_container=output.container.name
output_well=output.container.location
csv_line=[input.container.name,input.container.location,output_container,output_well,self.process.udf['DNA Volume (uL)'],'0']
csv_array.append(csv_line)
print(csv_array)
def main():
p = step_argparser()
args = p.parse_args()
load_config()
action = GenerateHamiltonInputUPL(args.step_uri, args.username, args.password, args.log_file)
action.run()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Test updated for change to assign_to_workflow_receive_sample.py in last commit.<commit_after>#!/usr/bin/env python
from EPPs.common import StepEPP, step_argparser
class GenerateHamiltonInputUPL(StepEPP):
""""Generate a CSV containing the necessary information to batch up ot 9 User Prepared Library receipt
into one DCT plate. Requires input and output plate containers and well positions from LIMS. Volume to be pipetted
is taken from the step UDF "DNA Volume (uL)"""
def _run(self):
all_inputs = self.process.all_inputs()
csv_array=[]
csv_column_headers=['Input Plate,','Input Well','Output Plate','Output Well,','DNA Volume,','TE Volume']
csv_array.append(csv_column_headers)
for input in all_inputs:
if input.type='Analyte'
outputs = input.outputs_per_input()
for output in outputs:
if output.type ='Analyte'
output_container=output.container.name
output_well=output.container.location
csv_line=[input.container.name,input.container.location,output_container,output_well,self.process.udf['DNA Volume (uL)'],'0']
csv_array.append(csv_line)
print(csv_array)
def main():
p = step_argparser()
args = p.parse_args()
load_config()
action = GenerateHamiltonInputUPL(args.step_uri, args.username, args.password, args.log_file)
action.run()
if __name__ == '__main__':
main()
|
|
9fe720e01de94a59f090c909b4659d2369cfea25
|
stack/skyline.py
|
stack/skyline.py
|
from collections import deque
def solution(H):
levels = deque()
blocks = 0
for i in xrange(len(H)):
print H[i]
while len(levels) > 0 and levels[-1] > H[i]:
levels.pop()
print 'Going down, H: %s, levels: %s' % (H[i], levels)
blocks += 1
if len(levels) == 0 or levels[-1] < H[i]:
levels.append(H[i])
print 'Going up, H: %s, levels: %s' % (H[i], levels)
return blocks + len(levels)
|
Add min block wall algorithm.
|
Add min block wall algorithm.
|
Python
|
apache-2.0
|
isendel/algorithms
|
Add min block wall algorithm.
|
from collections import deque
def solution(H):
levels = deque()
blocks = 0
for i in xrange(len(H)):
print H[i]
while len(levels) > 0 and levels[-1] > H[i]:
levels.pop()
print 'Going down, H: %s, levels: %s' % (H[i], levels)
blocks += 1
if len(levels) == 0 or levels[-1] < H[i]:
levels.append(H[i])
print 'Going up, H: %s, levels: %s' % (H[i], levels)
return blocks + len(levels)
|
<commit_before><commit_msg>Add min block wall algorithm.<commit_after>
|
from collections import deque
def solution(H):
levels = deque()
blocks = 0
for i in xrange(len(H)):
print H[i]
while len(levels) > 0 and levels[-1] > H[i]:
levels.pop()
print 'Going down, H: %s, levels: %s' % (H[i], levels)
blocks += 1
if len(levels) == 0 or levels[-1] < H[i]:
levels.append(H[i])
print 'Going up, H: %s, levels: %s' % (H[i], levels)
return blocks + len(levels)
|
Add min block wall algorithm.from collections import deque
def solution(H):
levels = deque()
blocks = 0
for i in xrange(len(H)):
print H[i]
while len(levels) > 0 and levels[-1] > H[i]:
levels.pop()
print 'Going down, H: %s, levels: %s' % (H[i], levels)
blocks += 1
if len(levels) == 0 or levels[-1] < H[i]:
levels.append(H[i])
print 'Going up, H: %s, levels: %s' % (H[i], levels)
return blocks + len(levels)
|
<commit_before><commit_msg>Add min block wall algorithm.<commit_after>from collections import deque
def solution(H):
levels = deque()
blocks = 0
for i in xrange(len(H)):
print H[i]
while len(levels) > 0 and levels[-1] > H[i]:
levels.pop()
print 'Going down, H: %s, levels: %s' % (H[i], levels)
blocks += 1
if len(levels) == 0 or levels[-1] < H[i]:
levels.append(H[i])
print 'Going up, H: %s, levels: %s' % (H[i], levels)
return blocks + len(levels)
|
|
ea17b8c990842290f4553374a51ebc223cf363bd
|
mnist_8by8_classifier.py
|
mnist_8by8_classifier.py
|
from __future__ import print_function
import time
import numpy as np
from sklearn.cross_validation import train_test_split
from sklearn.datasets import load_digits
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.preprocessing import LabelBinarizer
from ann import ANN
# import the simplified mnist dataset form scikit learn
digits = load_digits()
# get the input vectors (X is a vector of vectors of type int)
X = digits.data
# get the output vector ( y is a vector of type int)
y = digits.target
print("X.shape", X.shape)
print("y.shape", y.shape)
# normalize input into [0, 1]
X -= X.min()
X /= X.max()
# split data into training and testing 75% of examples are used for training and 25% are used for testing
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25)
# binarize the labels from a number into a vector with a 1 at that index
# ex: label 4 -> binarized [0 0 0 0 1 0 0 0 0 0]
# ex: label 7 -> binarized [0 0 0 0 0 0 0 1 0 0]
labels_train = LabelBinarizer().fit_transform(y_train)
labels_test = LabelBinarizer().fit_transform(y_test)
# convert from numpy to normal python list for our simple implementation
X_train_l = X_train.tolist()
labels_train_l = labels_train.tolist()
# create the artificial network with:
# 1 input layer of size 64 (the images are 8x8 gray pixels)
# 1 hidden layer of size 100
# 1 output layer of size 10 (the labels of digits are 0 to 9)
nn = ANN([64, 100, 10])
# see how long training takes
startTime = time.time()
# train it
nn.train(10, X_train_l, labels_train_l)
elapsedTime = time.time() - startTime
print("Training took {} seconds", int(elapsedTime))
# compute the predictions
predictions = []
for i in range(X_test.shape[0]):
o = nn.predict(X_test[i])
# the inverse of the binarization would be taking the maximum argument index
# ex: [.1 .1 .1 .1 .9 .1 .1 .1 .1 .1] -> 4
# ex: [.1 .1 .1 .1 .1 .1 .1 .9 .1 .1] -> 7
predictions.append(np.argmax(o))
# compute a confusion matrix
print("confusion matrix")
print(confusion_matrix(y_test, predictions))
# show a classification report
print("classification report")
print(classification_report(y_test, predictions))
# 94%-97% precision 94-97% recall
|
Add mnist 8by8 classifier with scikit dataset
|
Add mnist 8by8 classifier with scikit dataset
|
Python
|
mit
|
Razvy000/ANN-Intro
|
Add mnist 8by8 classifier with scikit dataset
|
from __future__ import print_function
import time
import numpy as np
from sklearn.cross_validation import train_test_split
from sklearn.datasets import load_digits
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.preprocessing import LabelBinarizer
from ann import ANN
# import the simplified mnist dataset form scikit learn
digits = load_digits()
# get the input vectors (X is a vector of vectors of type int)
X = digits.data
# get the output vector ( y is a vector of type int)
y = digits.target
print("X.shape", X.shape)
print("y.shape", y.shape)
# normalize input into [0, 1]
X -= X.min()
X /= X.max()
# split data into training and testing 75% of examples are used for training and 25% are used for testing
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25)
# binarize the labels from a number into a vector with a 1 at that index
# ex: label 4 -> binarized [0 0 0 0 1 0 0 0 0 0]
# ex: label 7 -> binarized [0 0 0 0 0 0 0 1 0 0]
labels_train = LabelBinarizer().fit_transform(y_train)
labels_test = LabelBinarizer().fit_transform(y_test)
# convert from numpy to normal python list for our simple implementation
X_train_l = X_train.tolist()
labels_train_l = labels_train.tolist()
# create the artificial network with:
# 1 input layer of size 64 (the images are 8x8 gray pixels)
# 1 hidden layer of size 100
# 1 output layer of size 10 (the labels of digits are 0 to 9)
nn = ANN([64, 100, 10])
# see how long training takes
startTime = time.time()
# train it
nn.train(10, X_train_l, labels_train_l)
elapsedTime = time.time() - startTime
print("Training took {} seconds", int(elapsedTime))
# compute the predictions
predictions = []
for i in range(X_test.shape[0]):
o = nn.predict(X_test[i])
# the inverse of the binarization would be taking the maximum argument index
# ex: [.1 .1 .1 .1 .9 .1 .1 .1 .1 .1] -> 4
# ex: [.1 .1 .1 .1 .1 .1 .1 .9 .1 .1] -> 7
predictions.append(np.argmax(o))
# compute a confusion matrix
print("confusion matrix")
print(confusion_matrix(y_test, predictions))
# show a classification report
print("classification report")
print(classification_report(y_test, predictions))
# 94%-97% precision 94-97% recall
|
<commit_before><commit_msg>Add mnist 8by8 classifier with scikit dataset<commit_after>
|
from __future__ import print_function
import time
import numpy as np
from sklearn.cross_validation import train_test_split
from sklearn.datasets import load_digits
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.preprocessing import LabelBinarizer
from ann import ANN
# import the simplified mnist dataset form scikit learn
digits = load_digits()
# get the input vectors (X is a vector of vectors of type int)
X = digits.data
# get the output vector ( y is a vector of type int)
y = digits.target
print("X.shape", X.shape)
print("y.shape", y.shape)
# normalize input into [0, 1]
X -= X.min()
X /= X.max()
# split data into training and testing 75% of examples are used for training and 25% are used for testing
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25)
# binarize the labels from a number into a vector with a 1 at that index
# ex: label 4 -> binarized [0 0 0 0 1 0 0 0 0 0]
# ex: label 7 -> binarized [0 0 0 0 0 0 0 1 0 0]
labels_train = LabelBinarizer().fit_transform(y_train)
labels_test = LabelBinarizer().fit_transform(y_test)
# convert from numpy to normal python list for our simple implementation
X_train_l = X_train.tolist()
labels_train_l = labels_train.tolist()
# create the artificial network with:
# 1 input layer of size 64 (the images are 8x8 gray pixels)
# 1 hidden layer of size 100
# 1 output layer of size 10 (the labels of digits are 0 to 9)
nn = ANN([64, 100, 10])
# see how long training takes
startTime = time.time()
# train it
nn.train(10, X_train_l, labels_train_l)
elapsedTime = time.time() - startTime
print("Training took {} seconds", int(elapsedTime))
# compute the predictions
predictions = []
for i in range(X_test.shape[0]):
o = nn.predict(X_test[i])
# the inverse of the binarization would be taking the maximum argument index
# ex: [.1 .1 .1 .1 .9 .1 .1 .1 .1 .1] -> 4
# ex: [.1 .1 .1 .1 .1 .1 .1 .9 .1 .1] -> 7
predictions.append(np.argmax(o))
# compute a confusion matrix
print("confusion matrix")
print(confusion_matrix(y_test, predictions))
# show a classification report
print("classification report")
print(classification_report(y_test, predictions))
# 94%-97% precision 94-97% recall
|
Add mnist 8by8 classifier with scikit datasetfrom __future__ import print_function
import time
import numpy as np
from sklearn.cross_validation import train_test_split
from sklearn.datasets import load_digits
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.preprocessing import LabelBinarizer
from ann import ANN
# import the simplified mnist dataset form scikit learn
digits = load_digits()
# get the input vectors (X is a vector of vectors of type int)
X = digits.data
# get the output vector ( y is a vector of type int)
y = digits.target
print("X.shape", X.shape)
print("y.shape", y.shape)
# normalize input into [0, 1]
X -= X.min()
X /= X.max()
# split data into training and testing 75% of examples are used for training and 25% are used for testing
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25)
# binarize the labels from a number into a vector with a 1 at that index
# ex: label 4 -> binarized [0 0 0 0 1 0 0 0 0 0]
# ex: label 7 -> binarized [0 0 0 0 0 0 0 1 0 0]
labels_train = LabelBinarizer().fit_transform(y_train)
labels_test = LabelBinarizer().fit_transform(y_test)
# convert from numpy to normal python list for our simple implementation
X_train_l = X_train.tolist()
labels_train_l = labels_train.tolist()
# create the artificial network with:
# 1 input layer of size 64 (the images are 8x8 gray pixels)
# 1 hidden layer of size 100
# 1 output layer of size 10 (the labels of digits are 0 to 9)
nn = ANN([64, 100, 10])
# see how long training takes
startTime = time.time()
# train it
nn.train(10, X_train_l, labels_train_l)
elapsedTime = time.time() - startTime
print("Training took {} seconds", int(elapsedTime))
# compute the predictions
predictions = []
for i in range(X_test.shape[0]):
o = nn.predict(X_test[i])
# the inverse of the binarization would be taking the maximum argument index
# ex: [.1 .1 .1 .1 .9 .1 .1 .1 .1 .1] -> 4
# ex: [.1 .1 .1 .1 .1 .1 .1 .9 .1 .1] -> 7
predictions.append(np.argmax(o))
# compute a confusion matrix
print("confusion matrix")
print(confusion_matrix(y_test, predictions))
# show a classification report
print("classification report")
print(classification_report(y_test, predictions))
# 94%-97% precision 94-97% recall
|
<commit_before><commit_msg>Add mnist 8by8 classifier with scikit dataset<commit_after>from __future__ import print_function
import time
import numpy as np
from sklearn.cross_validation import train_test_split
from sklearn.datasets import load_digits
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.preprocessing import LabelBinarizer
from ann import ANN
# import the simplified mnist dataset form scikit learn
digits = load_digits()
# get the input vectors (X is a vector of vectors of type int)
X = digits.data
# get the output vector ( y is a vector of type int)
y = digits.target
print("X.shape", X.shape)
print("y.shape", y.shape)
# normalize input into [0, 1]
X -= X.min()
X /= X.max()
# split data into training and testing 75% of examples are used for training and 25% are used for testing
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25)
# binarize the labels from a number into a vector with a 1 at that index
# ex: label 4 -> binarized [0 0 0 0 1 0 0 0 0 0]
# ex: label 7 -> binarized [0 0 0 0 0 0 0 1 0 0]
labels_train = LabelBinarizer().fit_transform(y_train)
labels_test = LabelBinarizer().fit_transform(y_test)
# convert from numpy to normal python list for our simple implementation
X_train_l = X_train.tolist()
labels_train_l = labels_train.tolist()
# create the artificial network with:
# 1 input layer of size 64 (the images are 8x8 gray pixels)
# 1 hidden layer of size 100
# 1 output layer of size 10 (the labels of digits are 0 to 9)
nn = ANN([64, 100, 10])
# see how long training takes
startTime = time.time()
# train it
nn.train(10, X_train_l, labels_train_l)
elapsedTime = time.time() - startTime
print("Training took {} seconds", int(elapsedTime))
# compute the predictions
predictions = []
for i in range(X_test.shape[0]):
o = nn.predict(X_test[i])
# the inverse of the binarization would be taking the maximum argument index
# ex: [.1 .1 .1 .1 .9 .1 .1 .1 .1 .1] -> 4
# ex: [.1 .1 .1 .1 .1 .1 .1 .9 .1 .1] -> 7
predictions.append(np.argmax(o))
# compute a confusion matrix
print("confusion matrix")
print(confusion_matrix(y_test, predictions))
# show a classification report
print("classification report")
print(classification_report(y_test, predictions))
# 94%-97% precision 94-97% recall
|
|
bb79f86e4eeadaf8a93ccceaee2936e248fea99e
|
server/removed_expired_data.py
|
server/removed_expired_data.py
|
#!/usr/bin/env python
# Standard Library
import os
import sqlite3
import ConfigParser as configparser
from datetime import datetime
# Third party
from dateutil.relativedelta import relativedelta
config = configparser.RawConfigParser()
if not config.read([os.path.expanduser('~/.ppupload.conf') or 'ppupload.conf', '/etc/ppupload.conf']):
sys.exit("Couldn't read configuration file")
# connect to sqlite and check if file exists
conn = sqlite3.connect(config.get('settings', 'database_path'))
c = conn.cursor()
c.execute("select hash, expire, one_time_download, filename, download_password from files where expire !=0")
for entry in c.fetchall():
download_hash, expire, one_time_download, filename, download_password = entry
expire = datetime.strptime(expire, '%Y%m%d%H%M')
if datetime.now() > expire:
try:
os.remove(os.path.join(config.get('settings', 'storage_path'), download_hash, filename))
os.removedirs(os.path.join(config.get('settings', 'storage_path'), download_hash))
except OSError as e:
print 'Failed to remove files %s' % e
c.execute("delete from files where hash=?", (download_hash,))
conn.commit()
c.close()
|
Remove expired files from database and storage.
|
Remove expired files from database and storage.
|
Python
|
bsd-3-clause
|
jhaals/filebutler-upload,jhaals/python-filebutler
|
Remove expired files from database and storage.
|
#!/usr/bin/env python
# Standard Library
import os
import sqlite3
import ConfigParser as configparser
from datetime import datetime
# Third party
from dateutil.relativedelta import relativedelta
config = configparser.RawConfigParser()
if not config.read([os.path.expanduser('~/.ppupload.conf') or 'ppupload.conf', '/etc/ppupload.conf']):
sys.exit("Couldn't read configuration file")
# connect to sqlite and check if file exists
conn = sqlite3.connect(config.get('settings', 'database_path'))
c = conn.cursor()
c.execute("select hash, expire, one_time_download, filename, download_password from files where expire !=0")
for entry in c.fetchall():
download_hash, expire, one_time_download, filename, download_password = entry
expire = datetime.strptime(expire, '%Y%m%d%H%M')
if datetime.now() > expire:
try:
os.remove(os.path.join(config.get('settings', 'storage_path'), download_hash, filename))
os.removedirs(os.path.join(config.get('settings', 'storage_path'), download_hash))
except OSError as e:
print 'Failed to remove files %s' % e
c.execute("delete from files where hash=?", (download_hash,))
conn.commit()
c.close()
|
<commit_before><commit_msg>Remove expired files from database and storage.<commit_after>
|
#!/usr/bin/env python
# Standard Library
import os
import sqlite3
import ConfigParser as configparser
from datetime import datetime
# Third party
from dateutil.relativedelta import relativedelta
config = configparser.RawConfigParser()
if not config.read([os.path.expanduser('~/.ppupload.conf') or 'ppupload.conf', '/etc/ppupload.conf']):
sys.exit("Couldn't read configuration file")
# connect to sqlite and check if file exists
conn = sqlite3.connect(config.get('settings', 'database_path'))
c = conn.cursor()
c.execute("select hash, expire, one_time_download, filename, download_password from files where expire !=0")
for entry in c.fetchall():
download_hash, expire, one_time_download, filename, download_password = entry
expire = datetime.strptime(expire, '%Y%m%d%H%M')
if datetime.now() > expire:
try:
os.remove(os.path.join(config.get('settings', 'storage_path'), download_hash, filename))
os.removedirs(os.path.join(config.get('settings', 'storage_path'), download_hash))
except OSError as e:
print 'Failed to remove files %s' % e
c.execute("delete from files where hash=?", (download_hash,))
conn.commit()
c.close()
|
Remove expired files from database and storage.#!/usr/bin/env python
# Standard Library
import os
import sqlite3
import ConfigParser as configparser
from datetime import datetime
# Third party
from dateutil.relativedelta import relativedelta
config = configparser.RawConfigParser()
if not config.read([os.path.expanduser('~/.ppupload.conf') or 'ppupload.conf', '/etc/ppupload.conf']):
sys.exit("Couldn't read configuration file")
# connect to sqlite and check if file exists
conn = sqlite3.connect(config.get('settings', 'database_path'))
c = conn.cursor()
c.execute("select hash, expire, one_time_download, filename, download_password from files where expire !=0")
for entry in c.fetchall():
download_hash, expire, one_time_download, filename, download_password = entry
expire = datetime.strptime(expire, '%Y%m%d%H%M')
if datetime.now() > expire:
try:
os.remove(os.path.join(config.get('settings', 'storage_path'), download_hash, filename))
os.removedirs(os.path.join(config.get('settings', 'storage_path'), download_hash))
except OSError as e:
print 'Failed to remove files %s' % e
c.execute("delete from files where hash=?", (download_hash,))
conn.commit()
c.close()
|
<commit_before><commit_msg>Remove expired files from database and storage.<commit_after>#!/usr/bin/env python
# Standard Library
import os
import sqlite3
import ConfigParser as configparser
from datetime import datetime
# Third party
from dateutil.relativedelta import relativedelta
config = configparser.RawConfigParser()
if not config.read([os.path.expanduser('~/.ppupload.conf') or 'ppupload.conf', '/etc/ppupload.conf']):
sys.exit("Couldn't read configuration file")
# connect to sqlite and check if file exists
conn = sqlite3.connect(config.get('settings', 'database_path'))
c = conn.cursor()
c.execute("select hash, expire, one_time_download, filename, download_password from files where expire !=0")
for entry in c.fetchall():
download_hash, expire, one_time_download, filename, download_password = entry
expire = datetime.strptime(expire, '%Y%m%d%H%M')
if datetime.now() > expire:
try:
os.remove(os.path.join(config.get('settings', 'storage_path'), download_hash, filename))
os.removedirs(os.path.join(config.get('settings', 'storage_path'), download_hash))
except OSError as e:
print 'Failed to remove files %s' % e
c.execute("delete from files where hash=?", (download_hash,))
conn.commit()
c.close()
|
|
9600746e27e8cd10cdb9ede05d1b341be903597f
|
gevent_tasks/utils.py
|
gevent_tasks/utils.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# >>
# gevent-tasks, 2017
# <<
import random
import string
ch_choices = string.ascii_letters + string.digits
def gen_uuid(length=4):
# type: (int) -> str
""" Generate a random ID of a given length. """
return ''.join(map(lambda c: random.choice(ch_choices), range(length)))
|
Add simple uuid generator based on length and ASCII chars
|
Add simple uuid generator based on length and ASCII chars
|
Python
|
mit
|
blakev/gevent-tasks
|
Add simple uuid generator based on length and ASCII chars
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# >>
# gevent-tasks, 2017
# <<
import random
import string
ch_choices = string.ascii_letters + string.digits
def gen_uuid(length=4):
# type: (int) -> str
""" Generate a random ID of a given length. """
return ''.join(map(lambda c: random.choice(ch_choices), range(length)))
|
<commit_before><commit_msg>Add simple uuid generator based on length and ASCII chars<commit_after>
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# >>
# gevent-tasks, 2017
# <<
import random
import string
ch_choices = string.ascii_letters + string.digits
def gen_uuid(length=4):
# type: (int) -> str
""" Generate a random ID of a given length. """
return ''.join(map(lambda c: random.choice(ch_choices), range(length)))
|
Add simple uuid generator based on length and ASCII chars#! /usr/bin/env python
# -*- coding: utf-8 -*-
# >>
# gevent-tasks, 2017
# <<
import random
import string
ch_choices = string.ascii_letters + string.digits
def gen_uuid(length=4):
# type: (int) -> str
""" Generate a random ID of a given length. """
return ''.join(map(lambda c: random.choice(ch_choices), range(length)))
|
<commit_before><commit_msg>Add simple uuid generator based on length and ASCII chars<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# >>
# gevent-tasks, 2017
# <<
import random
import string
ch_choices = string.ascii_letters + string.digits
def gen_uuid(length=4):
# type: (int) -> str
""" Generate a random ID of a given length. """
return ''.join(map(lambda c: random.choice(ch_choices), range(length)))
|
|
7c2548f7f4cf01d0a5cf389c290a47cdf029a7ac
|
apps/explorer/tests/views/test_mixins.py
|
apps/explorer/tests/views/test_mixins.py
|
import pytest
from django.test import TestCase
from apps.explorer.views.mixins import DataTableMixin, SubsetSelectionMixin
class DataTableMixinTestCase(TestCase):
def test_get_omics_units_must_be_implemented(self):
class DataTableWithNoGetOmicsUnits(DataTableMixin):
pass
with pytest.raises(NotImplementedError):
fake_session = dict()
obj = DataTableWithNoGetOmicsUnits()
obj.get_omics_units(fake_session)
class SubsetSelectionMixinTestCase(TestCase):
def test_get_omics_units_must_be_implemented(self):
class SubsetSelectionWithNoGetOmicsUnits(SubsetSelectionMixin):
pass
with pytest.raises(NotImplementedError):
fake_session = dict()
obj = SubsetSelectionWithNoGetOmicsUnits()
obj.get_omics_units(fake_session)
|
Add tests for explorer views mixins
|
Add tests for explorer views mixins
|
Python
|
bsd-3-clause
|
Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel
|
Add tests for explorer views mixins
|
import pytest
from django.test import TestCase
from apps.explorer.views.mixins import DataTableMixin, SubsetSelectionMixin
class DataTableMixinTestCase(TestCase):
def test_get_omics_units_must_be_implemented(self):
class DataTableWithNoGetOmicsUnits(DataTableMixin):
pass
with pytest.raises(NotImplementedError):
fake_session = dict()
obj = DataTableWithNoGetOmicsUnits()
obj.get_omics_units(fake_session)
class SubsetSelectionMixinTestCase(TestCase):
def test_get_omics_units_must_be_implemented(self):
class SubsetSelectionWithNoGetOmicsUnits(SubsetSelectionMixin):
pass
with pytest.raises(NotImplementedError):
fake_session = dict()
obj = SubsetSelectionWithNoGetOmicsUnits()
obj.get_omics_units(fake_session)
|
<commit_before><commit_msg>Add tests for explorer views mixins<commit_after>
|
import pytest
from django.test import TestCase
from apps.explorer.views.mixins import DataTableMixin, SubsetSelectionMixin
class DataTableMixinTestCase(TestCase):
def test_get_omics_units_must_be_implemented(self):
class DataTableWithNoGetOmicsUnits(DataTableMixin):
pass
with pytest.raises(NotImplementedError):
fake_session = dict()
obj = DataTableWithNoGetOmicsUnits()
obj.get_omics_units(fake_session)
class SubsetSelectionMixinTestCase(TestCase):
def test_get_omics_units_must_be_implemented(self):
class SubsetSelectionWithNoGetOmicsUnits(SubsetSelectionMixin):
pass
with pytest.raises(NotImplementedError):
fake_session = dict()
obj = SubsetSelectionWithNoGetOmicsUnits()
obj.get_omics_units(fake_session)
|
Add tests for explorer views mixinsimport pytest
from django.test import TestCase
from apps.explorer.views.mixins import DataTableMixin, SubsetSelectionMixin
class DataTableMixinTestCase(TestCase):
def test_get_omics_units_must_be_implemented(self):
class DataTableWithNoGetOmicsUnits(DataTableMixin):
pass
with pytest.raises(NotImplementedError):
fake_session = dict()
obj = DataTableWithNoGetOmicsUnits()
obj.get_omics_units(fake_session)
class SubsetSelectionMixinTestCase(TestCase):
def test_get_omics_units_must_be_implemented(self):
class SubsetSelectionWithNoGetOmicsUnits(SubsetSelectionMixin):
pass
with pytest.raises(NotImplementedError):
fake_session = dict()
obj = SubsetSelectionWithNoGetOmicsUnits()
obj.get_omics_units(fake_session)
|
<commit_before><commit_msg>Add tests for explorer views mixins<commit_after>import pytest
from django.test import TestCase
from apps.explorer.views.mixins import DataTableMixin, SubsetSelectionMixin
class DataTableMixinTestCase(TestCase):
def test_get_omics_units_must_be_implemented(self):
class DataTableWithNoGetOmicsUnits(DataTableMixin):
pass
with pytest.raises(NotImplementedError):
fake_session = dict()
obj = DataTableWithNoGetOmicsUnits()
obj.get_omics_units(fake_session)
class SubsetSelectionMixinTestCase(TestCase):
def test_get_omics_units_must_be_implemented(self):
class SubsetSelectionWithNoGetOmicsUnits(SubsetSelectionMixin):
pass
with pytest.raises(NotImplementedError):
fake_session = dict()
obj = SubsetSelectionWithNoGetOmicsUnits()
obj.get_omics_units(fake_session)
|
|
988481094bd34842e3ec186ec5c7daaff9663591
|
examples/dots.py
|
examples/dots.py
|
from threading import Thread
from time import sleep
from nuimo import Controller, ControllerManager, ControllerListener, LedMatrix
MAC_ADDRESS = "c4:d7:54:71:e2:ce"
class NuimoListener(ControllerListener):
def __init__(self, controller):
self.controller = controller
self.stopping = False
self.t = Thread(target=self.show_dots)
def connect_succeeded(self):
self.t.start()
def show_dots(self):
num_dots = 1
while not self.stopping:
sleep(0.5)
s = "{:<81}".format("*" * num_dots)
self.controller.display_matrix(LedMatrix(s), interval=3.0, brightness=1.0, fading=True)
num_dots += 1
if num_dots > 81:
num_dots = 1
def stop(self):
self.controller.disconnect()
self.stopping = True
controller = Controller(adapter_name="hci0", mac_address=MAC_ADDRESS)
listener = NuimoListener(controller)
controller.listener = listener
controller.connect()
manager = ControllerManager()
try:
manager.run()
except KeyboardInterrupt:
print("Stopping...")
listener.stop()
manager.stop()
|
Add an example how to write to the LED matrix from another thread
|
Add an example how to write to the LED matrix from another thread
|
Python
|
mit
|
getsenic/nuimo-linux-python
|
Add an example how to write to the LED matrix from another thread
|
from threading import Thread
from time import sleep
from nuimo import Controller, ControllerManager, ControllerListener, LedMatrix
MAC_ADDRESS = "c4:d7:54:71:e2:ce"
class NuimoListener(ControllerListener):
def __init__(self, controller):
self.controller = controller
self.stopping = False
self.t = Thread(target=self.show_dots)
def connect_succeeded(self):
self.t.start()
def show_dots(self):
num_dots = 1
while not self.stopping:
sleep(0.5)
s = "{:<81}".format("*" * num_dots)
self.controller.display_matrix(LedMatrix(s), interval=3.0, brightness=1.0, fading=True)
num_dots += 1
if num_dots > 81:
num_dots = 1
def stop(self):
self.controller.disconnect()
self.stopping = True
controller = Controller(adapter_name="hci0", mac_address=MAC_ADDRESS)
listener = NuimoListener(controller)
controller.listener = listener
controller.connect()
manager = ControllerManager()
try:
manager.run()
except KeyboardInterrupt:
print("Stopping...")
listener.stop()
manager.stop()
|
<commit_before><commit_msg>Add an example how to write to the LED matrix from another thread<commit_after>
|
from threading import Thread
from time import sleep
from nuimo import Controller, ControllerManager, ControllerListener, LedMatrix
MAC_ADDRESS = "c4:d7:54:71:e2:ce"
class NuimoListener(ControllerListener):
def __init__(self, controller):
self.controller = controller
self.stopping = False
self.t = Thread(target=self.show_dots)
def connect_succeeded(self):
self.t.start()
def show_dots(self):
num_dots = 1
while not self.stopping:
sleep(0.5)
s = "{:<81}".format("*" * num_dots)
self.controller.display_matrix(LedMatrix(s), interval=3.0, brightness=1.0, fading=True)
num_dots += 1
if num_dots > 81:
num_dots = 1
def stop(self):
self.controller.disconnect()
self.stopping = True
controller = Controller(adapter_name="hci0", mac_address=MAC_ADDRESS)
listener = NuimoListener(controller)
controller.listener = listener
controller.connect()
manager = ControllerManager()
try:
manager.run()
except KeyboardInterrupt:
print("Stopping...")
listener.stop()
manager.stop()
|
Add an example how to write to the LED matrix from another threadfrom threading import Thread
from time import sleep
from nuimo import Controller, ControllerManager, ControllerListener, LedMatrix
MAC_ADDRESS = "c4:d7:54:71:e2:ce"
class NuimoListener(ControllerListener):
def __init__(self, controller):
self.controller = controller
self.stopping = False
self.t = Thread(target=self.show_dots)
def connect_succeeded(self):
self.t.start()
def show_dots(self):
num_dots = 1
while not self.stopping:
sleep(0.5)
s = "{:<81}".format("*" * num_dots)
self.controller.display_matrix(LedMatrix(s), interval=3.0, brightness=1.0, fading=True)
num_dots += 1
if num_dots > 81:
num_dots = 1
def stop(self):
self.controller.disconnect()
self.stopping = True
controller = Controller(adapter_name="hci0", mac_address=MAC_ADDRESS)
listener = NuimoListener(controller)
controller.listener = listener
controller.connect()
manager = ControllerManager()
try:
manager.run()
except KeyboardInterrupt:
print("Stopping...")
listener.stop()
manager.stop()
|
<commit_before><commit_msg>Add an example how to write to the LED matrix from another thread<commit_after>from threading import Thread
from time import sleep
from nuimo import Controller, ControllerManager, ControllerListener, LedMatrix
MAC_ADDRESS = "c4:d7:54:71:e2:ce"
class NuimoListener(ControllerListener):
def __init__(self, controller):
self.controller = controller
self.stopping = False
self.t = Thread(target=self.show_dots)
def connect_succeeded(self):
self.t.start()
def show_dots(self):
num_dots = 1
while not self.stopping:
sleep(0.5)
s = "{:<81}".format("*" * num_dots)
self.controller.display_matrix(LedMatrix(s), interval=3.0, brightness=1.0, fading=True)
num_dots += 1
if num_dots > 81:
num_dots = 1
def stop(self):
self.controller.disconnect()
self.stopping = True
controller = Controller(adapter_name="hci0", mac_address=MAC_ADDRESS)
listener = NuimoListener(controller)
controller.listener = listener
controller.connect()
manager = ControllerManager()
try:
manager.run()
except KeyboardInterrupt:
print("Stopping...")
listener.stop()
manager.stop()
|
|
5c167d35ea341e8ea33596f4174e16b9aafa041a
|
py/perfect-number.py
|
py/perfect-number.py
|
class Solution(object):
def checkPerfectNumber(self, num):
"""
:type num: int
:rtype: bool
"""
MersennePrime = [3, 7, 31, 127, 8191, 131071, 524287]
MPp = [2, 3, 5, 7, 13, 17, 19]
perfectnumbers = set(map(lambda a, b:a * (1 << (b - 1)), MersennePrime, MPp))
return num in perfectnumbers
|
Add py solution for 507. Perfect Number
|
Add py solution for 507. Perfect Number
507. Perfect Number: https://leetcode.com/problems/perfect-number/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 507. Perfect Number
507. Perfect Number: https://leetcode.com/problems/perfect-number/
|
class Solution(object):
def checkPerfectNumber(self, num):
"""
:type num: int
:rtype: bool
"""
MersennePrime = [3, 7, 31, 127, 8191, 131071, 524287]
MPp = [2, 3, 5, 7, 13, 17, 19]
perfectnumbers = set(map(lambda a, b:a * (1 << (b - 1)), MersennePrime, MPp))
return num in perfectnumbers
|
<commit_before><commit_msg>Add py solution for 507. Perfect Number
507. Perfect Number: https://leetcode.com/problems/perfect-number/<commit_after>
|
class Solution(object):
def checkPerfectNumber(self, num):
"""
:type num: int
:rtype: bool
"""
MersennePrime = [3, 7, 31, 127, 8191, 131071, 524287]
MPp = [2, 3, 5, 7, 13, 17, 19]
perfectnumbers = set(map(lambda a, b:a * (1 << (b - 1)), MersennePrime, MPp))
return num in perfectnumbers
|
Add py solution for 507. Perfect Number
507. Perfect Number: https://leetcode.com/problems/perfect-number/class Solution(object):
def checkPerfectNumber(self, num):
"""
:type num: int
:rtype: bool
"""
MersennePrime = [3, 7, 31, 127, 8191, 131071, 524287]
MPp = [2, 3, 5, 7, 13, 17, 19]
perfectnumbers = set(map(lambda a, b:a * (1 << (b - 1)), MersennePrime, MPp))
return num in perfectnumbers
|
<commit_before><commit_msg>Add py solution for 507. Perfect Number
507. Perfect Number: https://leetcode.com/problems/perfect-number/<commit_after>class Solution(object):
def checkPerfectNumber(self, num):
"""
:type num: int
:rtype: bool
"""
MersennePrime = [3, 7, 31, 127, 8191, 131071, 524287]
MPp = [2, 3, 5, 7, 13, 17, 19]
perfectnumbers = set(map(lambda a, b:a * (1 << (b - 1)), MersennePrime, MPp))
return num in perfectnumbers
|
|
70b245f321bf542111929a1f1ba5460c46c067fc
|
python/gameOfLife.py
|
python/gameOfLife.py
|
# https://leetcode.com/problems/game-of-life/
class Solution(object):
def gameOfLife(self, board):
"""
:type board: List[List[int]]
:rtype: void Do not return anything, modify board in-place instead.
"""
dx = (-1, -1, -1, 0, 1, 1, 1, 0)
dy = (-1, 0, 1, 1, 1, 0, -1, -1)
for x in xrange(len(board)):
for y in xrange(len(board[0])):
lives = 0
for z in xrange(8):
lives += self.status(board, x+dx[z], y+dy[z])
print lives
if lives == 3 or board[x][y] + lives == 3:
board[x][y] |= 2
for x in xrange(len(board)):
for y in xrange(len(board[0])):
board[x][y] >>= 1
def status(self, board, x, y):
if x < 0 or y < 0 or x >= len(board) or y >= len(board[0]):
return 0
return board[x][y] & 1
board = [
[1, 1],
[1, 0],
]
s = Solution()
s.gameOfLife(board)
print board
|
Add problem Game Of Life
|
Add problem Game Of Life
|
Python
|
mit
|
guozengxin/myleetcode,guozengxin/myleetcode
|
Add problem Game Of Life
|
# https://leetcode.com/problems/game-of-life/
class Solution(object):
def gameOfLife(self, board):
"""
:type board: List[List[int]]
:rtype: void Do not return anything, modify board in-place instead.
"""
dx = (-1, -1, -1, 0, 1, 1, 1, 0)
dy = (-1, 0, 1, 1, 1, 0, -1, -1)
for x in xrange(len(board)):
for y in xrange(len(board[0])):
lives = 0
for z in xrange(8):
lives += self.status(board, x+dx[z], y+dy[z])
print lives
if lives == 3 or board[x][y] + lives == 3:
board[x][y] |= 2
for x in xrange(len(board)):
for y in xrange(len(board[0])):
board[x][y] >>= 1
def status(self, board, x, y):
if x < 0 or y < 0 or x >= len(board) or y >= len(board[0]):
return 0
return board[x][y] & 1
board = [
[1, 1],
[1, 0],
]
s = Solution()
s.gameOfLife(board)
print board
|
<commit_before><commit_msg>Add problem Game Of Life<commit_after>
|
# https://leetcode.com/problems/game-of-life/
class Solution(object):
def gameOfLife(self, board):
"""
:type board: List[List[int]]
:rtype: void Do not return anything, modify board in-place instead.
"""
dx = (-1, -1, -1, 0, 1, 1, 1, 0)
dy = (-1, 0, 1, 1, 1, 0, -1, -1)
for x in xrange(len(board)):
for y in xrange(len(board[0])):
lives = 0
for z in xrange(8):
lives += self.status(board, x+dx[z], y+dy[z])
print lives
if lives == 3 or board[x][y] + lives == 3:
board[x][y] |= 2
for x in xrange(len(board)):
for y in xrange(len(board[0])):
board[x][y] >>= 1
def status(self, board, x, y):
if x < 0 or y < 0 or x >= len(board) or y >= len(board[0]):
return 0
return board[x][y] & 1
board = [
[1, 1],
[1, 0],
]
s = Solution()
s.gameOfLife(board)
print board
|
Add problem Game Of Life# https://leetcode.com/problems/game-of-life/
class Solution(object):
def gameOfLife(self, board):
"""
:type board: List[List[int]]
:rtype: void Do not return anything, modify board in-place instead.
"""
dx = (-1, -1, -1, 0, 1, 1, 1, 0)
dy = (-1, 0, 1, 1, 1, 0, -1, -1)
for x in xrange(len(board)):
for y in xrange(len(board[0])):
lives = 0
for z in xrange(8):
lives += self.status(board, x+dx[z], y+dy[z])
print lives
if lives == 3 or board[x][y] + lives == 3:
board[x][y] |= 2
for x in xrange(len(board)):
for y in xrange(len(board[0])):
board[x][y] >>= 1
def status(self, board, x, y):
if x < 0 or y < 0 or x >= len(board) or y >= len(board[0]):
return 0
return board[x][y] & 1
board = [
[1, 1],
[1, 0],
]
s = Solution()
s.gameOfLife(board)
print board
|
<commit_before><commit_msg>Add problem Game Of Life<commit_after># https://leetcode.com/problems/game-of-life/
class Solution(object):
def gameOfLife(self, board):
"""
:type board: List[List[int]]
:rtype: void Do not return anything, modify board in-place instead.
"""
dx = (-1, -1, -1, 0, 1, 1, 1, 0)
dy = (-1, 0, 1, 1, 1, 0, -1, -1)
for x in xrange(len(board)):
for y in xrange(len(board[0])):
lives = 0
for z in xrange(8):
lives += self.status(board, x+dx[z], y+dy[z])
print lives
if lives == 3 or board[x][y] + lives == 3:
board[x][y] |= 2
for x in xrange(len(board)):
for y in xrange(len(board[0])):
board[x][y] >>= 1
def status(self, board, x, y):
if x < 0 or y < 0 or x >= len(board) or y >= len(board[0]):
return 0
return board[x][y] & 1
board = [
[1, 1],
[1, 0],
]
s = Solution()
s.gameOfLife(board)
print board
|
|
0ab30d3f9b836db48b5e05614c5e1807d9189977
|
sheldon/basic_classes.py
|
sheldon/basic_classes.py
|
# -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of new messages
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
|
Add realization of adapter class
|
Add realization of adapter class
|
Python
|
mit
|
lises/sheldon
|
Add realization of adapter class
|
# -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of new messages
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
|
<commit_before><commit_msg>Add realization of adapter class<commit_after>
|
# -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of new messages
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
|
Add realization of adapter class# -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of new messages
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
|
<commit_before><commit_msg>Add realization of adapter class<commit_after># -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of new messages
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
|
|
12a2113453eb5ec6171d52a49948ea663609afbd
|
gutenbrowse/util.py
|
gutenbrowse/util.py
|
import urllib as _urllib
class HTTPError(IOError):
def __init__(self, code, msg, headers):
IOError.__init__(self, 'HTTP error', code, msg, headers)
def __str__(self):
return "HTTP: %d %s" % (self.args[1], self.args[2])
class MyURLOpener(_urllib.FancyURLopener):
def http_error_default(self, url, fp, errcode, errmsg, headers):
fp.close()
raise HTTPError(errcode, errmsg, headers)
_urlopener = None
def myurlopen(url, data=None, proxies=None):
"""
As urllib.urlopen, but raises HTTPErrors on HTTP failure
"""
global _urlopener
if proxies is not None:
opener = MyURLOpener(proxies=proxies)
elif not _urlopener:
opener = MyURLOpener()
_urlopener = opener
else:
opener = _urlopener
if data is None:
return opener.open(url)
else:
return opener.open(url, data)
__all__ = ['myurlopen']
|
Add an exception-raising-on-http-error urlopen function
|
Add an exception-raising-on-http-error urlopen function
|
Python
|
bsd-3-clause
|
pv/mgutenberg,pv/mgutenberg
|
Add an exception-raising-on-http-error urlopen function
|
import urllib as _urllib
class HTTPError(IOError):
def __init__(self, code, msg, headers):
IOError.__init__(self, 'HTTP error', code, msg, headers)
def __str__(self):
return "HTTP: %d %s" % (self.args[1], self.args[2])
class MyURLOpener(_urllib.FancyURLopener):
def http_error_default(self, url, fp, errcode, errmsg, headers):
fp.close()
raise HTTPError(errcode, errmsg, headers)
_urlopener = None
def myurlopen(url, data=None, proxies=None):
"""
As urllib.urlopen, but raises HTTPErrors on HTTP failure
"""
global _urlopener
if proxies is not None:
opener = MyURLOpener(proxies=proxies)
elif not _urlopener:
opener = MyURLOpener()
_urlopener = opener
else:
opener = _urlopener
if data is None:
return opener.open(url)
else:
return opener.open(url, data)
__all__ = ['myurlopen']
|
<commit_before><commit_msg>Add an exception-raising-on-http-error urlopen function<commit_after>
|
import urllib as _urllib
class HTTPError(IOError):
def __init__(self, code, msg, headers):
IOError.__init__(self, 'HTTP error', code, msg, headers)
def __str__(self):
return "HTTP: %d %s" % (self.args[1], self.args[2])
class MyURLOpener(_urllib.FancyURLopener):
def http_error_default(self, url, fp, errcode, errmsg, headers):
fp.close()
raise HTTPError(errcode, errmsg, headers)
_urlopener = None
def myurlopen(url, data=None, proxies=None):
"""
As urllib.urlopen, but raises HTTPErrors on HTTP failure
"""
global _urlopener
if proxies is not None:
opener = MyURLOpener(proxies=proxies)
elif not _urlopener:
opener = MyURLOpener()
_urlopener = opener
else:
opener = _urlopener
if data is None:
return opener.open(url)
else:
return opener.open(url, data)
__all__ = ['myurlopen']
|
Add an exception-raising-on-http-error urlopen functionimport urllib as _urllib
class HTTPError(IOError):
def __init__(self, code, msg, headers):
IOError.__init__(self, 'HTTP error', code, msg, headers)
def __str__(self):
return "HTTP: %d %s" % (self.args[1], self.args[2])
class MyURLOpener(_urllib.FancyURLopener):
def http_error_default(self, url, fp, errcode, errmsg, headers):
fp.close()
raise HTTPError(errcode, errmsg, headers)
_urlopener = None
def myurlopen(url, data=None, proxies=None):
"""
As urllib.urlopen, but raises HTTPErrors on HTTP failure
"""
global _urlopener
if proxies is not None:
opener = MyURLOpener(proxies=proxies)
elif not _urlopener:
opener = MyURLOpener()
_urlopener = opener
else:
opener = _urlopener
if data is None:
return opener.open(url)
else:
return opener.open(url, data)
__all__ = ['myurlopen']
|
<commit_before><commit_msg>Add an exception-raising-on-http-error urlopen function<commit_after>import urllib as _urllib
class HTTPError(IOError):
def __init__(self, code, msg, headers):
IOError.__init__(self, 'HTTP error', code, msg, headers)
def __str__(self):
return "HTTP: %d %s" % (self.args[1], self.args[2])
class MyURLOpener(_urllib.FancyURLopener):
def http_error_default(self, url, fp, errcode, errmsg, headers):
fp.close()
raise HTTPError(errcode, errmsg, headers)
_urlopener = None
def myurlopen(url, data=None, proxies=None):
"""
As urllib.urlopen, but raises HTTPErrors on HTTP failure
"""
global _urlopener
if proxies is not None:
opener = MyURLOpener(proxies=proxies)
elif not _urlopener:
opener = MyURLOpener()
_urlopener = opener
else:
opener = _urlopener
if data is None:
return opener.open(url)
else:
return opener.open(url, data)
__all__ = ['myurlopen']
|
|
c6480cad80a9c8eb93afdb3bd31a8c8c21eea8d9
|
scripts/pubmedextract.py
|
scripts/pubmedextract.py
|
"""
Extract a set of doc ids from the pubmed xml files.
"""
import argparse
import glob
import gzip
import multiprocessing
import os
from functools import partial
from multiprocessing import Pool
import sys
from lxml import etree
def parse_pubmeds(pmids: list, file: str) -> str:
"""
:param pmids:
:param file:
:return:
"""
data = """<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE PubmedArticleSet SYSTEM "http://dtd.nlm.nih.gov/ncbi/pubmed/out/pubmed_170101.dtd">
<PubmedArticleSet>
{}
</PubmedArticleSet>
"""
print(file)
decompressed_file = gzip.GzipFile(file, mode='rb')
tree = etree.parse(decompressed_file)
root = tree.getroot()
for node in root.findall('PubmedArticle'):
pmid = node.find('MedlineCitation').find('PMID').text
if pmid in pmids:
print(pmid)
file_data = data.format(
etree.tostring(node, encoding='unicode', method='xml', pretty_print=True))
with open('/datadrive2/pubmed_filter/{}.xml'.format(pmid), 'w') as f:
f.write(file_data)
if __name__ == '__main__':
argparser = argparse.ArgumentParser()
argparser.add_argument('--pmids', help='Location of pmids file.',
type=argparse.FileType('r'), default=sys.stdin)
argparser.add_argument('--pubmed', help='Location of pubmed gzip files.',
type=str, required=True)
args = argparser.parse_args()
parse_partial = partial(parse_pubmeds, [x.strip() for x in args.pmids.readlines()])
print(list(glob.glob(os.path.join(args.pubmed, '*.xml.gz'))))
p = Pool(multiprocessing.cpu_count() - 1 or 1)
p.map(parse_partial, list(glob.glob(os.path.join(args.pubmed, '*.xml.gz'))))
p.close()
p.join()
|
Add script for extracting pmids out of pubmed data.
|
Add script for extracting pmids out of pubmed data.
|
Python
|
apache-2.0
|
leifos/lucene4ir,leifos/lucene4ir,leifos/lucene4ir,lucene4ir/lucene4ir,lucene4ir/lucene4ir,lucene4ir/lucene4ir
|
Add script for extracting pmids out of pubmed data.
|
"""
Extract a set of doc ids from the pubmed xml files.
"""
import argparse
import glob
import gzip
import multiprocessing
import os
from functools import partial
from multiprocessing import Pool
import sys
from lxml import etree
def parse_pubmeds(pmids: list, file: str) -> str:
"""
:param pmids:
:param file:
:return:
"""
data = """<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE PubmedArticleSet SYSTEM "http://dtd.nlm.nih.gov/ncbi/pubmed/out/pubmed_170101.dtd">
<PubmedArticleSet>
{}
</PubmedArticleSet>
"""
print(file)
decompressed_file = gzip.GzipFile(file, mode='rb')
tree = etree.parse(decompressed_file)
root = tree.getroot()
for node in root.findall('PubmedArticle'):
pmid = node.find('MedlineCitation').find('PMID').text
if pmid in pmids:
print(pmid)
file_data = data.format(
etree.tostring(node, encoding='unicode', method='xml', pretty_print=True))
with open('/datadrive2/pubmed_filter/{}.xml'.format(pmid), 'w') as f:
f.write(file_data)
if __name__ == '__main__':
argparser = argparse.ArgumentParser()
argparser.add_argument('--pmids', help='Location of pmids file.',
type=argparse.FileType('r'), default=sys.stdin)
argparser.add_argument('--pubmed', help='Location of pubmed gzip files.',
type=str, required=True)
args = argparser.parse_args()
parse_partial = partial(parse_pubmeds, [x.strip() for x in args.pmids.readlines()])
print(list(glob.glob(os.path.join(args.pubmed, '*.xml.gz'))))
p = Pool(multiprocessing.cpu_count() - 1 or 1)
p.map(parse_partial, list(glob.glob(os.path.join(args.pubmed, '*.xml.gz'))))
p.close()
p.join()
|
<commit_before><commit_msg>Add script for extracting pmids out of pubmed data.<commit_after>
|
"""
Extract a set of doc ids from the pubmed xml files.
"""
import argparse
import glob
import gzip
import multiprocessing
import os
from functools import partial
from multiprocessing import Pool
import sys
from lxml import etree
def parse_pubmeds(pmids: list, file: str) -> str:
"""
:param pmids:
:param file:
:return:
"""
data = """<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE PubmedArticleSet SYSTEM "http://dtd.nlm.nih.gov/ncbi/pubmed/out/pubmed_170101.dtd">
<PubmedArticleSet>
{}
</PubmedArticleSet>
"""
print(file)
decompressed_file = gzip.GzipFile(file, mode='rb')
tree = etree.parse(decompressed_file)
root = tree.getroot()
for node in root.findall('PubmedArticle'):
pmid = node.find('MedlineCitation').find('PMID').text
if pmid in pmids:
print(pmid)
file_data = data.format(
etree.tostring(node, encoding='unicode', method='xml', pretty_print=True))
with open('/datadrive2/pubmed_filter/{}.xml'.format(pmid), 'w') as f:
f.write(file_data)
if __name__ == '__main__':
argparser = argparse.ArgumentParser()
argparser.add_argument('--pmids', help='Location of pmids file.',
type=argparse.FileType('r'), default=sys.stdin)
argparser.add_argument('--pubmed', help='Location of pubmed gzip files.',
type=str, required=True)
args = argparser.parse_args()
parse_partial = partial(parse_pubmeds, [x.strip() for x in args.pmids.readlines()])
print(list(glob.glob(os.path.join(args.pubmed, '*.xml.gz'))))
p = Pool(multiprocessing.cpu_count() - 1 or 1)
p.map(parse_partial, list(glob.glob(os.path.join(args.pubmed, '*.xml.gz'))))
p.close()
p.join()
|
Add script for extracting pmids out of pubmed data."""
Extract a set of doc ids from the pubmed xml files.
"""
import argparse
import glob
import gzip
import multiprocessing
import os
from functools import partial
from multiprocessing import Pool
import sys
from lxml import etree
def parse_pubmeds(pmids: list, file: str) -> str:
"""
:param pmids:
:param file:
:return:
"""
data = """<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE PubmedArticleSet SYSTEM "http://dtd.nlm.nih.gov/ncbi/pubmed/out/pubmed_170101.dtd">
<PubmedArticleSet>
{}
</PubmedArticleSet>
"""
print(file)
decompressed_file = gzip.GzipFile(file, mode='rb')
tree = etree.parse(decompressed_file)
root = tree.getroot()
for node in root.findall('PubmedArticle'):
pmid = node.find('MedlineCitation').find('PMID').text
if pmid in pmids:
print(pmid)
file_data = data.format(
etree.tostring(node, encoding='unicode', method='xml', pretty_print=True))
with open('/datadrive2/pubmed_filter/{}.xml'.format(pmid), 'w') as f:
f.write(file_data)
if __name__ == '__main__':
argparser = argparse.ArgumentParser()
argparser.add_argument('--pmids', help='Location of pmids file.',
type=argparse.FileType('r'), default=sys.stdin)
argparser.add_argument('--pubmed', help='Location of pubmed gzip files.',
type=str, required=True)
args = argparser.parse_args()
parse_partial = partial(parse_pubmeds, [x.strip() for x in args.pmids.readlines()])
print(list(glob.glob(os.path.join(args.pubmed, '*.xml.gz'))))
p = Pool(multiprocessing.cpu_count() - 1 or 1)
p.map(parse_partial, list(glob.glob(os.path.join(args.pubmed, '*.xml.gz'))))
p.close()
p.join()
|
<commit_before><commit_msg>Add script for extracting pmids out of pubmed data.<commit_after>"""
Extract a set of doc ids from the pubmed xml files.
"""
import argparse
import glob
import gzip
import multiprocessing
import os
from functools import partial
from multiprocessing import Pool
import sys
from lxml import etree
def parse_pubmeds(pmids: list, file: str) -> str:
"""
:param pmids:
:param file:
:return:
"""
data = """<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE PubmedArticleSet SYSTEM "http://dtd.nlm.nih.gov/ncbi/pubmed/out/pubmed_170101.dtd">
<PubmedArticleSet>
{}
</PubmedArticleSet>
"""
print(file)
decompressed_file = gzip.GzipFile(file, mode='rb')
tree = etree.parse(decompressed_file)
root = tree.getroot()
for node in root.findall('PubmedArticle'):
pmid = node.find('MedlineCitation').find('PMID').text
if pmid in pmids:
print(pmid)
file_data = data.format(
etree.tostring(node, encoding='unicode', method='xml', pretty_print=True))
with open('/datadrive2/pubmed_filter/{}.xml'.format(pmid), 'w') as f:
f.write(file_data)
if __name__ == '__main__':
argparser = argparse.ArgumentParser()
argparser.add_argument('--pmids', help='Location of pmids file.',
type=argparse.FileType('r'), default=sys.stdin)
argparser.add_argument('--pubmed', help='Location of pubmed gzip files.',
type=str, required=True)
args = argparser.parse_args()
parse_partial = partial(parse_pubmeds, [x.strip() for x in args.pmids.readlines()])
print(list(glob.glob(os.path.join(args.pubmed, '*.xml.gz'))))
p = Pool(multiprocessing.cpu_count() - 1 or 1)
p.map(parse_partial, list(glob.glob(os.path.join(args.pubmed, '*.xml.gz'))))
p.close()
p.join()
|
|
27b43cd5dd8c27afd8b7dbacd3024c222af909d0
|
tests/test_int_vs_long.py
|
tests/test_int_vs_long.py
|
from __future__ import absolute_import, unicode_literals
import unittest
from jnius import autoclass, cast, PythonJavaClass, java_method
class TestImplemIterator(PythonJavaClass):
__javainterfaces__ = ['java/util/ListIterator']
class TestImplem(PythonJavaClass):
__javainterfaces__ = ['java/util/List']
def __init__(self, *args):
super(TestImplem, self).__init__(*args)
self.data = list(args)
@java_method('()I')
def size(self):
return len(self.data)
@java_method('(I)Ljava/lang/Object;')
def get(self, index):
return self.data[index]
@java_method('(ILjava/lang/Object;)Ljava/lang/Object;')
def set(self, index, obj):
old_object = self.data[index]
self.data[index] = obj
return old_object
class TestIntLongConversion(unittest.TestCase):
def test_reverse(self):
'''
String comparison because values are the same for INT and LONG,
but only __str__ shows the real difference.
'''
Collections = autoclass('java.util.Collections')
List = autoclass('java.util.List')
pylist = list(range(10))
a = TestImplem(*pylist)
self.assertEqual(a.data, pylist)
self.assertEqual(str(a.data), '[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]')
# reverse the array, be sure it's converted back to INT!
Collections.reverse(a)
# conversion to/from Java objects hides INT/LONG conv on Py2
# which is wrong to switch between because even Java
# recognizes INT and LONG types separately (Py3 doesn't)
self.assertEqual(a.data, list(reversed(pylist)))
self.assertNotIn('L', str(a.data))
self.assertEqual(str(a.data), '[9, 8, 7, 6, 5, 4, 3, 2, 1, 0]')
if __name__ == '__main__':
unittest.main()
|
Add test for INT vs LONG conversion on Py2
|
Add test for INT vs LONG conversion on Py2
int and long are the same on py3, but we do care about the conversion from Python too which in case INT is >=2**31 then Java might have problem with that and it could cause issues for the users
|
Python
|
mit
|
kivy/pyjnius,kivy/pyjnius,kivy/pyjnius
|
Add test for INT vs LONG conversion on Py2
int and long are the same on py3, but we do care about the conversion from Python too which in case INT is >=2**31 then Java might have problem with that and it could cause issues for the users
|
from __future__ import absolute_import, unicode_literals
import unittest
from jnius import autoclass, cast, PythonJavaClass, java_method
class TestImplemIterator(PythonJavaClass):
__javainterfaces__ = ['java/util/ListIterator']
class TestImplem(PythonJavaClass):
__javainterfaces__ = ['java/util/List']
def __init__(self, *args):
super(TestImplem, self).__init__(*args)
self.data = list(args)
@java_method('()I')
def size(self):
return len(self.data)
@java_method('(I)Ljava/lang/Object;')
def get(self, index):
return self.data[index]
@java_method('(ILjava/lang/Object;)Ljava/lang/Object;')
def set(self, index, obj):
old_object = self.data[index]
self.data[index] = obj
return old_object
class TestIntLongConversion(unittest.TestCase):
def test_reverse(self):
'''
String comparison because values are the same for INT and LONG,
but only __str__ shows the real difference.
'''
Collections = autoclass('java.util.Collections')
List = autoclass('java.util.List')
pylist = list(range(10))
a = TestImplem(*pylist)
self.assertEqual(a.data, pylist)
self.assertEqual(str(a.data), '[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]')
# reverse the array, be sure it's converted back to INT!
Collections.reverse(a)
# conversion to/from Java objects hides INT/LONG conv on Py2
# which is wrong to switch between because even Java
# recognizes INT and LONG types separately (Py3 doesn't)
self.assertEqual(a.data, list(reversed(pylist)))
self.assertNotIn('L', str(a.data))
self.assertEqual(str(a.data), '[9, 8, 7, 6, 5, 4, 3, 2, 1, 0]')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test for INT vs LONG conversion on Py2
int and long are the same on py3, but we do care about the conversion from Python too which in case INT is >=2**31 then Java might have problem with that and it could cause issues for the users<commit_after>
|
from __future__ import absolute_import, unicode_literals
import unittest
from jnius import autoclass, cast, PythonJavaClass, java_method
class TestImplemIterator(PythonJavaClass):
__javainterfaces__ = ['java/util/ListIterator']
class TestImplem(PythonJavaClass):
__javainterfaces__ = ['java/util/List']
def __init__(self, *args):
super(TestImplem, self).__init__(*args)
self.data = list(args)
@java_method('()I')
def size(self):
return len(self.data)
@java_method('(I)Ljava/lang/Object;')
def get(self, index):
return self.data[index]
@java_method('(ILjava/lang/Object;)Ljava/lang/Object;')
def set(self, index, obj):
old_object = self.data[index]
self.data[index] = obj
return old_object
class TestIntLongConversion(unittest.TestCase):
def test_reverse(self):
'''
String comparison because values are the same for INT and LONG,
but only __str__ shows the real difference.
'''
Collections = autoclass('java.util.Collections')
List = autoclass('java.util.List')
pylist = list(range(10))
a = TestImplem(*pylist)
self.assertEqual(a.data, pylist)
self.assertEqual(str(a.data), '[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]')
# reverse the array, be sure it's converted back to INT!
Collections.reverse(a)
# conversion to/from Java objects hides INT/LONG conv on Py2
# which is wrong to switch between because even Java
# recognizes INT and LONG types separately (Py3 doesn't)
self.assertEqual(a.data, list(reversed(pylist)))
self.assertNotIn('L', str(a.data))
self.assertEqual(str(a.data), '[9, 8, 7, 6, 5, 4, 3, 2, 1, 0]')
if __name__ == '__main__':
unittest.main()
|
Add test for INT vs LONG conversion on Py2
int and long are the same on py3, but we do care about the conversion from Python too which in case INT is >=2**31 then Java might have problem with that and it could cause issues for the usersfrom __future__ import absolute_import, unicode_literals
import unittest
from jnius import autoclass, cast, PythonJavaClass, java_method
class TestImplemIterator(PythonJavaClass):
__javainterfaces__ = ['java/util/ListIterator']
class TestImplem(PythonJavaClass):
__javainterfaces__ = ['java/util/List']
def __init__(self, *args):
super(TestImplem, self).__init__(*args)
self.data = list(args)
@java_method('()I')
def size(self):
return len(self.data)
@java_method('(I)Ljava/lang/Object;')
def get(self, index):
return self.data[index]
@java_method('(ILjava/lang/Object;)Ljava/lang/Object;')
def set(self, index, obj):
old_object = self.data[index]
self.data[index] = obj
return old_object
class TestIntLongConversion(unittest.TestCase):
def test_reverse(self):
'''
String comparison because values are the same for INT and LONG,
but only __str__ shows the real difference.
'''
Collections = autoclass('java.util.Collections')
List = autoclass('java.util.List')
pylist = list(range(10))
a = TestImplem(*pylist)
self.assertEqual(a.data, pylist)
self.assertEqual(str(a.data), '[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]')
# reverse the array, be sure it's converted back to INT!
Collections.reverse(a)
# conversion to/from Java objects hides INT/LONG conv on Py2
# which is wrong to switch between because even Java
# recognizes INT and LONG types separately (Py3 doesn't)
self.assertEqual(a.data, list(reversed(pylist)))
self.assertNotIn('L', str(a.data))
self.assertEqual(str(a.data), '[9, 8, 7, 6, 5, 4, 3, 2, 1, 0]')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test for INT vs LONG conversion on Py2
int and long are the same on py3, but we do care about the conversion from Python too which in case INT is >=2**31 then Java might have problem with that and it could cause issues for the users<commit_after>from __future__ import absolute_import, unicode_literals
import unittest
from jnius import autoclass, cast, PythonJavaClass, java_method
class TestImplemIterator(PythonJavaClass):
__javainterfaces__ = ['java/util/ListIterator']
class TestImplem(PythonJavaClass):
__javainterfaces__ = ['java/util/List']
def __init__(self, *args):
super(TestImplem, self).__init__(*args)
self.data = list(args)
@java_method('()I')
def size(self):
return len(self.data)
@java_method('(I)Ljava/lang/Object;')
def get(self, index):
return self.data[index]
@java_method('(ILjava/lang/Object;)Ljava/lang/Object;')
def set(self, index, obj):
old_object = self.data[index]
self.data[index] = obj
return old_object
class TestIntLongConversion(unittest.TestCase):
def test_reverse(self):
'''
String comparison because values are the same for INT and LONG,
but only __str__ shows the real difference.
'''
Collections = autoclass('java.util.Collections')
List = autoclass('java.util.List')
pylist = list(range(10))
a = TestImplem(*pylist)
self.assertEqual(a.data, pylist)
self.assertEqual(str(a.data), '[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]')
# reverse the array, be sure it's converted back to INT!
Collections.reverse(a)
# conversion to/from Java objects hides INT/LONG conv on Py2
# which is wrong to switch between because even Java
# recognizes INT and LONG types separately (Py3 doesn't)
self.assertEqual(a.data, list(reversed(pylist)))
self.assertNotIn('L', str(a.data))
self.assertEqual(str(a.data), '[9, 8, 7, 6, 5, 4, 3, 2, 1, 0]')
if __name__ == '__main__':
unittest.main()
|
|
266eae5880b352783ca3cb31af8eae3fc40e5ea5
|
snoboy/tests/test_cpu.py
|
snoboy/tests/test_cpu.py
|
from nose.tools import eq_, ok_
from snoboy import cpu
def test_BC():
cpu.registers.DE = 0xB060
cpu.registers.BC = 0xC005
eq_(cpu.registers.BC, 0xC005)
eq_(cpu.registers.DE, 0xB060)
|
Add simple test case for cpu compound registers
|
Add simple test case for cpu compound registers
|
Python
|
mit
|
Osmose/snoboy
|
Add simple test case for cpu compound registers
|
from nose.tools import eq_, ok_
from snoboy import cpu
def test_BC():
cpu.registers.DE = 0xB060
cpu.registers.BC = 0xC005
eq_(cpu.registers.BC, 0xC005)
eq_(cpu.registers.DE, 0xB060)
|
<commit_before><commit_msg>Add simple test case for cpu compound registers<commit_after>
|
from nose.tools import eq_, ok_
from snoboy import cpu
def test_BC():
cpu.registers.DE = 0xB060
cpu.registers.BC = 0xC005
eq_(cpu.registers.BC, 0xC005)
eq_(cpu.registers.DE, 0xB060)
|
Add simple test case for cpu compound registersfrom nose.tools import eq_, ok_
from snoboy import cpu
def test_BC():
cpu.registers.DE = 0xB060
cpu.registers.BC = 0xC005
eq_(cpu.registers.BC, 0xC005)
eq_(cpu.registers.DE, 0xB060)
|
<commit_before><commit_msg>Add simple test case for cpu compound registers<commit_after>from nose.tools import eq_, ok_
from snoboy import cpu
def test_BC():
cpu.registers.DE = 0xB060
cpu.registers.BC = 0xC005
eq_(cpu.registers.BC, 0xC005)
eq_(cpu.registers.DE, 0xB060)
|
|
6bbde2294d1f5eeb4dde2ee76f6a874c6ed52ead
|
src/products/baseball.py
|
src/products/baseball.py
|
import numpy as np
import tensorflow.contrib.keras as keras
PWD = 'products'
MODEL = "{}/model.h5".format(PWD)
THRESHOLD = 0.75
# Load model.
model = keras.models.load_model(MODEL)
# Stream test photos.
test_datagen = keras.preprocessing.image.ImageDataGenerator(
rescale=1. / 255)
test_generator = test_datagen.flow_from_directory(
"{}/baseball".format(PWD),
target_size=(299, 299),
batch_size=32,
class_mode='categorical',
shuffle=False)
# Test model.
results = model.predict_generator(test_generator)
count = 0.0
total = 0.0
for i in range(0, len(results)):
predicted = np.argmax(results[i])
value = results[i][predicted]
actual = test_generator.classes[i]
if value > THRESHOLD:
if predicted == actual:
count = count + 1
total = total + 1
print("accuracy: {}".format(count / total))
print("coverage: {}".format(total / len(results)))
|
Add script to only predict if above confidence threshold.
|
Add script to only predict if above confidence threshold.
|
Python
|
mit
|
isaacanthony/tensorflow-playground,isaacanthony/tensorflow-playground
|
Add script to only predict if above confidence threshold.
|
import numpy as np
import tensorflow.contrib.keras as keras
PWD = 'products'
MODEL = "{}/model.h5".format(PWD)
THRESHOLD = 0.75
# Load model.
model = keras.models.load_model(MODEL)
# Stream test photos.
test_datagen = keras.preprocessing.image.ImageDataGenerator(
rescale=1. / 255)
test_generator = test_datagen.flow_from_directory(
"{}/baseball".format(PWD),
target_size=(299, 299),
batch_size=32,
class_mode='categorical',
shuffle=False)
# Test model.
results = model.predict_generator(test_generator)
count = 0.0
total = 0.0
for i in range(0, len(results)):
predicted = np.argmax(results[i])
value = results[i][predicted]
actual = test_generator.classes[i]
if value > THRESHOLD:
if predicted == actual:
count = count + 1
total = total + 1
print("accuracy: {}".format(count / total))
print("coverage: {}".format(total / len(results)))
|
<commit_before><commit_msg>Add script to only predict if above confidence threshold.<commit_after>
|
import numpy as np
import tensorflow.contrib.keras as keras
PWD = 'products'
MODEL = "{}/model.h5".format(PWD)
THRESHOLD = 0.75
# Load model.
model = keras.models.load_model(MODEL)
# Stream test photos.
test_datagen = keras.preprocessing.image.ImageDataGenerator(
rescale=1. / 255)
test_generator = test_datagen.flow_from_directory(
"{}/baseball".format(PWD),
target_size=(299, 299),
batch_size=32,
class_mode='categorical',
shuffle=False)
# Test model.
results = model.predict_generator(test_generator)
count = 0.0
total = 0.0
for i in range(0, len(results)):
predicted = np.argmax(results[i])
value = results[i][predicted]
actual = test_generator.classes[i]
if value > THRESHOLD:
if predicted == actual:
count = count + 1
total = total + 1
print("accuracy: {}".format(count / total))
print("coverage: {}".format(total / len(results)))
|
Add script to only predict if above confidence threshold.import numpy as np
import tensorflow.contrib.keras as keras
PWD = 'products'
MODEL = "{}/model.h5".format(PWD)
THRESHOLD = 0.75
# Load model.
model = keras.models.load_model(MODEL)
# Stream test photos.
test_datagen = keras.preprocessing.image.ImageDataGenerator(
rescale=1. / 255)
test_generator = test_datagen.flow_from_directory(
"{}/baseball".format(PWD),
target_size=(299, 299),
batch_size=32,
class_mode='categorical',
shuffle=False)
# Test model.
results = model.predict_generator(test_generator)
count = 0.0
total = 0.0
for i in range(0, len(results)):
predicted = np.argmax(results[i])
value = results[i][predicted]
actual = test_generator.classes[i]
if value > THRESHOLD:
if predicted == actual:
count = count + 1
total = total + 1
print("accuracy: {}".format(count / total))
print("coverage: {}".format(total / len(results)))
|
<commit_before><commit_msg>Add script to only predict if above confidence threshold.<commit_after>import numpy as np
import tensorflow.contrib.keras as keras
PWD = 'products'
MODEL = "{}/model.h5".format(PWD)
THRESHOLD = 0.75
# Load model.
model = keras.models.load_model(MODEL)
# Stream test photos.
test_datagen = keras.preprocessing.image.ImageDataGenerator(
rescale=1. / 255)
test_generator = test_datagen.flow_from_directory(
"{}/baseball".format(PWD),
target_size=(299, 299),
batch_size=32,
class_mode='categorical',
shuffle=False)
# Test model.
results = model.predict_generator(test_generator)
count = 0.0
total = 0.0
for i in range(0, len(results)):
predicted = np.argmax(results[i])
value = results[i][predicted]
actual = test_generator.classes[i]
if value > THRESHOLD:
if predicted == actual:
count = count + 1
total = total + 1
print("accuracy: {}".format(count / total))
print("coverage: {}".format(total / len(results)))
|
|
9b6f703d65cfbf57cf5b4aee224b89c215f931eb
|
packaging/__init__.py
|
packaging/__init__.py
|
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from packaging.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
|
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from .__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
|
Use relative import to allow vendoring
|
Use relative import to allow vendoring
|
Python
|
apache-2.0
|
xavfernandez/packaging,nvie/packaging
|
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from packaging.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
Use relative import to allow vendoring
|
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from .__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
|
<commit_before># Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from packaging.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
<commit_msg>Use relative import to allow vendoring<commit_after>
|
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from .__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
|
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from packaging.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
Use relative import to allow vendoring# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from .__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
|
<commit_before># Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from packaging.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
<commit_msg>Use relative import to allow vendoring<commit_after># Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from .__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
|
0c60ce6b4c816c080c56b44a0568c0b9520c3581
|
lms/djangoapps/student_account/careersandenterprise.py
|
lms/djangoapps/student_account/careersandenterprise.py
|
from django.conf import settings
from social_core.backends.oauth import BaseOAuth2
import logging
import re
log = logging.getLogger(__name__)
class CareersAndEnterpriseOAuth2(BaseOAuth2):
"""Careers and Enterprise Company OAuth2 authentication backend."""
settings_dict = settings.CUSTOM_BACKENDS.get('careersandenterprise')
name = 'careersandenterprise-oauth2'
REDIRECT_STATE = False
STATE_PARAMETER = False
AUTHORIZATION_URL = settings_dict.get('AUTH_URL')
ACCESS_TOKEN_URL = settings_dict.get('ACCESS_TOKEN_URL')
USER_DATA_URL = settings_dict.get('USER_DATA_URL')
ACCESS_TOKEN_METHOD = 'POST'
RESPONSE_TYPE = 'code'
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance."""
self.process_error(self.data)
state = self.validate_state()
response = self.request_access_token(
self.access_token_url(),
data=self.auth_complete_params(state),
headers=self.auth_headers(),
auth=self.auth_complete_credentials(),
method=self.ACCESS_TOKEN_METHOD
)
self.process_error(response)
return self.do_auth(response['access_token'], response=response,
*args, **kwargs)
def auth_complete_params(self, state=None):
client_id, client_secret = self.get_key_and_secret()
return {
'state': state,
'grant_type': 'authorization_code',
'code': self.data.get('code', ''), # server response code
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': self.get_redirect_uri(state)
}
def get_user_details(self, response):
username = re.sub('[^A-Za-z0-9]+', '_', response.get('name'))
fullname = "{} {}".format(response.get('firstName'), response.get('lastName'))
return {'username': username,
'email': response.get('mail'),
'fullname': fullname}
def user_data(self, access_token, *args, **kwargs):
response = self.get_json(self.USER_DATA_URL, headers={
'Authorization': 'Bearer {}'.format(access_token)
})
return response
def get_user_id(self, details, response):
return details.get('email')
|
Add initial version Careers and Enterprise Oauth2 backend.
|
Add initial version Careers and Enterprise Oauth2 backend.
|
Python
|
agpl-3.0
|
proversity-org/edx-platform,proversity-org/edx-platform,proversity-org/edx-platform,proversity-org/edx-platform
|
Add initial version Careers and Enterprise Oauth2 backend.
|
from django.conf import settings
from social_core.backends.oauth import BaseOAuth2
import logging
import re
log = logging.getLogger(__name__)
class CareersAndEnterpriseOAuth2(BaseOAuth2):
"""Careers and Enterprise Company OAuth2 authentication backend."""
settings_dict = settings.CUSTOM_BACKENDS.get('careersandenterprise')
name = 'careersandenterprise-oauth2'
REDIRECT_STATE = False
STATE_PARAMETER = False
AUTHORIZATION_URL = settings_dict.get('AUTH_URL')
ACCESS_TOKEN_URL = settings_dict.get('ACCESS_TOKEN_URL')
USER_DATA_URL = settings_dict.get('USER_DATA_URL')
ACCESS_TOKEN_METHOD = 'POST'
RESPONSE_TYPE = 'code'
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance."""
self.process_error(self.data)
state = self.validate_state()
response = self.request_access_token(
self.access_token_url(),
data=self.auth_complete_params(state),
headers=self.auth_headers(),
auth=self.auth_complete_credentials(),
method=self.ACCESS_TOKEN_METHOD
)
self.process_error(response)
return self.do_auth(response['access_token'], response=response,
*args, **kwargs)
def auth_complete_params(self, state=None):
client_id, client_secret = self.get_key_and_secret()
return {
'state': state,
'grant_type': 'authorization_code',
'code': self.data.get('code', ''), # server response code
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': self.get_redirect_uri(state)
}
def get_user_details(self, response):
username = re.sub('[^A-Za-z0-9]+', '_', response.get('name'))
fullname = "{} {}".format(response.get('firstName'), response.get('lastName'))
return {'username': username,
'email': response.get('mail'),
'fullname': fullname}
def user_data(self, access_token, *args, **kwargs):
response = self.get_json(self.USER_DATA_URL, headers={
'Authorization': 'Bearer {}'.format(access_token)
})
return response
def get_user_id(self, details, response):
return details.get('email')
|
<commit_before><commit_msg>Add initial version Careers and Enterprise Oauth2 backend.<commit_after>
|
from django.conf import settings
from social_core.backends.oauth import BaseOAuth2
import logging
import re
log = logging.getLogger(__name__)
class CareersAndEnterpriseOAuth2(BaseOAuth2):
"""Careers and Enterprise Company OAuth2 authentication backend."""
settings_dict = settings.CUSTOM_BACKENDS.get('careersandenterprise')
name = 'careersandenterprise-oauth2'
REDIRECT_STATE = False
STATE_PARAMETER = False
AUTHORIZATION_URL = settings_dict.get('AUTH_URL')
ACCESS_TOKEN_URL = settings_dict.get('ACCESS_TOKEN_URL')
USER_DATA_URL = settings_dict.get('USER_DATA_URL')
ACCESS_TOKEN_METHOD = 'POST'
RESPONSE_TYPE = 'code'
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance."""
self.process_error(self.data)
state = self.validate_state()
response = self.request_access_token(
self.access_token_url(),
data=self.auth_complete_params(state),
headers=self.auth_headers(),
auth=self.auth_complete_credentials(),
method=self.ACCESS_TOKEN_METHOD
)
self.process_error(response)
return self.do_auth(response['access_token'], response=response,
*args, **kwargs)
def auth_complete_params(self, state=None):
client_id, client_secret = self.get_key_and_secret()
return {
'state': state,
'grant_type': 'authorization_code',
'code': self.data.get('code', ''), # server response code
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': self.get_redirect_uri(state)
}
def get_user_details(self, response):
username = re.sub('[^A-Za-z0-9]+', '_', response.get('name'))
fullname = "{} {}".format(response.get('firstName'), response.get('lastName'))
return {'username': username,
'email': response.get('mail'),
'fullname': fullname}
def user_data(self, access_token, *args, **kwargs):
response = self.get_json(self.USER_DATA_URL, headers={
'Authorization': 'Bearer {}'.format(access_token)
})
return response
def get_user_id(self, details, response):
return details.get('email')
|
Add initial version Careers and Enterprise Oauth2 backend.from django.conf import settings
from social_core.backends.oauth import BaseOAuth2
import logging
import re
log = logging.getLogger(__name__)
class CareersAndEnterpriseOAuth2(BaseOAuth2):
"""Careers and Enterprise Company OAuth2 authentication backend."""
settings_dict = settings.CUSTOM_BACKENDS.get('careersandenterprise')
name = 'careersandenterprise-oauth2'
REDIRECT_STATE = False
STATE_PARAMETER = False
AUTHORIZATION_URL = settings_dict.get('AUTH_URL')
ACCESS_TOKEN_URL = settings_dict.get('ACCESS_TOKEN_URL')
USER_DATA_URL = settings_dict.get('USER_DATA_URL')
ACCESS_TOKEN_METHOD = 'POST'
RESPONSE_TYPE = 'code'
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance."""
self.process_error(self.data)
state = self.validate_state()
response = self.request_access_token(
self.access_token_url(),
data=self.auth_complete_params(state),
headers=self.auth_headers(),
auth=self.auth_complete_credentials(),
method=self.ACCESS_TOKEN_METHOD
)
self.process_error(response)
return self.do_auth(response['access_token'], response=response,
*args, **kwargs)
def auth_complete_params(self, state=None):
client_id, client_secret = self.get_key_and_secret()
return {
'state': state,
'grant_type': 'authorization_code',
'code': self.data.get('code', ''), # server response code
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': self.get_redirect_uri(state)
}
def get_user_details(self, response):
username = re.sub('[^A-Za-z0-9]+', '_', response.get('name'))
fullname = "{} {}".format(response.get('firstName'), response.get('lastName'))
return {'username': username,
'email': response.get('mail'),
'fullname': fullname}
def user_data(self, access_token, *args, **kwargs):
response = self.get_json(self.USER_DATA_URL, headers={
'Authorization': 'Bearer {}'.format(access_token)
})
return response
def get_user_id(self, details, response):
return details.get('email')
|
<commit_before><commit_msg>Add initial version Careers and Enterprise Oauth2 backend.<commit_after>from django.conf import settings
from social_core.backends.oauth import BaseOAuth2
import logging
import re
log = logging.getLogger(__name__)
class CareersAndEnterpriseOAuth2(BaseOAuth2):
"""Careers and Enterprise Company OAuth2 authentication backend."""
settings_dict = settings.CUSTOM_BACKENDS.get('careersandenterprise')
name = 'careersandenterprise-oauth2'
REDIRECT_STATE = False
STATE_PARAMETER = False
AUTHORIZATION_URL = settings_dict.get('AUTH_URL')
ACCESS_TOKEN_URL = settings_dict.get('ACCESS_TOKEN_URL')
USER_DATA_URL = settings_dict.get('USER_DATA_URL')
ACCESS_TOKEN_METHOD = 'POST'
RESPONSE_TYPE = 'code'
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance."""
self.process_error(self.data)
state = self.validate_state()
response = self.request_access_token(
self.access_token_url(),
data=self.auth_complete_params(state),
headers=self.auth_headers(),
auth=self.auth_complete_credentials(),
method=self.ACCESS_TOKEN_METHOD
)
self.process_error(response)
return self.do_auth(response['access_token'], response=response,
*args, **kwargs)
def auth_complete_params(self, state=None):
client_id, client_secret = self.get_key_and_secret()
return {
'state': state,
'grant_type': 'authorization_code',
'code': self.data.get('code', ''), # server response code
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': self.get_redirect_uri(state)
}
def get_user_details(self, response):
username = re.sub('[^A-Za-z0-9]+', '_', response.get('name'))
fullname = "{} {}".format(response.get('firstName'), response.get('lastName'))
return {'username': username,
'email': response.get('mail'),
'fullname': fullname}
def user_data(self, access_token, *args, **kwargs):
response = self.get_json(self.USER_DATA_URL, headers={
'Authorization': 'Bearer {}'.format(access_token)
})
return response
def get_user_id(self, details, response):
return details.get('email')
|
|
42a43d6594efc21ab29ea079f758df5bd2ec3c41
|
Homeworks/HW1/Problem1.py
|
Homeworks/HW1/Problem1.py
|
"""Problem 1: Break math
Break math using a computer. To be a bit more specific, demonstrate a
numerical calculation using the computer language of your choice where
the answer is demonstrably wrong. I'll want to see the code you used,
preferably something brief and punchy, and then the result. For full credit,
fix math again by demonstrating an alternate method of calculating the
result that previously broke math but yields the correct answer
"""
import math
a = 25.0
while ( math.sqrt(a)**2 == a ):
print('sqrt(a)^2 = ' + str(a) + ' = ' + str(math.sqrt(a)**2))
a *= 10
# There was a rounding error
print('sqrt(a)^2 = ' + str(a) + ' != ' + str(math.sqrt(a)**2))
# Determine the exponent of the float
expo = math.floor(math.log10(a))-1.0
# Reduce to only significant digits
b = a/(10**expo)
print('sqrt(a)^2 = ' + str(a) + ' = ' + str((math.sqrt(b)**2)*10**exp))
|
Add hw 1 problem 1 solution
|
Add hw 1 problem 1 solution
|
Python
|
mit
|
dankolbman/NumericalAnalysis
|
Add hw 1 problem 1 solution
|
"""Problem 1: Break math
Break math using a computer. To be a bit more specific, demonstrate a
numerical calculation using the computer language of your choice where
the answer is demonstrably wrong. I'll want to see the code you used,
preferably something brief and punchy, and then the result. For full credit,
fix math again by demonstrating an alternate method of calculating the
result that previously broke math but yields the correct answer
"""
import math
a = 25.0
while ( math.sqrt(a)**2 == a ):
print('sqrt(a)^2 = ' + str(a) + ' = ' + str(math.sqrt(a)**2))
a *= 10
# There was a rounding error
print('sqrt(a)^2 = ' + str(a) + ' != ' + str(math.sqrt(a)**2))
# Determine the exponent of the float
expo = math.floor(math.log10(a))-1.0
# Reduce to only significant digits
b = a/(10**expo)
print('sqrt(a)^2 = ' + str(a) + ' = ' + str((math.sqrt(b)**2)*10**exp))
|
<commit_before><commit_msg>Add hw 1 problem 1 solution<commit_after>
|
"""Problem 1: Break math
Break math using a computer. To be a bit more specific, demonstrate a
numerical calculation using the computer language of your choice where
the answer is demonstrably wrong. I'll want to see the code you used,
preferably something brief and punchy, and then the result. For full credit,
fix math again by demonstrating an alternate method of calculating the
result that previously broke math but yields the correct answer
"""
import math
a = 25.0
while ( math.sqrt(a)**2 == a ):
print('sqrt(a)^2 = ' + str(a) + ' = ' + str(math.sqrt(a)**2))
a *= 10
# There was a rounding error
print('sqrt(a)^2 = ' + str(a) + ' != ' + str(math.sqrt(a)**2))
# Determine the exponent of the float
expo = math.floor(math.log10(a))-1.0
# Reduce to only significant digits
b = a/(10**expo)
print('sqrt(a)^2 = ' + str(a) + ' = ' + str((math.sqrt(b)**2)*10**exp))
|
Add hw 1 problem 1 solution"""Problem 1: Break math
Break math using a computer. To be a bit more specific, demonstrate a
numerical calculation using the computer language of your choice where
the answer is demonstrably wrong. I'll want to see the code you used,
preferably something brief and punchy, and then the result. For full credit,
fix math again by demonstrating an alternate method of calculating the
result that previously broke math but yields the correct answer
"""
import math
a = 25.0
while ( math.sqrt(a)**2 == a ):
print('sqrt(a)^2 = ' + str(a) + ' = ' + str(math.sqrt(a)**2))
a *= 10
# There was a rounding error
print('sqrt(a)^2 = ' + str(a) + ' != ' + str(math.sqrt(a)**2))
# Determine the exponent of the float
expo = math.floor(math.log10(a))-1.0
# Reduce to only significant digits
b = a/(10**expo)
print('sqrt(a)^2 = ' + str(a) + ' = ' + str((math.sqrt(b)**2)*10**exp))
|
<commit_before><commit_msg>Add hw 1 problem 1 solution<commit_after>"""Problem 1: Break math
Break math using a computer. To be a bit more specific, demonstrate a
numerical calculation using the computer language of your choice where
the answer is demonstrably wrong. I'll want to see the code you used,
preferably something brief and punchy, and then the result. For full credit,
fix math again by demonstrating an alternate method of calculating the
result that previously broke math but yields the correct answer
"""
import math
a = 25.0
while ( math.sqrt(a)**2 == a ):
print('sqrt(a)^2 = ' + str(a) + ' = ' + str(math.sqrt(a)**2))
a *= 10
# There was a rounding error
print('sqrt(a)^2 = ' + str(a) + ' != ' + str(math.sqrt(a)**2))
# Determine the exponent of the float
expo = math.floor(math.log10(a))-1.0
# Reduce to only significant digits
b = a/(10**expo)
print('sqrt(a)^2 = ' + str(a) + ' = ' + str((math.sqrt(b)**2)*10**exp))
|
|
d997f0aa51b20be8038e96bd6a79783b507aea08
|
constantDraw.py
|
constantDraw.py
|
def draw_constant(res=10):
"""Re-draws a Read node using Constant nodes as pixels."""
# Checks that the user has selected a Read node.
try:
if nuke.selectedNode().Class() != "Read":
# Pushes pop up message to the user
nuke.message("No Read node selected to re-draw!")
return False
except ValueError:
nuke.message("No node selected!")
return False
else:
# Sets the Image Height/Width.
ih = nuke.selectedNode().height()
iw = nuke.selectedNode().width()
# Sets the Node Grid Height/Width, making sure to maintain Aspect
# Ratio.
h = res
w = res * iw / ih
# The loops construct the grid layout of the pixels.
for y in range(h):
for x in range(w):
# Construct the Constant nodes and set their position.
c = nuke.nodes.Constant(xpos=x * 70, ypos=-y * 70)
# Sets Constant colour to sampled pixels.
c['color'].setValue([
nuke.sample(nuke.selectedNode(), "r",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "g",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "b",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "a",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h)))
])
draw_constant()
|
Add .py file to repo.
|
Add .py file to repo.
|
Python
|
mit
|
SurpriseTRex/node-draw
|
Add .py file to repo.
|
def draw_constant(res=10):
"""Re-draws a Read node using Constant nodes as pixels."""
# Checks that the user has selected a Read node.
try:
if nuke.selectedNode().Class() != "Read":
# Pushes pop up message to the user
nuke.message("No Read node selected to re-draw!")
return False
except ValueError:
nuke.message("No node selected!")
return False
else:
# Sets the Image Height/Width.
ih = nuke.selectedNode().height()
iw = nuke.selectedNode().width()
# Sets the Node Grid Height/Width, making sure to maintain Aspect
# Ratio.
h = res
w = res * iw / ih
# The loops construct the grid layout of the pixels.
for y in range(h):
for x in range(w):
# Construct the Constant nodes and set their position.
c = nuke.nodes.Constant(xpos=x * 70, ypos=-y * 70)
# Sets Constant colour to sampled pixels.
c['color'].setValue([
nuke.sample(nuke.selectedNode(), "r",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "g",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "b",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "a",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h)))
])
draw_constant()
|
<commit_before><commit_msg>Add .py file to repo.<commit_after>
|
def draw_constant(res=10):
"""Re-draws a Read node using Constant nodes as pixels."""
# Checks that the user has selected a Read node.
try:
if nuke.selectedNode().Class() != "Read":
# Pushes pop up message to the user
nuke.message("No Read node selected to re-draw!")
return False
except ValueError:
nuke.message("No node selected!")
return False
else:
# Sets the Image Height/Width.
ih = nuke.selectedNode().height()
iw = nuke.selectedNode().width()
# Sets the Node Grid Height/Width, making sure to maintain Aspect
# Ratio.
h = res
w = res * iw / ih
# The loops construct the grid layout of the pixels.
for y in range(h):
for x in range(w):
# Construct the Constant nodes and set their position.
c = nuke.nodes.Constant(xpos=x * 70, ypos=-y * 70)
# Sets Constant colour to sampled pixels.
c['color'].setValue([
nuke.sample(nuke.selectedNode(), "r",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "g",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "b",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "a",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h)))
])
draw_constant()
|
Add .py file to repo.def draw_constant(res=10):
"""Re-draws a Read node using Constant nodes as pixels."""
# Checks that the user has selected a Read node.
try:
if nuke.selectedNode().Class() != "Read":
# Pushes pop up message to the user
nuke.message("No Read node selected to re-draw!")
return False
except ValueError:
nuke.message("No node selected!")
return False
else:
# Sets the Image Height/Width.
ih = nuke.selectedNode().height()
iw = nuke.selectedNode().width()
# Sets the Node Grid Height/Width, making sure to maintain Aspect
# Ratio.
h = res
w = res * iw / ih
# The loops construct the grid layout of the pixels.
for y in range(h):
for x in range(w):
# Construct the Constant nodes and set their position.
c = nuke.nodes.Constant(xpos=x * 70, ypos=-y * 70)
# Sets Constant colour to sampled pixels.
c['color'].setValue([
nuke.sample(nuke.selectedNode(), "r",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "g",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "b",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "a",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h)))
])
draw_constant()
|
<commit_before><commit_msg>Add .py file to repo.<commit_after>def draw_constant(res=10):
"""Re-draws a Read node using Constant nodes as pixels."""
# Checks that the user has selected a Read node.
try:
if nuke.selectedNode().Class() != "Read":
# Pushes pop up message to the user
nuke.message("No Read node selected to re-draw!")
return False
except ValueError:
nuke.message("No node selected!")
return False
else:
# Sets the Image Height/Width.
ih = nuke.selectedNode().height()
iw = nuke.selectedNode().width()
# Sets the Node Grid Height/Width, making sure to maintain Aspect
# Ratio.
h = res
w = res * iw / ih
# The loops construct the grid layout of the pixels.
for y in range(h):
for x in range(w):
# Construct the Constant nodes and set their position.
c = nuke.nodes.Constant(xpos=x * 70, ypos=-y * 70)
# Sets Constant colour to sampled pixels.
c['color'].setValue([
nuke.sample(nuke.selectedNode(), "r",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "g",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "b",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h))),
nuke.sample(nuke.selectedNode(), "a",
0.5 + (x * (iw / w)), 0.5 + (y * (ih / h)))
])
draw_constant()
|
|
0e899fabae552a33b7dfabe9908a0ad0279055b6
|
tests/header_passthrough_tests.py
|
tests/header_passthrough_tests.py
|
from django.test.client import RequestFactory
from mock import Mock
from unittest2 import TestCase
from helpers import RequestPatchMixin
from test_views import TestProxy
class HttpProxyHeaderPassThrough(TestCase, RequestPatchMixin):
"""HttpProxy header pass through"""
def setUp(self):
self.proxy = TestProxy.as_view()
self.browser_request = RequestFactory().get('/')
# Fake headers that are representative of how Django munges them when
# it sticks them into the META dict.
self.browser_request.META['HTTP_Host'] = 'cnn.com'
self.browser_request.META['HTTP_X_Forwarded_For'] = 'ipaddr 1'
self.browser_request.META['HTTP_UNNORMALIZED_HEADER'] = 'header value'
self.patch_request(Mock(headers={'Fake-Header': '123'}))
self.proxy(self.browser_request)
# The value of the headers kwarg that gets passed to request_methd
self.headers = self.request.mock_calls[0][2]['headers']
def test_passes_non_http_prefixed_headers_to_proxied_endpoint(self):
self.assertIn('Content-Type', self.headers)
def test_filters_disallowed_headers(self):
self.assertNotIn('Host', self.headers)
def test_passes_django_http_prefixed_headers_to_proxied_endpoint(self):
self.assertIn('X-Forwarded-For', self.headers)
def test_normalizes_header_names(self):
self.assertIn('Unnormalized-Header', self.headers)
def test_doesnt_modify_header_values(self):
self.assertEqual(self.headers['X-Forwarded-For'], 'ipaddr 1')
|
Add header pass through tests
|
Add header pass through tests
|
Python
|
mit
|
thomasw/djproxy
|
Add header pass through tests
|
from django.test.client import RequestFactory
from mock import Mock
from unittest2 import TestCase
from helpers import RequestPatchMixin
from test_views import TestProxy
class HttpProxyHeaderPassThrough(TestCase, RequestPatchMixin):
"""HttpProxy header pass through"""
def setUp(self):
self.proxy = TestProxy.as_view()
self.browser_request = RequestFactory().get('/')
# Fake headers that are representative of how Django munges them when
# it sticks them into the META dict.
self.browser_request.META['HTTP_Host'] = 'cnn.com'
self.browser_request.META['HTTP_X_Forwarded_For'] = 'ipaddr 1'
self.browser_request.META['HTTP_UNNORMALIZED_HEADER'] = 'header value'
self.patch_request(Mock(headers={'Fake-Header': '123'}))
self.proxy(self.browser_request)
# The value of the headers kwarg that gets passed to request_methd
self.headers = self.request.mock_calls[0][2]['headers']
def test_passes_non_http_prefixed_headers_to_proxied_endpoint(self):
self.assertIn('Content-Type', self.headers)
def test_filters_disallowed_headers(self):
self.assertNotIn('Host', self.headers)
def test_passes_django_http_prefixed_headers_to_proxied_endpoint(self):
self.assertIn('X-Forwarded-For', self.headers)
def test_normalizes_header_names(self):
self.assertIn('Unnormalized-Header', self.headers)
def test_doesnt_modify_header_values(self):
self.assertEqual(self.headers['X-Forwarded-For'], 'ipaddr 1')
|
<commit_before><commit_msg>Add header pass through tests<commit_after>
|
from django.test.client import RequestFactory
from mock import Mock
from unittest2 import TestCase
from helpers import RequestPatchMixin
from test_views import TestProxy
class HttpProxyHeaderPassThrough(TestCase, RequestPatchMixin):
"""HttpProxy header pass through"""
def setUp(self):
self.proxy = TestProxy.as_view()
self.browser_request = RequestFactory().get('/')
# Fake headers that are representative of how Django munges them when
# it sticks them into the META dict.
self.browser_request.META['HTTP_Host'] = 'cnn.com'
self.browser_request.META['HTTP_X_Forwarded_For'] = 'ipaddr 1'
self.browser_request.META['HTTP_UNNORMALIZED_HEADER'] = 'header value'
self.patch_request(Mock(headers={'Fake-Header': '123'}))
self.proxy(self.browser_request)
# The value of the headers kwarg that gets passed to request_methd
self.headers = self.request.mock_calls[0][2]['headers']
def test_passes_non_http_prefixed_headers_to_proxied_endpoint(self):
self.assertIn('Content-Type', self.headers)
def test_filters_disallowed_headers(self):
self.assertNotIn('Host', self.headers)
def test_passes_django_http_prefixed_headers_to_proxied_endpoint(self):
self.assertIn('X-Forwarded-For', self.headers)
def test_normalizes_header_names(self):
self.assertIn('Unnormalized-Header', self.headers)
def test_doesnt_modify_header_values(self):
self.assertEqual(self.headers['X-Forwarded-For'], 'ipaddr 1')
|
Add header pass through testsfrom django.test.client import RequestFactory
from mock import Mock
from unittest2 import TestCase
from helpers import RequestPatchMixin
from test_views import TestProxy
class HttpProxyHeaderPassThrough(TestCase, RequestPatchMixin):
"""HttpProxy header pass through"""
def setUp(self):
self.proxy = TestProxy.as_view()
self.browser_request = RequestFactory().get('/')
# Fake headers that are representative of how Django munges them when
# it sticks them into the META dict.
self.browser_request.META['HTTP_Host'] = 'cnn.com'
self.browser_request.META['HTTP_X_Forwarded_For'] = 'ipaddr 1'
self.browser_request.META['HTTP_UNNORMALIZED_HEADER'] = 'header value'
self.patch_request(Mock(headers={'Fake-Header': '123'}))
self.proxy(self.browser_request)
# The value of the headers kwarg that gets passed to request_methd
self.headers = self.request.mock_calls[0][2]['headers']
def test_passes_non_http_prefixed_headers_to_proxied_endpoint(self):
self.assertIn('Content-Type', self.headers)
def test_filters_disallowed_headers(self):
self.assertNotIn('Host', self.headers)
def test_passes_django_http_prefixed_headers_to_proxied_endpoint(self):
self.assertIn('X-Forwarded-For', self.headers)
def test_normalizes_header_names(self):
self.assertIn('Unnormalized-Header', self.headers)
def test_doesnt_modify_header_values(self):
self.assertEqual(self.headers['X-Forwarded-For'], 'ipaddr 1')
|
<commit_before><commit_msg>Add header pass through tests<commit_after>from django.test.client import RequestFactory
from mock import Mock
from unittest2 import TestCase
from helpers import RequestPatchMixin
from test_views import TestProxy
class HttpProxyHeaderPassThrough(TestCase, RequestPatchMixin):
"""HttpProxy header pass through"""
def setUp(self):
self.proxy = TestProxy.as_view()
self.browser_request = RequestFactory().get('/')
# Fake headers that are representative of how Django munges them when
# it sticks them into the META dict.
self.browser_request.META['HTTP_Host'] = 'cnn.com'
self.browser_request.META['HTTP_X_Forwarded_For'] = 'ipaddr 1'
self.browser_request.META['HTTP_UNNORMALIZED_HEADER'] = 'header value'
self.patch_request(Mock(headers={'Fake-Header': '123'}))
self.proxy(self.browser_request)
# The value of the headers kwarg that gets passed to request_methd
self.headers = self.request.mock_calls[0][2]['headers']
def test_passes_non_http_prefixed_headers_to_proxied_endpoint(self):
self.assertIn('Content-Type', self.headers)
def test_filters_disallowed_headers(self):
self.assertNotIn('Host', self.headers)
def test_passes_django_http_prefixed_headers_to_proxied_endpoint(self):
self.assertIn('X-Forwarded-For', self.headers)
def test_normalizes_header_names(self):
self.assertIn('Unnormalized-Header', self.headers)
def test_doesnt_modify_header_values(self):
self.assertEqual(self.headers['X-Forwarded-For'], 'ipaddr 1')
|
|
198fe89f055d2bdd1d8b0ea2a8df46f75a9ee21d
|
guessenc.py
|
guessenc.py
|
#!/usr/bin/env python
import sys
# common characters in GB
gb_cc = frozenset([
b"\xce\xd2", # 我
b"\xb5\xc4", # 的
b"\xc4\xe3", # 你
b"\xca\xc7", # 是
b"\xb2\xbb", # 不
b"\xc1\xcb", # 了
b"\xd2\xbb", # 一
b"\xc3\xc7", # 们
b"\xd5\xe2", # 这
b"\xd3\xd0" # 有
])
big5_cc = frozenset([
b"\xa7\xda", # 我
b"\xaa\xba", # 的
b"\xa7\x41", # 你
b"\xac\x4f", # 是
b"\xa4\xa3", # 不
b"\xa4\x46", # 了
b"\xa4\x40", # 一
b"\xad\xcc", # 們
b"\xb3\x6f", # 這
b"\xa6\xb3" # 有
])
utf8_cc = frozenset([
"我".encode(),
"的".encode(),
"你".encode(),
"是".encode(),
"不".encode(),
"了".encode(),
"一".encode(),
"们".encode(),
"这".encode(),
"有".encode(),
"們".encode(),
"這".encode()
])
def process_file(f):
txt = open(f, 'rb').read()
big5score = 0
gbscore = 0
utf8score = 0
for i, b in enumerate(txt[:-3]):
# look for high bytes
if b > 0xa0:
if b < 0xb4: # could be common big5 char
if bytes([b, txt[i+1]]) in big5_cc:
big5score += 1
if 0xb2 <= b <= 0xd5: # could be common gb char
if bytes([b, txt[i+1]]) in gb_cc:
gbscore += 1
if 0xe4 <= b <= 0xe8:
if bytes([b, txt[i+1], txt[i+2]]) in utf8_cc:
utf8score += 1
if utf8score >= gbscore:
if utf8score >= big5score:
return 'utf8'
else:
return 'big5'
else:
if gbscore > big5score:
return 'gb18030'
else:
return 'big5'
if __name__ == "__main__":
print(process_file(sys.argv[1]))
|
Add script to guess text encoding
|
Add script to guess text encoding
|
Python
|
agpl-3.0
|
erjiang/hanzidefs
|
Add script to guess text encoding
|
#!/usr/bin/env python
import sys
# common characters in GB
gb_cc = frozenset([
b"\xce\xd2", # 我
b"\xb5\xc4", # 的
b"\xc4\xe3", # 你
b"\xca\xc7", # 是
b"\xb2\xbb", # 不
b"\xc1\xcb", # 了
b"\xd2\xbb", # 一
b"\xc3\xc7", # 们
b"\xd5\xe2", # 这
b"\xd3\xd0" # 有
])
big5_cc = frozenset([
b"\xa7\xda", # 我
b"\xaa\xba", # 的
b"\xa7\x41", # 你
b"\xac\x4f", # 是
b"\xa4\xa3", # 不
b"\xa4\x46", # 了
b"\xa4\x40", # 一
b"\xad\xcc", # 們
b"\xb3\x6f", # 這
b"\xa6\xb3" # 有
])
utf8_cc = frozenset([
"我".encode(),
"的".encode(),
"你".encode(),
"是".encode(),
"不".encode(),
"了".encode(),
"一".encode(),
"们".encode(),
"这".encode(),
"有".encode(),
"們".encode(),
"這".encode()
])
def process_file(f):
txt = open(f, 'rb').read()
big5score = 0
gbscore = 0
utf8score = 0
for i, b in enumerate(txt[:-3]):
# look for high bytes
if b > 0xa0:
if b < 0xb4: # could be common big5 char
if bytes([b, txt[i+1]]) in big5_cc:
big5score += 1
if 0xb2 <= b <= 0xd5: # could be common gb char
if bytes([b, txt[i+1]]) in gb_cc:
gbscore += 1
if 0xe4 <= b <= 0xe8:
if bytes([b, txt[i+1], txt[i+2]]) in utf8_cc:
utf8score += 1
if utf8score >= gbscore:
if utf8score >= big5score:
return 'utf8'
else:
return 'big5'
else:
if gbscore > big5score:
return 'gb18030'
else:
return 'big5'
if __name__ == "__main__":
print(process_file(sys.argv[1]))
|
<commit_before><commit_msg>Add script to guess text encoding<commit_after>
|
#!/usr/bin/env python
import sys
# common characters in GB
gb_cc = frozenset([
b"\xce\xd2", # 我
b"\xb5\xc4", # 的
b"\xc4\xe3", # 你
b"\xca\xc7", # 是
b"\xb2\xbb", # 不
b"\xc1\xcb", # 了
b"\xd2\xbb", # 一
b"\xc3\xc7", # 们
b"\xd5\xe2", # 这
b"\xd3\xd0" # 有
])
big5_cc = frozenset([
b"\xa7\xda", # 我
b"\xaa\xba", # 的
b"\xa7\x41", # 你
b"\xac\x4f", # 是
b"\xa4\xa3", # 不
b"\xa4\x46", # 了
b"\xa4\x40", # 一
b"\xad\xcc", # 們
b"\xb3\x6f", # 這
b"\xa6\xb3" # 有
])
utf8_cc = frozenset([
"我".encode(),
"的".encode(),
"你".encode(),
"是".encode(),
"不".encode(),
"了".encode(),
"一".encode(),
"们".encode(),
"这".encode(),
"有".encode(),
"們".encode(),
"這".encode()
])
def process_file(f):
txt = open(f, 'rb').read()
big5score = 0
gbscore = 0
utf8score = 0
for i, b in enumerate(txt[:-3]):
# look for high bytes
if b > 0xa0:
if b < 0xb4: # could be common big5 char
if bytes([b, txt[i+1]]) in big5_cc:
big5score += 1
if 0xb2 <= b <= 0xd5: # could be common gb char
if bytes([b, txt[i+1]]) in gb_cc:
gbscore += 1
if 0xe4 <= b <= 0xe8:
if bytes([b, txt[i+1], txt[i+2]]) in utf8_cc:
utf8score += 1
if utf8score >= gbscore:
if utf8score >= big5score:
return 'utf8'
else:
return 'big5'
else:
if gbscore > big5score:
return 'gb18030'
else:
return 'big5'
if __name__ == "__main__":
print(process_file(sys.argv[1]))
|
Add script to guess text encoding#!/usr/bin/env python
import sys
# common characters in GB
gb_cc = frozenset([
b"\xce\xd2", # 我
b"\xb5\xc4", # 的
b"\xc4\xe3", # 你
b"\xca\xc7", # 是
b"\xb2\xbb", # 不
b"\xc1\xcb", # 了
b"\xd2\xbb", # 一
b"\xc3\xc7", # 们
b"\xd5\xe2", # 这
b"\xd3\xd0" # 有
])
big5_cc = frozenset([
b"\xa7\xda", # 我
b"\xaa\xba", # 的
b"\xa7\x41", # 你
b"\xac\x4f", # 是
b"\xa4\xa3", # 不
b"\xa4\x46", # 了
b"\xa4\x40", # 一
b"\xad\xcc", # 們
b"\xb3\x6f", # 這
b"\xa6\xb3" # 有
])
utf8_cc = frozenset([
"我".encode(),
"的".encode(),
"你".encode(),
"是".encode(),
"不".encode(),
"了".encode(),
"一".encode(),
"们".encode(),
"这".encode(),
"有".encode(),
"們".encode(),
"這".encode()
])
def process_file(f):
txt = open(f, 'rb').read()
big5score = 0
gbscore = 0
utf8score = 0
for i, b in enumerate(txt[:-3]):
# look for high bytes
if b > 0xa0:
if b < 0xb4: # could be common big5 char
if bytes([b, txt[i+1]]) in big5_cc:
big5score += 1
if 0xb2 <= b <= 0xd5: # could be common gb char
if bytes([b, txt[i+1]]) in gb_cc:
gbscore += 1
if 0xe4 <= b <= 0xe8:
if bytes([b, txt[i+1], txt[i+2]]) in utf8_cc:
utf8score += 1
if utf8score >= gbscore:
if utf8score >= big5score:
return 'utf8'
else:
return 'big5'
else:
if gbscore > big5score:
return 'gb18030'
else:
return 'big5'
if __name__ == "__main__":
print(process_file(sys.argv[1]))
|
<commit_before><commit_msg>Add script to guess text encoding<commit_after>#!/usr/bin/env python
import sys
# common characters in GB
gb_cc = frozenset([
b"\xce\xd2", # 我
b"\xb5\xc4", # 的
b"\xc4\xe3", # 你
b"\xca\xc7", # 是
b"\xb2\xbb", # 不
b"\xc1\xcb", # 了
b"\xd2\xbb", # 一
b"\xc3\xc7", # 们
b"\xd5\xe2", # 这
b"\xd3\xd0" # 有
])
big5_cc = frozenset([
b"\xa7\xda", # 我
b"\xaa\xba", # 的
b"\xa7\x41", # 你
b"\xac\x4f", # 是
b"\xa4\xa3", # 不
b"\xa4\x46", # 了
b"\xa4\x40", # 一
b"\xad\xcc", # 們
b"\xb3\x6f", # 這
b"\xa6\xb3" # 有
])
utf8_cc = frozenset([
"我".encode(),
"的".encode(),
"你".encode(),
"是".encode(),
"不".encode(),
"了".encode(),
"一".encode(),
"们".encode(),
"这".encode(),
"有".encode(),
"們".encode(),
"這".encode()
])
def process_file(f):
txt = open(f, 'rb').read()
big5score = 0
gbscore = 0
utf8score = 0
for i, b in enumerate(txt[:-3]):
# look for high bytes
if b > 0xa0:
if b < 0xb4: # could be common big5 char
if bytes([b, txt[i+1]]) in big5_cc:
big5score += 1
if 0xb2 <= b <= 0xd5: # could be common gb char
if bytes([b, txt[i+1]]) in gb_cc:
gbscore += 1
if 0xe4 <= b <= 0xe8:
if bytes([b, txt[i+1], txt[i+2]]) in utf8_cc:
utf8score += 1
if utf8score >= gbscore:
if utf8score >= big5score:
return 'utf8'
else:
return 'big5'
else:
if gbscore > big5score:
return 'gb18030'
else:
return 'big5'
if __name__ == "__main__":
print(process_file(sys.argv[1]))
|
|
4b91050ddb5357970462d5a15fe0d1ed97d51178
|
tests/variable_fields.py
|
tests/variable_fields.py
|
"""Test cases for variable fields
"""
import unittest
from lighty.templates import Template
class VariableFieldTestCase(unittest.TestCase):
"""Test case for block template tag
"""
def setUp(self):
self.value = 'value'
self.variable_template = Template(name='base.html')
self.variable_template.parse("{{ simple_var }}")
self.object_field_template = Template(name='object-field.html')
self.object_field_template.parse('{{ object.field }}')
self.deep_template = Template(name='deep-field.html')
self.deep_template.parse('{{ object.field.field }}')
def assertResult(self, result):
assert result == self.value, 'Error emplate execution: %s' % ' '.join(
result, 'except', self.value)
def testSimpleVariable(self):
result = self.variable_template.execute({'simple_var': 'value'})
self.assertResult(result)
def testObjectField(self):
class TestClass(object):
field = self.value
result = self.object_field_template.execute({'object': TestClass()})
self.assertResult(result)
def testDictValue(self):
obj = {'field': self.value }
result = self.object_field_template.execute({'object': obj})
self.assertResult(result)
def testMultilevelField(self):
class TestClass(object):
field = {'field': self.value}
result = self.deep_template.execute({'object': TestClass()})
self.assertResult(result)
def test():
suite = unittest.TestSuite()
suite.addTest(VariableFieldTestCase('testSimpleVariable'))
suite.addTest(VariableFieldTestCase('testObjectField'))
suite.addTest(VariableFieldTestCase('testDictValue'))
suite.addTest(VariableFieldTestCase('testMultilevelField'))
return suite
|
Add test for fields variables lookup
|
Add test for fields variables lookup
|
Python
|
bsd-3-clause
|
GrAndSE/lighty,GrAndSE/lighty-template
|
Add test for fields variables lookup
|
"""Test cases for variable fields
"""
import unittest
from lighty.templates import Template
class VariableFieldTestCase(unittest.TestCase):
"""Test case for block template tag
"""
def setUp(self):
self.value = 'value'
self.variable_template = Template(name='base.html')
self.variable_template.parse("{{ simple_var }}")
self.object_field_template = Template(name='object-field.html')
self.object_field_template.parse('{{ object.field }}')
self.deep_template = Template(name='deep-field.html')
self.deep_template.parse('{{ object.field.field }}')
def assertResult(self, result):
assert result == self.value, 'Error emplate execution: %s' % ' '.join(
result, 'except', self.value)
def testSimpleVariable(self):
result = self.variable_template.execute({'simple_var': 'value'})
self.assertResult(result)
def testObjectField(self):
class TestClass(object):
field = self.value
result = self.object_field_template.execute({'object': TestClass()})
self.assertResult(result)
def testDictValue(self):
obj = {'field': self.value }
result = self.object_field_template.execute({'object': obj})
self.assertResult(result)
def testMultilevelField(self):
class TestClass(object):
field = {'field': self.value}
result = self.deep_template.execute({'object': TestClass()})
self.assertResult(result)
def test():
suite = unittest.TestSuite()
suite.addTest(VariableFieldTestCase('testSimpleVariable'))
suite.addTest(VariableFieldTestCase('testObjectField'))
suite.addTest(VariableFieldTestCase('testDictValue'))
suite.addTest(VariableFieldTestCase('testMultilevelField'))
return suite
|
<commit_before><commit_msg>Add test for fields variables lookup<commit_after>
|
"""Test cases for variable fields
"""
import unittest
from lighty.templates import Template
class VariableFieldTestCase(unittest.TestCase):
"""Test case for block template tag
"""
def setUp(self):
self.value = 'value'
self.variable_template = Template(name='base.html')
self.variable_template.parse("{{ simple_var }}")
self.object_field_template = Template(name='object-field.html')
self.object_field_template.parse('{{ object.field }}')
self.deep_template = Template(name='deep-field.html')
self.deep_template.parse('{{ object.field.field }}')
def assertResult(self, result):
assert result == self.value, 'Error emplate execution: %s' % ' '.join(
result, 'except', self.value)
def testSimpleVariable(self):
result = self.variable_template.execute({'simple_var': 'value'})
self.assertResult(result)
def testObjectField(self):
class TestClass(object):
field = self.value
result = self.object_field_template.execute({'object': TestClass()})
self.assertResult(result)
def testDictValue(self):
obj = {'field': self.value }
result = self.object_field_template.execute({'object': obj})
self.assertResult(result)
def testMultilevelField(self):
class TestClass(object):
field = {'field': self.value}
result = self.deep_template.execute({'object': TestClass()})
self.assertResult(result)
def test():
suite = unittest.TestSuite()
suite.addTest(VariableFieldTestCase('testSimpleVariable'))
suite.addTest(VariableFieldTestCase('testObjectField'))
suite.addTest(VariableFieldTestCase('testDictValue'))
suite.addTest(VariableFieldTestCase('testMultilevelField'))
return suite
|
Add test for fields variables lookup"""Test cases for variable fields
"""
import unittest
from lighty.templates import Template
class VariableFieldTestCase(unittest.TestCase):
"""Test case for block template tag
"""
def setUp(self):
self.value = 'value'
self.variable_template = Template(name='base.html')
self.variable_template.parse("{{ simple_var }}")
self.object_field_template = Template(name='object-field.html')
self.object_field_template.parse('{{ object.field }}')
self.deep_template = Template(name='deep-field.html')
self.deep_template.parse('{{ object.field.field }}')
def assertResult(self, result):
assert result == self.value, 'Error emplate execution: %s' % ' '.join(
result, 'except', self.value)
def testSimpleVariable(self):
result = self.variable_template.execute({'simple_var': 'value'})
self.assertResult(result)
def testObjectField(self):
class TestClass(object):
field = self.value
result = self.object_field_template.execute({'object': TestClass()})
self.assertResult(result)
def testDictValue(self):
obj = {'field': self.value }
result = self.object_field_template.execute({'object': obj})
self.assertResult(result)
def testMultilevelField(self):
class TestClass(object):
field = {'field': self.value}
result = self.deep_template.execute({'object': TestClass()})
self.assertResult(result)
def test():
suite = unittest.TestSuite()
suite.addTest(VariableFieldTestCase('testSimpleVariable'))
suite.addTest(VariableFieldTestCase('testObjectField'))
suite.addTest(VariableFieldTestCase('testDictValue'))
suite.addTest(VariableFieldTestCase('testMultilevelField'))
return suite
|
<commit_before><commit_msg>Add test for fields variables lookup<commit_after>"""Test cases for variable fields
"""
import unittest
from lighty.templates import Template
class VariableFieldTestCase(unittest.TestCase):
"""Test case for block template tag
"""
def setUp(self):
self.value = 'value'
self.variable_template = Template(name='base.html')
self.variable_template.parse("{{ simple_var }}")
self.object_field_template = Template(name='object-field.html')
self.object_field_template.parse('{{ object.field }}')
self.deep_template = Template(name='deep-field.html')
self.deep_template.parse('{{ object.field.field }}')
def assertResult(self, result):
assert result == self.value, 'Error emplate execution: %s' % ' '.join(
result, 'except', self.value)
def testSimpleVariable(self):
result = self.variable_template.execute({'simple_var': 'value'})
self.assertResult(result)
def testObjectField(self):
class TestClass(object):
field = self.value
result = self.object_field_template.execute({'object': TestClass()})
self.assertResult(result)
def testDictValue(self):
obj = {'field': self.value }
result = self.object_field_template.execute({'object': obj})
self.assertResult(result)
def testMultilevelField(self):
class TestClass(object):
field = {'field': self.value}
result = self.deep_template.execute({'object': TestClass()})
self.assertResult(result)
def test():
suite = unittest.TestSuite()
suite.addTest(VariableFieldTestCase('testSimpleVariable'))
suite.addTest(VariableFieldTestCase('testObjectField'))
suite.addTest(VariableFieldTestCase('testDictValue'))
suite.addTest(VariableFieldTestCase('testMultilevelField'))
return suite
|
|
7d8164982754c86cc3fb90662795373845813c47
|
myuw/management/commands/check_preference.py
|
myuw/management/commands/check_preference.py
|
#!/usr/bin/python
"""
Test all the links in the CSV for non-200 status codes (after redirects).
"""
import sys
from django.core.management.base import BaseCommand, CommandError
from myuw.models import UserMigrationPreference
class Command(BaseCommand):
args = "<user netid>"
help = "Find the Migration Preference for the given user"
def handle(self, *args, **kwargs):
if len(args) < 1:
raise CommandError("Invalid argument %s" % args)
username = args[0]
pref = False
try:
saved = UserMigrationPreference.objects.get(username=username)
if saved.use_legacy_site:
pref = True
except UserMigrationPreference.DoesNotExist:
pass
if pref:
print "%s prefers the legacy MyUW" % username
else:
print "%s doesn't prefers the legacy MyUW" % username
|
Add a management command to check legacy preference
|
Add a management command to check legacy preference
|
Python
|
apache-2.0
|
uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw
|
Add a management command to check legacy preference
|
#!/usr/bin/python
"""
Test all the links in the CSV for non-200 status codes (after redirects).
"""
import sys
from django.core.management.base import BaseCommand, CommandError
from myuw.models import UserMigrationPreference
class Command(BaseCommand):
args = "<user netid>"
help = "Find the Migration Preference for the given user"
def handle(self, *args, **kwargs):
if len(args) < 1:
raise CommandError("Invalid argument %s" % args)
username = args[0]
pref = False
try:
saved = UserMigrationPreference.objects.get(username=username)
if saved.use_legacy_site:
pref = True
except UserMigrationPreference.DoesNotExist:
pass
if pref:
print "%s prefers the legacy MyUW" % username
else:
print "%s doesn't prefers the legacy MyUW" % username
|
<commit_before><commit_msg>Add a management command to check legacy preference<commit_after>
|
#!/usr/bin/python
"""
Test all the links in the CSV for non-200 status codes (after redirects).
"""
import sys
from django.core.management.base import BaseCommand, CommandError
from myuw.models import UserMigrationPreference
class Command(BaseCommand):
args = "<user netid>"
help = "Find the Migration Preference for the given user"
def handle(self, *args, **kwargs):
if len(args) < 1:
raise CommandError("Invalid argument %s" % args)
username = args[0]
pref = False
try:
saved = UserMigrationPreference.objects.get(username=username)
if saved.use_legacy_site:
pref = True
except UserMigrationPreference.DoesNotExist:
pass
if pref:
print "%s prefers the legacy MyUW" % username
else:
print "%s doesn't prefers the legacy MyUW" % username
|
Add a management command to check legacy preference#!/usr/bin/python
"""
Test all the links in the CSV for non-200 status codes (after redirects).
"""
import sys
from django.core.management.base import BaseCommand, CommandError
from myuw.models import UserMigrationPreference
class Command(BaseCommand):
args = "<user netid>"
help = "Find the Migration Preference for the given user"
def handle(self, *args, **kwargs):
if len(args) < 1:
raise CommandError("Invalid argument %s" % args)
username = args[0]
pref = False
try:
saved = UserMigrationPreference.objects.get(username=username)
if saved.use_legacy_site:
pref = True
except UserMigrationPreference.DoesNotExist:
pass
if pref:
print "%s prefers the legacy MyUW" % username
else:
print "%s doesn't prefers the legacy MyUW" % username
|
<commit_before><commit_msg>Add a management command to check legacy preference<commit_after>#!/usr/bin/python
"""
Test all the links in the CSV for non-200 status codes (after redirects).
"""
import sys
from django.core.management.base import BaseCommand, CommandError
from myuw.models import UserMigrationPreference
class Command(BaseCommand):
args = "<user netid>"
help = "Find the Migration Preference for the given user"
def handle(self, *args, **kwargs):
if len(args) < 1:
raise CommandError("Invalid argument %s" % args)
username = args[0]
pref = False
try:
saved = UserMigrationPreference.objects.get(username=username)
if saved.use_legacy_site:
pref = True
except UserMigrationPreference.DoesNotExist:
pass
if pref:
print "%s prefers the legacy MyUW" % username
else:
print "%s doesn't prefers the legacy MyUW" % username
|
|
5caf415a1517017271cbdce8e5bd3de3e552420e
|
test/unittests/skills/test_context.py
|
test/unittests/skills/test_context.py
|
from unittest import TestCase, mock
from mycroft.skills.context import adds_context, removes_context
"""
Tests for the adapt context decorators.
"""
class ContextSkillMock(mock.Mock):
"""Mock class to apply decorators on."""
@adds_context('DestroyContext')
def handler_adding_context(self):
pass
@adds_context('DestroyContext', 'exterminate')
def handler_adding_context_with_words(self):
pass
@removes_context('DestroyContext')
def handler_removing_context(self):
pass
class TestContextDecorators(TestCase):
def test_adding_context(self):
"""Check that calling handler adds the correct Keyword."""
skill = ContextSkillMock()
skill.handler_adding_context()
skill.set_context.assert_called_once_with('DestroyContext', '')
def test_adding_context_with_words(self):
"""Ensure that decorated handler adds Keyword and content."""
skill = ContextSkillMock()
skill.handler_adding_context_with_words()
skill.set_context.assert_called_once_with('DestroyContext',
'exterminate')
def test_removing_context(self):
"""Make sure the decorated handler removes the specified context."""
skill = ContextSkillMock()
skill.handler_removing_context()
skill.remove_context.assert_called_once_with('DestroyContext')
|
Add tests for context decorators
|
Add tests for context decorators
|
Python
|
apache-2.0
|
forslund/mycroft-core,MycroftAI/mycroft-core,MycroftAI/mycroft-core,forslund/mycroft-core
|
Add tests for context decorators
|
from unittest import TestCase, mock
from mycroft.skills.context import adds_context, removes_context
"""
Tests for the adapt context decorators.
"""
class ContextSkillMock(mock.Mock):
"""Mock class to apply decorators on."""
@adds_context('DestroyContext')
def handler_adding_context(self):
pass
@adds_context('DestroyContext', 'exterminate')
def handler_adding_context_with_words(self):
pass
@removes_context('DestroyContext')
def handler_removing_context(self):
pass
class TestContextDecorators(TestCase):
def test_adding_context(self):
"""Check that calling handler adds the correct Keyword."""
skill = ContextSkillMock()
skill.handler_adding_context()
skill.set_context.assert_called_once_with('DestroyContext', '')
def test_adding_context_with_words(self):
"""Ensure that decorated handler adds Keyword and content."""
skill = ContextSkillMock()
skill.handler_adding_context_with_words()
skill.set_context.assert_called_once_with('DestroyContext',
'exterminate')
def test_removing_context(self):
"""Make sure the decorated handler removes the specified context."""
skill = ContextSkillMock()
skill.handler_removing_context()
skill.remove_context.assert_called_once_with('DestroyContext')
|
<commit_before><commit_msg>Add tests for context decorators<commit_after>
|
from unittest import TestCase, mock
from mycroft.skills.context import adds_context, removes_context
"""
Tests for the adapt context decorators.
"""
class ContextSkillMock(mock.Mock):
"""Mock class to apply decorators on."""
@adds_context('DestroyContext')
def handler_adding_context(self):
pass
@adds_context('DestroyContext', 'exterminate')
def handler_adding_context_with_words(self):
pass
@removes_context('DestroyContext')
def handler_removing_context(self):
pass
class TestContextDecorators(TestCase):
def test_adding_context(self):
"""Check that calling handler adds the correct Keyword."""
skill = ContextSkillMock()
skill.handler_adding_context()
skill.set_context.assert_called_once_with('DestroyContext', '')
def test_adding_context_with_words(self):
"""Ensure that decorated handler adds Keyword and content."""
skill = ContextSkillMock()
skill.handler_adding_context_with_words()
skill.set_context.assert_called_once_with('DestroyContext',
'exterminate')
def test_removing_context(self):
"""Make sure the decorated handler removes the specified context."""
skill = ContextSkillMock()
skill.handler_removing_context()
skill.remove_context.assert_called_once_with('DestroyContext')
|
Add tests for context decoratorsfrom unittest import TestCase, mock
from mycroft.skills.context import adds_context, removes_context
"""
Tests for the adapt context decorators.
"""
class ContextSkillMock(mock.Mock):
"""Mock class to apply decorators on."""
@adds_context('DestroyContext')
def handler_adding_context(self):
pass
@adds_context('DestroyContext', 'exterminate')
def handler_adding_context_with_words(self):
pass
@removes_context('DestroyContext')
def handler_removing_context(self):
pass
class TestContextDecorators(TestCase):
def test_adding_context(self):
"""Check that calling handler adds the correct Keyword."""
skill = ContextSkillMock()
skill.handler_adding_context()
skill.set_context.assert_called_once_with('DestroyContext', '')
def test_adding_context_with_words(self):
"""Ensure that decorated handler adds Keyword and content."""
skill = ContextSkillMock()
skill.handler_adding_context_with_words()
skill.set_context.assert_called_once_with('DestroyContext',
'exterminate')
def test_removing_context(self):
"""Make sure the decorated handler removes the specified context."""
skill = ContextSkillMock()
skill.handler_removing_context()
skill.remove_context.assert_called_once_with('DestroyContext')
|
<commit_before><commit_msg>Add tests for context decorators<commit_after>from unittest import TestCase, mock
from mycroft.skills.context import adds_context, removes_context
"""
Tests for the adapt context decorators.
"""
class ContextSkillMock(mock.Mock):
"""Mock class to apply decorators on."""
@adds_context('DestroyContext')
def handler_adding_context(self):
pass
@adds_context('DestroyContext', 'exterminate')
def handler_adding_context_with_words(self):
pass
@removes_context('DestroyContext')
def handler_removing_context(self):
pass
class TestContextDecorators(TestCase):
def test_adding_context(self):
"""Check that calling handler adds the correct Keyword."""
skill = ContextSkillMock()
skill.handler_adding_context()
skill.set_context.assert_called_once_with('DestroyContext', '')
def test_adding_context_with_words(self):
"""Ensure that decorated handler adds Keyword and content."""
skill = ContextSkillMock()
skill.handler_adding_context_with_words()
skill.set_context.assert_called_once_with('DestroyContext',
'exterminate')
def test_removing_context(self):
"""Make sure the decorated handler removes the specified context."""
skill = ContextSkillMock()
skill.handler_removing_context()
skill.remove_context.assert_called_once_with('DestroyContext')
|
|
b0f3c0f8db69b7c4a141bd32680ed937b40d34c6
|
util/item_name_gen.py
|
util/item_name_gen.py
|
'''Script to help generate item names.'''
def int_to_str(num, alphabet):
'''Convert integer to string.'''
# http://stackoverflow.com/a/1119769/1524507
if (num == 0):
return alphabet[0]
arr = []
base = len(alphabet)
while num:
rem = num % base
num = num // base
arr.append(alphabet[rem])
arr.reverse()
return ''.join(arr)
def main():
start_num = 0
end_num = 2176782335
pics_per_item = 1000
counter = start_num
while True:
lower = counter
upper = min(counter + pics_per_item, end_num)
print('picture:{0}-{1}'.format(lower, upper))
counter += 1000
if counter > end_num:
break
if __name__ == '__main__':
main()
|
Add util script to generate item names.
|
Add util script to generate item names.
|
Python
|
unlicense
|
ArchiveTeam/twitpic-discovery,ArchiveTeam/twitpic-discovery
|
Add util script to generate item names.
|
'''Script to help generate item names.'''
def int_to_str(num, alphabet):
'''Convert integer to string.'''
# http://stackoverflow.com/a/1119769/1524507
if (num == 0):
return alphabet[0]
arr = []
base = len(alphabet)
while num:
rem = num % base
num = num // base
arr.append(alphabet[rem])
arr.reverse()
return ''.join(arr)
def main():
start_num = 0
end_num = 2176782335
pics_per_item = 1000
counter = start_num
while True:
lower = counter
upper = min(counter + pics_per_item, end_num)
print('picture:{0}-{1}'.format(lower, upper))
counter += 1000
if counter > end_num:
break
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add util script to generate item names.<commit_after>
|
'''Script to help generate item names.'''
def int_to_str(num, alphabet):
'''Convert integer to string.'''
# http://stackoverflow.com/a/1119769/1524507
if (num == 0):
return alphabet[0]
arr = []
base = len(alphabet)
while num:
rem = num % base
num = num // base
arr.append(alphabet[rem])
arr.reverse()
return ''.join(arr)
def main():
start_num = 0
end_num = 2176782335
pics_per_item = 1000
counter = start_num
while True:
lower = counter
upper = min(counter + pics_per_item, end_num)
print('picture:{0}-{1}'.format(lower, upper))
counter += 1000
if counter > end_num:
break
if __name__ == '__main__':
main()
|
Add util script to generate item names.'''Script to help generate item names.'''
def int_to_str(num, alphabet):
'''Convert integer to string.'''
# http://stackoverflow.com/a/1119769/1524507
if (num == 0):
return alphabet[0]
arr = []
base = len(alphabet)
while num:
rem = num % base
num = num // base
arr.append(alphabet[rem])
arr.reverse()
return ''.join(arr)
def main():
start_num = 0
end_num = 2176782335
pics_per_item = 1000
counter = start_num
while True:
lower = counter
upper = min(counter + pics_per_item, end_num)
print('picture:{0}-{1}'.format(lower, upper))
counter += 1000
if counter > end_num:
break
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add util script to generate item names.<commit_after>'''Script to help generate item names.'''
def int_to_str(num, alphabet):
'''Convert integer to string.'''
# http://stackoverflow.com/a/1119769/1524507
if (num == 0):
return alphabet[0]
arr = []
base = len(alphabet)
while num:
rem = num % base
num = num // base
arr.append(alphabet[rem])
arr.reverse()
return ''.join(arr)
def main():
start_num = 0
end_num = 2176782335
pics_per_item = 1000
counter = start_num
while True:
lower = counter
upper = min(counter + pics_per_item, end_num)
print('picture:{0}-{1}'.format(lower, upper))
counter += 1000
if counter > end_num:
break
if __name__ == '__main__':
main()
|
|
a4d312411cb125685291e81b8bb2204324fea34d
|
sandbox/order/migrations/0005_auto_20170502_1533.py
|
sandbox/order/migrations/0005_auto_20170502_1533.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-02 15:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('order', '0004_auto_20160111_1108'),
]
operations = [
migrations.AlterField(
model_name='order',
name='guest_email',
field=models.EmailField(blank=True, max_length=254, verbose_name='Guest email address'),
),
]
|
Add missing migration in sandbox
|
Add missing migration in sandbox
|
Python
|
isc
|
thelabnyc/django-oscar-api-checkout
|
Add missing migration in sandbox
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-02 15:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('order', '0004_auto_20160111_1108'),
]
operations = [
migrations.AlterField(
model_name='order',
name='guest_email',
field=models.EmailField(blank=True, max_length=254, verbose_name='Guest email address'),
),
]
|
<commit_before><commit_msg>Add missing migration in sandbox<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-02 15:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('order', '0004_auto_20160111_1108'),
]
operations = [
migrations.AlterField(
model_name='order',
name='guest_email',
field=models.EmailField(blank=True, max_length=254, verbose_name='Guest email address'),
),
]
|
Add missing migration in sandbox# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-02 15:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('order', '0004_auto_20160111_1108'),
]
operations = [
migrations.AlterField(
model_name='order',
name='guest_email',
field=models.EmailField(blank=True, max_length=254, verbose_name='Guest email address'),
),
]
|
<commit_before><commit_msg>Add missing migration in sandbox<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-02 15:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('order', '0004_auto_20160111_1108'),
]
operations = [
migrations.AlterField(
model_name='order',
name='guest_email',
field=models.EmailField(blank=True, max_length=254, verbose_name='Guest email address'),
),
]
|
|
6bde05ad9c16be2677ab6c91444e793529a9fdd3
|
comrade/core/models.py
|
comrade/core/models.py
|
from django.db import models
class ComradeBaseModel(models.Model):
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.__unicode__()
def __repr__(self):
return self.__unicode__()
class Meta:
abstract = True
get_latest_by = 'created'
def get_absolute_url(self, *args, **kwargs):
return self.get_url_path(*args, **kwargs)
|
Add a base Django model to share common functionality.
|
Add a base Django model to share common functionality.
|
Python
|
mit
|
bueda/django-comrade
|
Add a base Django model to share common functionality.
|
from django.db import models
class ComradeBaseModel(models.Model):
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.__unicode__()
def __repr__(self):
return self.__unicode__()
class Meta:
abstract = True
get_latest_by = 'created'
def get_absolute_url(self, *args, **kwargs):
return self.get_url_path(*args, **kwargs)
|
<commit_before><commit_msg>Add a base Django model to share common functionality.<commit_after>
|
from django.db import models
class ComradeBaseModel(models.Model):
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.__unicode__()
def __repr__(self):
return self.__unicode__()
class Meta:
abstract = True
get_latest_by = 'created'
def get_absolute_url(self, *args, **kwargs):
return self.get_url_path(*args, **kwargs)
|
Add a base Django model to share common functionality.from django.db import models
class ComradeBaseModel(models.Model):
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.__unicode__()
def __repr__(self):
return self.__unicode__()
class Meta:
abstract = True
get_latest_by = 'created'
def get_absolute_url(self, *args, **kwargs):
return self.get_url_path(*args, **kwargs)
|
<commit_before><commit_msg>Add a base Django model to share common functionality.<commit_after>from django.db import models
class ComradeBaseModel(models.Model):
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.__unicode__()
def __repr__(self):
return self.__unicode__()
class Meta:
abstract = True
get_latest_by = 'created'
def get_absolute_url(self, *args, **kwargs):
return self.get_url_path(*args, **kwargs)
|
|
b154468b085dad53de8fdef09ec42c8518475556
|
tests/basics/string_format_modulo_int.py
|
tests/basics/string_format_modulo_int.py
|
# test string modulo formatting with int values
# test + option with various amount of padding
for pad in ('', ' ', '0'):
for n in (1, 2, 3):
for val in (-1, 0, 1):
print(('%+' + pad + str(n) + 'd') % val)
|
Add test for string module formatting with int argument.
|
tests/basics: Add test for string module formatting with int argument.
|
Python
|
mit
|
infinnovation/micropython,lowRISC/micropython,infinnovation/micropython,micropython/micropython-esp32,toolmacher/micropython,PappaPeppar/micropython,pfalcon/micropython,SHA2017-badge/micropython-esp32,AriZuu/micropython,lowRISC/micropython,tobbad/micropython,puuu/micropython,dmazzella/micropython,SHA2017-badge/micropython-esp32,PappaPeppar/micropython,MrSurly/micropython-esp32,oopy/micropython,Peetz0r/micropython-esp32,tralamazza/micropython,blazewicz/micropython,hiway/micropython,TDAbboud/micropython,tralamazza/micropython,kerneltask/micropython,adafruit/micropython,blazewicz/micropython,chrisdearman/micropython,hiway/micropython,pramasoul/micropython,oopy/micropython,trezor/micropython,SHA2017-badge/micropython-esp32,tobbad/micropython,bvernoux/micropython,hiway/micropython,micropython/micropython-esp32,dmazzella/micropython,Timmenem/micropython,MrSurly/micropython,trezor/micropython,adafruit/micropython,Peetz0r/micropython-esp32,Peetz0r/micropython-esp32,kerneltask/micropython,oopy/micropython,chrisdearman/micropython,adafruit/circuitpython,alex-robbins/micropython,TDAbboud/micropython,adafruit/circuitpython,swegener/micropython,chrisdearman/micropython,toolmacher/micropython,bvernoux/micropython,henriknelson/micropython,puuu/micropython,lowRISC/micropython,infinnovation/micropython,dmazzella/micropython,kerneltask/micropython,TDAbboud/micropython,pozetroninc/micropython,AriZuu/micropython,deshipu/micropython,tralamazza/micropython,torwag/micropython,kerneltask/micropython,henriknelson/micropython,Peetz0r/micropython-esp32,selste/micropython,selste/micropython,bvernoux/micropython,oopy/micropython,HenrikSolver/micropython,HenrikSolver/micropython,MrSurly/micropython,MrSurly/micropython-esp32,SHA2017-badge/micropython-esp32,cwyark/micropython,swegener/micropython,pfalcon/micropython,selste/micropython,AriZuu/micropython,TDAbboud/micropython,tobbad/micropython,adafruit/circuitpython,alex-robbins/micropython,adafruit/circuitpython,selste/micropython,alex-robbins/micropython,HenrikSolver/micropython,MrSurly/micropython,MrSurly/micropython-esp32,hiway/micropython,deshipu/micropython,SHA2017-badge/micropython-esp32,pozetroninc/micropython,henriknelson/micropython,chrisdearman/micropython,torwag/micropython,ryannathans/micropython,henriknelson/micropython,puuu/micropython,adafruit/circuitpython,cwyark/micropython,Timmenem/micropython,selste/micropython,cwyark/micropython,MrSurly/micropython,MrSurly/micropython,pfalcon/micropython,MrSurly/micropython-esp32,deshipu/micropython,adafruit/micropython,trezor/micropython,pramasoul/micropython,Timmenem/micropython,swegener/micropython,ryannathans/micropython,cwyark/micropython,Timmenem/micropython,HenrikSolver/micropython,puuu/micropython,AriZuu/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,dmazzella/micropython,PappaPeppar/micropython,pozetroninc/micropython,torwag/micropython,pozetroninc/micropython,toolmacher/micropython,adafruit/circuitpython,alex-robbins/micropython,henriknelson/micropython,deshipu/micropython,infinnovation/micropython,AriZuu/micropython,tralamazza/micropython,infinnovation/micropython,lowRISC/micropython,PappaPeppar/micropython,pfalcon/micropython,trezor/micropython,swegener/micropython,swegener/micropython,lowRISC/micropython,pramasoul/micropython,chrisdearman/micropython,blazewicz/micropython,ryannathans/micropython,PappaPeppar/micropython,adafruit/micropython,ryannathans/micropython,cwyark/micropython,torwag/micropython,pramasoul/micropython,toolmacher/micropython,Timmenem/micropython,puuu/micropython,adafruit/micropython,hiway/micropython,alex-robbins/micropython,torwag/micropython,micropython/micropython-esp32,pfalcon/micropython,bvernoux/micropython,blazewicz/micropython,tobbad/micropython,toolmacher/micropython,blazewicz/micropython,deshipu/micropython,trezor/micropython,pozetroninc/micropython,bvernoux/micropython,micropython/micropython-esp32,HenrikSolver/micropython,kerneltask/micropython,tobbad/micropython,pramasoul/micropython,oopy/micropython,ryannathans/micropython,TDAbboud/micropython,Peetz0r/micropython-esp32
|
tests/basics: Add test for string module formatting with int argument.
|
# test string modulo formatting with int values
# test + option with various amount of padding
for pad in ('', ' ', '0'):
for n in (1, 2, 3):
for val in (-1, 0, 1):
print(('%+' + pad + str(n) + 'd') % val)
|
<commit_before><commit_msg>tests/basics: Add test for string module formatting with int argument.<commit_after>
|
# test string modulo formatting with int values
# test + option with various amount of padding
for pad in ('', ' ', '0'):
for n in (1, 2, 3):
for val in (-1, 0, 1):
print(('%+' + pad + str(n) + 'd') % val)
|
tests/basics: Add test for string module formatting with int argument.# test string modulo formatting with int values
# test + option with various amount of padding
for pad in ('', ' ', '0'):
for n in (1, 2, 3):
for val in (-1, 0, 1):
print(('%+' + pad + str(n) + 'd') % val)
|
<commit_before><commit_msg>tests/basics: Add test for string module formatting with int argument.<commit_after># test string modulo formatting with int values
# test + option with various amount of padding
for pad in ('', ' ', '0'):
for n in (1, 2, 3):
for val in (-1, 0, 1):
print(('%+' + pad + str(n) + 'd') % val)
|
|
b4f9b8d6c056224fc3728c6eecf511fecf6ac6d7
|
tests/test_configuration/test_gateway.py
|
tests/test_configuration/test_gateway.py
|
'''
Test gateway creation
'''
import unittest
from wirecurly.configuration import gateway
from nose import tools
class testGatewayCreation(unittest.TestCase):
'''
Test gateway creation
'''
def setUp(self):
'''
gateway fixtures for tests
'''
self.gw = gateway.Gateway('name')
def test_gw_dict_ok(self):
'''
Test that gw is properly serialized
'''
self.gw.addParameter('username','someuser')
assert isinstance(self.gw.todict(), dict)
def test_adding_param(self):
'''
Test if param is properly add to a gateway
'''
self.gw.addParameter('username','someuser')
assert self.gw.getParameter('username') == 'someuser'
@tools.raises(ValueError)
def test_adding_existing_param(self):
'''
Test adding an existing param
'''
self.gw.addParameter('username','someuser')
self.gw.addParameter('username','someuser')
|
Add Tests for gateway creation
|
Add Tests for gateway creation
|
Python
|
mpl-2.0
|
IndiciumSRL/wirecurly
|
Add Tests for gateway creation
|
'''
Test gateway creation
'''
import unittest
from wirecurly.configuration import gateway
from nose import tools
class testGatewayCreation(unittest.TestCase):
'''
Test gateway creation
'''
def setUp(self):
'''
gateway fixtures for tests
'''
self.gw = gateway.Gateway('name')
def test_gw_dict_ok(self):
'''
Test that gw is properly serialized
'''
self.gw.addParameter('username','someuser')
assert isinstance(self.gw.todict(), dict)
def test_adding_param(self):
'''
Test if param is properly add to a gateway
'''
self.gw.addParameter('username','someuser')
assert self.gw.getParameter('username') == 'someuser'
@tools.raises(ValueError)
def test_adding_existing_param(self):
'''
Test adding an existing param
'''
self.gw.addParameter('username','someuser')
self.gw.addParameter('username','someuser')
|
<commit_before><commit_msg>Add Tests for gateway creation<commit_after>
|
'''
Test gateway creation
'''
import unittest
from wirecurly.configuration import gateway
from nose import tools
class testGatewayCreation(unittest.TestCase):
'''
Test gateway creation
'''
def setUp(self):
'''
gateway fixtures for tests
'''
self.gw = gateway.Gateway('name')
def test_gw_dict_ok(self):
'''
Test that gw is properly serialized
'''
self.gw.addParameter('username','someuser')
assert isinstance(self.gw.todict(), dict)
def test_adding_param(self):
'''
Test if param is properly add to a gateway
'''
self.gw.addParameter('username','someuser')
assert self.gw.getParameter('username') == 'someuser'
@tools.raises(ValueError)
def test_adding_existing_param(self):
'''
Test adding an existing param
'''
self.gw.addParameter('username','someuser')
self.gw.addParameter('username','someuser')
|
Add Tests for gateway creation'''
Test gateway creation
'''
import unittest
from wirecurly.configuration import gateway
from nose import tools
class testGatewayCreation(unittest.TestCase):
'''
Test gateway creation
'''
def setUp(self):
'''
gateway fixtures for tests
'''
self.gw = gateway.Gateway('name')
def test_gw_dict_ok(self):
'''
Test that gw is properly serialized
'''
self.gw.addParameter('username','someuser')
assert isinstance(self.gw.todict(), dict)
def test_adding_param(self):
'''
Test if param is properly add to a gateway
'''
self.gw.addParameter('username','someuser')
assert self.gw.getParameter('username') == 'someuser'
@tools.raises(ValueError)
def test_adding_existing_param(self):
'''
Test adding an existing param
'''
self.gw.addParameter('username','someuser')
self.gw.addParameter('username','someuser')
|
<commit_before><commit_msg>Add Tests for gateway creation<commit_after>'''
Test gateway creation
'''
import unittest
from wirecurly.configuration import gateway
from nose import tools
class testGatewayCreation(unittest.TestCase):
'''
Test gateway creation
'''
def setUp(self):
'''
gateway fixtures for tests
'''
self.gw = gateway.Gateway('name')
def test_gw_dict_ok(self):
'''
Test that gw is properly serialized
'''
self.gw.addParameter('username','someuser')
assert isinstance(self.gw.todict(), dict)
def test_adding_param(self):
'''
Test if param is properly add to a gateway
'''
self.gw.addParameter('username','someuser')
assert self.gw.getParameter('username') == 'someuser'
@tools.raises(ValueError)
def test_adding_existing_param(self):
'''
Test adding an existing param
'''
self.gw.addParameter('username','someuser')
self.gw.addParameter('username','someuser')
|
|
6ba84140e5d7ddca2ee88e8ca6562a32a66a1d6b
|
tests/app/main/forms/test_govuk_text_input_field.py
|
tests/app/main/forms/test_govuk_text_input_field.py
|
from flask_wtf import FlaskForm as Form
from app.main.forms import GovukTextInputField
def test_GovukTextInputField_renders_zero(client_request):
class FakeForm(Form):
field = GovukTextInputField()
form = FakeForm(field=0)
html = form.field()
assert 'value="0"' in html
|
Add test for rendering integer 0 in form input
|
Add test for rendering integer 0 in form input
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
Add test for rendering integer 0 in form input
|
from flask_wtf import FlaskForm as Form
from app.main.forms import GovukTextInputField
def test_GovukTextInputField_renders_zero(client_request):
class FakeForm(Form):
field = GovukTextInputField()
form = FakeForm(field=0)
html = form.field()
assert 'value="0"' in html
|
<commit_before><commit_msg>Add test for rendering integer 0 in form input<commit_after>
|
from flask_wtf import FlaskForm as Form
from app.main.forms import GovukTextInputField
def test_GovukTextInputField_renders_zero(client_request):
class FakeForm(Form):
field = GovukTextInputField()
form = FakeForm(field=0)
html = form.field()
assert 'value="0"' in html
|
Add test for rendering integer 0 in form inputfrom flask_wtf import FlaskForm as Form
from app.main.forms import GovukTextInputField
def test_GovukTextInputField_renders_zero(client_request):
class FakeForm(Form):
field = GovukTextInputField()
form = FakeForm(field=0)
html = form.field()
assert 'value="0"' in html
|
<commit_before><commit_msg>Add test for rendering integer 0 in form input<commit_after>from flask_wtf import FlaskForm as Form
from app.main.forms import GovukTextInputField
def test_GovukTextInputField_renders_zero(client_request):
class FakeForm(Form):
field = GovukTextInputField()
form = FakeForm(field=0)
html = form.field()
assert 'value="0"' in html
|
|
d9dd7214865130d2ccc31670bcbc1c1f6cc7596a
|
02_task/sample_test.py
|
02_task/sample_test.py
|
import unittest
import solution
class SampleTest(unittest.TestCase):
def test_five_plus_three(self):
plus = solution.create_operator('+', lambda lhs, rhs: lhs + rhs)
x = solution.create_variable('x')
y = solution.create_variable('y')
added_expression = solution.create_expression((x, plus, y))
self.assertEqual(added_expression.evaluate(x=5, y=3), 8)
def test_operators(self):
y = solution.create_variable('y')
twelve = solution.create_constant(12)
expression = y + twelve
self.assertEqual(expression.evaluate(y=3), 15)
def test_constant_evaluation(self):
self.assertEqual(solution.create_variable('x').evaluate(x=42), 42)
self.assertEqual(solution.create_constant(5).evaluate(), 5)
if __name__ == '__main__':
unittest.main()
|
Add 02-task sample test file.
|
Add 02-task sample test file.
|
Python
|
mit
|
pepincho/Python-Course-FMI
|
Add 02-task sample test file.
|
import unittest
import solution
class SampleTest(unittest.TestCase):
def test_five_plus_three(self):
plus = solution.create_operator('+', lambda lhs, rhs: lhs + rhs)
x = solution.create_variable('x')
y = solution.create_variable('y')
added_expression = solution.create_expression((x, plus, y))
self.assertEqual(added_expression.evaluate(x=5, y=3), 8)
def test_operators(self):
y = solution.create_variable('y')
twelve = solution.create_constant(12)
expression = y + twelve
self.assertEqual(expression.evaluate(y=3), 15)
def test_constant_evaluation(self):
self.assertEqual(solution.create_variable('x').evaluate(x=42), 42)
self.assertEqual(solution.create_constant(5).evaluate(), 5)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add 02-task sample test file.<commit_after>
|
import unittest
import solution
class SampleTest(unittest.TestCase):
def test_five_plus_three(self):
plus = solution.create_operator('+', lambda lhs, rhs: lhs + rhs)
x = solution.create_variable('x')
y = solution.create_variable('y')
added_expression = solution.create_expression((x, plus, y))
self.assertEqual(added_expression.evaluate(x=5, y=3), 8)
def test_operators(self):
y = solution.create_variable('y')
twelve = solution.create_constant(12)
expression = y + twelve
self.assertEqual(expression.evaluate(y=3), 15)
def test_constant_evaluation(self):
self.assertEqual(solution.create_variable('x').evaluate(x=42), 42)
self.assertEqual(solution.create_constant(5).evaluate(), 5)
if __name__ == '__main__':
unittest.main()
|
Add 02-task sample test file.import unittest
import solution
class SampleTest(unittest.TestCase):
def test_five_plus_three(self):
plus = solution.create_operator('+', lambda lhs, rhs: lhs + rhs)
x = solution.create_variable('x')
y = solution.create_variable('y')
added_expression = solution.create_expression((x, plus, y))
self.assertEqual(added_expression.evaluate(x=5, y=3), 8)
def test_operators(self):
y = solution.create_variable('y')
twelve = solution.create_constant(12)
expression = y + twelve
self.assertEqual(expression.evaluate(y=3), 15)
def test_constant_evaluation(self):
self.assertEqual(solution.create_variable('x').evaluate(x=42), 42)
self.assertEqual(solution.create_constant(5).evaluate(), 5)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add 02-task sample test file.<commit_after>import unittest
import solution
class SampleTest(unittest.TestCase):
def test_five_plus_three(self):
plus = solution.create_operator('+', lambda lhs, rhs: lhs + rhs)
x = solution.create_variable('x')
y = solution.create_variable('y')
added_expression = solution.create_expression((x, plus, y))
self.assertEqual(added_expression.evaluate(x=5, y=3), 8)
def test_operators(self):
y = solution.create_variable('y')
twelve = solution.create_constant(12)
expression = y + twelve
self.assertEqual(expression.evaluate(y=3), 15)
def test_constant_evaluation(self):
self.assertEqual(solution.create_variable('x').evaluate(x=42), 42)
self.assertEqual(solution.create_constant(5).evaluate(), 5)
if __name__ == '__main__':
unittest.main()
|
|
bac583e9b3849884aa9865a3d8d19796b0eedd70
|
tap_dhcp.py
|
tap_dhcp.py
|
import re
import subprocess
def get_namespace_list():
"""Retrieve the list of DHCP namespaces."""
return subprocess(['ip', 'netns', 'list']).split()
def get_interfaces_for(namespace):
"""Retrieve the list of interfaces inside a namespace."""
return subprocess(['ip', 'netns', 'exec', namespace, 'ip', 'a'])
def main():
TAP = re.compile('\btap\S+')
namespaces = get_namespace_list()
if not namespaces:
print 'status err no dhcp namespaces on this host'
raise SystemError(True)
interfaces = ((n, get_interfaces_for(n)) for n in namespaces)
errored = False
for namespace, interface_list in interfaces:
num_taps = len(TAP.findall(interface_list))
if num_taps != 1:
print 'status err namespace {0} has {1} TAPs present'.format(
namespaces, num_taps)
errored = True
if not errored:
print 'status ok'
else:
raise SystemError(True)
if __name__ == '__main__':
main()
|
Add first pass at checking dhcp namespaces for taps
|
Add first pass at checking dhcp namespaces for taps
|
Python
|
apache-2.0
|
sigmavirus24/rpc-openstack,stevelle/rpc-openstack,git-harry/rpc-openstack,miguelgrinberg/rpc-openstack,git-harry/rpc-openstack,jacobwagner/rpc-openstack,mancdaz/rpc-openstack,mattt416/rpc-openstack,darrenchan/rpc-openstack,hughsaunders/rpc-openstack,galstrom21/rpc-openstack,darrenchan/rpc-openstack,rcbops/rpc-openstack,cfarquhar/rpc-maas,andymcc/rpc-openstack,busterswt/rpc-openstack,shannonmitchell/rpc-openstack,briancurtin/rpc-maas,cfarquhar/rpc-openstack,major/rpc-openstack,xeregin/rpc-openstack,jpmontez/rpc-openstack,xeregin/rpc-openstack,prometheanfire/rpc-openstack,BjoernT/rpc-openstack,cloudnull/rpc-openstack,jacobwagner/rpc-openstack,cfarquhar/rpc-maas,major/rpc-openstack,cfarquhar/rpc-maas,claco/rpc-openstack,cfarquhar/rpc-openstack,xeregin/rpc-openstack,mancdaz/rpc-openstack,cloudnull/rpc-openstack,jpmontez/rpc-openstack,cloudnull/rpc-maas,cloudnull/rpc-maas,byronmccollum/rpc-openstack,robb-romans/rpc-openstack,briancurtin/rpc-maas,jpmontez/rpc-openstack,andymcc/rpc-openstack,darrenchan/rpc-openstack,byronmccollum/rpc-openstack,claco/rpc-openstack,xeregin/rpc-openstack,busterswt/rpc-openstack,mattt416/rpc-openstack,prometheanfire/rpc-openstack,nrb/rpc-openstack,darrenchan/rpc-openstack,mattt416/rpc-openstack,byronmccollum/rpc-openstack,npawelek/rpc-maas,BjoernT/rpc-openstack,npawelek/rpc-maas,rcbops/rpc-openstack,claco/rpc-openstack,shannonmitchell/rpc-openstack,briancurtin/rpc-maas,stevelle/rpc-openstack,robb-romans/rpc-openstack,miguelgrinberg/rpc-openstack,sigmavirus24/rpc-openstack,sigmavirus24/rpc-openstack,nrb/rpc-openstack,galstrom21/rpc-openstack,nrb/rpc-openstack,stevelle/rpc-openstack,npawelek/rpc-maas,busterswt/rpc-openstack,hughsaunders/rpc-openstack,miguelgrinberg/rpc-openstack,cloudnull/rpc-maas,sigmavirus24/rpc-openstack,andymcc/rpc-openstack
|
Add first pass at checking dhcp namespaces for taps
|
import re
import subprocess
def get_namespace_list():
"""Retrieve the list of DHCP namespaces."""
return subprocess(['ip', 'netns', 'list']).split()
def get_interfaces_for(namespace):
"""Retrieve the list of interfaces inside a namespace."""
return subprocess(['ip', 'netns', 'exec', namespace, 'ip', 'a'])
def main():
TAP = re.compile('\btap\S+')
namespaces = get_namespace_list()
if not namespaces:
print 'status err no dhcp namespaces on this host'
raise SystemError(True)
interfaces = ((n, get_interfaces_for(n)) for n in namespaces)
errored = False
for namespace, interface_list in interfaces:
num_taps = len(TAP.findall(interface_list))
if num_taps != 1:
print 'status err namespace {0} has {1} TAPs present'.format(
namespaces, num_taps)
errored = True
if not errored:
print 'status ok'
else:
raise SystemError(True)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add first pass at checking dhcp namespaces for taps<commit_after>
|
import re
import subprocess
def get_namespace_list():
"""Retrieve the list of DHCP namespaces."""
return subprocess(['ip', 'netns', 'list']).split()
def get_interfaces_for(namespace):
"""Retrieve the list of interfaces inside a namespace."""
return subprocess(['ip', 'netns', 'exec', namespace, 'ip', 'a'])
def main():
TAP = re.compile('\btap\S+')
namespaces = get_namespace_list()
if not namespaces:
print 'status err no dhcp namespaces on this host'
raise SystemError(True)
interfaces = ((n, get_interfaces_for(n)) for n in namespaces)
errored = False
for namespace, interface_list in interfaces:
num_taps = len(TAP.findall(interface_list))
if num_taps != 1:
print 'status err namespace {0} has {1} TAPs present'.format(
namespaces, num_taps)
errored = True
if not errored:
print 'status ok'
else:
raise SystemError(True)
if __name__ == '__main__':
main()
|
Add first pass at checking dhcp namespaces for tapsimport re
import subprocess
def get_namespace_list():
"""Retrieve the list of DHCP namespaces."""
return subprocess(['ip', 'netns', 'list']).split()
def get_interfaces_for(namespace):
"""Retrieve the list of interfaces inside a namespace."""
return subprocess(['ip', 'netns', 'exec', namespace, 'ip', 'a'])
def main():
TAP = re.compile('\btap\S+')
namespaces = get_namespace_list()
if not namespaces:
print 'status err no dhcp namespaces on this host'
raise SystemError(True)
interfaces = ((n, get_interfaces_for(n)) for n in namespaces)
errored = False
for namespace, interface_list in interfaces:
num_taps = len(TAP.findall(interface_list))
if num_taps != 1:
print 'status err namespace {0} has {1} TAPs present'.format(
namespaces, num_taps)
errored = True
if not errored:
print 'status ok'
else:
raise SystemError(True)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add first pass at checking dhcp namespaces for taps<commit_after>import re
import subprocess
def get_namespace_list():
"""Retrieve the list of DHCP namespaces."""
return subprocess(['ip', 'netns', 'list']).split()
def get_interfaces_for(namespace):
"""Retrieve the list of interfaces inside a namespace."""
return subprocess(['ip', 'netns', 'exec', namespace, 'ip', 'a'])
def main():
TAP = re.compile('\btap\S+')
namespaces = get_namespace_list()
if not namespaces:
print 'status err no dhcp namespaces on this host'
raise SystemError(True)
interfaces = ((n, get_interfaces_for(n)) for n in namespaces)
errored = False
for namespace, interface_list in interfaces:
num_taps = len(TAP.findall(interface_list))
if num_taps != 1:
print 'status err namespace {0} has {1} TAPs present'.format(
namespaces, num_taps)
errored = True
if not errored:
print 'status ok'
else:
raise SystemError(True)
if __name__ == '__main__':
main()
|
|
5d06524c8465064248cc3605c69dd32687ea7565
|
wqflask/tests/wqflask/test_user_login.py
|
wqflask/tests/wqflask/test_user_login.py
|
"""Test cases for some methods in login.py"""
import unittest
from wqflask.user_login import encode_password
class TestUserLogin(unittest.TestCase):
def test_encode_password(self):
"""
Test encode password
"""
pass_gen_fields = {
"salt": "salt",
"hashfunc": "sha1",
"iterations": 4096,
"keylength": 20,
}
self.assertEqual(
encode_password(pass_gen_fields,
"password").get("password"),
'4b007901b765489abead49d926f721d065a429c1')
|
Add tests for encoding password
|
Add tests for encoding password
* wqflask/tests/wqflask/test_user_login.py: New tests.
|
Python
|
agpl-3.0
|
pjotrp/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2
|
Add tests for encoding password
* wqflask/tests/wqflask/test_user_login.py: New tests.
|
"""Test cases for some methods in login.py"""
import unittest
from wqflask.user_login import encode_password
class TestUserLogin(unittest.TestCase):
def test_encode_password(self):
"""
Test encode password
"""
pass_gen_fields = {
"salt": "salt",
"hashfunc": "sha1",
"iterations": 4096,
"keylength": 20,
}
self.assertEqual(
encode_password(pass_gen_fields,
"password").get("password"),
'4b007901b765489abead49d926f721d065a429c1')
|
<commit_before><commit_msg>Add tests for encoding password
* wqflask/tests/wqflask/test_user_login.py: New tests.<commit_after>
|
"""Test cases for some methods in login.py"""
import unittest
from wqflask.user_login import encode_password
class TestUserLogin(unittest.TestCase):
def test_encode_password(self):
"""
Test encode password
"""
pass_gen_fields = {
"salt": "salt",
"hashfunc": "sha1",
"iterations": 4096,
"keylength": 20,
}
self.assertEqual(
encode_password(pass_gen_fields,
"password").get("password"),
'4b007901b765489abead49d926f721d065a429c1')
|
Add tests for encoding password
* wqflask/tests/wqflask/test_user_login.py: New tests."""Test cases for some methods in login.py"""
import unittest
from wqflask.user_login import encode_password
class TestUserLogin(unittest.TestCase):
def test_encode_password(self):
"""
Test encode password
"""
pass_gen_fields = {
"salt": "salt",
"hashfunc": "sha1",
"iterations": 4096,
"keylength": 20,
}
self.assertEqual(
encode_password(pass_gen_fields,
"password").get("password"),
'4b007901b765489abead49d926f721d065a429c1')
|
<commit_before><commit_msg>Add tests for encoding password
* wqflask/tests/wqflask/test_user_login.py: New tests.<commit_after>"""Test cases for some methods in login.py"""
import unittest
from wqflask.user_login import encode_password
class TestUserLogin(unittest.TestCase):
def test_encode_password(self):
"""
Test encode password
"""
pass_gen_fields = {
"salt": "salt",
"hashfunc": "sha1",
"iterations": 4096,
"keylength": 20,
}
self.assertEqual(
encode_password(pass_gen_fields,
"password").get("password"),
'4b007901b765489abead49d926f721d065a429c1')
|
|
e1914156c0d4085d35c88634c98c294e38faada5
|
cryptchat/test/test_aes.py
|
cryptchat/test/test_aes.py
|
#/usr/bin/python
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python -m unittest discover
import unittest
from ..crypto.aes import AESCipher
class testAESCipher(unittest.TestCase):
def test_encrypt_decrypt(self):
key = "TTTcPolAhIqZZJY0IOH7Orecb/EEaUx8/u/pQlCgma8="
cipher = AESCipher(key)
m = "[TOP SECRET] I like k-pop"
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def test_encrypt_decrypt_unicode(self):
key = "TTTcPolAhIqZZJY0IOH7Orecb/EEaUx8/u/pQlCgma8="
cipher = AESCipher(key)
m = u"『秘密』K-popは好きです".encode('utf8')
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def test_encrypt_decrypt_128(self):
key = "Ya/C/EvmwW1xWhjM1BgZ/g=="
cipher = AESCipher(key)
m = "Private stuff"
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
Add some tests for crypto/aes
|
Add some tests for crypto/aes
|
Python
|
mit
|
djohsson/Cryptchat
|
Add some tests for crypto/aes
|
#/usr/bin/python
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python -m unittest discover
import unittest
from ..crypto.aes import AESCipher
class testAESCipher(unittest.TestCase):
def test_encrypt_decrypt(self):
key = "TTTcPolAhIqZZJY0IOH7Orecb/EEaUx8/u/pQlCgma8="
cipher = AESCipher(key)
m = "[TOP SECRET] I like k-pop"
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def test_encrypt_decrypt_unicode(self):
key = "TTTcPolAhIqZZJY0IOH7Orecb/EEaUx8/u/pQlCgma8="
cipher = AESCipher(key)
m = u"『秘密』K-popは好きです".encode('utf8')
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def test_encrypt_decrypt_128(self):
key = "Ya/C/EvmwW1xWhjM1BgZ/g=="
cipher = AESCipher(key)
m = "Private stuff"
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add some tests for crypto/aes<commit_after>
|
#/usr/bin/python
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python -m unittest discover
import unittest
from ..crypto.aes import AESCipher
class testAESCipher(unittest.TestCase):
def test_encrypt_decrypt(self):
key = "TTTcPolAhIqZZJY0IOH7Orecb/EEaUx8/u/pQlCgma8="
cipher = AESCipher(key)
m = "[TOP SECRET] I like k-pop"
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def test_encrypt_decrypt_unicode(self):
key = "TTTcPolAhIqZZJY0IOH7Orecb/EEaUx8/u/pQlCgma8="
cipher = AESCipher(key)
m = u"『秘密』K-popは好きです".encode('utf8')
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def test_encrypt_decrypt_128(self):
key = "Ya/C/EvmwW1xWhjM1BgZ/g=="
cipher = AESCipher(key)
m = "Private stuff"
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
Add some tests for crypto/aes#/usr/bin/python
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python -m unittest discover
import unittest
from ..crypto.aes import AESCipher
class testAESCipher(unittest.TestCase):
def test_encrypt_decrypt(self):
key = "TTTcPolAhIqZZJY0IOH7Orecb/EEaUx8/u/pQlCgma8="
cipher = AESCipher(key)
m = "[TOP SECRET] I like k-pop"
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def test_encrypt_decrypt_unicode(self):
key = "TTTcPolAhIqZZJY0IOH7Orecb/EEaUx8/u/pQlCgma8="
cipher = AESCipher(key)
m = u"『秘密』K-popは好きです".encode('utf8')
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def test_encrypt_decrypt_128(self):
key = "Ya/C/EvmwW1xWhjM1BgZ/g=="
cipher = AESCipher(key)
m = "Private stuff"
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add some tests for crypto/aes<commit_after>#/usr/bin/python
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python -m unittest discover
import unittest
from ..crypto.aes import AESCipher
class testAESCipher(unittest.TestCase):
def test_encrypt_decrypt(self):
key = "TTTcPolAhIqZZJY0IOH7Orecb/EEaUx8/u/pQlCgma8="
cipher = AESCipher(key)
m = "[TOP SECRET] I like k-pop"
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def test_encrypt_decrypt_unicode(self):
key = "TTTcPolAhIqZZJY0IOH7Orecb/EEaUx8/u/pQlCgma8="
cipher = AESCipher(key)
m = u"『秘密』K-popは好きです".encode('utf8')
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def test_encrypt_decrypt_128(self):
key = "Ya/C/EvmwW1xWhjM1BgZ/g=="
cipher = AESCipher(key)
m = "Private stuff"
c = cipher.encrypt(m)
m2 = cipher.decrypt(c)
self.assertEqual(m, m2)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
|
e9942aea9a2c11575d5abcfa33f2aef1d8d53c6a
|
modify-color.py
|
modify-color.py
|
#!/bin/python
"""modify-color
------------
"""
import sys
import colorsys
class Color(object):
def __init__(self, color_str, color_format="hex"):
pass
if __name__ == "__main__":
if len(sys.argv) == 1 or sys.argv[1] in ['help', '--help']:
print(__doc__)
sys.exit()
|
Print doc on help or no args
|
Print doc on help or no args
|
Python
|
bsd-3-clause
|
seenaburns/color-util,seenaburns/color-util
|
Print doc on help or no args
|
#!/bin/python
"""modify-color
------------
"""
import sys
import colorsys
class Color(object):
def __init__(self, color_str, color_format="hex"):
pass
if __name__ == "__main__":
if len(sys.argv) == 1 or sys.argv[1] in ['help', '--help']:
print(__doc__)
sys.exit()
|
<commit_before><commit_msg>Print doc on help or no args<commit_after>
|
#!/bin/python
"""modify-color
------------
"""
import sys
import colorsys
class Color(object):
def __init__(self, color_str, color_format="hex"):
pass
if __name__ == "__main__":
if len(sys.argv) == 1 or sys.argv[1] in ['help', '--help']:
print(__doc__)
sys.exit()
|
Print doc on help or no args#!/bin/python
"""modify-color
------------
"""
import sys
import colorsys
class Color(object):
def __init__(self, color_str, color_format="hex"):
pass
if __name__ == "__main__":
if len(sys.argv) == 1 or sys.argv[1] in ['help', '--help']:
print(__doc__)
sys.exit()
|
<commit_before><commit_msg>Print doc on help or no args<commit_after>#!/bin/python
"""modify-color
------------
"""
import sys
import colorsys
class Color(object):
def __init__(self, color_str, color_format="hex"):
pass
if __name__ == "__main__":
if len(sys.argv) == 1 or sys.argv[1] in ['help', '--help']:
print(__doc__)
sys.exit()
|
|
f2bb3b0ab09da5fc1c186765052aea8fd87a9b2b
|
setuptools/tests/test_find_packages.py
|
setuptools/tests/test_find_packages.py
|
"""Tests for setuptools.find_packages()."""
import os
import shutil
import tempfile
import unittest
from setuptools import find_packages
class TestFindPackages(unittest.TestCase):
def setUp(self):
self.dist_dir = tempfile.mkdtemp()
self._make_pkg_structure()
def tearDown(self):
shutil.rmtree(self.dist_dir)
def _make_pkg_structure(self):
"""Make basic package structure.
dist/
docs/
conf.py
pkg/
__pycache__/
nspkg/
mod.py
subpkg/
assets/
asset
__init__.py
setup.py
"""
self.docs_dir = self._mkdir('docs', self.dist_dir)
self._touch('conf.py', self.docs_dir)
self.pkg_dir = self._mkdir('pkg', self.dist_dir)
self._mkdir('__pycache__', self.pkg_dir)
self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir)
self._touch('mod.py', self.ns_pkg_dir)
self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir)
self.asset_dir = self._mkdir('assets', self.sub_pkg_dir)
self._touch('asset', self.asset_dir)
self._touch('__init__.py', self.sub_pkg_dir)
self._touch('setup.py', self.dist_dir)
def _mkdir(self, path, parent_dir=None):
if parent_dir:
path = os.path.join(parent_dir, path)
os.mkdir(path)
return path
def _touch(self, path, dir_=None):
if dir_:
path = os.path.join(dir_, path)
fp = open(path, 'w')
fp.close()
return path
def test_regular_package(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir)
self.assertEqual(packages, ['pkg', 'pkg.subpkg'])
def test_dir_with_dot_is_skipped(self):
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
data_dir = self._mkdir('some.data', self.pkg_dir)
self._touch('__init__.py', data_dir)
self._touch('file.dat', data_dir)
packages = find_packages(self.dist_dir)
self.assertNotIn('pkg.some.data', packages)
|
Add unit tests for find_packages
|
Add unit tests for find_packages
--HG--
extra : rebase_source : 75f5ce4d2fb9d0ccd7168739c23d9ea1eeeb9112
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
Add unit tests for find_packages
--HG--
extra : rebase_source : 75f5ce4d2fb9d0ccd7168739c23d9ea1eeeb9112
|
"""Tests for setuptools.find_packages()."""
import os
import shutil
import tempfile
import unittest
from setuptools import find_packages
class TestFindPackages(unittest.TestCase):
def setUp(self):
self.dist_dir = tempfile.mkdtemp()
self._make_pkg_structure()
def tearDown(self):
shutil.rmtree(self.dist_dir)
def _make_pkg_structure(self):
"""Make basic package structure.
dist/
docs/
conf.py
pkg/
__pycache__/
nspkg/
mod.py
subpkg/
assets/
asset
__init__.py
setup.py
"""
self.docs_dir = self._mkdir('docs', self.dist_dir)
self._touch('conf.py', self.docs_dir)
self.pkg_dir = self._mkdir('pkg', self.dist_dir)
self._mkdir('__pycache__', self.pkg_dir)
self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir)
self._touch('mod.py', self.ns_pkg_dir)
self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir)
self.asset_dir = self._mkdir('assets', self.sub_pkg_dir)
self._touch('asset', self.asset_dir)
self._touch('__init__.py', self.sub_pkg_dir)
self._touch('setup.py', self.dist_dir)
def _mkdir(self, path, parent_dir=None):
if parent_dir:
path = os.path.join(parent_dir, path)
os.mkdir(path)
return path
def _touch(self, path, dir_=None):
if dir_:
path = os.path.join(dir_, path)
fp = open(path, 'w')
fp.close()
return path
def test_regular_package(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir)
self.assertEqual(packages, ['pkg', 'pkg.subpkg'])
def test_dir_with_dot_is_skipped(self):
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
data_dir = self._mkdir('some.data', self.pkg_dir)
self._touch('__init__.py', data_dir)
self._touch('file.dat', data_dir)
packages = find_packages(self.dist_dir)
self.assertNotIn('pkg.some.data', packages)
|
<commit_before><commit_msg>Add unit tests for find_packages
--HG--
extra : rebase_source : 75f5ce4d2fb9d0ccd7168739c23d9ea1eeeb9112<commit_after>
|
"""Tests for setuptools.find_packages()."""
import os
import shutil
import tempfile
import unittest
from setuptools import find_packages
class TestFindPackages(unittest.TestCase):
def setUp(self):
self.dist_dir = tempfile.mkdtemp()
self._make_pkg_structure()
def tearDown(self):
shutil.rmtree(self.dist_dir)
def _make_pkg_structure(self):
"""Make basic package structure.
dist/
docs/
conf.py
pkg/
__pycache__/
nspkg/
mod.py
subpkg/
assets/
asset
__init__.py
setup.py
"""
self.docs_dir = self._mkdir('docs', self.dist_dir)
self._touch('conf.py', self.docs_dir)
self.pkg_dir = self._mkdir('pkg', self.dist_dir)
self._mkdir('__pycache__', self.pkg_dir)
self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir)
self._touch('mod.py', self.ns_pkg_dir)
self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir)
self.asset_dir = self._mkdir('assets', self.sub_pkg_dir)
self._touch('asset', self.asset_dir)
self._touch('__init__.py', self.sub_pkg_dir)
self._touch('setup.py', self.dist_dir)
def _mkdir(self, path, parent_dir=None):
if parent_dir:
path = os.path.join(parent_dir, path)
os.mkdir(path)
return path
def _touch(self, path, dir_=None):
if dir_:
path = os.path.join(dir_, path)
fp = open(path, 'w')
fp.close()
return path
def test_regular_package(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir)
self.assertEqual(packages, ['pkg', 'pkg.subpkg'])
def test_dir_with_dot_is_skipped(self):
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
data_dir = self._mkdir('some.data', self.pkg_dir)
self._touch('__init__.py', data_dir)
self._touch('file.dat', data_dir)
packages = find_packages(self.dist_dir)
self.assertNotIn('pkg.some.data', packages)
|
Add unit tests for find_packages
--HG--
extra : rebase_source : 75f5ce4d2fb9d0ccd7168739c23d9ea1eeeb9112"""Tests for setuptools.find_packages()."""
import os
import shutil
import tempfile
import unittest
from setuptools import find_packages
class TestFindPackages(unittest.TestCase):
def setUp(self):
self.dist_dir = tempfile.mkdtemp()
self._make_pkg_structure()
def tearDown(self):
shutil.rmtree(self.dist_dir)
def _make_pkg_structure(self):
"""Make basic package structure.
dist/
docs/
conf.py
pkg/
__pycache__/
nspkg/
mod.py
subpkg/
assets/
asset
__init__.py
setup.py
"""
self.docs_dir = self._mkdir('docs', self.dist_dir)
self._touch('conf.py', self.docs_dir)
self.pkg_dir = self._mkdir('pkg', self.dist_dir)
self._mkdir('__pycache__', self.pkg_dir)
self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir)
self._touch('mod.py', self.ns_pkg_dir)
self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir)
self.asset_dir = self._mkdir('assets', self.sub_pkg_dir)
self._touch('asset', self.asset_dir)
self._touch('__init__.py', self.sub_pkg_dir)
self._touch('setup.py', self.dist_dir)
def _mkdir(self, path, parent_dir=None):
if parent_dir:
path = os.path.join(parent_dir, path)
os.mkdir(path)
return path
def _touch(self, path, dir_=None):
if dir_:
path = os.path.join(dir_, path)
fp = open(path, 'w')
fp.close()
return path
def test_regular_package(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir)
self.assertEqual(packages, ['pkg', 'pkg.subpkg'])
def test_dir_with_dot_is_skipped(self):
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
data_dir = self._mkdir('some.data', self.pkg_dir)
self._touch('__init__.py', data_dir)
self._touch('file.dat', data_dir)
packages = find_packages(self.dist_dir)
self.assertNotIn('pkg.some.data', packages)
|
<commit_before><commit_msg>Add unit tests for find_packages
--HG--
extra : rebase_source : 75f5ce4d2fb9d0ccd7168739c23d9ea1eeeb9112<commit_after>"""Tests for setuptools.find_packages()."""
import os
import shutil
import tempfile
import unittest
from setuptools import find_packages
class TestFindPackages(unittest.TestCase):
def setUp(self):
self.dist_dir = tempfile.mkdtemp()
self._make_pkg_structure()
def tearDown(self):
shutil.rmtree(self.dist_dir)
def _make_pkg_structure(self):
"""Make basic package structure.
dist/
docs/
conf.py
pkg/
__pycache__/
nspkg/
mod.py
subpkg/
assets/
asset
__init__.py
setup.py
"""
self.docs_dir = self._mkdir('docs', self.dist_dir)
self._touch('conf.py', self.docs_dir)
self.pkg_dir = self._mkdir('pkg', self.dist_dir)
self._mkdir('__pycache__', self.pkg_dir)
self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir)
self._touch('mod.py', self.ns_pkg_dir)
self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir)
self.asset_dir = self._mkdir('assets', self.sub_pkg_dir)
self._touch('asset', self.asset_dir)
self._touch('__init__.py', self.sub_pkg_dir)
self._touch('setup.py', self.dist_dir)
def _mkdir(self, path, parent_dir=None):
if parent_dir:
path = os.path.join(parent_dir, path)
os.mkdir(path)
return path
def _touch(self, path, dir_=None):
if dir_:
path = os.path.join(dir_, path)
fp = open(path, 'w')
fp.close()
return path
def test_regular_package(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir)
self.assertEqual(packages, ['pkg', 'pkg.subpkg'])
def test_dir_with_dot_is_skipped(self):
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
data_dir = self._mkdir('some.data', self.pkg_dir)
self._touch('__init__.py', data_dir)
self._touch('file.dat', data_dir)
packages = find_packages(self.dist_dir)
self.assertNotIn('pkg.some.data', packages)
|
|
3e1a40ae2455dc6b1588451e9633efac0fd1ffaf
|
addons/account/report/account_journal_common_default.py
|
addons/account/report/account_journal_common_default.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
from tools.translate import _
class account_journal_common_default(object):
def _sum_debit(self, period_id, journal_id=False):
if isinstance(journal_id, int):
journal_id = [journal_id]
if not journal_id:
journal_id = self.journal_ids
self.cr.execute('SELECT SUM(debit) FROM account_move_line l WHERE period_id=%s AND journal_id IN %s '+self.query_get_clause+' ', (period_id, tuple(journal_id)))
res = self.cr.fetchone()[0]
return res or 0.0
def _sum_credit(self, period_id, journal_id=False):
if isinstance(journal_id, int):
journal_id = [journal_id]
if not journal_id:
journal_id = self.journal_ids
self.cr.execute('SELECT SUM(credit) FROM account_move_line l WHERE period_id=%s AND journal_id IN %s '+self.query_get_clause+'', (period_id, tuple(journal_id)))
return self.cr.fetchone()[0] or 0.0
def get_start_date(self, form):
return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_from']).name
def get_end_date(self, form):
return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_to']).name
#vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Add common report header default file (Tested with central journal only) => Need to improve it and work of all journals reports (Under development)
|
[ADD] Add common report header default file (Tested with central journal only) => Need to improve it and work of all journals reports (Under development)
bzr revid: mra@mra-laptop-20100710071957-ddp1dmz5ve9f5qdt
|
Python
|
agpl-3.0
|
dgzurita/odoo,osvalr/odoo,massot/odoo,sadleader/odoo,pplatek/odoo,slevenhagen/odoo,Drooids/odoo,dgzurita/odoo,waytai/odoo,feroda/odoo,zchking/odoo,markeTIC/OCB,steedos/odoo,ClearCorp-dev/odoo,oliverhr/odoo,hifly/OpenUpgrade,odootr/odoo,mszewczy/odoo,patmcb/odoo,Danisan/odoo-1,hmen89/odoo,Daniel-CA/odoo,sv-dev1/odoo,CatsAndDogsbvba/odoo,n0m4dz/odoo,OpenUpgrade/OpenUpgrade,windedge/odoo,prospwro/odoo,lightcn/odoo,lombritz/odoo,n0m4dz/odoo,mustafat/odoo-1,nuuuboo/odoo,nexiles/odoo,slevenhagen/odoo,mszewczy/odoo,mlaitinen/odoo,steedos/odoo,Elico-Corp/odoo_OCB,realsaiko/odoo,xzYue/odoo,0k/odoo,n0m4dz/odoo,Elico-Corp/odoo_OCB,aviciimaxwell/odoo,leoliujie/odoo,idncom/odoo,bealdav/OpenUpgrade,apocalypsebg/odoo,laslabs/odoo,hassoon3/odoo,hoatle/odoo,GauravSahu/odoo,diagramsoftware/odoo,patmcb/odoo,acshan/odoo,janocat/odoo,sve-odoo/odoo,fuselock/odoo,OpenUpgrade-dev/OpenUpgrade,gdgellatly/OCB1,fgesora/odoo,vnsofthe/odoo,jesramirez/odoo,JCA-Developpement/Odoo,jfpla/odoo,dariemp/odoo,dfang/odoo,ccomb/OpenUpgrade,hopeall/odoo,codekaki/odoo,jiachenning/odoo,cloud9UG/odoo,Endika/OpenUpgrade,odooindia/odoo,Bachaco-ve/odoo,Codefans-fan/odoo,xujb/odoo,mustafat/odoo-1,kirca/OpenUpgrade,pplatek/odoo,jpshort/odoo,synconics/odoo,bakhtout/odoo-educ,zchking/odoo,rdeheele/odoo,gorjuce/odoo,doomsterinc/odoo,n0m4dz/odoo,fdvarela/odoo8,bkirui/odoo,BT-astauder/odoo,gorjuce/odoo,acshan/odoo,rgeleta/odoo,rschnapka/odoo,ehirt/odoo,zchking/odoo,pedrobaeza/OpenUpgrade,osvalr/odoo,hifly/OpenUpgrade,ShineFan/odoo,xujb/odoo,Endika/odoo,ChanduERP/odoo,bkirui/odoo,ramitalat/odoo,omprakasha/odoo,hubsaysnuaa/odoo,OpenUpgrade-dev/OpenUpgrade,luistorresm/odoo,guewen/OpenUpgrade,ccomb/OpenUpgrade,ramadhane/odoo,odootr/odoo,jeasoft/odoo,fuhongliang/odoo,sinbazhou/odoo,papouso/odoo,grap/OpenUpgrade,damdam-s/OpenUpgrade,Ernesto99/odoo,tinkhaven-organization/odoo,Grirrane/odoo,stonegithubs/odoo,luiseduardohdbackup/odoo,CopeX/odoo,dkubiak789/odoo,storm-computers/odoo,tvtsoft/odoo8,erkrishna9/odoo,poljeff/odoo,hifly/OpenUpgrade,JGarcia-Panach/odoo,NeovaHealth/odoo,numerigraphe/odoo,codekaki/odoo,hoatle/odoo,jpshort/odoo,datenbetrieb/odoo,odootr/odoo,oihane/odoo,jpshort/odoo,slevenhagen/odoo,jolevq/odoopub,credativUK/OCB,deKupini/erp,factorlibre/OCB,AuyaJackie/odoo,nexiles/odoo,waytai/odoo,jolevq/odoopub,joariasl/odoo,kybriainfotech/iSocioCRM,sv-dev1/odoo,wangjun/odoo,agrista/odoo-saas,Maspear/odoo,osvalr/odoo,ujjwalwahi/odoo,klunwebale/odoo,0k/odoo,Maspear/odoo,nuncjo/odoo,rowemoore/odoo,ihsanudin/odoo,MarcosCommunity/odoo,slevenhagen/odoo,ihsanudin/odoo,tinkhaven-organization/odoo,0k/OpenUpgrade,Eric-Zhong/odoo,lsinfo/odoo,agrista/odoo-saas,dalegregory/odoo,nhomar/odoo-mirror,tvtsoft/odoo8,hassoon3/odoo,cloud9UG/odoo,ThinkOpen-Solutions/odoo,JonathanStein/odoo,frouty/odoo_oph,jolevq/odoopub,numerigraphe/odoo,credativUK/OCB,sergio-incaser/odoo,bguillot/OpenUpgrade,Kilhog/odoo,jeasoft/odoo,alexcuellar/odoo,hubsaysnuaa/odoo,Daniel-CA/odoo,shaufi10/odoo,CatsAndDogsbvba/odoo,oasiswork/odoo,demon-ru/iml-crm,addition-it-solutions/project-all,hanicker/odoo,leoliujie/odoo,rubencabrera/odoo,hifly/OpenUpgrade,virgree/odoo,omprakasha/odoo,shaufi10/odoo,MarcosCommunity/odoo,ShineFan/odoo,odoo-turkiye/odoo,florentx/OpenUpgrade,TRESCLOUD/odoopub,havt/odoo,mkieszek/odoo,markeTIC/OCB,guerrerocarlos/odoo,codekaki/odoo,jiangzhixiao/odoo,podemos-info/odoo,hip-odoo/odoo,mmbtba/odoo,VitalPet/odoo,tarzan0820/odoo,luistorresm/odoo,agrista/odoo-saas,havt/odoo,ojengwa/odoo,gorjuce/odoo,csrocha/OpenUpgrade,wangjun/odoo,stonegithubs/odoo,srsman/odoo,Noviat/odoo,BT-fgarbely/odoo,VitalPet/odoo,dkubiak789/odoo,windedge/odoo,lombritz/odoo,VielSoft/odoo,NeovaHealth/odoo,ubic135/odoo-design,jiangzhixiao/odoo,grap/OpenUpgrade,ShineFan/odoo,tarzan0820/odoo,gavin-feng/odoo,markeTIC/OCB,Kilhog/odoo,nuncjo/odoo,massot/odoo,ojengwa/odoo,Ernesto99/odoo,nhomar/odoo,kirca/OpenUpgrade,rdeheele/odoo,brijeshkesariya/odoo,0k/OpenUpgrade,collex100/odoo,doomsterinc/odoo,GauravSahu/odoo,jeasoft/odoo,bobisme/odoo,lightcn/odoo,joshuajan/odoo,sergio-incaser/odoo,xzYue/odoo,virgree/odoo,alqfahad/odoo,sv-dev1/odoo,tinkerthaler/odoo,podemos-info/odoo,eino-makitalo/odoo,codekaki/odoo,oasiswork/odoo,avoinsystems/odoo,Bachaco-ve/odoo,joariasl/odoo,NL66278/OCB,Danisan/odoo-1,Nick-OpusVL/odoo,sysadminmatmoz/OCB,incaser/odoo-odoo,FlorianLudwig/odoo,kybriainfotech/iSocioCRM,brijeshkesariya/odoo,odoousers2014/odoo,bplancher/odoo,alexteodor/odoo,optima-ict/odoo,funkring/fdoo,virgree/odoo,QianBIG/odoo,syci/OCB,lsinfo/odoo,CubicERP/odoo,christophlsa/odoo,alhashash/odoo,odoousers2014/odoo,rgeleta/odoo,christophlsa/odoo,rahuldhote/odoo,sebalix/OpenUpgrade,credativUK/OCB,idncom/odoo,klunwebale/odoo,dsfsdgsbngfggb/odoo,abdellatifkarroum/odoo,makinacorpus/odoo,Endika/odoo,bwrsandman/OpenUpgrade,Antiun/odoo,Gitlab11/odoo,ShineFan/odoo,bplancher/odoo,savoirfairelinux/odoo,collex100/odoo,luiseduardohdbackup/odoo,Drooids/odoo,tinkhaven-organization/odoo,cpyou/odoo,Bachaco-ve/odoo,srsman/odoo,BT-astauder/odoo,fevxie/odoo,shivam1111/odoo,stephen144/odoo,srimai/odoo,savoirfairelinux/OpenUpgrade,brijeshkesariya/odoo,ojengwa/odoo,damdam-s/OpenUpgrade,hubsaysnuaa/odoo,slevenhagen/odoo-npg,OpenPymeMx/OCB,ChanduERP/odoo,steedos/odoo,glovebx/odoo,sadleader/odoo,lgscofield/odoo,Danisan/odoo-1,grap/OpenUpgrade,rahuldhote/odoo,VitalPet/odoo,ecosoft-odoo/odoo,microcom/odoo,Noviat/odoo,factorlibre/OCB,n0m4dz/odoo,prospwro/odoo,addition-it-solutions/project-all,vnsofthe/odoo,leorochael/odoo,PongPi/isl-odoo,bguillot/OpenUpgrade,cloud9UG/odoo,joariasl/odoo,erkrishna9/odoo,Endika/OpenUpgrade,sve-odoo/odoo,mvaled/OpenUpgrade,Nick-OpusVL/odoo,sysadminmatmoz/OCB,syci/OCB,steedos/odoo,CopeX/odoo,sve-odoo/odoo,oliverhr/odoo,eino-makitalo/odoo,nitinitprof/odoo,klunwebale/odoo,aviciimaxwell/odoo,Antiun/odoo,abstract-open-solutions/OCB,jaxkodex/odoo,odootr/odoo,Maspear/odoo,leoliujie/odoo,sysadminmatmoz/OCB,odoo-turkiye/odoo,diagramsoftware/odoo,dkubiak789/odoo,takis/odoo,lgscofield/odoo,srsman/odoo,hopeall/odoo,ecosoft-odoo/odoo,diagramsoftware/odoo,cloud9UG/odoo,srsman/odoo,hoatle/odoo,factorlibre/OCB,SAM-IT-SA/odoo,prospwro/odoo,fossoult/odoo,ehirt/odoo,Drooids/odoo,idncom/odoo,zchking/odoo,makinacorpus/odoo,oihane/odoo,brijeshkesariya/odoo,NL66278/OCB,poljeff/odoo,Nick-OpusVL/odoo,luistorresm/odoo,Adel-Magebinary/odoo,poljeff/odoo,odoousers2014/odoo,lgscofield/odoo,sinbazhou/odoo,JonathanStein/odoo,MarcosCommunity/odoo,florian-dacosta/OpenUpgrade,ClearCorp-dev/odoo,abstract-open-solutions/OCB,ecosoft-odoo/odoo,mmbtba/odoo,jiachenning/odoo,havt/odoo,grap/OpenUpgrade,javierTerry/odoo,arthru/OpenUpgrade,hubsaysnuaa/odoo,bkirui/odoo,nitinitprof/odoo,bguillot/OpenUpgrade,dkubiak789/odoo,shivam1111/odoo,MarcosCommunity/odoo,microcom/odoo,fjbatresv/odoo,pedrobaeza/odoo,lsinfo/odoo,credativUK/OCB,javierTerry/odoo,jfpla/odoo,JGarcia-Panach/odoo,windedge/odoo,naousse/odoo,apanju/odoo,havt/odoo,cdrooom/odoo,jfpla/odoo,gsmartway/odoo,dezynetechnologies/odoo,Endika/odoo,frouty/odoo_oph,dariemp/odoo,Eric-Zhong/odoo,sadleader/odoo,shaufi10/odoo,csrocha/OpenUpgrade,xujb/odoo,sysadminmatmoz/OCB,synconics/odoo,jiangzhixiao/odoo,sve-odoo/odoo,jpshort/odoo,naousse/odoo,kittiu/odoo,nhomar/odoo-mirror,jfpla/odoo,takis/odoo,blaggacao/OpenUpgrade,VielSoft/odoo,joshuajan/odoo,jiangzhixiao/odoo,bobisme/odoo,fuhongliang/odoo,gvb/odoo,GauravSahu/odoo,jpshort/odoo,dalegregory/odoo,mvaled/OpenUpgrade,kybriainfotech/iSocioCRM,ojengwa/odoo,blaggacao/OpenUpgrade,patmcb/odoo,hubsaysnuaa/odoo,funkring/fdoo,sadleader/odoo,ovnicraft/odoo,ChanduERP/odoo,camptocamp/ngo-addons-backport,chiragjogi/odoo,Grirrane/odoo,tarzan0820/odoo,BT-rmartin/odoo,nhomar/odoo,dllsf/odootest,salaria/odoo,jolevq/odoopub,markeTIC/OCB,CatsAndDogsbvba/odoo,joariasl/odoo,OpenUpgrade/OpenUpgrade,feroda/odoo,OpenUpgrade-dev/OpenUpgrade,apanju/GMIO_Odoo,charbeljc/OCB,vrenaville/ngo-addons-backport,nagyistoce/odoo-dev-odoo,funkring/fdoo,CatsAndDogsbvba/odoo,fdvarela/odoo8,nhomar/odoo,mvaled/OpenUpgrade,Grirrane/odoo,mkieszek/odoo,gsmartway/odoo,dariemp/odoo,hoatle/odoo,javierTerry/odoo,colinnewell/odoo,tvibliani/odoo,shivam1111/odoo,doomsterinc/odoo,NL66278/OCB,NL66278/OCB,havt/odoo,feroda/odoo,pedrobaeza/OpenUpgrade,klunwebale/odoo,savoirfairelinux/OpenUpgrade,fdvarela/odoo8,apanju/GMIO_Odoo,oihane/odoo,vrenaville/ngo-addons-backport,ubic135/odoo-design,GauravSahu/odoo,Adel-Magebinary/odoo,NL66278/OCB,jaxkodex/odoo,RafaelTorrealba/odoo,pedrobaeza/OpenUpgrade,highco-groupe/odoo,cysnake4713/odoo,kybriainfotech/iSocioCRM,Adel-Magebinary/odoo,wangjun/odoo,leoliujie/odoo,ingadhoc/odoo,ThinkOpen-Solutions/odoo,bakhtout/odoo-educ,luistorresm/odoo,Bachaco-ve/odoo,omprakasha/odoo,Noviat/odoo,jolevq/odoopub,gdgellatly/OCB1,abstract-open-solutions/OCB,ThinkOpen-Solutions/odoo,ecosoft-odoo/odoo,tvibliani/odoo,markeTIC/OCB,oihane/odoo,janocat/odoo,dariemp/odoo,jeasoft/odoo,OpusVL/odoo,hanicker/odoo,fjbatresv/odoo,ChanduERP/odoo,OSSESAC/odoopubarquiluz,jiachenning/odoo,Endika/OpenUpgrade,hmen89/odoo,Endika/odoo,mszewczy/odoo,alexteodor/odoo,vrenaville/ngo-addons-backport,gsmartway/odoo,slevenhagen/odoo-npg,tarzan0820/odoo,0k/OpenUpgrade,sergio-incaser/odoo,patmcb/odoo,RafaelTorrealba/odoo,JonathanStein/odoo,PongPi/isl-odoo,javierTerry/odoo,takis/odoo,oihane/odoo,ApuliaSoftware/odoo,podemos-info/odoo,draugiskisprendimai/odoo,jpshort/odoo,prospwro/odoo,oliverhr/odoo,dalegregory/odoo,optima-ict/odoo,kybriainfotech/iSocioCRM,grap/OpenUpgrade,damdam-s/OpenUpgrade,colinnewell/odoo,CatsAndDogsbvba/odoo,dgzurita/odoo,thanhacun/odoo,srsman/odoo,BT-astauder/odoo,sv-dev1/odoo,Ernesto99/odoo,JCA-Developpement/Odoo,janocat/odoo,christophlsa/odoo,makinacorpus/odoo,hmen89/odoo,abstract-open-solutions/OCB,makinacorpus/odoo,NeovaHealth/odoo,brijeshkesariya/odoo,Endika/OpenUpgrade,alhashash/odoo,frouty/odoogoeen,NeovaHealth/odoo,VitalPet/odoo,xzYue/odoo,hopeall/odoo,blaggacao/OpenUpgrade,CopeX/odoo,ygol/odoo,luistorresm/odoo,xujb/odoo,incaser/odoo-odoo,savoirfairelinux/odoo,glovebx/odoo,RafaelTorrealba/odoo,guewen/OpenUpgrade,fjbatresv/odoo,nagyistoce/odoo-dev-odoo,naousse/odoo,sebalix/OpenUpgrade,bobisme/odoo,QianBIG/odoo,Endika/OpenUpgrade,gorjuce/odoo,collex100/odoo,fevxie/odoo,highco-groupe/odoo,VielSoft/odoo,spadae22/odoo,cysnake4713/odoo,x111ong/odoo,x111ong/odoo,VielSoft/odoo,florian-dacosta/OpenUpgrade,rgeleta/odoo,gorjuce/odoo,dkubiak789/odoo,mlaitinen/odoo,hoatle/odoo,kifcaliph/odoo,glovebx/odoo,waytai/odoo,tinkerthaler/odoo,windedge/odoo,bkirui/odoo,hbrunn/OpenUpgrade,nagyistoce/odoo-dev-odoo,CubicERP/odoo,MarcosCommunity/odoo,odoo-turkiye/odoo,incaser/odoo-odoo,makinacorpus/odoo,incaser/odoo-odoo,lgscofield/odoo,mmbtba/odoo,apanju/odoo,savoirfairelinux/odoo,rowemoore/odoo,bplancher/odoo,Nowheresly/odoo,tangyiyong/odoo,draugiskisprendimai/odoo,ShineFan/odoo,ehirt/odoo,guerrerocarlos/odoo,kirca/OpenUpgrade,apocalypsebg/odoo,aviciimaxwell/odoo,mmbtba/odoo,storm-computers/odoo,nagyistoce/odoo-dev-odoo,provaleks/o8,tarzan0820/odoo,frouty/odoo_oph,ujjwalwahi/odoo,kirca/OpenUpgrade,SAM-IT-SA/odoo,gvb/odoo,acshan/odoo,tinkerthaler/odoo,OSSESAC/odoopubarquiluz,grap/OCB,mvaled/OpenUpgrade,sadleader/odoo,nexiles/odoo,datenbetrieb/odoo,tangyiyong/odoo,andreparames/odoo,dsfsdgsbngfggb/odoo,janocat/odoo,omprakasha/odoo,colinnewell/odoo,Elico-Corp/odoo_OCB,luistorresm/odoo,sebalix/OpenUpgrade,chiragjogi/odoo,hifly/OpenUpgrade,deKupini/erp,eino-makitalo/odoo,erkrishna9/odoo,ygol/odoo,odoousers2014/odoo,idncom/odoo,tarzan0820/odoo,Bachaco-ve/odoo,BT-astauder/odoo,nuncjo/odoo,cpyou/odoo,hip-odoo/odoo,shingonoide/odoo,alqfahad/odoo,florentx/OpenUpgrade,bguillot/OpenUpgrade,n0m4dz/odoo,fjbatresv/odoo,oihane/odoo,fuselock/odoo,cysnake4713/odoo,fuhongliang/odoo,OpenPymeMx/OCB,nexiles/odoo,alqfahad/odoo,Endika/odoo,papouso/odoo,doomsterinc/odoo,luiseduardohdbackup/odoo,markeTIC/OCB,nuuuboo/odoo,apanju/GMIO_Odoo,matrixise/odoo,rahuldhote/odoo,nhomar/odoo,BT-rmartin/odoo,Danisan/odoo-1,SerpentCS/odoo,jusdng/odoo,jeasoft/odoo,florentx/OpenUpgrade,ovnicraft/odoo,doomsterinc/odoo,xzYue/odoo,goliveirab/odoo,rgeleta/odoo,andreparames/odoo,apocalypsebg/odoo,Endika/OpenUpgrade,dfang/odoo,Gitlab11/odoo,juanalfonsopr/odoo,OpenUpgrade/OpenUpgrade,xujb/odoo,juanalfonsopr/odoo,nhomar/odoo-mirror,lsinfo/odoo,ihsanudin/odoo,rschnapka/odoo,frouty/odoo_oph,camptocamp/ngo-addons-backport,JonathanStein/odoo,TRESCLOUD/odoopub,rschnapka/odoo,Elico-Corp/odoo_OCB,JGarcia-Panach/odoo,idncom/odoo,QianBIG/odoo,bealdav/OpenUpgrade,andreparames/odoo,factorlibre/OCB,tinkerthaler/odoo,podemos-info/odoo,inspyration/odoo,bwrsandman/OpenUpgrade,0k/odoo,bwrsandman/OpenUpgrade,ovnicraft/odoo,Ichag/odoo,csrocha/OpenUpgrade,charbeljc/OCB,ecosoft-odoo/odoo,simongoffin/website_version,odoousers2014/odoo,OpenPymeMx/OCB,kirca/OpenUpgrade,shaufi/odoo,VitalPet/odoo,slevenhagen/odoo-npg,shingonoide/odoo,QianBIG/odoo,n0m4dz/odoo,rubencabrera/odoo,glovebx/odoo,OpenUpgrade-dev/OpenUpgrade,jpshort/odoo,podemos-info/odoo,frouty/odoogoeen,pplatek/odoo,laslabs/odoo,guewen/OpenUpgrade,fgesora/odoo,bkirui/odoo,frouty/odoo_oph,savoirfairelinux/OpenUpgrade,papouso/odoo,BT-ojossen/odoo,Antiun/odoo,aviciimaxwell/odoo,juanalfonsopr/odoo,gdgellatly/OCB1,Maspear/odoo,feroda/odoo,feroda/odoo,ThinkOpen-Solutions/odoo,tinkhaven-organization/odoo,massot/odoo,VielSoft/odoo,cloud9UG/odoo,csrocha/OpenUpgrade,addition-it-solutions/project-all,bakhtout/odoo-educ,savoirfairelinux/OpenUpgrade,gsmartway/odoo,jesramirez/odoo,hip-odoo/odoo,luiseduardohdbackup/odoo,elmerdpadilla/iv,eino-makitalo/odoo,gdgellatly/OCB1,brijeshkesariya/odoo,cedk/odoo,kifcaliph/odoo,MarcosCommunity/odoo,leorochael/odoo,Nick-OpusVL/odoo,vrenaville/ngo-addons-backport,hoatle/odoo,gdgellatly/OCB1,Nowheresly/odoo,salaria/odoo,bwrsandman/OpenUpgrade,GauravSahu/odoo,nexiles/odoo,dfang/odoo,erkrishna9/odoo,synconics/odoo,chiragjogi/odoo,dezynetechnologies/odoo,bplancher/odoo,datenbetrieb/odoo,OpenPymeMx/OCB,sergio-incaser/odoo,feroda/odoo,xujb/odoo,acshan/odoo,SAM-IT-SA/odoo,alqfahad/odoo,CubicERP/odoo,hbrunn/OpenUpgrade,glovebx/odoo,odootr/odoo,provaleks/o8,PongPi/isl-odoo,makinacorpus/odoo,spadae22/odoo,factorlibre/OCB,hopeall/odoo,shaufi/odoo,collex100/odoo,dgzurita/odoo,nuuuboo/odoo,salaria/odoo,RafaelTorrealba/odoo,florian-dacosta/OpenUpgrade,Codefans-fan/odoo,mszewczy/odoo,jiangzhixiao/odoo,BT-ojossen/odoo,RafaelTorrealba/odoo,fossoult/odoo,takis/odoo,dkubiak789/odoo,Maspear/odoo,avoinsystems/odoo,hbrunn/OpenUpgrade,oliverhr/odoo,dfang/odoo,Danisan/odoo-1,slevenhagen/odoo,cpyou/odoo,hassoon3/odoo,AuyaJackie/odoo,idncom/odoo,spadae22/odoo,dsfsdgsbngfggb/odoo,colinnewell/odoo,kittiu/odoo,Kilhog/odoo,gdgellatly/OCB1,shivam1111/odoo,bakhtout/odoo-educ,datenbetrieb/odoo,matrixise/odoo,mkieszek/odoo,FlorianLudwig/odoo,shaufi10/odoo,microcom/odoo,gsmartway/odoo,Adel-Magebinary/odoo,dezynetechnologies/odoo,sinbazhou/odoo,charbeljc/OCB,synconics/odoo,nuncjo/odoo,alhashash/odoo,realsaiko/odoo,tangyiyong/odoo,hoatle/odoo,codekaki/odoo,bobisme/odoo,florian-dacosta/OpenUpgrade,kittiu/odoo,alexteodor/odoo,nuncjo/odoo,nuuuboo/odoo,ccomb/OpenUpgrade,fdvarela/odoo8,makinacorpus/odoo,grap/OCB,sinbazhou/odoo,csrocha/OpenUpgrade,rschnapka/odoo,omprakasha/odoo,arthru/OpenUpgrade,Ichag/odoo,Daniel-CA/odoo,Noviat/odoo,luiseduardohdbackup/odoo,alexcuellar/odoo,slevenhagen/odoo-npg,cloud9UG/odoo,KontorConsulting/odoo,ApuliaSoftware/odoo,mvaled/OpenUpgrade,OpenUpgrade/OpenUpgrade,klunwebale/odoo,funkring/fdoo,Antiun/odoo,Nowheresly/odoo,minhtuancn/odoo,wangjun/odoo,lombritz/odoo,Nick-OpusVL/odoo,OSSESAC/odoopubarquiluz,fevxie/odoo,apanju/GMIO_Odoo,vnsofthe/odoo,nitinitprof/odoo,thanhacun/odoo,NeovaHealth/odoo,poljeff/odoo,Ichag/odoo,GauravSahu/odoo,dllsf/odootest,dllsf/odootest,bakhtout/odoo-educ,apocalypsebg/odoo,leorochael/odoo,dalegregory/odoo,KontorConsulting/odoo,Eric-Zhong/odoo,steedos/odoo,simongoffin/website_version,QianBIG/odoo,guerrerocarlos/odoo,BT-rmartin/odoo,ApuliaSoftware/odoo,bealdav/OpenUpgrade,rubencabrera/odoo,VitalPet/odoo,0k/odoo,Elico-Corp/odoo_OCB,shaufi10/odoo,leorochael/odoo,OpusVL/odoo,jaxkodex/odoo,thanhacun/odoo,podemos-info/odoo,rahuldhote/odoo,grap/OCB,gvb/odoo,credativUK/OCB,gvb/odoo,thanhacun/odoo,demon-ru/iml-crm,Ernesto99/odoo,vrenaville/ngo-addons-backport,sysadminmatmoz/OCB,jeasoft/odoo,spadae22/odoo,ingadhoc/odoo,FlorianLudwig/odoo,tangyiyong/odoo,virgree/odoo,jusdng/odoo,acshan/odoo,tinkerthaler/odoo,datenbetrieb/odoo,cysnake4713/odoo,Codefans-fan/odoo,Gitlab11/odoo,stonegithubs/odoo,minhtuancn/odoo,fuhongliang/odoo,ecosoft-odoo/odoo,omprakasha/odoo,CopeX/odoo,Ichag/odoo,jfpla/odoo,JCA-Developpement/Odoo,realsaiko/odoo,CubicERP/odoo,Danisan/odoo-1,poljeff/odoo,camptocamp/ngo-addons-backport,slevenhagen/odoo-npg,kittiu/odoo,SerpentCS/odoo,ThinkOpen-Solutions/odoo,glovebx/odoo,jaxkodex/odoo,hopeall/odoo,Bachaco-ve/odoo,fevxie/odoo,damdam-s/OpenUpgrade,sergio-incaser/odoo,fdvarela/odoo8,PongPi/isl-odoo,dgzurita/odoo,Noviat/odoo,BT-fgarbely/odoo,hmen89/odoo,oihane/odoo,kifcaliph/odoo,ChanduERP/odoo,xzYue/odoo,ramitalat/odoo,VielSoft/odoo,shaufi/odoo,fuselock/odoo,Kilhog/odoo,OpenUpgrade/OpenUpgrade,rschnapka/odoo,numerigraphe/odoo,mustafat/odoo-1,mszewczy/odoo,waytai/odoo,vnsofthe/odoo,jusdng/odoo,jfpla/odoo,chiragjogi/odoo,bealdav/OpenUpgrade,windedge/odoo,tinkhaven-organization/odoo,sv-dev1/odoo,dezynetechnologies/odoo,BT-ojossen/odoo,shaufi/odoo,dllsf/odootest,rschnapka/odoo,acshan/odoo,leoliujie/odoo,OpenUpgrade/OpenUpgrade,alhashash/odoo,prospwro/odoo,juanalfonsopr/odoo,elmerdpadilla/iv,mlaitinen/odoo,salaria/odoo,abstract-open-solutions/OCB,andreparames/odoo,fossoult/odoo,takis/odoo,fjbatresv/odoo,florian-dacosta/OpenUpgrade,alexteodor/odoo,goliveirab/odoo,ojengwa/odoo,realsaiko/odoo,BT-ojossen/odoo,slevenhagen/odoo,laslabs/odoo,KontorConsulting/odoo,avoinsystems/odoo,takis/odoo,ubic135/odoo-design,ihsanudin/odoo,grap/OCB,laslabs/odoo,ygol/odoo,bkirui/odoo,optima-ict/odoo,JonathanStein/odoo,kittiu/odoo,pedrobaeza/odoo,tinkerthaler/odoo,fossoult/odoo,shivam1111/odoo,goliveirab/odoo,collex100/odoo,pedrobaeza/OpenUpgrade,bwrsandman/OpenUpgrade,Grirrane/odoo,Ernesto99/odoo,sinbazhou/odoo,papouso/odoo,tvibliani/odoo,KontorConsulting/odoo,BT-fgarbely/odoo,shingonoide/odoo,kybriainfotech/iSocioCRM,optima-ict/odoo,juanalfonsopr/odoo,gvb/odoo,JCA-Developpement/Odoo,PongPi/isl-odoo,funkring/fdoo,grap/OCB,gsmartway/odoo,kittiu/odoo,grap/OCB,Endika/odoo,inspyration/odoo,papouso/odoo,shingonoide/odoo,ChanduERP/odoo,vrenaville/ngo-addons-backport,PongPi/isl-odoo,gavin-feng/odoo,bguillot/OpenUpgrade,cedk/odoo,rowemoore/odoo,dllsf/odootest,rahuldhote/odoo,aviciimaxwell/odoo,ramadhane/odoo,shivam1111/odoo,havt/odoo,hopeall/odoo,virgree/odoo,tvtsoft/odoo8,virgree/odoo,rgeleta/odoo,cedk/odoo,srimai/odoo,Daniel-CA/odoo,SerpentCS/odoo,cpyou/odoo,OSSESAC/odoopubarquiluz,hip-odoo/odoo,codekaki/odoo,NeovaHealth/odoo,cedk/odoo,abenzbiria/clients_odoo,nitinitprof/odoo,FlorianLudwig/odoo,klunwebale/odoo,guewen/OpenUpgrade,tinkhaven-organization/odoo,hip-odoo/odoo,apanju/odoo,abdellatifkarroum/odoo,srimai/odoo,RafaelTorrealba/odoo,pplatek/odoo,stonegithubs/odoo,thanhacun/odoo,oasiswork/odoo,QianBIG/odoo,incaser/odoo-odoo,SerpentCS/odoo,cedk/odoo,provaleks/o8,florentx/OpenUpgrade,grap/OCB,jiachenning/odoo,apanju/GMIO_Odoo,wangjun/odoo,numerigraphe/odoo,dsfsdgsbngfggb/odoo,srimai/odoo,pedrobaeza/OpenUpgrade,ovnicraft/odoo,thanhacun/odoo,dfang/odoo,Drooids/odoo,ingadhoc/odoo,bobisme/odoo,synconics/odoo,mmbtba/odoo,joariasl/odoo,odoo-turkiye/odoo,Gitlab11/odoo,addition-it-solutions/project-all,jesramirez/odoo,savoirfairelinux/OpenUpgrade,xzYue/odoo,sv-dev1/odoo,hanicker/odoo,jeasoft/odoo,ovnicraft/odoo,bguillot/OpenUpgrade,TRESCLOUD/odoopub,naousse/odoo,tinkhaven-organization/odoo,christophlsa/odoo,ApuliaSoftware/odoo,juanalfonsopr/odoo,laslabs/odoo,frouty/odoogoeen,ujjwalwahi/odoo,osvalr/odoo,gorjuce/odoo,feroda/odoo,BT-rmartin/odoo,nhomar/odoo-mirror,ovnicraft/odoo,odoo-turkiye/odoo,draugiskisprendimai/odoo,rowemoore/odoo,ygol/odoo,oasiswork/odoo,provaleks/o8,CatsAndDogsbvba/odoo,andreparames/odoo,arthru/OpenUpgrade,naousse/odoo,xzYue/odoo,OpenUpgrade-dev/OpenUpgrade,juanalfonsopr/odoo,rubencabrera/odoo,ccomb/OpenUpgrade,nagyistoce/odoo-dev-odoo,Codefans-fan/odoo,jaxkodex/odoo,Drooids/odoo,fjbatresv/odoo,OpusVL/odoo,brijeshkesariya/odoo,addition-it-solutions/project-all,BT-rmartin/odoo,ehirt/odoo,ygol/odoo,AuyaJackie/odoo,lombritz/odoo,eino-makitalo/odoo,hanicker/odoo,aviciimaxwell/odoo,nuuuboo/odoo,syci/OCB,SAM-IT-SA/odoo,ingadhoc/odoo,fuselock/odoo,Adel-Magebinary/odoo,MarcosCommunity/odoo,janocat/odoo,fossoult/odoo,lombritz/odoo,sergio-incaser/odoo,simongoffin/website_version,JGarcia-Panach/odoo,joshuajan/odoo,wangjun/odoo,JGarcia-Panach/odoo,blaggacao/OpenUpgrade,hmen89/odoo,rubencabrera/odoo,ClearCorp-dev/odoo,damdam-s/OpenUpgrade,demon-ru/iml-crm,janocat/odoo,osvalr/odoo,guewen/OpenUpgrade,KontorConsulting/odoo,minhtuancn/odoo,Eric-Zhong/odoo,zchking/odoo,vrenaville/ngo-addons-backport,agrista/odoo-saas,cdrooom/odoo,naousse/odoo,Antiun/odoo,Kilhog/odoo,srsman/odoo,grap/OCB,0k/OpenUpgrade,Eric-Zhong/odoo,tvibliani/odoo,fgesora/odoo,chiragjogi/odoo,guewen/OpenUpgrade,Daniel-CA/odoo,aviciimaxwell/odoo,Endika/OpenUpgrade,pedrobaeza/odoo,storm-computers/odoo,JGarcia-Panach/odoo,odoousers2014/odoo,oasiswork/odoo,Ichag/odoo,csrocha/OpenUpgrade,CatsAndDogsbvba/odoo,nagyistoce/odoo-dev-odoo,jiachenning/odoo,OpenPymeMx/OCB,diagramsoftware/odoo,bobisme/odoo,diagramsoftware/odoo,synconics/odoo,arthru/OpenUpgrade,gvb/odoo,ehirt/odoo,OSSESAC/odoopubarquiluz,minhtuancn/odoo,sebalix/OpenUpgrade,srimai/odoo,ujjwalwahi/odoo,minhtuancn/odoo,eino-makitalo/odoo,florentx/OpenUpgrade,Nowheresly/odoo,sinbazhou/odoo,Nick-OpusVL/odoo,apanju/odoo,odooindia/odoo,ramadhane/odoo,tvibliani/odoo,ihsanudin/odoo,dsfsdgsbngfggb/odoo,erkrishna9/odoo,SerpentCS/odoo,cloud9UG/odoo,dezynetechnologies/odoo,Grirrane/odoo,NeovaHealth/odoo,mmbtba/odoo,GauravSahu/odoo,rgeleta/odoo,ojengwa/odoo,kifcaliph/odoo,mlaitinen/odoo,joshuajan/odoo,rowemoore/odoo,slevenhagen/odoo,lightcn/odoo,microcom/odoo,ccomb/OpenUpgrade,savoirfairelinux/OpenUpgrade,BT-ojossen/odoo,ThinkOpen-Solutions/odoo,joshuajan/odoo,nexiles/odoo,rdeheele/odoo,savoirfairelinux/odoo,rgeleta/odoo,deKupini/erp,Daniel-CA/odoo,bwrsandman/OpenUpgrade,Danisan/odoo-1,srimai/odoo,christophlsa/odoo,draugiskisprendimai/odoo,windedge/odoo,papouso/odoo,abdellatifkarroum/odoo,storm-computers/odoo,leorochael/odoo,fevxie/odoo,savoirfairelinux/odoo,fuselock/odoo,goliveirab/odoo,Codefans-fan/odoo,waytai/odoo,JonathanStein/odoo,CubicERP/odoo,FlorianLudwig/odoo,goliveirab/odoo,mvaled/OpenUpgrade,chiragjogi/odoo,mkieszek/odoo,gdgellatly/OCB1,gsmartway/odoo,luistorresm/odoo,apanju/odoo,ihsanudin/odoo,provaleks/o8,nitinitprof/odoo,vnsofthe/odoo,mkieszek/odoo,ovnicraft/odoo,apanju/GMIO_Odoo,slevenhagen/odoo-npg,Nowheresly/odoo,Ernesto99/odoo,rschnapka/odoo,markeTIC/OCB,fevxie/odoo,kybriainfotech/iSocioCRM,sve-odoo/odoo,hifly/OpenUpgrade,ClearCorp-dev/odoo,Bachaco-ve/odoo,abenzbiria/clients_odoo,minhtuancn/odoo,VielSoft/odoo,prospwro/odoo,rahuldhote/odoo,hubsaysnuaa/odoo,tangyiyong/odoo,lombritz/odoo,mlaitinen/odoo,ygol/odoo,draugiskisprendimai/odoo,ramitalat/odoo,cysnake4713/odoo,draugiskisprendimai/odoo,blaggacao/OpenUpgrade,AuyaJackie/odoo,hassoon3/odoo,wangjun/odoo,patmcb/odoo,mvaled/OpenUpgrade,virgree/odoo,ChanduERP/odoo,rubencabrera/odoo,ingadhoc/odoo,tarzan0820/odoo,numerigraphe/odoo,fuhongliang/odoo,shaufi/odoo,bguillot/OpenUpgrade,AuyaJackie/odoo,sysadminmatmoz/OCB,cdrooom/odoo,camptocamp/ngo-addons-backport,alexcuellar/odoo,ApuliaSoftware/odoo,takis/odoo,odoo-turkiye/odoo,oliverhr/odoo,guerrerocarlos/odoo,tangyiyong/odoo,nhomar/odoo-mirror,hanicker/odoo,pplatek/odoo,lgscofield/odoo,stonegithubs/odoo,Adel-Magebinary/odoo,goliveirab/odoo,sysadminmatmoz/OCB,poljeff/odoo,elmerdpadilla/iv,mustafat/odoo-1,SAM-IT-SA/odoo,kirca/OpenUpgrade,oasiswork/odoo,joariasl/odoo,ujjwalwahi/odoo,camptocamp/ngo-addons-backport,nhomar/odoo,OpenPymeMx/OCB,bakhtout/odoo-educ,lgscofield/odoo,microcom/odoo,Drooids/odoo,Codefans-fan/odoo,mustafat/odoo-1,idncom/odoo,BT-ojossen/odoo,ingadhoc/odoo,BT-fgarbely/odoo,codekaki/odoo,factorlibre/OCB,dgzurita/odoo,JCA-Developpement/Odoo,simongoffin/website_version,dariemp/odoo,jaxkodex/odoo,BT-fgarbely/odoo,cdrooom/odoo,gorjuce/odoo,matrixise/odoo,vrenaville/ngo-addons-backport,alhashash/odoo,bakhtout/odoo-educ,ClearCorp-dev/odoo,demon-ru/iml-crm,bobisme/odoo,VitalPet/odoo,SerpentCS/odoo,sinbazhou/odoo,stephen144/odoo,javierTerry/odoo,nagyistoce/odoo-dev-odoo,dalegregory/odoo,abenzbiria/clients_odoo,colinnewell/odoo,microcom/odoo,florentx/OpenUpgrade,alexcuellar/odoo,jusdng/odoo,x111ong/odoo,shingonoide/odoo,salaria/odoo,jiangzhixiao/odoo,grap/OpenUpgrade,christophlsa/odoo,dezynetechnologies/odoo,lsinfo/odoo,apanju/odoo,patmcb/odoo,ygol/odoo,savoirfairelinux/odoo,kifcaliph/odoo,colinnewell/odoo,abdellatifkarroum/odoo,jeasoft/odoo,lightcn/odoo,ehirt/odoo,incaser/odoo-odoo,avoinsystems/odoo,ccomb/OpenUpgrade,salaria/odoo,jfpla/odoo,spadae22/odoo,spadae22/odoo,fuhongliang/odoo,Maspear/odoo,podemos-info/odoo,BT-ojossen/odoo,ubic135/odoo-design,nuuuboo/odoo,OpenUpgrade-dev/OpenUpgrade,dariemp/odoo,ApuliaSoftware/odoo,frouty/odoogoeen,ihsanudin/odoo,bwrsandman/OpenUpgrade,papouso/odoo,charbeljc/OCB,draugiskisprendimai/odoo,0k/odoo,diagramsoftware/odoo,0k/OpenUpgrade,alexcuellar/odoo,pplatek/odoo,jesramirez/odoo,credativUK/OCB,zchking/odoo,chiragjogi/odoo,Eric-Zhong/odoo,omprakasha/odoo,mszewczy/odoo,x111ong/odoo,luiseduardohdbackup/odoo,frouty/odoogoeen,lightcn/odoo,bplancher/odoo,credativUK/OCB,andreparames/odoo,abstract-open-solutions/OCB,tvtsoft/odoo8,fuhongliang/odoo,havt/odoo,fossoult/odoo,gavin-feng/odoo,minhtuancn/odoo,frouty/odoogoeen,dfang/odoo,pedrobaeza/OpenUpgrade,leoliujie/odoo,cpyou/odoo,doomsterinc/odoo,mlaitinen/odoo,jiachenning/odoo,lgscofield/odoo,blaggacao/OpenUpgrade,BT-fgarbely/odoo,CopeX/odoo,tinkerthaler/odoo,fossoult/odoo,odooindia/odoo,spadae22/odoo,BT-rmartin/odoo,hbrunn/OpenUpgrade,avoinsystems/odoo,ujjwalwahi/odoo,ramitalat/odoo,addition-it-solutions/project-all,sv-dev1/odoo,optima-ict/odoo,lightcn/odoo,thanhacun/odoo,abdellatifkarroum/odoo,mmbtba/odoo,arthru/OpenUpgrade,storm-computers/odoo,hassoon3/odoo,ApuliaSoftware/odoo,camptocamp/ngo-addons-backport,hanicker/odoo,Gitlab11/odoo,waytai/odoo,sebalix/OpenUpgrade,TRESCLOUD/odoopub,stephen144/odoo,stonegithubs/odoo,ecosoft-odoo/odoo,leorochael/odoo,charbeljc/OCB,highco-groupe/odoo,CubicERP/odoo,tvibliani/odoo,alqfahad/odoo,lightcn/odoo,ShineFan/odoo,oliverhr/odoo,apocalypsebg/odoo,zchking/odoo,guewen/OpenUpgrade,simongoffin/website_version,ubic135/odoo-design,elmerdpadilla/iv,FlorianLudwig/odoo,waytai/odoo,alqfahad/odoo,alqfahad/odoo,pedrobaeza/odoo,PongPi/isl-odoo,bplancher/odoo,shingonoide/odoo,christophlsa/odoo,x111ong/odoo,Kilhog/odoo,syci/OCB,diagramsoftware/odoo,mkieszek/odoo,deKupini/erp,BT-astauder/odoo,lsinfo/odoo,stephen144/odoo,ramadhane/odoo,alhashash/odoo,RafaelTorrealba/odoo,dkubiak789/odoo,tvtsoft/odoo8,elmerdpadilla/iv,nuncjo/odoo,gavin-feng/odoo,shivam1111/odoo,javierTerry/odoo,matrixise/odoo,leorochael/odoo,shaufi/odoo,SerpentCS/odoo,ehirt/odoo,OSSESAC/odoopubarquiluz,OpenPymeMx/OCB,janocat/odoo,blaggacao/OpenUpgrade,ojengwa/odoo,OpenPymeMx/OCB,steedos/odoo,jusdng/odoo,avoinsystems/odoo,apocalypsebg/odoo,ThinkOpen-Solutions/odoo,hanicker/odoo,nhomar/odoo,tvtsoft/odoo8,hbrunn/OpenUpgrade,sebalix/OpenUpgrade,codekaki/odoo,Ichag/odoo,acshan/odoo,nexiles/odoo,apocalypsebg/odoo,camptocamp/ngo-addons-backport,shingonoide/odoo,storm-computers/odoo,gvb/odoo,grap/OpenUpgrade,abdellatifkarroum/odoo,AuyaJackie/odoo,Maspear/odoo,luiseduardohdbackup/odoo,datenbetrieb/odoo,OpusVL/odoo,poljeff/odoo,collex100/odoo,vnsofthe/odoo,abstract-open-solutions/OCB,csrocha/OpenUpgrade,salaria/odoo,Eric-Zhong/odoo,0k/OpenUpgrade,rowemoore/odoo,prospwro/odoo,synconics/odoo,frouty/odoogoeen,dsfsdgsbngfggb/odoo,rahuldhote/odoo,fgesora/odoo,sebalix/OpenUpgrade,Codefans-fan/odoo,demon-ru/iml-crm,arthru/OpenUpgrade,abenzbiria/clients_odoo,Kilhog/odoo,rubencabrera/odoo,fuselock/odoo,ramitalat/odoo,CopeX/odoo,BT-rmartin/odoo,CubicERP/odoo,mustafat/odoo-1,guerrerocarlos/odoo,ccomb/OpenUpgrade,damdam-s/OpenUpgrade,SAM-IT-SA/odoo,agrista/odoo-saas,Endika/odoo,Ichag/odoo,deKupini/erp,provaleks/o8,jaxkodex/odoo,lsinfo/odoo,joshuajan/odoo,optima-ict/odoo,apanju/odoo,Nick-OpusVL/odoo,hubsaysnuaa/odoo,goliveirab/odoo,credativUK/OCB,funkring/fdoo,windedge/odoo,highco-groupe/odoo,stonegithubs/odoo,rdeheele/odoo,AuyaJackie/odoo,stephen144/odoo,vnsofthe/odoo,Nowheresly/odoo,nitinitprof/odoo,CopeX/odoo,jesramirez/odoo,patmcb/odoo,eino-makitalo/odoo,Grirrane/odoo,rdeheele/odoo,ramadhane/odoo,inspyration/odoo,nuncjo/odoo,klunwebale/odoo,slevenhagen/odoo-npg,VitalPet/odoo,oliverhr/odoo,KontorConsulting/odoo,florian-dacosta/OpenUpgrade,x111ong/odoo,jiangzhixiao/odoo,charbeljc/OCB,syci/OCB,highco-groupe/odoo,odootr/odoo,gavin-feng/odoo,SAM-IT-SA/odoo,Elico-Corp/odoo_OCB,fuselock/odoo,kirca/OpenUpgrade,ingadhoc/odoo,odooindia/odoo,javierTerry/odoo,doomsterinc/odoo,rschnapka/odoo,JonathanStein/odoo,Noviat/odoo,Adel-Magebinary/odoo,mlaitinen/odoo,tangyiyong/odoo,shaufi10/odoo,apanju/GMIO_Odoo,bealdav/OpenUpgrade,syci/OCB,kittiu/odoo,hifly/OpenUpgrade,oasiswork/odoo,gdgellatly/OCB1,FlorianLudwig/odoo,colinnewell/odoo,KontorConsulting/odoo,glovebx/odoo,guerrerocarlos/odoo,pedrobaeza/odoo,avoinsystems/odoo,hbrunn/OpenUpgrade,JGarcia-Panach/odoo,charbeljc/OCB,pedrobaeza/OpenUpgrade,srsman/odoo,naousse/odoo,odoo-turkiye/odoo,alexteodor/odoo,ramitalat/odoo,datenbetrieb/odoo,numerigraphe/odoo,incaser/odoo-odoo,collex100/odoo,mszewczy/odoo,alexcuellar/odoo,funkring/fdoo,BT-fgarbely/odoo,massot/odoo,abdellatifkarroum/odoo,odootr/odoo,guerrerocarlos/odoo,inspyration/odoo,Antiun/odoo,x111ong/odoo,realsaiko/odoo,dgzurita/odoo,Nowheresly/odoo,shaufi10/odoo,dalegregory/odoo,fgesora/odoo,damdam-s/OpenUpgrade,camptocamp/ngo-addons-backport,jusdng/odoo,pplatek/odoo,bealdav/OpenUpgrade,fgesora/odoo,ramadhane/odoo,jusdng/odoo,MarcosCommunity/odoo,srimai/odoo,odooindia/odoo,ramadhane/odoo,shaufi/odoo,pedrobaeza/odoo,frouty/odoo_oph,frouty/odoogoeen,stephen144/odoo,cedk/odoo,TRESCLOUD/odoopub,nitinitprof/odoo,xujb/odoo,osvalr/odoo,Gitlab11/odoo,bkirui/odoo,hopeall/odoo,laslabs/odoo,ujjwalwahi/odoo,Ernesto99/odoo,Daniel-CA/odoo,hip-odoo/odoo,gavin-feng/odoo,Noviat/odoo,rowemoore/odoo,dsfsdgsbngfggb/odoo,lombritz/odoo,fgesora/odoo,matrixise/odoo,Drooids/odoo,andreparames/odoo,tvibliani/odoo,joariasl/odoo,alexcuellar/odoo,dariemp/odoo,Antiun/odoo,steedos/odoo,fevxie/odoo,mustafat/odoo-1,massot/odoo,abenzbiria/clients_odoo,numerigraphe/odoo,provaleks/o8,ShineFan/odoo,fjbatresv/odoo,dezynetechnologies/odoo,osvalr/odoo,leoliujie/odoo,OpenUpgrade/OpenUpgrade,cedk/odoo,nuuuboo/odoo,Gitlab11/odoo,gavin-feng/odoo,factorlibre/OCB,hassoon3/odoo,dalegregory/odoo
|
[ADD] Add common report header default file (Tested with central journal only) => Need to improve it and work of all journals reports (Under development)
bzr revid: mra@mra-laptop-20100710071957-ddp1dmz5ve9f5qdt
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
from tools.translate import _
class account_journal_common_default(object):
def _sum_debit(self, period_id, journal_id=False):
if isinstance(journal_id, int):
journal_id = [journal_id]
if not journal_id:
journal_id = self.journal_ids
self.cr.execute('SELECT SUM(debit) FROM account_move_line l WHERE period_id=%s AND journal_id IN %s '+self.query_get_clause+' ', (period_id, tuple(journal_id)))
res = self.cr.fetchone()[0]
return res or 0.0
def _sum_credit(self, period_id, journal_id=False):
if isinstance(journal_id, int):
journal_id = [journal_id]
if not journal_id:
journal_id = self.journal_ids
self.cr.execute('SELECT SUM(credit) FROM account_move_line l WHERE period_id=%s AND journal_id IN %s '+self.query_get_clause+'', (period_id, tuple(journal_id)))
return self.cr.fetchone()[0] or 0.0
def get_start_date(self, form):
return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_from']).name
def get_end_date(self, form):
return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_to']).name
#vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before><commit_msg>[ADD] Add common report header default file (Tested with central journal only) => Need to improve it and work of all journals reports (Under development)
bzr revid: mra@mra-laptop-20100710071957-ddp1dmz5ve9f5qdt<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
from tools.translate import _
class account_journal_common_default(object):
def _sum_debit(self, period_id, journal_id=False):
if isinstance(journal_id, int):
journal_id = [journal_id]
if not journal_id:
journal_id = self.journal_ids
self.cr.execute('SELECT SUM(debit) FROM account_move_line l WHERE period_id=%s AND journal_id IN %s '+self.query_get_clause+' ', (period_id, tuple(journal_id)))
res = self.cr.fetchone()[0]
return res or 0.0
def _sum_credit(self, period_id, journal_id=False):
if isinstance(journal_id, int):
journal_id = [journal_id]
if not journal_id:
journal_id = self.journal_ids
self.cr.execute('SELECT SUM(credit) FROM account_move_line l WHERE period_id=%s AND journal_id IN %s '+self.query_get_clause+'', (period_id, tuple(journal_id)))
return self.cr.fetchone()[0] or 0.0
def get_start_date(self, form):
return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_from']).name
def get_end_date(self, form):
return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_to']).name
#vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[ADD] Add common report header default file (Tested with central journal only) => Need to improve it and work of all journals reports (Under development)
bzr revid: mra@mra-laptop-20100710071957-ddp1dmz5ve9f5qdt# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
from tools.translate import _
class account_journal_common_default(object):
def _sum_debit(self, period_id, journal_id=False):
if isinstance(journal_id, int):
journal_id = [journal_id]
if not journal_id:
journal_id = self.journal_ids
self.cr.execute('SELECT SUM(debit) FROM account_move_line l WHERE period_id=%s AND journal_id IN %s '+self.query_get_clause+' ', (period_id, tuple(journal_id)))
res = self.cr.fetchone()[0]
return res or 0.0
def _sum_credit(self, period_id, journal_id=False):
if isinstance(journal_id, int):
journal_id = [journal_id]
if not journal_id:
journal_id = self.journal_ids
self.cr.execute('SELECT SUM(credit) FROM account_move_line l WHERE period_id=%s AND journal_id IN %s '+self.query_get_clause+'', (period_id, tuple(journal_id)))
return self.cr.fetchone()[0] or 0.0
def get_start_date(self, form):
return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_from']).name
def get_end_date(self, form):
return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_to']).name
#vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before><commit_msg>[ADD] Add common report header default file (Tested with central journal only) => Need to improve it and work of all journals reports (Under development)
bzr revid: mra@mra-laptop-20100710071957-ddp1dmz5ve9f5qdt<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
from tools.translate import _
class account_journal_common_default(object):
def _sum_debit(self, period_id, journal_id=False):
if isinstance(journal_id, int):
journal_id = [journal_id]
if not journal_id:
journal_id = self.journal_ids
self.cr.execute('SELECT SUM(debit) FROM account_move_line l WHERE period_id=%s AND journal_id IN %s '+self.query_get_clause+' ', (period_id, tuple(journal_id)))
res = self.cr.fetchone()[0]
return res or 0.0
def _sum_credit(self, period_id, journal_id=False):
if isinstance(journal_id, int):
journal_id = [journal_id]
if not journal_id:
journal_id = self.journal_ids
self.cr.execute('SELECT SUM(credit) FROM account_move_line l WHERE period_id=%s AND journal_id IN %s '+self.query_get_clause+'', (period_id, tuple(journal_id)))
return self.cr.fetchone()[0] or 0.0
def get_start_date(self, form):
return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_from']).name
def get_end_date(self, form):
return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_to']).name
#vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
|
4b7171ae794cc2ba5a4e668708c7cf424419e081
|
tests/test_compare.py
|
tests/test_compare.py
|
import unittest
import pandas.util.testing as pdt
import recordlinkage
import numpy as np
import pandas as pd
class TestCompare(unittest.TestCase):
def test_exact_two_series(self):
comp = recordlinkage.Compare()
s1 = pd.Series(['mary ann', 'bob1', 'angel1', 'bob', 'mary ann', 'john', np.nan])
s2 = pd.Series(['mary ann', 'bob2', 'angel2', 'bob', 'mary ann', 'john', np.nan])
# Missing values as 0
result = comp.exact(s1, s2, missing_values=0)
expected = pd.Series([1,0,0,1,1,1,0])
pdt.assert_series_equal(expected, result)
# Missing values as np.nan
result = comp.exact(s1, s2, missing_values=np.nan)
expected = pd.Series([1,0,0,1,1,1,np.nan])
pdt.assert_series_equal(expected, result)
# Missing values 0 and disagreement as 2
result = comp.exact(s1, s2, disagreement_value=2, missing_values=0)
expected = pd.Series([1,2,2,1,1,1,0])
pdt.assert_series_equal(expected, result)
|
Add tests for compare module
|
Add tests for compare module
|
Python
|
bsd-3-clause
|
J535D165/recordlinkage,J535D165/recordlinkage
|
Add tests for compare module
|
import unittest
import pandas.util.testing as pdt
import recordlinkage
import numpy as np
import pandas as pd
class TestCompare(unittest.TestCase):
def test_exact_two_series(self):
comp = recordlinkage.Compare()
s1 = pd.Series(['mary ann', 'bob1', 'angel1', 'bob', 'mary ann', 'john', np.nan])
s2 = pd.Series(['mary ann', 'bob2', 'angel2', 'bob', 'mary ann', 'john', np.nan])
# Missing values as 0
result = comp.exact(s1, s2, missing_values=0)
expected = pd.Series([1,0,0,1,1,1,0])
pdt.assert_series_equal(expected, result)
# Missing values as np.nan
result = comp.exact(s1, s2, missing_values=np.nan)
expected = pd.Series([1,0,0,1,1,1,np.nan])
pdt.assert_series_equal(expected, result)
# Missing values 0 and disagreement as 2
result = comp.exact(s1, s2, disagreement_value=2, missing_values=0)
expected = pd.Series([1,2,2,1,1,1,0])
pdt.assert_series_equal(expected, result)
|
<commit_before><commit_msg>Add tests for compare module<commit_after>
|
import unittest
import pandas.util.testing as pdt
import recordlinkage
import numpy as np
import pandas as pd
class TestCompare(unittest.TestCase):
def test_exact_two_series(self):
comp = recordlinkage.Compare()
s1 = pd.Series(['mary ann', 'bob1', 'angel1', 'bob', 'mary ann', 'john', np.nan])
s2 = pd.Series(['mary ann', 'bob2', 'angel2', 'bob', 'mary ann', 'john', np.nan])
# Missing values as 0
result = comp.exact(s1, s2, missing_values=0)
expected = pd.Series([1,0,0,1,1,1,0])
pdt.assert_series_equal(expected, result)
# Missing values as np.nan
result = comp.exact(s1, s2, missing_values=np.nan)
expected = pd.Series([1,0,0,1,1,1,np.nan])
pdt.assert_series_equal(expected, result)
# Missing values 0 and disagreement as 2
result = comp.exact(s1, s2, disagreement_value=2, missing_values=0)
expected = pd.Series([1,2,2,1,1,1,0])
pdt.assert_series_equal(expected, result)
|
Add tests for compare moduleimport unittest
import pandas.util.testing as pdt
import recordlinkage
import numpy as np
import pandas as pd
class TestCompare(unittest.TestCase):
def test_exact_two_series(self):
comp = recordlinkage.Compare()
s1 = pd.Series(['mary ann', 'bob1', 'angel1', 'bob', 'mary ann', 'john', np.nan])
s2 = pd.Series(['mary ann', 'bob2', 'angel2', 'bob', 'mary ann', 'john', np.nan])
# Missing values as 0
result = comp.exact(s1, s2, missing_values=0)
expected = pd.Series([1,0,0,1,1,1,0])
pdt.assert_series_equal(expected, result)
# Missing values as np.nan
result = comp.exact(s1, s2, missing_values=np.nan)
expected = pd.Series([1,0,0,1,1,1,np.nan])
pdt.assert_series_equal(expected, result)
# Missing values 0 and disagreement as 2
result = comp.exact(s1, s2, disagreement_value=2, missing_values=0)
expected = pd.Series([1,2,2,1,1,1,0])
pdt.assert_series_equal(expected, result)
|
<commit_before><commit_msg>Add tests for compare module<commit_after>import unittest
import pandas.util.testing as pdt
import recordlinkage
import numpy as np
import pandas as pd
class TestCompare(unittest.TestCase):
def test_exact_two_series(self):
comp = recordlinkage.Compare()
s1 = pd.Series(['mary ann', 'bob1', 'angel1', 'bob', 'mary ann', 'john', np.nan])
s2 = pd.Series(['mary ann', 'bob2', 'angel2', 'bob', 'mary ann', 'john', np.nan])
# Missing values as 0
result = comp.exact(s1, s2, missing_values=0)
expected = pd.Series([1,0,0,1,1,1,0])
pdt.assert_series_equal(expected, result)
# Missing values as np.nan
result = comp.exact(s1, s2, missing_values=np.nan)
expected = pd.Series([1,0,0,1,1,1,np.nan])
pdt.assert_series_equal(expected, result)
# Missing values 0 and disagreement as 2
result = comp.exact(s1, s2, disagreement_value=2, missing_values=0)
expected = pd.Series([1,2,2,1,1,1,0])
pdt.assert_series_equal(expected, result)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.