text stringlengths 4 1.02M | meta dict |
|---|---|
__all__ = (
'MageError',
'InstallError',
'FlagValidationError',
'MissingRequirements',
'PluginError',
'ParamProcessingError',
'ParamValidationError',
'NetworkError',
'ScriptRuntimeError',
)
class MageError(Exception):
"""Default Exception class for Mage installer."""
def __init__(self, *args, **kwargs):
super(MageError, self).__init__(*args)
self.stdout = kwargs.get('stdout', None)
self.stderr = kwargs.get('stderr', None)
class PuppetError(Exception):
"""Raised when Puppet will have some problems."""
class MissingRequirements(MageError):
"""Raised when minimum install requirements are not met."""
pass
class InstallError(MageError):
"""Exception for generic errors during setup run."""
pass
class FlagValidationError(InstallError):
"""Raised when single flag validation fails."""
pass
class ParamValidationError(InstallError):
"""Raised when parameter value validation fails."""
pass
class PluginError(MageError):
pass
class ParamProcessingError(PluginError):
pass
class NetworkError(MageError):
"""Should be used for Mage's network failures."""
pass
class ScriptRuntimeError(MageError):
"""
Raised when utils.ScriptRunner.execute does not end successfully.
"""
pass
class ExecuteRuntimeError(MageError):
"""Raised when utils.execute does not end successfully."""
class SequenceError(MageError):
"""Exception for errors during setup sequence run."""
pass
| {
"content_hash": "5fe10fdc10dcdb15e5d9d7d222cacd52",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 69,
"avg_line_length": 20.493333333333332,
"alnum_prop": 0.682498373454782,
"repo_name": "jiasir/mage",
"id": "1948922a59a3369ff2876c6c9f2a1d94397f990e",
"size": "1562",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8104"
}
],
"symlink_target": ""
} |
from __future__ import print_function, unicode_literals, division, absolute_import
import filecmp
import os
import shutil
import sys
import tempfile
import unittest
import dxpy
import dxpy_testutil as testutil
def setUpTempProjects(thing):
thing.old_workspace_id = dxpy.WORKSPACE_ID
thing.proj_id = dxpy.api.project_new({'name': 'azure-test-project', 'region': testutil.TEST_AZURE})['id']
dxpy.set_workspace_id(thing.proj_id)
def tearDownTempProjects(thing):
dxpy.api.project_destroy(thing.proj_id, {'terminateJobs': True})
dxpy.set_workspace_id(thing.old_workspace_id)
@unittest.skipUnless(testutil.TEST_AZURE, 'skipping tests for Azure regions')
class TestDXFile(unittest.TestCase):
'''
Creates a temporary file and stores a handle to it as
cls.sample_file. It should not be modified by any of the tests.
'''
foo_str = "foo upload file to azure\n"
@classmethod
def setUpClass(cls):
cls.sample_file = tempfile.NamedTemporaryFile(delete=False)
cls.sample_file.write(cls.foo_str)
cls.sample_file.close()
@classmethod
def tearDownClass(cls):
os.remove(cls.sample_file.name)
def setUp(self):
setUpTempProjects(self)
self.tempdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tempdir)
tearDownTempProjects(self)
def test_upload_download_files_dxfile(self):
myfile = dxpy.upload_local_file(self.sample_file.name, project=self.proj_id, wait_on_close=True)
self.assertTrue(myfile.closed())
self.assertEqual(myfile.describe()["name"],
os.path.basename(self.sample_file.name))
dxpy.download_dxfile(myfile, os.path.join(self.tempdir, 'downloaded'))
self.assertTrue(filecmp.cmp(self.sample_file.name, os.path.join(self.tempdir, 'downloaded')))
def test_upload_download_large_file_size_dxfile(self):
test_file_name = os.path.join(self.tempdir, 'large_file')
hundredMB = 1024*1024*100
with open(test_file_name, 'w') as test_file:
with open("/dev/urandom", 'r') as random_input:
test_file.write(random_input.read(hundredMB + 4002080))
myfile = dxpy.upload_local_file(test_file_name, project=self.proj_id,
write_buffer_size=hundredMB, wait_on_close=True)
self.assertTrue(myfile.closed())
# Check file was split up into parts appropriately
# 104857600 (or 100 MB) is the maximum size for a single part
parts = myfile.describe(fields={"parts": True})['parts']
self.assertEquals(parts['1']['size'], hundredMB)
self.assertEquals(parts['2']['size'], 4002080)
self.assertEqual(myfile.describe()["name"], 'large_file')
downloaded_again = os.path.join(self.tempdir, 'large_file_2')
dxpy.download_dxfile(myfile, downloaded_again)
self.assertTrue(filecmp.cmp(test_file_name, downloaded_again))
def test_upload_download_large_file_small_bufsize_dxfile(self):
num_parts = 50000
common_args = dict(mode='w', project=self.proj_id)
with dxpy.new_dxfile(write_buffer_size=280000, expected_file_size=300000*num_parts, **common_args) as myfile:
myfile.write("0" * 700000)
myfile.close(block=True)
parts = myfile.describe(fields={"parts": True})['parts']
self.assertEquals(parts['1']['size'], 300000)
with dxpy.new_dxfile(write_buffer_size=320000, expected_file_size=300000*num_parts, **common_args) as myfile:
myfile.write("0" * 700000)
myfile.close(block=True)
parts = myfile.describe(fields={"parts": True})['parts']
self.assertEquals(parts['1']['size'], 320000)
if __name__ == '__main__':
if dxpy.AUTH_HELPER is None:
sys.exit(1, 'Error: Need to be logged in to run these tests')
unittest.main()
| {
"content_hash": "0f665e14f9e6e058087f442ed76d4167",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 117,
"avg_line_length": 37.24761904761905,
"alnum_prop": 0.6578880081820506,
"repo_name": "dnanexus/dx-toolkit",
"id": "7a63cacd9254e62cbf971386fc38552a0b466135",
"size": "4634",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/python/test/test_dxabs.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3198"
},
{
"name": "C",
"bytes": "9503"
},
{
"name": "C++",
"bytes": "1906095"
},
{
"name": "CMake",
"bytes": "25521"
},
{
"name": "Java",
"bytes": "2569488"
},
{
"name": "Makefile",
"bytes": "42074"
},
{
"name": "NSIS",
"bytes": "17861"
},
{
"name": "Nextflow",
"bytes": "955"
},
{
"name": "Perl",
"bytes": "55622"
},
{
"name": "PowerShell",
"bytes": "1442"
},
{
"name": "Python",
"bytes": "2606345"
},
{
"name": "R",
"bytes": "543112"
},
{
"name": "Ruby",
"bytes": "95466"
},
{
"name": "Shell",
"bytes": "79900"
}
],
"symlink_target": ""
} |
from gwt.gmaps.Map import (
Map,
MapCanvasProjection,
MapOptions,
MapPanes,
MapTypeControlOptions,
MapTypeControlStyle,
MapTypeId,
NavigationControlOptions,
NavigationControlStyle,
ScaleControlOptions,
ScaleControlStyle,
createListenerMethods,
dictToJs,
)
| {
"content_hash": "0e4d0d86bcc34226646f1d595e5e9b25",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 29,
"avg_line_length": 20.533333333333335,
"alnum_prop": 0.7175324675324676,
"repo_name": "anandology/pyjamas",
"id": "846daf44b9c7c5349cc0552cda37c472c4791100",
"size": "308",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "library/pyjamas/gmaps/Map.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "325172"
},
{
"name": "PHP",
"bytes": "121841"
},
{
"name": "Python",
"bytes": "6383764"
},
{
"name": "Shell",
"bytes": "19448"
}
],
"symlink_target": ""
} |
import requests
import unittest
import json
protocol = "http"
#host = "tactile-petal-92303.appspot.com"
host = "localhost:8080"
def genUrl(URI):
return "%s://%s/%s" %(protocol, host, URI)
class CreateUserIDTest(unittest.TestCase):
"""Create User test case"""
# preparing to test
def setUp(self):
""" Setting up for the test """
requests.delete(genUrl("user/"))
self.r = requests.post(genUrl("user/"))
# ending the test
def tearDown(self):
"""Cleaning up after the test"""
# test length
def testLength5(self):
"""Test length 5"""
self.assertEqual(len(self.r.text), 5, "User ID should be length 5")
# test lowercase
def testAllLower(self):
"""Test all lower"""
lowercase_letters = ''.join(c for c in self.r.text if c.islower())
self.assertEqual(len(lowercase_letters), 5, "All IDs should be lowercase")
#test alphabets
def testAllCharacters(self):
import string
alphabets = ''.join(c for c in self.r.text if c in string.ascii_letters)
self.assertEqual(len(alphabets), 5, "All IDs should be alphabets")
#test no repeat
def testNoRepeat(self):
IDs = set()
IDs.add(self.r.text)
for n in range(1):
r = requests.post(genUrl("user/"))
self.assertTrue(r.text not in IDs, "There is a repeat user ID")
IDs.add(r.text)
class UserTest(unittest.TestCase):
"""Get User case"""
# preparing to test
def setUp(self):
""" Setting up for the test """
requests.delete(genUrl("user/"))
# ending the test
def tearDown(self):
"""Cleaning up after the test"""
# test existing
def testCreateUser(self):
"""Test Existing ID"""
r = requests.post(genUrl("user/"))
userID = r.text
r = requests.get(genUrl("user/%s" % userID))
self.assertEqual(r.status_code, 200, "There should be a user created")
def testDeleteUser(self):
r = requests.post(genUrl("user/"))
userID = r.text
r = requests.delete(genUrl("user/%s" % userID))
self.assertEqual(r.status_code, 200, "Status code of delete user should be 200")
r = requests.get(genUrl("user/%s" % userID))
self.assertEqual(r.status_code, 404)
def testGetUserInfo(self):
""" Test if the information from server is correct"""
r = requests.post(genUrl("user/"))
userID = r.text
r = requests.get(genUrl("user/%s" % userID))
try:
r.json()
except ValueError:
self.assertTrue(False, "No JSON object could be decoded")
def testGetDefaultUserInfo(self):
r = requests.post(genUrl("user/"))
userID = r.text
r = requests.get(genUrl("user/%s" % userID))
try:
obj = r.json()
self.assertIn("Name", obj)
self.assertIn("XP", obj)
self.assertIn("Level", obj)
self.assertIn("Gold", obj)
self.assertEqual(obj["Name"], "")
self.assertEqual(obj["XP"], 0)
self.assertEqual(obj["Level"], 1)
self.assertEqual(obj["Gold"], 0)
except ValueError:
self.assertTrue(False, "No JSON object could be decoded")
def testGetAlteredUserInfo(self):
r = requests.post(genUrl("user/"))
userID = r.text
r = requests.put(genUrl("user/gold/%s"%userID), data="100")
r = requests.put(genUrl("user/XP/%s"%userID), data="100")
r = requests.get(genUrl("user/%s" % userID))
try:
obj = r.json()
self.assertIn("Name", obj)
self.assertIn("XP", obj)
self.assertIn("Level", obj)
self.assertIn("Gold", obj)
self.assertEqual(obj["Name"], "")
self.assertEqual(obj["XP"], 100)
self.assertEqual(obj["Level"], 2)
self.assertEqual(obj["Gold"], 100)
except ValueError:
self.assertTrue(False, "No JSON object could be decoded")
def testDelete1User(self):
""" Create 2 users, delete 1, check if it is properly deleted"""
r = requests.post(genUrl("user/"))
userID1 = r.text
r = requests.post(genUrl("user/"))
userID2 = r.text
r = requests.delete(genUrl("user/%s" % userID1))
r = requests.get(genUrl("user/%s" % userID1))
self.assertEqual(r.status_code, 404)
r = requests.get(genUrl("user/%s" % userID2))
try:
obj = r.json()
self.assertIn("Name", obj)
self.assertIn("XP", obj)
self.assertIn("Level", obj)
self.assertIn("Gold", obj)
self.assertEqual(obj["Name"], "")
self.assertEqual(obj["XP"], 0)
self.assertEqual(obj["Level"], 1)
self.assertEqual(obj["Gold"], 0)
except ValueError:
self.assertTrue(False, "No JSON object could be decoded")
def testDeleteAllUsers(self):
r = requests.post(genUrl("user/"))
userID1 = r.text
r = requests.post(genUrl("user/"))
userID2 = r.text
r = requests.delete(genUrl("user/"))
self.assertEqual(r.status_code, 200, "Status code of delete all users should be 200")
r = requests.get(genUrl("user/%s" % userID1))
self.assertEqual(r.status_code, 404)
r = requests.get(genUrl("user/%s" % userID2))
self.assertEqual(r.status_code, 404)
class UserNameTest(unittest.TestCase):
"""Get User Name case"""
# preparing to test
def setUp(self):
""" Setting up for the test """
requests.delete(genUrl("user/"))
r = requests.post(genUrl("user/"))
self.userID = r.text
# ending the test
def tearDown(self):
"""Cleaning up after the test"""
def getUserName(self, userID=None):
if not userID:
userID = self.userID
r = requests.get(genUrl("user/%s" % userID))
try:
obj = r.json()
self.assertIn("Name", obj, "There should be a key \'Name\'")
return obj["Name"]
except ValueError:
self.assertTrue(False, "No JSON object could be decoded")
# test existing
def testExistingID(self):
"""Test Existing ID with no user name set"""
userName = self.getUserName()
self.assertEqual(userName, "", "Username should be blank by default")
def testNonExistingID(self):
userID = "aaaaa"
r = requests.get(genUrl("user/%s" % userID))
self.assertEqual(r.status_code, 404, "Non existing user")
def testSetName(self):
userName = "wangchuck"
r = requests.post(genUrl("user/name/%s" % self.userID), data=userName)
self.assertEqual(r.status_code, 200, "Set user name should be successful")
self.assertEqual(self.getUserName(), userName, "User name retrieved should be the same as the one set")
def testSetEmptyName(self):
userName = ""
r = requests.post(genUrl("user/name/%s" % self.userID), data=userName)
self.assertEqual(r.status_code, 400, "User name should not be the same")
self.assertEqual(self.getUserName(), userName, "User name retrieved should be the same as the one set")
def testSetEmptyThenValidName(self):
userName = ""
r = requests.post(genUrl("user/name/%s" % self.userID), data=userName)
self.assertEqual(r.status_code, 400, "User name should not be the same")
self.assertEqual(self.getUserName(), userName, "User name retrieved should be the same as the one set")
userName = "wangchuck"
r = requests.post(genUrl("user/name/%s" % self.userID), data=userName)
self.assertEqual(r.status_code, 200, "Set user name should be successful")
self.assertEqual(self.getUserName(), userName, "User name retrieved should be the same as the one set")
def testNameWithSpace(self):
userName = "Mother of Dragons"
r = requests.post(genUrl("user/name/%s" % self.userID), data=userName)
self.assertEqual(r.status_code, 200, "Set user name should be successful")
self.assertEqual(self.getUserName(), userName, "User name retrieved should be the same as the one set")
def testResetName(self):
""" Test if the name can be set a 2nd time """
userName1 = "wangchuck"
r = requests.post(genUrl("user/name/%s" % self.userID), data=userName1)
self.assertEqual(r.status_code, 200, "Set user name should be successful")
self.assertEqual(self.getUserName(), userName1, "User name retrieved should be the same as the one set")
userName2 = "Waterloo"
r = requests.post(genUrl("user/name/%s" % self.userID), data=userName2)
self.assertEqual(r.status_code, 405, "Set user name should be unsuccessful 405")
self.assertEqual(self.getUserName(), userName1, "User name retrieved should be the same as the first one set")
def testInvalidName(self):
userName = "@$%^"
r = requests.post(genUrl("user/name/%s" % self.userID), data=userName)
self.assertEqual(r.status_code, 400, "Should be a bad request, bad user name")
self.assertEqual(self.getUserName(), "", "User name retrieved should be empty")
class XPTest(unittest.TestCase):
"""Get User XP case"""
# preparing to test
def setUp(self):
""" Setting up for the test """
requests.delete(genUrl("user/"))
r = requests.post(genUrl("user/"))
self.userID = r.text
# ending the test
def tearDown(self):
"""Cleaning up after the test"""
# test existing
def testExistingID(self):
"""Test Existing ID to see if the call passes"""
r = requests.get(genUrl("user/XP/%s" % self.userID))
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testNonExistingID(self):
""" to see if the call fails """
r = requests.get(genUrl("user/XP/%s" % "aaaaa"))
self.assertEqual(r.status_code, 404, "Status code should be 404 not found")
XPVal = "300"
r = requests.put(genUrl("user/XP/%s" % "aaaaa"), data=XPVal)
self.assertEqual(r.status_code, 404, "Status code should be 404 not found")
def testSetValidXP(self):
XPVal = "300"
r = requests.put(genUrl("user/XP/%s" % self.userID), data=XPVal)
self.assertEqual(r.status_code, 200, "Status code should be 200")
r = requests.get(genUrl("user/XP/%s" % self.userID))
self.assertEqual(r.text, XPVal, "XP should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testSetHigherXP(self):
XPVal = "300"
r = requests.put(genUrl("user/XP/%s" % self.userID), data=XPVal)
self.assertEqual(r.status_code, 200, "Status code should be 200")
r = requests.get(genUrl("user/XP/%s" % self.userID))
self.assertEqual(r.text, XPVal, "XP should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
highXPVal = "400"
r = requests.put(genUrl("user/XP/%s" % self.userID), data=highXPVal)
self.assertEqual(r.status_code, 200, "Status code should be 400, XP won't go lower")
r = requests.get(genUrl("user/XP/%s" % self.userID))
self.assertEqual(r.text, highXPVal, "XP should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testSetLowerXP(self):
XPVal = "300"
r = requests.put(genUrl("user/XP/%s" % self.userID), data=XPVal)
self.assertEqual(r.status_code, 200, "Status code should be 200")
r = requests.get(genUrl("user/XP/%s" % self.userID))
self.assertEqual(r.text, XPVal, "XP should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
lowXPVal = "200"
r = requests.put(genUrl("user/XP/%s" % self.userID), data=lowXPVal)
self.assertEqual(r.status_code, 400, "Status code should be 400, XP won't go lower")
r = requests.get(genUrl("user/XP/%s" % self.userID))
self.assertEqual(r.text, XPVal, "XP should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testGetDefaultXP(self):
r = requests.get(genUrl("user/XP/%s" % self.userID))
self.assertEqual(r.text, "0", "XP should be 0 by default")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testSetNegativeXP(self):
XPVal = "-300"
r = requests.put(genUrl("user/XP/%s" % self.userID), data=XPVal)
self.assertEqual(r.status_code, 400, "Status code should be 400, no negative values")
r = requests.get(genUrl("user/XP/%s" % self.userID))
self.assertEqual(r.text, "0", "XP should be 0 by default")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testSetAlphabetXP(self):
XPVal = "abc"
r = requests.put(genUrl("user/XP/%s" % self.userID), data=XPVal)
self.assertEqual(r.status_code, 400, "Status code should be 400, no alphabet values")
r = requests.get(genUrl("user/XP/%s" % self.userID))
self.assertEqual(r.text, "0", "XP should be 0 by default")
self.assertEqual(r.status_code, 200, "Status code should be 200")
class GoldTest(unittest.TestCase):
"""Gold test case"""
# preparing to test
def setUp(self):
""" Setting up for the test """
requests.delete(genUrl("user/"))
r = requests.post(genUrl("user/"))
self.userID = r.text
# ending the test
def tearDown(self):
"""Cleaning up after the test"""
# test existing
def testExistingID(self):
"""Test Existing ID to see if call passes"""
r = requests.get(genUrl("user/gold/%s" % self.userID))
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testNonExistingID(self):
""" Test non existing ID to see if call fails """
r = requests.get(genUrl("user/gold/%s" % "aaaaa"))
self.assertEqual(r.status_code, 404, "Status code should be 404 not found")
goldVal = "300"
r = requests.put(genUrl("user/gold/%s" % "aaaaa"), data=goldVal)
self.assertEqual(r.status_code, 404, "Status code should be 404 not found")
def testSetValidGold(self):
goldVal = "300"
r = requests.put(genUrl("user/gold/%s" % self.userID), data=goldVal)
self.assertEqual(r.status_code, 200, "Status code should be 200")
r = requests.get(genUrl("user/gold/%s" % self.userID))
self.assertEqual(r.text, goldVal, "Gold value should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testSetMoreOrLessGold(self):
goldVal1 = "300"
r = requests.put(genUrl("user/gold/%s" % self.userID), data=goldVal1)
self.assertEqual(r.status_code, 200, "Status code should be 200")
r = requests.get(genUrl("user/gold/%s" % self.userID))
self.assertEqual(r.text, goldVal1, "Gold value should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
goldVal2 = "400"
r = requests.put(genUrl("user/gold/%s" % self.userID), data=goldVal2)
self.assertEqual(r.status_code, 200, "Status code should be 200")
r = requests.get(genUrl("user/gold/%s" % self.userID))
self.assertEqual(r.text, goldVal2, "Gold value should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
goldVal3 = "100"
r = requests.put(genUrl("user/gold/%s" % self.userID), data=goldVal3)
self.assertEqual(r.status_code, 200, "Status code should be 200")
r = requests.get(genUrl("user/gold/%s" % self.userID))
self.assertEqual(r.text, goldVal3, "Gold value should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testGetDefaultGold(self):
r = requests.get(genUrl("user/gold/%s" % self.userID))
self.assertEqual(r.text, "0", "Gold should be 0 by default")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testSetNegativeGold(self):
goldVal = "-300"
r = requests.put(genUrl("user/gold/%s" % self.userID), data=goldVal)
self.assertEqual(r.status_code, 400, "Status code should be 400, no negative gold")
r = requests.get(genUrl("user/gold/%s" % self.userID))
self.assertEqual(r.text, "0", "Gold value should be 0 by default")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testSetAlphabetGold(self):
goldVal = "jiu"
r = requests.put(genUrl("user/gold/%s" % self.userID), data=goldVal)
self.assertEqual(r.status_code, 400, "Status code should be 400, no alphabet gold")
r = requests.get(genUrl("user/gold/%s" % self.userID))
self.assertEqual(r.text, "0", "Gold value should be 0 by default")
self.assertEqual(r.status_code, 200, "Status code should be 200")
class LevelTest(unittest.TestCase):
"""Get User level case"""
# preparing to test
def setUp(self):
""" Setting up for the test """
requests.delete(genUrl("user/"))
r = requests.post(genUrl("user/"))
self.userID = r.text
# ending the test
def tearDown(self):
"""Cleaning up after the test"""
# test existing
def testExistingID(self):
"""Test Existing ID"""
r = requests.get(genUrl("user/level/%s" % self.userID))
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testNonExistingID(self):
""" to see if the call fails """
r = requests.get(genUrl("user/level/%s" % "aaaaa"))
self.assertEqual(r.status_code, 404, "Status code should be 404 not found")
XPVal = "300"
r = requests.put(genUrl("user/level/%s" % "aaaaa"), data=XPVal)
self.assertEqual(r.status_code, 405, "Status code should be 405 not allowed")
def testSetValidLevel(self):
XPVal = "300"
r = requests.put(genUrl("user/XP/%s" % self.userID), data=XPVal)
self.assertEqual(r.status_code, 200, "Status code should be 200")
levelVal = "4"
r = requests.get(genUrl("user/level/%s" % self.userID))
self.assertEqual(r.text, levelVal, "Level should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testGetDefaultLevel(self):
r = requests.get(genUrl("user/level/%s" % self.userID))
self.assertEqual(r.text, "1", "Level should be default 1")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testSetLowerLevels(self):
"""Set first time"""
XPVal = "300"
r = requests.put(genUrl("user/XP/%s" % self.userID), data=XPVal)
self.assertEqual(r.status_code, 200, "Status code should be 200")
levelVal = "4"
r = requests.get(genUrl("user/level/%s" % self.userID))
self.assertEqual(r.text, levelVal, "Level should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
"""Set lower Level"""
XPVal = "200"
r = requests.put(genUrl("user/XP/%s" % self.userID), data=XPVal)
self.assertEqual(r.status_code, 400, "Status code should be 400, level set should not be lower")
levelVal = "4"
r = requests.get(genUrl("user/level/%s" % self.userID))
self.assertEqual(r.text, levelVal, "Level should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def testSetHigherLevels(self):
"""Set first time"""
XPVal = "300"
r = requests.put(genUrl("user/XP/%s" % self.userID), data=XPVal)
self.assertEqual(r.status_code, 200, "Status code should be 200")
levelVal = "4"
r = requests.get(genUrl("user/level/%s" % self.userID))
self.assertEqual(r.text, levelVal, "Level should be set")
self.assertEqual(r.status_code, 200, "Status code should be 200")
"""Set higher Level"""
XPVal = "400"
r = requests.put(genUrl("user/XP/%s" % self.userID), data=XPVal)
self.assertEqual(r.status_code, 200, "Status code should be 200")
levelVal = "5"
r = requests.get(genUrl("user/level/%s" % self.userID))
self.assertEqual(r.text, levelVal, "Level should be set correctly")
self.assertEqual(r.status_code, 200, "Status code should be 200")
def runTestsFromFile(filename):
submission = []
with open(filename, 'r') as f:
submission = f.readlines()
submission = [line.strip().split(",") for line in submission]
for student in submission:
host = student[1]
if student[0][0] == '#':
print "Skipping %s" % (student[0])
continue
print "Testing %s %s" %(student[0], student[1])
with open("Result_%s.txt" % student[0], "w") as f:
assignment1Runner = unittest.TextTestRunner(stream=f, descriptions=False, verbosity=2)
testResult = assignment1Runner.run(assignment1Suite)
f.write(str(testResult)+"\n\n")
f.write("Tests Run = %d\n"%(testResult.testsRun))
if testResult.errors:
f.write("Errors\n")
for error in testResult.errors:
f.write("%s\n%s\n"%(error[0].id, error[1]))
if testResult.failures:
f.write("Failures\n")
for failure in testResult.failures:
f.write("%s\n%s\n"%(failure[0].id, failure[1]))
passedTests = testResult.testsRun - len(testResult.failures) - len(testResult.errors)
f.write("Total Tests = %d, Passed Test = %d, Failed Test = %d, Errors = %d"%(testResult.testsRun, passedTests, len(testResult.failures), len(testResult.errors)))
if __name__ == "__main__":
# creating a new test suite
assignment1Suite = unittest.TestSuite()
# adding a test case
assignment1Suite.addTest(unittest.makeSuite(CreateUserIDTest))
assignment1Suite.addTest(unittest.makeSuite(UserTest))
assignment1Suite.addTest(unittest.makeSuite(UserNameTest))
assignment1Suite.addTest(unittest.makeSuite(XPTest))
assignment1Suite.addTest(unittest.makeSuite(GoldTest))
assignment1Suite.addTest(unittest.makeSuite(LevelTest))
#assignment1Runner = unittest.TextTestRunner()
#unittest.main()
nameList = "submission.csv"
import os.path
if os.path.isfile(nameList):
runTestsFromFile(nameList)
else:
unittest.main() | {
"content_hash": "d3601f8714491d4b949fc343787324e5",
"timestamp": "",
"source": "github",
"line_count": 577,
"max_line_length": 173,
"avg_line_length": 39.70883882149047,
"alnum_prop": 0.6051414106145251,
"repo_name": "SIDM-GDT/laughing-sansa",
"id": "f72984b4463dbea778d747a2fe0ae2e029e5fd52",
"size": "22912",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "DM2341Assignment1Tester.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24110"
}
],
"symlink_target": ""
} |
import collections
class SymbolTable(collections.MutableMapping):
def __init__(self):
"""Initialize the symbol table to a stack with global scope"""
self.stack = []
self.push()
return
def push(self):
"""Pushes and returns an empty dictionary onto the symbol table"""
d = {}
self.stack.append(d)
return d
def pop(self):
"""Removes and returns the top-level symbol dictionary"""
v = self.stack[-1]
del self.stack[-1]
return v
def __len__(self):
"""Return total number of elements in the symbol table"""
return sum(map(len, self.stack))
def __iter__(self):
"""Iterator to the stack"""
for d in reversed(self.stack):
for k in d:
yield k
return
def top(self, symbol):
"""Look in the top level of the symbol table for the symbol"""
return self.stack[-1].get(symbol)
def __getitem__(self, symbol):
"""Get the value that is stored under symbol"""
for d in reversed(self.stack):
v = d.get(symbol)
if v is not None:
return v
return None
def __setitem__(self, symbol, value):
"""Set the value of symbol to be value"""
self.stack[-1][symbol] = value
return
def __delitem__(self, symbol):
"""Deletes and returns the item associated with symbol"""
for d in reversed(self.stack):
v = d.get(symbol)
if v is not None:
item = v
del d[symbol]
return item
return None
def find(self, symbol):
"""Returns a integer that gives the position of the symbol in the table"""
for i in reversed(range(self.num_levels)):
if symbol in self.stack[i]:
return i
return -1
@property
def functions(self):
"""Returns a dict of the functions that are in the global scope"""
return self.stack[0]
@property
def num_levels(self):
"""Returns the number of levels of the symbol table"""
return len(self.stack)
| {
"content_hash": "729337e25c53971477f2d6e7e7495f61",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 82,
"avg_line_length": 28.723684210526315,
"alnum_prop": 0.5478699038021072,
"repo_name": "patmiller/SAP",
"id": "a887adf86027a37665d29fa34faf4cad391809a4",
"size": "2183",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sap/symbol_table.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "699"
},
{
"name": "C++",
"bytes": "112451"
},
{
"name": "Python",
"bytes": "135383"
},
{
"name": "Shell",
"bytes": "323"
}
],
"symlink_target": ""
} |
''' Python DB API 2.0 driver compliance unit test suite.
This software is Public Domain and may be used without restrictions.
"Now we have booze and barflies entering the discussion, plus rumours of
DBAs on drugs... and I won't tell you what flashes through my mind each
time I read the subject line with 'Anal Compliance' in it. All around
this is turning out to be a thoroughly unwholesome unit test."
-- Ian Bicking
'''
__version__ = '1.14.3'
import unittest
import time
import sys
if sys.version[0] >= '3': #python 3.x
_BaseException = Exception
def _failUnless(self, expr, msg=None):
self.assertTrue(expr, msg)
else: #python 2.x
from exceptions import StandardError as _BaseException
def _failUnless(self, expr, msg=None):
self.failUnless(expr, msg) ## deprecated since Python 2.6
def str2bytes(sval):
if sys.version_info < (3,0) and isinstance(sval, str):
sval = sval.decode("latin1")
return sval.encode("latin1") #python 3 make unicode into bytes
class DatabaseAPI20Test(unittest.TestCase):
''' Test a database self.driver for DB API 2.0 compatibility.
This implementation tests Gadfly, but the TestCase
is structured so that other self.drivers can subclass this
test case to ensure compiliance with the DB-API. It is
expected that this TestCase may be expanded in the future
if ambiguities or edge conditions are discovered.
The 'Optional Extensions' are not yet being tested.
self.drivers should subclass this test, overriding setUp, tearDown,
self.driver, connect_args and connect_kw_args. Class specification
should be as follows:
import dbapi20
class mytest(dbapi20.DatabaseAPI20Test):
[...]
Don't 'import DatabaseAPI20Test from dbapi20', or you will
confuse the unit tester - just 'import dbapi20'.
'''
# The self.driver module. This should be the module where the 'connect'
# method is to be found
driver = None
connect_args = () # List of arguments to pass to connect
connect_kw_args = {} # Keyword arguments for connect
table_prefix = 'dbapi20test_' # If you need to specify a prefix for tables
ddl1 = 'create table %sbooze (name varchar(20))' % table_prefix
ddl2 = 'create table %sbarflys (name varchar(20), drink varchar(30))' % table_prefix
xddl1 = 'drop table %sbooze' % table_prefix
xddl2 = 'drop table %sbarflys' % table_prefix
insert = 'insert'
lowerfunc = 'lower' # Name of stored procedure to convert string->lowercase
# Some drivers may need to override these helpers, for example adding
# a 'commit' after the execute.
def executeDDL1(self,cursor):
cursor.execute(self.ddl1)
def executeDDL2(self,cursor):
cursor.execute(self.ddl2)
def setUp(self):
''' self.drivers should override this method to perform required setup
if any is necessary, such as creating the database.
'''
pass
def tearDown(self):
''' self.drivers should override this method to perform required cleanup
if any is necessary, such as deleting the test database.
The default drops the tables that may be created.
'''
try:
con = self._connect()
try:
cur = con.cursor()
for ddl in (self.xddl1,self.xddl2):
try:
cur.execute(ddl)
con.commit()
except self.driver.Error:
# Assume table didn't exist. Other tests will check if
# execute is busted.
pass
finally:
con.close()
except _BaseException:
pass
def _connect(self):
try:
r = self.driver.connect(
*self.connect_args,**self.connect_kw_args
)
except AttributeError:
self.fail("No connect method found in self.driver module")
return r
def test_connect(self):
con = self._connect()
con.close()
def test_apilevel(self):
try:
# Must exist
apilevel = self.driver.apilevel
# Must equal 2.0
self.assertEqual(apilevel,'2.0')
except AttributeError:
self.fail("Driver doesn't define apilevel")
def test_threadsafety(self):
try:
# Must exist
threadsafety = self.driver.threadsafety
# Must be a valid value
_failUnless(self, threadsafety in (0,1,2,3))
except AttributeError:
self.fail("Driver doesn't define threadsafety")
def test_paramstyle(self):
try:
# Must exist
paramstyle = self.driver.paramstyle
# Must be a valid value
_failUnless(self, paramstyle in (
'qmark','numeric','named','format','pyformat'
))
except AttributeError:
self.fail("Driver doesn't define paramstyle")
def test_Exceptions(self):
# Make sure required exceptions exist, and are in the
# defined heirarchy.
if sys.version[0] == '3': #under Python 3 StardardError no longer exists
self.assertTrue(issubclass(self.driver.Warning,Exception))
self.assertTrue(issubclass(self.driver.Error,Exception))
else:
self.failUnless(issubclass(self.driver.Warning,StandardError))
self.failUnless(issubclass(self.driver.Error,StandardError))
_failUnless(self,
issubclass(self.driver.InterfaceError,self.driver.Error)
)
_failUnless(self,
issubclass(self.driver.DatabaseError,self.driver.Error)
)
_failUnless(self,
issubclass(self.driver.OperationalError,self.driver.Error)
)
_failUnless(self,
issubclass(self.driver.IntegrityError,self.driver.Error)
)
_failUnless(self,
issubclass(self.driver.InternalError,self.driver.Error)
)
_failUnless(self,
issubclass(self.driver.ProgrammingError,self.driver.Error)
)
_failUnless(self,
issubclass(self.driver.NotSupportedError,self.driver.Error)
)
def test_ExceptionsAsConnectionAttributes(self):
# OPTIONAL EXTENSION
# Test for the optional DB API 2.0 extension, where the exceptions
# are exposed as attributes on the Connection object
# I figure this optional extension will be implemented by any
# driver author who is using this test suite, so it is enabled
# by default.
con = self._connect()
drv = self.driver
_failUnless(self,con.Warning is drv.Warning)
_failUnless(self,con.Error is drv.Error)
_failUnless(self,con.InterfaceError is drv.InterfaceError)
_failUnless(self,con.DatabaseError is drv.DatabaseError)
_failUnless(self,con.OperationalError is drv.OperationalError)
_failUnless(self,con.IntegrityError is drv.IntegrityError)
_failUnless(self,con.InternalError is drv.InternalError)
_failUnless(self,con.ProgrammingError is drv.ProgrammingError)
_failUnless(self,con.NotSupportedError is drv.NotSupportedError)
def test_commit(self):
con = self._connect()
try:
# Commit must work, even if it doesn't do anything
con.commit()
finally:
con.close()
def test_rollback(self):
con = self._connect()
# If rollback is defined, it should either work or throw
# the documented exception
if hasattr(con,'rollback'):
try:
con.rollback()
except self.driver.NotSupportedError:
pass
def test_cursor(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
def test_cursor_isolation(self):
con = self._connect()
try:
# Make sure cursors created from the same connection have
# the documented transaction isolation level
cur1 = con.cursor()
cur2 = con.cursor()
self.executeDDL1(cur1)
cur1.execute("%s into %sbooze values ('Victoria Bitter')" % (
self.insert, self.table_prefix
))
cur2.execute("select name from %sbooze" % self.table_prefix)
booze = cur2.fetchall()
self.assertEqual(len(booze),1)
self.assertEqual(len(booze[0]),1)
self.assertEqual(booze[0][0],'Victoria Bitter')
finally:
con.close()
def test_description(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
self.assertEqual(cur.description,None,
'cursor.description should be none after executing a '
'statement that can return no rows (such as DDL)'
)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(len(cur.description),1,
'cursor.description describes too many columns'
)
self.assertEqual(len(cur.description[0]),7,
'cursor.description[x] tuples must have 7 elements'
)
self.assertEqual(cur.description[0][0].lower(),'name',
'cursor.description[x][0] must return column name'
)
self.assertEqual(cur.description[0][1],self.driver.STRING,
'cursor.description[x][1] must return column type. Got %r'
% cur.description[0][1]
)
# Make sure self.description gets reset
self.executeDDL2(cur)
self.assertEqual(cur.description,None,
'cursor.description not being set to None when executing '
'no-result statements (eg. DDL)'
)
finally:
con.close()
def test_rowcount(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
_failUnless(self,cur.rowcount in (-1,0), # Bug #543885
'cursor.rowcount should be -1 or 0 after executing no-result '
'statements'
)
cur.execute("%s into %sbooze values ('Victoria Bitter')" % (
self.insert, self.table_prefix
))
_failUnless(self,cur.rowcount in (-1,1),
'cursor.rowcount should == number or rows inserted, or '
'set to -1 after executing an insert statement'
)
cur.execute("select name from %sbooze" % self.table_prefix)
_failUnless(self,cur.rowcount in (-1,1),
'cursor.rowcount should == number of rows returned, or '
'set to -1 after executing a select statement'
)
self.executeDDL2(cur)
_failUnless(self,cur.rowcount in (-1,0), # Bug #543885
'cursor.rowcount should be -1 or 0 after executing no-result '
'statements'
)
finally:
con.close()
lower_func = 'lower'
def test_callproc(self):
con = self._connect()
try:
cur = con.cursor()
if self.lower_func and hasattr(cur,'callproc'):
r = cur.callproc(self.lower_func,('FOO',))
self.assertEqual(len(r),1)
self.assertEqual(r[0],'FOO')
r = cur.fetchall()
self.assertEqual(len(r),1,'callproc produced no result set')
self.assertEqual(len(r[0]),1,
'callproc produced invalid result set'
)
self.assertEqual(r[0][0],'foo',
'callproc produced invalid results'
)
finally:
con.close()
def test_close(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
# cursor.execute should raise an Error if called after connection
# closed
self.assertRaises(self.driver.Error,self.executeDDL1,cur)
# connection.commit should raise an Error if called after connection'
# closed.'
self.assertRaises(self.driver.Error,con.commit)
def test_non_idempotent_close(self):
con = self._connect()
con.close()
# connection.close should raise an Error if called more than once
#!!! reasonable persons differ about the usefulness of this test and this feature !!!
self.assertRaises(self.driver.Error,con.close)
def test_execute(self):
con = self._connect()
try:
cur = con.cursor()
self._paraminsert(cur)
finally:
con.close()
def _paraminsert(self,cur):
self.executeDDL2(cur)
cur.execute("%s into %sbarflys values ('Victoria Bitter', 'thi%%s :may ca%%(u)se? troub:1e')" % (
self.insert, self.table_prefix
))
_failUnless(self,cur.rowcount in (-1,1))
if self.driver.paramstyle == 'qmark':
cur.execute(
"%s into %sbarflys values (?, 'thi%%s :may ca%%(u)se? troub:1e')" % (self.insert, self.table_prefix),
("Cooper's",)
)
elif self.driver.paramstyle == 'numeric':
cur.execute(
"%s into %sbarflys values (:1, 'thi%%s :may ca%%(u)se? troub:1e')" % (self.insert, self.table_prefix),
("Cooper's",)
)
elif self.driver.paramstyle == 'named':
cur.execute(
"%s into %sbarflys values (:beer, 'thi%%s :may ca%%(u)se? troub:1e')" % (self.insert, self.table_prefix),
{'beer':"Cooper's"}
)
elif self.driver.paramstyle == 'format':
cur.execute(
"%s into %sbarflys values (%%s, 'thi%%%%s :may ca%%%%(u)se? troub:1e')" % (self.insert, self.table_prefix),
("Cooper's",)
)
elif self.driver.paramstyle == 'pyformat':
cur.execute(
"%s into %sbarflys values (%%(beer)s, 'thi%%%%s :may ca%%%%(u)se? troub:1e')" % (self.insert, self.table_prefix),
{'beer':"Cooper's"}
)
else:
self.fail('Invalid paramstyle')
_failUnless(self,cur.rowcount in (-1,1))
cur.execute('select name, drink from %sbarflys' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,'cursor.fetchall returned too few rows')
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Cooper's",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
self.assertEqual(beers[1],"Victoria Bitter",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
trouble = "thi%s :may ca%(u)se? troub:1e"
self.assertEqual(res[0][1], trouble,
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly. Got=%s, Expected=%s' % (repr(res[0][1]), repr(trouble)))
self.assertEqual(res[1][1], trouble,
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly. Got=%s, Expected=%s' % (repr(res[1][1]), repr(trouble)
))
def test_executemany(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
largs = [ ("Cooper's",) , ("Boag's",) ]
margs = [ {'beer': "Cooper's"}, {'beer': "Boag's"} ]
if self.driver.paramstyle == 'qmark':
cur.executemany(
'%s into %sbooze values (?)' % (self.insert, self.table_prefix),
largs
)
elif self.driver.paramstyle == 'numeric':
cur.executemany(
'%s into %sbooze values (:1)' % (self.insert, self.table_prefix),
largs
)
elif self.driver.paramstyle == 'named':
cur.executemany(
'%s into %sbooze values (:beer)' % (self.insert, self.table_prefix),
margs
)
elif self.driver.paramstyle == 'format':
cur.executemany(
'%s into %sbooze values (%%s)' % (self.insert, self.table_prefix),
largs
)
elif self.driver.paramstyle == 'pyformat':
cur.executemany(
'%s into %sbooze values (%%(beer)s)' % (
self.insert, self.table_prefix
),
margs
)
else:
self.fail('Unknown paramstyle')
_failUnless(self,cur.rowcount in (-1,2),
'insert using cursor.executemany set cursor.rowcount to '
'incorrect value %r' % cur.rowcount
)
cur.execute('select name from %sbooze' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,
'cursor.fetchall retrieved incorrect number of rows'
)
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Boag's",'incorrect data retrieved')
self.assertEqual(beers[1],"Cooper's",'incorrect data retrieved')
finally:
con.close()
def test_fetchone(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchone should raise an Error if called before
# executing a select-type query
self.assertRaises(self.driver.Error,cur.fetchone)
# cursor.fetchone should raise an Error if called after
# executing a query that cannnot return rows
self.executeDDL1(cur)
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if a query retrieves '
'no rows'
)
_failUnless(self,cur.rowcount in (-1,0))
# cursor.fetchone should raise an Error if called after
# executing a query that cannnot return rows
cur.execute("%s into %sbooze values ('Victoria Bitter')" % (
self.insert, self.table_prefix
))
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchone()
self.assertEqual(len(r),1,
'cursor.fetchone should have retrieved a single row'
)
self.assertEqual(r[0],'Victoria Bitter',
'cursor.fetchone retrieved incorrect data'
)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if no more rows available'
)
_failUnless(self,cur.rowcount in (-1,1))
finally:
con.close()
samples = [
'Carlton Cold',
'Carlton Draft',
'Mountain Goat',
'Redback',
'Victoria Bitter',
'XXXX'
]
def _populate(self):
''' Return a list of sql commands to setup the DB for the fetch
tests.
'''
populate = [
"%s into %sbooze values ('%s')" % (self.insert, self.table_prefix, s)
for s in self.samples
]
return populate
def test_fetchmany(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchmany should raise an Error if called without
#issuing a query
self.assertRaises(self.driver.Error,cur.fetchmany,4)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany()
self.assertEqual(len(r),1,
'cursor.fetchmany retrieved incorrect number of rows, '
'default of arraysize is one.'
)
cur.arraysize=10
r = cur.fetchmany(3) # Should get 3 rows
self.assertEqual(len(r),3,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should get 2 more
self.assertEqual(len(r),2,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should be an empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence after '
'results are exhausted'
)
_failUnless(self,cur.rowcount in (-1,6))
# Same as above, using cursor.arraysize
cur.arraysize=4
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany() # Should get 4 rows
self.assertEqual(len(r),4,
'cursor.arraysize not being honoured by fetchmany'
)
r = cur.fetchmany() # Should get 2 more
self.assertEqual(len(r),2)
r = cur.fetchmany() # Should be an empty sequence
self.assertEqual(len(r),0)
_failUnless(self,cur.rowcount in (-1,6))
cur.arraysize=6
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchmany() # Should get all rows
_failUnless(self,cur.rowcount in (-1,6))
self.assertEqual(len(rows),6)
self.assertEqual(len(rows),6)
rows = [r[0] for r in rows]
rows.sort()
# Make sure we get the right data back out
for i in range(0,6):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved by cursor.fetchmany'
)
rows = cur.fetchmany() # Should return an empty list
self.assertEqual(len(rows),0,
'cursor.fetchmany should return an empty sequence if '
'called after the whole result set has been fetched'
)
_failUnless(self,cur.rowcount in (-1,6))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
r = cur.fetchmany() # Should get empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence if '
'query retrieved no rows'
)
_failUnless(self,cur.rowcount in (-1,0))
finally:
con.close()
def test_fetchall(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchall should raise an Error if called
# without executing a query that may return rows (such
# as a select)
self.assertRaises(self.driver.Error, cur.fetchall)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
# cursor.fetchall should raise an Error if called
# after executing a a statement that cannot return rows
self.assertRaises(self.driver.Error,cur.fetchall)
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchall()
_failUnless(self,cur.rowcount in (-1,len(self.samples)))
self.assertEqual(len(rows),len(self.samples),
'cursor.fetchall did not retrieve all rows'
)
rows = [r[0] for r in rows]
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'cursor.fetchall retrieved incorrect rows'
)
rows = cur.fetchall()
self.assertEqual(
len(rows),0,
'cursor.fetchall should return an empty list if called '
'after the whole result set has been fetched'
)
_failUnless(self,cur.rowcount in (-1,len(self.samples)))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
rows = cur.fetchall()
_failUnless(self,cur.rowcount in (-1,0))
self.assertEqual(len(rows),0,
'cursor.fetchall should return an empty list if '
'a select query returns no rows'
)
finally:
con.close()
def test_mixedfetch(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
rows1 = cur.fetchone()
rows23 = cur.fetchmany(2)
rows4 = cur.fetchone()
rows56 = cur.fetchall()
_failUnless(self,cur.rowcount in (-1,6))
self.assertEqual(len(rows23),2,
'fetchmany returned incorrect number of rows'
)
self.assertEqual(len(rows56),2,
'fetchall returned incorrect number of rows'
)
rows = [rows1[0]]
rows.extend([rows23[0][0],rows23[1][0]])
rows.append(rows4[0])
rows.extend([rows56[0][0],rows56[1][0]])
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved or inserted'
)
finally:
con.close()
def help_nextset_setUp(self,cur):
''' Should create a procedure called deleteme
that returns two result sets, first the
number of rows in booze then "name from booze"
'''
raise NotImplementedError('Helper not implemented')
#sql="""
# create procedure deleteme as
# begin
# select count(*) from booze
# select name from booze
# end
#"""
#cur.execute(sql)
def help_nextset_tearDown(self,cur):
'If cleaning up is needed after nextSetTest'
raise NotImplementedError('Helper not implemented')
#cur.execute("drop procedure deleteme")
def test_nextset(self):
con = self._connect()
try:
cur = con.cursor()
if not hasattr(cur,'nextset'):
return
try:
self.executeDDL1(cur)
sql=self._populate()
for sql in self._populate():
cur.execute(sql)
self.help_nextset_setUp(cur)
cur.callproc('deleteme')
numberofrows=cur.fetchone()
assert numberofrows[0]== len(self.samples)
assert cur.nextset()
names=cur.fetchall()
assert len(names) == len(self.samples)
s=cur.nextset()
assert s == None,'No more return sets, should return None'
finally:
self.help_nextset_tearDown(cur)
finally:
con.close()
def test_nextset(self):
raise NotImplementedError('Drivers need to override this test')
def test_arraysize(self):
# Not much here - rest of the tests for this are in test_fetchmany
con = self._connect()
try:
cur = con.cursor()
_failUnless(self,hasattr(cur,'arraysize'),
'cursor.arraysize must be defined'
)
finally:
con.close()
def test_setinputsizes(self):
con = self._connect()
try:
cur = con.cursor()
cur.setinputsizes( (25,) )
self._paraminsert(cur) # Make sure cursor still works
finally:
con.close()
def test_setoutputsize_basic(self):
# Basic test is to make sure setoutputsize doesn't blow up
con = self._connect()
try:
cur = con.cursor()
cur.setoutputsize(1000)
cur.setoutputsize(2000,0)
self._paraminsert(cur) # Make sure the cursor still works
finally:
con.close()
def test_setoutputsize(self):
# Real test for setoutputsize is driver dependant
raise NotImplementedError('Driver needed to override this test')
def test_None(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
cur.execute("%s into %sbarflys values ('a', NULL)" % (self.insert, self.table_prefix))
cur.execute('select drink from %sbarflys' % self.table_prefix)
r = cur.fetchall()
self.assertEqual(len(r),1)
self.assertEqual(len(r[0]),1)
self.assertEqual(r[0][0],None,'NULL value not returned as None')
finally:
con.close()
def test_Date(self):
d1 = self.driver.Date(2002,12,25)
d2 = self.driver.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(d1),str(d2))
def test_Time(self):
t1 = self.driver.Time(13,45,30)
t2 = self.driver.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
def test_Timestamp(self):
t1 = self.driver.Timestamp(2002,12,25,13,45,30)
t2 = self.driver.TimestampFromTicks(
time.mktime((2002,12,25,13,45,30,0,0,0))
)
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
def test_Binary(self):
b = self.driver.Binary(str2bytes('Something'))
b = self.driver.Binary(str2bytes(''))
def test_STRING(self):
_failUnless(self, hasattr(self.driver,'STRING'),
'module.STRING must be defined'
)
def test_BINARY(self):
_failUnless(self, hasattr(self.driver,'BINARY'),
'module.BINARY must be defined.'
)
def test_NUMBER(self):
_failUnless(self, hasattr(self.driver,'NUMBER'),
'module.NUMBER must be defined.'
)
def test_DATETIME(self):
_failUnless(self, hasattr(self.driver,'DATETIME'),
'module.DATETIME must be defined.'
)
def test_ROWID(self):
_failUnless(self, hasattr(self.driver,'ROWID'),
'module.ROWID must be defined.'
)
| {
"content_hash": "4479e34b9a281f0fbbe72de7eb9bd18f",
"timestamp": "",
"source": "github",
"line_count": 840,
"max_line_length": 129,
"avg_line_length": 37.60595238095238,
"alnum_prop": 0.544809902181139,
"repo_name": "youngwookim/python-phoenixdb",
"id": "93cdac66aa46311f74fa08690b6e4cc9af544ef8",
"size": "31611",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "phoenixdb/tests/dbapi20.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "98479"
},
{
"name": "Shell",
"bytes": "1894"
}
],
"symlink_target": ""
} |
"""Unit tests for the standardise.StandardiseMetadata plugin."""
import unittest
from datetime import datetime
import iris
import numpy as np
from iris.coords import AuxCoord
from iris.tests import IrisTest
from improver.standardise import StandardiseMetadata
from improver.synthetic_data.set_up_test_cubes import set_up_variable_cube
class Test_process(IrisTest):
"""Test the process method"""
def setUp(self):
"""Set up input cube"""
self.cube = set_up_variable_cube(
282 * np.ones((5, 5), dtype=np.float32),
spatial_grid="latlon",
standard_grid_metadata="gl_det",
time=datetime(2019, 10, 11),
time_bounds=[datetime(2019, 10, 10, 23), datetime(2019, 10, 11)],
frt=datetime(2019, 10, 10, 18),
)
self.plugin = StandardiseMetadata()
def test_null(self):
"""Test process method with default arguments returns an unchanged
cube"""
result = self.plugin.process(self.cube.copy())
self.assertIsInstance(result, iris.cube.Cube)
self.assertArrayAlmostEqual(result.data, self.cube.data)
self.assertEqual(result.metadata, self.cube.metadata)
def test_standardise_time_coords(self):
"""Test incorrect time-type coordinates are cast to the correct
datatypes and units"""
for coord in ["time", "forecast_period"]:
self.cube.coord(coord).points = self.cube.coord(coord).points.astype(
np.float64
)
self.cube.coord(coord).bounds = self.cube.coord(coord).bounds.astype(
np.float64
)
self.cube.coord("forecast_period").convert_units("hours")
result = self.plugin.process(self.cube)
self.assertEqual(result.coord("forecast_period").units, "seconds")
self.assertEqual(result.coord("forecast_period").points.dtype, np.int32)
self.assertEqual(result.coord("forecast_period").bounds.dtype, np.int32)
self.assertEqual(result.coord("time").points.dtype, np.int64)
self.assertEqual(result.coord("time").bounds.dtype, np.int64)
def test_standardise_time_coords_missing_fp(self):
"""Test a missing time-type coordinate does not cause an error when
standardisation is required"""
self.cube.coord("time").points = self.cube.coord("time").points.astype(
np.float64
)
self.cube.remove_coord("forecast_period")
result = self.plugin.process(self.cube)
self.assertEqual(result.coord("time").points.dtype, np.int64)
def test_collapse_scalar_dimensions(self):
"""Test scalar dimension is collapsed"""
cube = iris.util.new_axis(self.cube, "time")
result = self.plugin.process(cube)
dim_coord_names = [coord.name() for coord in result.coords(dim_coords=True)]
aux_coord_names = [coord.name() for coord in result.coords(dim_coords=False)]
self.assertSequenceEqual(result.shape, (5, 5))
self.assertNotIn("time", dim_coord_names)
self.assertIn("time", aux_coord_names)
def test_realization_not_collapsed(self):
"""Test scalar realization coordinate is preserved"""
realization = AuxCoord([1], "realization")
self.cube.add_aux_coord(realization)
cube = iris.util.new_axis(self.cube, "realization")
result = self.plugin.process(cube)
dim_coord_names = [coord.name() for coord in result.coords(dim_coords=True)]
self.assertSequenceEqual(result.shape, (1, 5, 5))
self.assertIn("realization", dim_coord_names)
def test_metadata_changes(self):
"""Test changes to cube name, coordinates and attributes without
regridding"""
new_name = "regridded_air_temperature"
attribute_changes = {
"institution": "Met Office",
"mosg__grid_version": "remove",
}
expected_attributes = {
"mosg__grid_domain": "global",
"mosg__grid_type": "standard",
"mosg__model_configuration": "gl_det",
"institution": "Met Office",
}
expected_data = self.cube.data.copy() - 273.15
result = self.plugin.process(
self.cube,
new_name=new_name,
new_units="degC",
coords_to_remove=["forecast_period"],
attributes_dict=attribute_changes,
)
self.assertEqual(result.name(), new_name)
self.assertEqual(result.units, "degC")
self.assertArrayAlmostEqual(result.data, expected_data, decimal=5)
self.assertDictEqual(result.attributes, expected_attributes)
self.assertNotIn("forecast_period", [coord.name() for coord in result.coords()])
def test_discard_cellmethod(self):
"""Test changes to cell_methods"""
cube = self.cube.copy()
cube.cell_methods = [
iris.coords.CellMethod(method="point", coords="time"),
iris.coords.CellMethod(method="max", coords="realization"),
]
result = self.plugin.process(cube,)
self.assertEqual(
result.cell_methods,
(iris.coords.CellMethod(method="max", coords="realization"),),
)
def test_float_deescalation(self):
"""Test precision de-escalation from float64 to float32"""
cube = self.cube.copy()
cube.data = cube.data.astype(np.float64)
result = self.plugin.process(cube)
self.assertEqual(result.data.dtype, np.float32)
self.assertArrayAlmostEqual(result.data, self.cube.data, decimal=4)
def test_float_deescalation_with_unit_change(self):
"""Covers the bug where unit conversion from an integer input field causes
float64 escalation"""
cube = set_up_variable_cube(
np.ones((5, 5), dtype=np.int16), name="rainrate", units="mm h-1"
)
result = self.plugin.process(cube, new_units="m s-1")
self.assertEqual(cube.dtype, np.float32)
self.assertEqual(result.data.dtype, np.float32)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "58213cbbd8b055980af7bf63b3273dcb",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 88,
"avg_line_length": 41.53061224489796,
"alnum_prop": 0.6286650286650287,
"repo_name": "fionaRust/improver",
"id": "073077b375e442c6677130c44e42397585970cc5",
"size": "7762",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "improver_tests/standardise/test_StandardiseMetadata.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "5026255"
},
{
"name": "Shell",
"bytes": "9493"
}
],
"symlink_target": ""
} |
import pymongo
import uvloop
import asyncio
import os
os.environ['MODE'] = 'PRO'
from pprint import pprint
from urllib.parse import parse_qs, urlparse
from owllook.database.mongodb import MotorBase
from owllook.fetcher.function import get_time
# mongo
MONGODB_SERVER = "localhost"
MONGODB_PORT = 27017
MONGODB_USERNAME = ""
MONGODB_PASSWORD = ""
MONGODB_DB = "owllook"
MONGODB_COLLECTION = "all_books"
class Novels(object):
def __init__(self):
_mongo_uri = 'mongodb://{account}{host}:{port}/{database}'.format(
account='{username}:{password}@'.format(
username=MONGODB_USERNAME,
password=MONGODB_PASSWORD) if MONGODB_USERNAME else '',
host='localhost',
port=27017,
database=MONGODB_DB)
connection = pymongo.MongoClient(_mongo_uri)
db = connection[MONGODB_DB]
self.collection = db[MONGODB_COLLECTION]
def search_name(self, name):
result = self.collection.find_one({'name': name})
return result if result else False
async def get_tag():
motor_db = MotorBase().db
novels = Novels()
# 获取所有书架链接游标
books_url_cursor = motor_db.user_message.find({}, {'books_url.book_url': 1, 'user': 1, '_id': 0})
async for document in books_url_cursor:
if document:
books_url = document.get('books_url', None)
if books_url:
all_user = {}
user = document['user']
all_user[user + '_novels'] = []
all_user[user + '_tag'] = []
all_user[user + '_author'] = []
for book_url in books_url:
chapter_url = book_url['book_url']
novels_name = parse_qs(urlparse(chapter_url).query).get('novels_name', '')[0]
all_user[user + '_novels'].append(novels_name)
novels_info = novels.search_name(novels_name)
if novels_info:
novels_type = novels_info['novels_type'].split('#')
author = novels_info['author']
all_user[user + '_author'].append(author)
all_user[user + '_tag'].extend(novels_type)
data = {
'user_novels': all_user[user + '_novels'],
'user_tag': all_user[user + '_tag'],
'user_author': all_user[user + '_author'],
}
await motor_db.user_tag.update_one(
{"user": user},
{'$set': {'data': data, "updated_at": get_time()}}, upsert=True)
pprint(data)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
def tag_test():
loop = asyncio.get_event_loop()
task = asyncio.ensure_future(get_tag())
loop.run_until_complete(task)
return task.result()
if __name__ == '__main__':
tag_test()
| {
"content_hash": "62014be2901b062a5025581d47e8940a",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 101,
"avg_line_length": 34.01162790697674,
"alnum_prop": 0.5422222222222223,
"repo_name": "zhiyue/owllook",
"id": "f0b844c511af854631f566747b1b9a70be6e5cfb",
"size": "2967",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/tag_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "19382"
},
{
"name": "HTML",
"bytes": "63824"
},
{
"name": "JavaScript",
"bytes": "16632"
},
{
"name": "Python",
"bytes": "142362"
}
],
"symlink_target": ""
} |
"""
WSGI config for todolist project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "todolist.settings")
application = get_wsgi_application()
| {
"content_hash": "93f88772bd32ab1dcc35665a4fbeb7c8",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 24.5625,
"alnum_prop": 0.7709923664122137,
"repo_name": "mannyrivera2010/todolist-py3-drf",
"id": "0d1c784d9d8b8f29db7f81be393eac0be3f6c3b5",
"size": "393",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "todolist/wsgi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16734"
},
{
"name": "Shell",
"bytes": "1146"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('communities', '0010_auto_20150607_1148'),
('users', '0003_auto_20150607_1606'),
]
operations = [
migrations.AddField(
model_name='membership',
name='group_role',
field=models.ForeignKey(related_name='memberships', verbose_name='Group', blank=True, to='communities.CommunityGroupRole', null=True),
),
]
| {
"content_hash": "5478096d101359fd379475e1197c28c5",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 146,
"avg_line_length": 27.894736842105264,
"alnum_prop": 0.6283018867924528,
"repo_name": "nonZero/OpenCommunity",
"id": "da973b5a00c5e73397d58d76289b733866d227ec",
"size": "554",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/users/migrations/0004_membership_group_role.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "610648"
},
{
"name": "HTML",
"bytes": "258907"
},
{
"name": "JavaScript",
"bytes": "2403446"
},
{
"name": "Python",
"bytes": "1385625"
},
{
"name": "Shell",
"bytes": "185"
}
],
"symlink_target": ""
} |
import wolo.log as log
import wolo.task as task
import wolo.parameters as parameters
def test_func(x):
return "this is a test"
example_log = []
example_log.append(log.TaskLog(index=[0], task_class="0", last_run_success=True))
sublog1 = []
sublog1.append(log.TaskLog(index=[1, "p0", 0], task_class="1_0_0", last_run_success=True))
sublog1.append(log.TaskLog(index=[1, "p0", 1], task_class="1_0_1", last_run_success=True))
sublog2 = []
sublog2.append(log.TaskLog(index=[1, "p1", 0], task_class="1_1_0", last_run_success=False))
sublog2.append(log.TaskLog(index=[1, "p1", 1], task_class="1_1_1", last_run_success=True))
example_log.append([sublog1, sublog2])
example_log.append(log.TaskLog(index=[2], task_class="2", last_run_success=True))
example_flat_view = log.FlatView(example_log)
example_flat_output = {"0": {"index": [0], "task_class": "0", "last_run_success": True, "inputs": {}, "outputs": {}, "info": {}, "last_run": None, "execution_time": None},
"1_p0_0": {"index": [1, "p0", 0], "task_class": "1_0_0", "last_run_success": True, "inputs": {}, "outputs": {}, "info": {}, "last_run": None, "execution_time": None},
"1_p0_1": {"index": [1, "p0", 1], "task_class": "1_0_1", "last_run_success": True, "inputs": {}, "outputs": {}, "info": {}, "last_run": None, "execution_time": None},
"1_p1_0": {"index": [1, "p1", 0], "task_class": "1_1_0", "last_run_success": False, "inputs": {}, "outputs": {}, "info": {}, "last_run": None, "execution_time": None},
"1_p1_1": {"index": [1, "p1", 1], "task_class": "1_1_1", "last_run_success": True, "inputs": {}, "outputs": {}, "info": {}, "last_run": None, "execution_time": None},
"2": {"index": [2], "task_class": "2", "last_run_success": True, "inputs": {}, "outputs": {}, "info": {}, "last_run": None, "execution_time": None}}
class ExampleTask(task.Task):
def input(self):
test_input = parameters.Parameter("test_input", self.args[0])
return test_input
def run(self):
return "test_report"
def output(self):
test_output = parameters.Parameter("test_output", self.kwargs["kwarg"])
return test_output
| {
"content_hash": "94ee7e90798afb6a15a50e839ca4a75e",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 190,
"avg_line_length": 58.526315789473685,
"alnum_prop": 0.5818345323741008,
"repo_name": "AKuederle/python-WoLo",
"id": "dca5980c88de9f57aad78788cb59a30f52168706",
"size": "2224",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/example_objects.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "56656"
}
],
"symlink_target": ""
} |
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['BoxCox'] , ['Lag1Trend'] , ['Seasonal_Minute'] , ['MLP'] ); | {
"content_hash": "4af9558578faeea1b95e2d6ebb49be6f",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 82,
"avg_line_length": 38.75,
"alnum_prop": 0.7032258064516129,
"repo_name": "antoinecarme/pyaf",
"id": "e36db48d19074ab5fe2a15d9ae6999249787bc71",
"size": "155",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/model_control/detailed/transf_BoxCox/model_control_one_enabled_BoxCox_Lag1Trend_Seasonal_Minute_MLP.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
} |
from . import event
| {
"content_hash": "bea3109162c4cf4119c1182f875c6c55",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 19,
"avg_line_length": 21,
"alnum_prop": 0.7142857142857143,
"repo_name": "ktan2020/jpype",
"id": "9850447da7d7b8a00abe7622d5888bdca9e6d2ab",
"size": "798",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "jpype/awt/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1842"
},
{
"name": "C",
"bytes": "37981"
},
{
"name": "C++",
"bytes": "495450"
},
{
"name": "Java",
"bytes": "31200"
},
{
"name": "Objective-C",
"bytes": "387"
},
{
"name": "PowerShell",
"bytes": "9130"
},
{
"name": "Python",
"bytes": "131910"
},
{
"name": "XSLT",
"bytes": "3460"
}
],
"symlink_target": ""
} |
"""New implementation of Visual Studio project generation."""
import hashlib
import os
import random
import gyp.common
try:
cmp
except NameError:
def cmp(x, y):
return (x > y) - (x < y)
# Initialize random number generator
random.seed()
# GUIDs for project types
ENTRY_TYPE_GUIDS = {
'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
}
#------------------------------------------------------------------------------
# Helper functions
def MakeGuid(name, seed='msvs_new'):
"""Returns a GUID for the specified target name.
Args:
name: Target name.
seed: Seed for MD5 hash.
Returns:
A GUID-line string calculated from the name and seed.
This generates something which looks like a GUID, but depends only on the
name and seed. This means the same name/seed will always generate the same
GUID, so that projects and solutions which refer to each other can explicitly
determine the GUID to refer to explicitly. It also means that the GUID will
not change when the project for a target is rebuilt.
"""
# Calculate a MD5 signature for the seed and name.
d = hashlib.md5((str(seed) + str(name)).encode('utf-8')).hexdigest().upper()
# Convert most of the signature to GUID form (discard the rest)
guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
+ '-' + d[20:32] + '}')
return guid
#------------------------------------------------------------------------------
class MSVSSolutionEntry(object):
def __cmp__(self, other):
# Sort by name then guid (so things are in order on vs2008).
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
def __lt__(self, other):
return self.__cmp__(other) < 0
class MSVSFolder(MSVSSolutionEntry):
"""Folder in a Visual Studio project or solution."""
def __init__(self, path, name = None, entries = None,
guid = None, items = None):
"""Initializes the folder.
Args:
path: Full path to the folder.
name: Name of the folder.
entries: List of folder entries to nest inside this folder. May contain
Folder or Project objects. May be None, if the folder is empty.
guid: GUID to use for folder, if not None.
items: List of solution items to include in the folder project. May be
None, if the folder does not directly contain items.
"""
if name:
self.name = name
else:
# Use last layer.
self.name = os.path.basename(path)
self.path = path
self.guid = guid
# Copy passed lists (or set to empty lists)
self.entries = sorted(list(entries or []))
self.items = list(items or [])
self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
def get_guid(self):
if self.guid is None:
# Use consistent guids for folders (so things don't regenerate).
self.guid = MakeGuid(self.path, seed='msvs_folder')
return self.guid
#------------------------------------------------------------------------------
class MSVSProject(MSVSSolutionEntry):
"""Visual Studio project."""
def __init__(self, path, name = None, dependencies = None, guid = None,
spec = None, build_file = None, config_platform_overrides = None,
fixpath_prefix = None):
"""Initializes the project.
Args:
path: Absolute path to the project file.
name: Name of project. If None, the name will be the same as the base
name of the project file.
dependencies: List of other Project objects this project is dependent
upon, if not None.
guid: GUID to use for project, if not None.
spec: Dictionary specifying how to build this project.
build_file: Filename of the .gyp file that the vcproj file comes from.
config_platform_overrides: optional dict of configuration platforms to
used in place of the default for this target.
fixpath_prefix: the path used to adjust the behavior of _fixpath
"""
self.path = path
self.guid = guid
self.spec = spec
self.build_file = build_file
# Use project filename if name not specified
self.name = name or os.path.splitext(os.path.basename(path))[0]
# Copy passed lists (or set to empty lists)
self.dependencies = list(dependencies or [])
self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
if config_platform_overrides:
self.config_platform_overrides = config_platform_overrides
else:
self.config_platform_overrides = {}
self.fixpath_prefix = fixpath_prefix
self.msbuild_toolset = None
def set_dependencies(self, dependencies):
self.dependencies = list(dependencies or [])
def get_guid(self):
if self.guid is None:
# Set GUID from path
# TODO(rspangler): This is fragile.
# 1. We can't just use the project filename sans path, since there could
# be multiple projects with the same base name (for example,
# foo/unittest.vcproj and bar/unittest.vcproj).
# 2. The path needs to be relative to $SOURCE_ROOT, so that the project
# GUID is the same whether it's included from base/base.sln or
# foo/bar/baz/baz.sln.
# 3. The GUID needs to be the same each time this builder is invoked, so
# that we don't need to rebuild the solution when the project changes.
# 4. We should be able to handle pre-built project files by reading the
# GUID from the files.
self.guid = MakeGuid(self.name)
return self.guid
def set_msbuild_toolset(self, msbuild_toolset):
self.msbuild_toolset = msbuild_toolset
#------------------------------------------------------------------------------
class MSVSSolution(object):
"""Visual Studio solution."""
def __init__(self, path, version, entries=None, variants=None,
websiteProperties=True):
"""Initializes the solution.
Args:
path: Path to solution file.
version: Format version to emit.
entries: List of entries in solution. May contain Folder or Project
objects. May be None, if the folder is empty.
variants: List of build variant strings. If none, a default list will
be used.
websiteProperties: Flag to decide if the website properties section
is generated.
"""
self.path = path
self.websiteProperties = websiteProperties
self.version = version
# Copy passed lists (or set to empty lists)
self.entries = list(entries or [])
if variants:
# Copy passed list
self.variants = variants[:]
else:
# Use default
self.variants = ['Debug|Win32', 'Release|Win32']
# TODO(rspangler): Need to be able to handle a mapping of solution config
# to project config. Should we be able to handle variants being a dict,
# or add a separate variant_map variable? If it's a dict, we can't
# guarantee the order of variants since dict keys aren't ordered.
# TODO(rspangler): Automatically write to disk for now; should delay until
# node-evaluation time.
self.Write()
def Write(self, writer=gyp.common.WriteOnDiff):
"""Writes the solution file to disk.
Raises:
IndexError: An entry appears multiple times.
"""
# Walk the entry tree and collect all the folders and projects.
all_entries = set()
entries_to_check = self.entries[:]
while entries_to_check:
e = entries_to_check.pop(0)
# If this entry has been visited, nothing to do.
if e in all_entries:
continue
all_entries.add(e)
# If this is a folder, check its entries too.
if isinstance(e, MSVSFolder):
entries_to_check += e.entries
all_entries = sorted(all_entries)
# Open file and print header
f = writer(self.path)
f.write('Microsoft Visual Studio Solution File, '
'Format Version %s\r\n' % self.version.SolutionVersion())
f.write('# %s\r\n' % self.version.Description())
# Project entries
sln_root = os.path.split(self.path)[0]
for e in all_entries:
relative_path = gyp.common.RelativePath(e.path, sln_root)
# msbuild does not accept an empty folder_name.
# use '.' in case relative_path is empty.
folder_name = relative_path.replace('/', '\\') or '.'
f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
e.entry_type_guid, # Entry type GUID
e.name, # Folder name
folder_name, # Folder name (again)
e.get_guid(), # Entry GUID
))
# TODO(rspangler): Need a way to configure this stuff
if self.websiteProperties:
f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
'\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
'\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
'\tEndProjectSection\r\n')
if isinstance(e, MSVSFolder):
if e.items:
f.write('\tProjectSection(SolutionItems) = preProject\r\n')
for i in e.items:
f.write('\t\t%s = %s\r\n' % (i, i))
f.write('\tEndProjectSection\r\n')
if isinstance(e, MSVSProject):
if e.dependencies:
f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
for d in e.dependencies:
f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
f.write('\tEndProjectSection\r\n')
f.write('EndProject\r\n')
# Global section
f.write('Global\r\n')
# Configurations (variants)
f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
for v in self.variants:
f.write('\t\t%s = %s\r\n' % (v, v))
f.write('\tEndGlobalSection\r\n')
# Sort config guids for easier diffing of solution changes.
config_guids = []
config_guids_overrides = {}
for e in all_entries:
if isinstance(e, MSVSProject):
config_guids.append(e.get_guid())
config_guids_overrides[e.get_guid()] = e.config_platform_overrides
config_guids.sort()
f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
for g in config_guids:
for v in self.variants:
nv = config_guids_overrides[g].get(v, v)
# Pick which project configuration to build for this solution
# configuration.
f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
g, # Project GUID
v, # Solution build configuration
nv, # Project build config for that solution config
))
# Enable project in this solution configuration.
f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
g, # Project GUID
v, # Solution build configuration
nv, # Project build config for that solution config
))
f.write('\tEndGlobalSection\r\n')
# TODO(rspangler): Should be able to configure this stuff too (though I've
# never seen this be any different)
f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
f.write('\t\tHideSolutionNode = FALSE\r\n')
f.write('\tEndGlobalSection\r\n')
# Folder mappings
# Omit this section if there are no folders
if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
for e in all_entries:
if not isinstance(e, MSVSFolder):
continue # Does not apply to projects, only folders
for subentry in e.entries:
f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
f.write('\tEndGlobalSection\r\n')
f.write('EndGlobal\r\n')
f.close()
| {
"content_hash": "e0c793f4833148adda7d3e73f4d06fb0",
"timestamp": "",
"source": "github",
"line_count": 334,
"max_line_length": 80,
"avg_line_length": 35.26646706586826,
"alnum_prop": 0.6128703625095508,
"repo_name": "enclose-io/compiler",
"id": "76c4b95c0c3e824046d911320fdf1109c952f512",
"size": "11936",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "lts/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "11474"
},
{
"name": "Shell",
"bytes": "131"
}
],
"symlink_target": ""
} |
from eclcli.common import command
from eclcli.common import utils
from ..networkclient.common import utils as to_obj
class ListColoSpace(command.Lister):
def get_parser(self, prog_name):
parser = super(ListColoSpace, self).get_parser(prog_name)
return parser
def take_action(self, parsed_args):
network_client = self.app.client_manager.network
columns = (
'id',
'name',
)
column_headers = (
'ID',
'Name',
)
data = [to_obj.ColoSpace(cfp)
for cfp in network_client.list_colo_spaces().get('colocation_spaces')]
return (column_headers,
(utils.get_item_properties(
s, columns,
) for s in data))
class ShowColoSpace(command.ShowOne):
def get_parser(self, prog_name):
parser = super(ShowColoSpace, self).get_parser(prog_name)
parser.add_argument(
'colocation_space_id',
metavar="COLOCATION_SPACE_ID",
help="ID of Colocation Space to show."
)
return parser
def take_action(self, parsed_args):
network_client = self.app.client_manager.network
colocation_space_id = parsed_args.colocation_space_id
dic = network_client.show_colo_space(colocation_space_id).get('colocation_space')
columns = utils.get_columns(dic)
obj = to_obj.ColoSpace(dic)
data = utils.get_item_properties(
obj, columns,)
return columns, data
| {
"content_hash": "e3b5ab1b0157b42e9fc0b47cc7f362ad",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 89,
"avg_line_length": 29.76923076923077,
"alnum_prop": 0.5910852713178295,
"repo_name": "anythingrandom/eclcli",
"id": "2a79eb0806e65a23a4c5a874f562606ee785e1e9",
"size": "1548",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "eclcli/network/v2/colocation_space.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1647657"
}
],
"symlink_target": ""
} |
import mcpi.minecraft as minecraft
import mcpi.block as block
import time
import math
import threading
# We need to "wrap up" our code into something called a class
# Classes are a very powerful way of re using code
# and hiding away code we are not directly interested in
# They are a bit like a blueprint, or cooking recipe
# Once you have a design or recipe
# it is easy to make lots of the same thing using that blueprint
# In our case, we want three lots of firework launches
# If we know how to make one, then all we have to do
# is tell our program we want three of them, all in slightly different places
class Fireworks(threading.Thread):
# Our firework class is mostly the same code as the previous firework code
# It has some extra code that lets us run several at the same time
# This is a special method that runs when the thread starts, the "initialise"
# where we do any housekeeping before the code starts doing something
# In our case, we store our x,y & z coordinates of where we want our firework so that our methods can use them
# After init is finished, we hand over to run
def __init__(self, px, py, pz):
threading.Thread.__init__(self)
self.px = px
self.py = py
self.pz = pz
def run(self):
mc = minecraft.Minecraft.create()
self.Setup(mc, px, py, pz)
def DrawSingleCircle(self, px, py, z, diameter, color, mode):
if (mode == 1):
blockID = block.WOOL.id
else:
blockID = block.AIR.id
endX = 2 * math.pi
x = 0.01
while (x < endX):
# We add the firework X and Y coordinates to the circle so the explosion is in the right place
# When we draw our blocks, we need to convert the floating point
# numbers back to integers
dx = int(diameter * math.cos(x)) + px
dy = int(diameter * math.sin(x)) + py
mc.setBlock(dx, dy, z, blockID, color)
# we want a reasonably "spokey" circle, so add 0.4 or larger
# this also makes it a bit faster and more realistic
x = x + 0.5
def DrawCircles(self, cx, cy, z):
# To create an explosion effect, we draw three circles, then start rubbing them out
# starting with the smallest circle
# keep a count of how many circles we have drawn, when we get to three
# start rubbing out
circles_drawn = 0
# Start with white
color = 0
# We want a big explosion, but not so big that it goes below ground
max_diameter = 18
# Now draw some circles, slowly increasing the size (diameter)
for diameter in range(3, max_diameter, 1):
# Go and draw the circle
self.DrawSingleCircle(cx, cy, z, diameter, color, 1)
circles_drawn = circles_drawn + 1
if (circles_drawn > 2):
# now rub out the circle we drew 3 loops ago
self.DrawSingleCircle(cx, cy, z, diameter - 2, color, 0)
# Wool has 16 different colors, 0-15, so recycle them
color = color + 1
if (color == 16):
color = 0
# unplot the last 2 circles, as our loop has finished drawing
self.DrawSingleCircle(cx, cy, z, max_diameter - 1, color, 0)
self.DrawSingleCircle(cx, cy, z, max_diameter - 2, color, 0)
def LaunchRocket(self, rocketX, rocketY, rocketZ):
# Place the rocket
time.sleep(1)
flames = [14, 1, 4, 10]
flame_colors = len (flames)
for count in range(0, 10):
for flame in range(0, flame_colors):
mc.setBlocks(
rocketX - 1,
rocketY,
rocketZ - 1,
rocketX + 1,
rocketY,
rocketZ + 1,
block.WOOL.id,
flames[flame])
time.sleep(.1)
# Clear up the flames by using a 3x3 block of air and draw some solid
# flames
mc.setBlock(rocketX, rocketY + 1, rocketZ, block.GLOWING_OBSIDIAN)
mc.setBlocks(
rocketX - 1,
rocketY,
rocketZ - 1,
rocketX + 1,
rocketY,
rocketZ + 1,
block.AIR)
rocketY = rocketY + 1
# When we draw a block, we need to remember it so that we can rub it out next time
# this makes it look like the block is moving up wards
# set the maximum height the rocket will launch to
# 20 blocks higher than where it starts from
max_height = rocketY + 19
while (rocketY < max_height):
mc.setBlock(rocketX, rocketY, rocketZ, block.GLOWING_OBSIDIAN)
mc.setBlock(rocketX, rocketY + 1, rocketZ, block.FURNACE_ACTIVE.id)
lastY = rocketY
# rub out the previous block
time.sleep(0.1)
mc.setBlock(rocketX, lastY - 1, rocketZ, block.AIR.id)
rocketY = rocketY + 1
# rub out the last rocket ready to explode
mc.setBlock(rocketX, lastY + 1, rocketZ, 0)
mc.setBlock(rocketX, lastY, rocketZ, 0)
time.sleep(0.05)
# Draw the explosion where the rocket finished
self.DrawCircles(rocketX, rocketY, rocketZ)
def Setup(self, mc, x, y, z):
# Launch the rocket from a safe distance :-)
rocketX = x + 6
rocketY = y
rocketZ = z + 7
# Let's have a small stone platform to launch from
mc.setBlocks(
rocketX - 2,
rocketY,
rocketZ - 2,
rocketX + 2,
rocketY,
rocketZ + 2,
block.STONE.id)
# A single stone block for the rocket to sit on
mc.setBlock(rocketX, rocketY + 1, rocketZ, block.STONE.id)
rocketY = rocketY + 1
# we will use a furnace to be our rocket, but change as you wish
mc.setBlock(rocketX, rocketY + 1, rocketZ, block.FURNACE_ACTIVE.id)
# Send a message to the player to tell them the rocket is going to launch
# mc.postToChat("Launching in 5 seconds")
time.sleep(4)
mc.setBlock(rocketX, rocketY, rocketZ, block.GLOWING_OBSIDIAN)
time.sleep(1)
self.LaunchRocket(rocketX, rocketY, rocketZ)
def checkLocation():
global px, pz
# if we are close to the edges of the world, moves us in a bit
if (px > 100 or px < -100):
print "shrinking player x"
px = int(px * 0.8)
if (pz > 100 or pz < -100):
pz = int(pz * 0.8)
print "shrinking player z"
mc = minecraft.Minecraft.create()
pos = mc.player.getTilePos()
px = pos.x
py = pos.y
pz = pos.z
#checkLocation()
mc.player.setPos(px, py, pz)
# Clean up the world and any previous circles nearby
mc.setBlocks(px - 20, py, pz - 20, px + 20, py + 40, pz + 10, block.AIR.id)
# Setup a grass floor
mc.setBlocks(px - 20, py - 1, pz - 20, px + 20, py, pz + 20, block.GRASS.id)
time.sleep(.5)
# Start three firework threads, with a slight pause between them
for launchCount in range(0, 3):
firework = Fireworks(px, py, pz)
firework.daemon
firework.start()
time.sleep(.15)
# Each firework will be 6 blocks to the right of the previous one
px = px + 6
time.sleep(2)
| {
"content_hash": "3dd7facada744b57a1cedfedf5f08c3d",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 115,
"avg_line_length": 36.46268656716418,
"alnum_prop": 0.5906672124437168,
"repo_name": "joedeller/pymine",
"id": "b469f8f1af728187979ebba127c70939a483e351",
"size": "8696",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fireworkV4.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "322778"
}
],
"symlink_target": ""
} |
import unittest
import os
import tempfile
import numpy as np
from numpy import ma
from numpy.testing import assert_array_equal
from netCDF4 import Dataset, __netcdf4libversion__
# Test use of vector of missing values.
class VectorMissingValues(unittest.TestCase):
def setUp(self):
self.testfile = tempfile.NamedTemporaryFile(suffix='.nc', delete=False).name
self.missing_values = [-999,999,0]
self.v = np.array([-999,0,1,2,3,999], dtype = "i2")
self.v_ma = ma.array([-1,0,1,2,3,4], dtype = "i2", \
mask = [True, True, False, False, False, True])
f = Dataset(self.testfile, 'w')
d = f.createDimension('x',6)
v = f.createVariable('v', "i2", 'x')
# issue 730: set fill_value for vlen str vars
v2 = f.createVariable('v2', str, 'x', fill_value='<missing>')
v.missing_value = self.missing_values
v[:] = self.v
v2[0]='first'
f.close()
def tearDown(self):
os.remove(self.testfile)
def test_scaled(self):
"""Testing auto-conversion of masked arrays"""
f = Dataset(self.testfile)
v = f.variables["v"]
v2 = f.variables["v2"]
self.assertTrue(isinstance(v[:], ma.core.MaskedArray))
assert_array_equal(v[:], self.v_ma)
assert_array_equal(v[2],self.v[2]) # issue #624.
v.set_auto_mask(False)
self.assertTrue(isinstance(v[:], np.ndarray))
assert_array_equal(v[:], self.v)
# issue 730
# this part fails with netcdf 4.1.3
# a bug in vlen strings?
if __netcdf4libversion__ >= '4.4.0':
assert v2[0] == 'first'
assert v2[1] == '<missing>'
f.close()
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "688063e208ddc9587960727ef0412a61",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 84,
"avg_line_length": 26.671641791044777,
"alnum_prop": 0.570229434806939,
"repo_name": "Unidata/netcdf4-python",
"id": "3d8dba4db144ad61e706c53cb37708df8030c455",
"size": "1787",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/tst_masked5.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "425"
},
{
"name": "Cython",
"bytes": "334106"
},
{
"name": "Python",
"bytes": "296829"
},
{
"name": "Shell",
"bytes": "469"
}
],
"symlink_target": ""
} |
from telemetry.core.platform import proc_supporting_platform_backend
from telemetry.core.platform import ps_util
class CrosPlatformBackend(
proc_supporting_platform_backend.ProcSupportingPlatformBackend):
def __init__(self, cri):
super(CrosPlatformBackend, self).__init__()
self._cri = cri
def StartRawDisplayFrameRateMeasurement(self):
raise NotImplementedError()
def StopRawDisplayFrameRateMeasurement(self):
raise NotImplementedError()
def GetRawDisplayFrameRateMeasurements(self):
raise NotImplementedError()
def IsThermallyThrottled(self):
raise NotImplementedError()
def HasBeenThermallyThrottled(self):
raise NotImplementedError()
def _RunCommand(self, args):
return self._cri.RunCmdOnDevice(args)[0]
def _GetFileContents(self, filename):
try:
return self._cri.RunCmdOnDevice(['cat', filename])[0]
except AssertionError:
return ''
def GetIOStats(self, pid):
# There is no '/proc/<pid>/io' file on CrOS platforms
# Returns empty dict as it does in PlatformBackend.
return {}
def GetOSName(self):
return 'chromeos'
def GetOSVersionName(self):
return '' # TODO: Implement this.
def GetChildPids(self, pid):
"""Returns a list of child pids of |pid|."""
all_process_info = self._cri.ListProcesses()
processes = [(curr_pid, curr_ppid, curr_state)
for curr_pid, _, curr_ppid, curr_state in all_process_info]
return ps_util.GetChildPids(processes, pid)
def GetCommandLine(self, pid):
procs = self._cri.ListProcesses()
return next((proc[1] for proc in procs if proc[0] == pid), None)
def CanFlushIndividualFilesFromSystemCache(self):
return True
def FlushEntireSystemCache(self):
raise NotImplementedError()
def FlushSystemCacheForDirectory(self, directory, ignoring=None):
raise NotImplementedError()
| {
"content_hash": "b0eb6fbe15322b7a5fff8974476f9318",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 76,
"avg_line_length": 28.923076923076923,
"alnum_prop": 0.7154255319148937,
"repo_name": "patrickm/chromium.src",
"id": "81f5b002bd804bf9d76d5e9c2ca19760fc95f151",
"size": "2047",
"binary": false,
"copies": "4",
"ref": "refs/heads/nw",
"path": "tools/telemetry/telemetry/core/platform/cros_platform_backend.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "853"
},
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "52960"
},
{
"name": "Awk",
"bytes": "8660"
},
{
"name": "C",
"bytes": "40737238"
},
{
"name": "C#",
"bytes": "1132"
},
{
"name": "C++",
"bytes": "207930633"
},
{
"name": "CSS",
"bytes": "939170"
},
{
"name": "Java",
"bytes": "5844934"
},
{
"name": "JavaScript",
"bytes": "17837835"
},
{
"name": "Mercury",
"bytes": "10533"
},
{
"name": "Objective-C",
"bytes": "886228"
},
{
"name": "Objective-C++",
"bytes": "6667789"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "Perl",
"bytes": "672770"
},
{
"name": "Python",
"bytes": "10857933"
},
{
"name": "Rebol",
"bytes": "262"
},
{
"name": "Shell",
"bytes": "1326032"
},
{
"name": "Tcl",
"bytes": "277091"
},
{
"name": "XSLT",
"bytes": "13493"
},
{
"name": "nesC",
"bytes": "15206"
}
],
"symlink_target": ""
} |
from django.utils.translation import get_language
from django.views.generic import DetailView, ListView
from parler.views import TranslatableSlugMixin
from .models import Article
class BaseArticleMixin:
# Only show published articles.
def get_queryset(self):
return super().get_queryset().filter(published=True)
class ArticleListView(BaseArticleMixin, ListView):
model = Article
template_name = "article/list.html"
def get_queryset(self):
# Only show objects translated in the current language.
language = get_language()
return super().get_queryset().filter(translations__language_code=language)
class ArticleDetailView(BaseArticleMixin, TranslatableSlugMixin, DetailView):
model = Article
template_name = "article/details.html" # This works as expected
| {
"content_hash": "40def5e1e7f890fca450006ef34de2b9",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 82,
"avg_line_length": 29.464285714285715,
"alnum_prop": 0.7442424242424243,
"repo_name": "django-parler/django-parler",
"id": "9bac1389de59fc4f7d4effacd6f6513504ae2652",
"size": "825",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "example/article/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5639"
},
{
"name": "HTML",
"bytes": "5814"
},
{
"name": "Python",
"bytes": "262492"
}
],
"symlink_target": ""
} |
from enum import Enum
class NotFoundException(Exception):
pass
class TooManyResultsException(Exception):
pass
class VirtualMachineGeneration(str, Enum):
GEN1 = "Microsoft:Hyper-V:SubType:1"
GEN2 = "Microsoft:Hyper-V:SubType:2"
class VirtualMachineState(int, Enum):
UNDEFINED = -1
RUNNING = 0
STOPPED = 1
SAVED = 2
PAUSED = 3
ERROR = 4
class ComPort(int, Enum):
COM1 = 0
COM2 = 1
| {
"content_hash": "b95d1d9c019cc55633c1705f4f4bf010",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 42,
"avg_line_length": 14.75,
"alnum_prop": 0.6997578692493946,
"repo_name": "crashtua/hvapi",
"id": "458d2e4d369680265952a93ada9f63aecceb4dfa",
"size": "1552",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hvapi/types.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "73894"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import wrapt
import opentracing
import types
from ..log import logger
from ..singletons import tracer
from ..util.traceutils import get_active_tracer
try:
import pika
def _extract_broker_tags(span, conn):
span.set_tag("address", "%s:%d" % (conn.params.host, conn.params.port))
def _extract_publisher_tags(span, conn, exchange, routing_key):
_extract_broker_tags(span, conn)
span.set_tag("sort", "publish")
span.set_tag("key", routing_key)
span.set_tag("exchange", exchange)
def _extract_consumer_tags(span, conn, queue):
_extract_broker_tags(span, conn)
span.set_tag("sort", "consume")
span.set_tag("queue", queue)
@wrapt.patch_function_wrapper('pika.channel', 'Channel.basic_publish')
def basic_publish_with_instana(wrapped, instance, args, kwargs):
def _bind_args(exchange, routing_key, body, properties=None, *args, **kwargs):
return (exchange, routing_key, body, properties, args, kwargs)
active_tracer = get_active_tracer()
if active_tracer is None:
return wrapped(*args, **kwargs)
(exchange, routing_key, body, properties, args, kwargs) = (_bind_args(*args, **kwargs))
with tracer.start_active_span("rabbitmq", child_of=active_tracer.active_span) as scope:
try:
_extract_publisher_tags(scope.span,
conn=instance.connection,
routing_key=routing_key,
exchange=exchange)
except:
logger.debug("publish_with_instana: ", exc_info=True)
# context propagation
properties = properties or pika.BasicProperties()
properties.headers = properties.headers or {}
tracer.inject(scope.span.context, opentracing.Format.HTTP_HEADERS, properties.headers,
disable_w3c_trace_context=True)
args = (exchange, routing_key, body, properties) + args
try:
rv = wrapped(*args, **kwargs)
except Exception as e:
scope.span.log_exception(e)
raise
else:
return rv
def basic_get_with_instana(wrapped, instance, args, kwargs):
def _bind_args(*args, **kwargs):
args = list(args)
queue = kwargs.pop('queue', None) or args.pop(0)
callback = kwargs.pop('callback', None) or kwargs.pop('on_message_callback', None) or args.pop(0)
return (queue, callback, tuple(args), kwargs)
queue, callback, args, kwargs = _bind_args(*args, **kwargs)
def _cb_wrapper(channel, method, properties, body):
parent_span = tracer.extract(opentracing.Format.HTTP_HEADERS, properties.headers,
disable_w3c_trace_context=True)
with tracer.start_active_span("rabbitmq", child_of=parent_span) as scope:
try:
_extract_consumer_tags(scope.span,
conn=instance.connection,
queue=queue)
except:
logger.debug("basic_get_with_instana: ", exc_info=True)
try:
callback(channel, method, properties, body)
except Exception as e:
scope.span.log_exception(e)
raise
args = (queue, _cb_wrapper) + args
return wrapped(*args, **kwargs)
@wrapt.patch_function_wrapper('pika.adapters.blocking_connection', 'BlockingChannel.basic_consume')
def basic_consume_with_instana(wrapped, instance, args, kwargs):
def _bind_args(queue, on_message_callback, *args, **kwargs):
return (queue, on_message_callback, args, kwargs)
queue, on_message_callback, args, kwargs = _bind_args(*args, **kwargs)
def _cb_wrapper(channel, method, properties, body):
parent_span = tracer.extract(opentracing.Format.HTTP_HEADERS, properties.headers,
disable_w3c_trace_context=True)
with tracer.start_active_span("rabbitmq", child_of=parent_span) as scope:
try:
_extract_consumer_tags(scope.span,
conn=instance.connection._impl,
queue=queue)
except:
logger.debug("basic_consume_with_instana: ", exc_info=True)
try:
on_message_callback(channel, method, properties, body)
except Exception as e:
scope.span.log_exception(e)
raise
args = (queue, _cb_wrapper) + args
return wrapped(*args, **kwargs)
@wrapt.patch_function_wrapper('pika.adapters.blocking_connection', 'BlockingChannel.consume')
def consume_with_instana(wrapped, instance, args, kwargs):
def _bind_args(queue, *args, **kwargs):
return (queue, args, kwargs)
(queue, args, kwargs) = (_bind_args(*args, **kwargs))
def _consume(gen):
for yilded in gen:
# Bypass the delivery created due to inactivity timeout
if yilded is None or not any(yilded):
yield yilded
continue
(method_frame, properties, body) = yilded
parent_span = tracer.extract(opentracing.Format.HTTP_HEADERS, properties.headers,
disable_w3c_trace_context=True)
with tracer.start_active_span("rabbitmq", child_of=parent_span) as scope:
try:
_extract_consumer_tags(scope.span,
conn=instance.connection._impl,
queue=queue)
except:
logger.debug("consume_with_instana: ", exc_info=True)
try:
yield yilded
except Exception as e:
scope.span.log_exception(e)
raise
args = (queue,) + args
res = wrapped(*args, **kwargs)
if isinstance(res, types.GeneratorType):
return _consume(res)
else:
return res
@wrapt.patch_function_wrapper('pika.adapters.blocking_connection', 'BlockingChannel.__init__')
def _BlockingChannel___init__(wrapped, instance, args, kwargs):
ret = wrapped(*args, **kwargs)
impl = getattr(instance, '_impl', None)
if impl and hasattr(impl.basic_consume, '__wrapped__'):
impl.basic_consume = impl.basic_consume.__wrapped__
return ret
wrapt.wrap_function_wrapper('pika.channel', 'Channel.basic_get', basic_get_with_instana)
wrapt.wrap_function_wrapper('pika.channel', 'Channel.basic_consume', basic_get_with_instana)
logger.debug("Instrumenting pika")
except ImportError:
pass
| {
"content_hash": "8291cb5ef3383acc0c8b513906dc0666",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 109,
"avg_line_length": 38.3563829787234,
"alnum_prop": 0.549022327000416,
"repo_name": "instana/python-sensor",
"id": "845ff9fee148199470818e6f482d0be71d18a268",
"size": "7294",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "instana/instrumentation/pika.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "419"
},
{
"name": "HTML",
"bytes": "154"
},
{
"name": "Python",
"bytes": "1056302"
}
],
"symlink_target": ""
} |
import sys
import os
project = 'python-neutronclient'
# -- General configuration ---------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = u'OpenStack LLC'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output ---------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'nature'
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author,
# documentclass [howto/manual]).
latex_documents = [
('index',
'%s.tex' % project,
u'%s Documentation' % project,
u'OpenStack LLC', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| {
"content_hash": "18e15fdef1d1c8e4ac43add643529a4b",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 78,
"avg_line_length": 30.509090909090908,
"alnum_prop": 0.6853396901072706,
"repo_name": "metacloud/python-neutronclient",
"id": "1d6023e2dbdf330eee21c2fb19f25639e0ada5df",
"size": "1705",
"binary": false,
"copies": "3",
"ref": "refs/heads/mc/2013.1.7",
"path": "doc/source/conf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "602510"
},
{
"name": "Shell",
"bytes": "5278"
}
],
"symlink_target": ""
} |
"""Auto-generated file, do not edit by hand. UA metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_UA = PhoneMetadata(id='UA', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='1\\d{2}', possible_number_pattern='\\d{3}', possible_length=(3,)),
toll_free=PhoneNumberDesc(),
premium_rate=PhoneNumberDesc(),
emergency=PhoneNumberDesc(national_number_pattern='1(?:0[123]|12)', possible_number_pattern='\\d{3}', example_number='112', possible_length=(3,)),
short_code=PhoneNumberDesc(national_number_pattern='1(?:0[123]|12)', possible_number_pattern='\\d{3}', example_number='112', possible_length=(3,)),
standard_rate=PhoneNumberDesc(),
carrier_specific=PhoneNumberDesc(),
short_data=True)
| {
"content_hash": "cb60ca81cda8dec8973c3899619313e1",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 151,
"avg_line_length": 68.16666666666667,
"alnum_prop": 0.726161369193154,
"repo_name": "vicky2135/lucious",
"id": "c61eb96df02fc12b30bf115590491f9138dbe6e1",
"size": "818",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oscar/lib/python2.7/site-packages/phonenumbers/shortdata/region_UA.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "896683"
},
{
"name": "C++",
"bytes": "52230"
},
{
"name": "CSS",
"bytes": "1169533"
},
{
"name": "HTML",
"bytes": "1104983"
},
{
"name": "JavaScript",
"bytes": "1055140"
},
{
"name": "Makefile",
"bytes": "145238"
},
{
"name": "Python",
"bytes": "55993261"
},
{
"name": "Shell",
"bytes": "40487"
}
],
"symlink_target": ""
} |
import mock
import os.path
from neutron.agent.linux import external_process as ep
from neutron.common import utils as common_utils
from neutron.tests import base
from neutron.tests import tools
TEST_UUID = 'test-uuid'
TEST_SERVICE = 'testsvc'
TEST_PID = 1234
class BaseTestProcessMonitor(base.BaseTestCase):
def setUp(self):
super(BaseTestProcessMonitor, self).setUp()
self.log_patch = mock.patch("neutron.agent.linux.external_process."
"LOG.error")
self.error_log = self.log_patch.start()
self.spawn_patch = mock.patch("eventlet.spawn")
self.eventlent_spawn = self.spawn_patch.start()
# create a default process monitor
self.create_child_process_monitor('respawn')
def create_child_process_monitor(self, action):
conf = mock.Mock()
conf.AGENT.check_child_processes_action = action
conf.AGENT.check_child_processes = True
self.pmonitor = ep.ProcessMonitor(
config=conf,
resource_type='test')
def get_monitored_process(self, uuid, service=None):
monitored_process = mock.Mock()
self.pmonitor.register(uuid=uuid,
service_name=service,
monitored_process=monitored_process)
return monitored_process
class TestProcessMonitor(BaseTestProcessMonitor):
def test_error_logged(self):
pm = self.get_monitored_process(TEST_UUID)
pm.active = False
self.pmonitor._check_child_processes()
self.assertTrue(self.error_log.called)
def test_exit_handler(self):
self.create_child_process_monitor('exit')
pm = self.get_monitored_process(TEST_UUID)
pm.active = False
with mock.patch.object(ep.ProcessMonitor,
'_exit_handler') as exit_handler:
self.pmonitor._check_child_processes()
exit_handler.assert_called_once_with(TEST_UUID, None)
def test_register(self):
pm = self.get_monitored_process(TEST_UUID)
self.assertEqual(len(self.pmonitor._monitored_processes), 1)
self.assertIn(pm, self.pmonitor._monitored_processes.values())
def test_register_same_service_twice(self):
self.get_monitored_process(TEST_UUID)
self.get_monitored_process(TEST_UUID)
self.assertEqual(len(self.pmonitor._monitored_processes), 1)
def test_register_different_service_types(self):
self.get_monitored_process(TEST_UUID)
self.get_monitored_process(TEST_UUID, TEST_SERVICE)
self.assertEqual(len(self.pmonitor._monitored_processes), 2)
def test_unregister(self):
self.get_monitored_process(TEST_UUID)
self.pmonitor.unregister(TEST_UUID, None)
self.assertEqual(len(self.pmonitor._monitored_processes), 0)
def test_unregister_unknown_process(self):
self.pmonitor.unregister(TEST_UUID, None)
self.assertEqual(len(self.pmonitor._monitored_processes), 0)
class TestProcessManager(base.BaseTestCase):
def setUp(self):
super(TestProcessManager, self).setUp()
self.execute_p = mock.patch('neutron.agent.common.utils.execute')
self.execute = self.execute_p.start()
self.delete_if_exists = mock.patch(
'oslo_utils.fileutils.delete_if_exists').start()
self.ensure_dir = mock.patch.object(
common_utils, 'ensure_dir').start()
self.conf = mock.Mock()
self.conf.external_pids = '/var/path'
def test_processmanager_ensures_pid_dir(self):
pid_file = os.path.join(self.conf.external_pids, 'pid')
ep.ProcessManager(self.conf, 'uuid', pid_file=pid_file)
self.ensure_dir.assert_called_once_with(self.conf.external_pids)
def test_enable_no_namespace(self):
callback = mock.Mock()
callback.return_value = ['the', 'cmd']
with mock.patch.object(ep.ProcessManager, 'get_pid_file_name') as name:
name.return_value = 'pidfile'
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=False)
manager = ep.ProcessManager(self.conf, 'uuid')
manager.enable(callback)
callback.assert_called_once_with('pidfile')
self.execute.assert_called_once_with(['the', 'cmd'],
check_exit_code=True,
extra_ok_codes=None,
run_as_root=False,
log_fail_as_error=True)
def test_enable_with_namespace(self):
callback = mock.Mock()
callback.return_value = ['the', 'cmd']
with mock.patch.object(ep.ProcessManager, 'get_pid_file_name') as name:
name.return_value = 'pidfile'
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=False)
manager = ep.ProcessManager(self.conf, 'uuid', namespace='ns')
with mock.patch.object(ep, 'ip_lib') as ip_lib:
manager.enable(callback)
callback.assert_called_once_with('pidfile')
ip_lib.assert_has_calls([
mock.call.IPWrapper(namespace='ns'),
mock.call.IPWrapper().netns.execute(
['the', 'cmd'], addl_env=None, run_as_root=False)])
def test_enable_with_namespace_process_active(self):
callback = mock.Mock()
callback.return_value = ['the', 'cmd']
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=True)
manager = ep.ProcessManager(self.conf, 'uuid', namespace='ns')
with mock.patch.object(ep, 'ip_lib'):
manager.enable(callback)
self.assertFalse(callback.called)
def test_disable_no_namespace(self):
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=4)
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=True)
manager = ep.ProcessManager(self.conf, 'uuid')
with mock.patch.object(ep, 'utils') as utils:
manager.disable()
utils.assert_has_calls([
mock.call.execute(['kill', '-9', 4],
run_as_root=True)])
def test_disable_namespace(self):
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=4)
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=True)
manager = ep.ProcessManager(self.conf, 'uuid', namespace='ns')
with mock.patch.object(ep, 'utils') as utils:
manager.disable()
utils.assert_has_calls([
mock.call.execute(['kill', '-9', 4],
run_as_root=True)])
def test_disable_not_active(self):
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=4)
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=False)
with mock.patch.object(ep.LOG, 'debug') as debug:
manager = ep.ProcessManager(self.conf, 'uuid')
manager.disable()
debug.assert_called_once_with(mock.ANY, mock.ANY)
def test_disable_no_pid(self):
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=None)
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=False)
with mock.patch.object(ep.LOG, 'debug') as debug:
manager = ep.ProcessManager(self.conf, 'uuid')
manager.disable()
debug.assert_called_once_with(mock.ANY, mock.ANY)
def test_get_pid_file_name_default(self):
manager = ep.ProcessManager(self.conf, 'uuid')
retval = manager.get_pid_file_name()
self.assertEqual(retval, '/var/path/uuid.pid')
def test_pid(self):
self.useFixture(tools.OpenFixture('/var/path/uuid.pid', '5'))
manager = ep.ProcessManager(self.conf, 'uuid')
self.assertEqual(manager.pid, 5)
def test_pid_no_an_int(self):
self.useFixture(tools.OpenFixture('/var/path/uuid.pid', 'foo'))
manager = ep.ProcessManager(self.conf, 'uuid')
self.assertIsNone(manager.pid)
def test_pid_invalid_file(self):
with mock.patch.object(ep.ProcessManager, 'get_pid_file_name') as name:
name.return_value = '.doesnotexist/pid'
manager = ep.ProcessManager(self.conf, 'uuid')
self.assertIsNone(manager.pid)
def test_active(self):
mock_open = self.useFixture(
tools.OpenFixture('/proc/4/cmdline', 'python foo --router_id=uuid')
).mock_open
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=4)
manager = ep.ProcessManager(self.conf, 'uuid')
self.assertTrue(manager.active)
mock_open.assert_called_once_with('/proc/4/cmdline', 'r')
def test_active_none(self):
dummy_cmd_line = 'python foo --router_id=uuid'
self.execute.return_value = dummy_cmd_line
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=None)
manager = ep.ProcessManager(self.conf, 'uuid')
self.assertFalse(manager.active)
def test_active_cmd_mismatch(self):
mock_open = self.useFixture(
tools.OpenFixture('/proc/4/cmdline',
'python foo --router_id=anotherid')
).mock_open
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=4)
manager = ep.ProcessManager(self.conf, 'uuid')
self.assertFalse(manager.active)
mock_open.assert_called_once_with('/proc/4/cmdline', 'r')
| {
"content_hash": "f5d2578623687235d66d8d015aed545e",
"timestamp": "",
"source": "github",
"line_count": 250,
"max_line_length": 79,
"avg_line_length": 42.368,
"alnum_prop": 0.5879909365558912,
"repo_name": "apporc/neutron",
"id": "e5ff67211c19a7f104f986c84e32c14d0624a826",
"size": "11219",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "neutron/tests/unit/agent/linux/test_external_process.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "7630810"
},
{
"name": "Shell",
"bytes": "13134"
}
],
"symlink_target": ""
} |
"""add_policy
Revision ID: 532e9e1f0f3a
Revises: initial_db
Create Date: 2014-12-18 14:52:20.402861
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
# revision identifiers, used by Alembic.
revision = '532e9e1f0f3a'
down_revision = 'initial_db'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('policies',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('deleted_at', sa.DateTime(), nullable=True),
sa.Column('deleted', sa.String(length=36),
server_default='', nullable=True),
sa.Column('name', sa.Text(), nullable=False),
sa.Column('abbreviation', sa.String(length=5),
nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('owner', sa.Text(), nullable=False),
sa.Column('kind', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB')
def downgrade():
op.drop_table('policies')
| {
"content_hash": "ae6af6bd2185e4372cfde16253cc5f32",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 75,
"avg_line_length": 35.05128205128205,
"alnum_prop": 0.5618141916605706,
"repo_name": "ramineni/my_congress",
"id": "e96e585c1f300651684f2f064f59007fd5cc8a89",
"size": "1982",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "congress/db/migration/alembic_migrations/versions/532e9e1f0f3a_add_policy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2744"
},
{
"name": "GAP",
"bytes": "7778"
},
{
"name": "HTML",
"bytes": "19549"
},
{
"name": "JavaScript",
"bytes": "9896"
},
{
"name": "Makefile",
"bytes": "228"
},
{
"name": "Mako",
"bytes": "1043"
},
{
"name": "Python",
"bytes": "2416507"
},
{
"name": "Shell",
"bytes": "26177"
}
],
"symlink_target": ""
} |
import time
import http.server
import os
HOST_NAME = '0.0.0.0' # Host name of the http server
# Gets the port number from $PORT0 environment variable
PORT_NUMBER = int(os.environ['PORT0'])
class MyHandler(http.server.BaseHTTPRequestHandler):
def do_GET(s):
"""Respond to a GET request."""
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><head><title>Time Server</title></head>".encode())
s.wfile.write("<body><p>The current time is {}</p>".format(time.asctime()).encode())
s.wfile.write("</body></html>".encode())
if __name__ == '__main__':
server_class = http.server.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), MyHandler)
print(time.asctime(), "Server Starts - {}:{}".format(HOST_NAME, PORT_NUMBER))
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print(time.asctime(), "Server Stops - {}:{}".format(HOST_NAME, PORT_NUMBER))
| {
"content_hash": "3f44f309316166579fd700acec4d5cd2",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 92,
"avg_line_length": 34.43333333333333,
"alnum_prop": 0.6321393998063891,
"repo_name": "madanadit/universe",
"id": "8f6a4f7b6fe0e9cd328f24ed22f20cf71f5e4305",
"size": "1033",
"binary": false,
"copies": "9",
"ref": "refs/heads/version-3.x",
"path": "docs/tutorial/helloworld.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "3887386"
},
{
"name": "Makefile",
"bytes": "1501"
},
{
"name": "Python",
"bytes": "70963"
},
{
"name": "Shell",
"bytes": "8470"
}
],
"symlink_target": ""
} |
import gtk
import webkit
view = webkit.WebView()
sw = gtk.ScrolledWindow()
sw.add(view)
win = gtk.Window(gtk.WINDOW_TOPLEVEL)
win.resize(800,600)
win.add(sw)
win.show_all()
view.open("http://localhost:5000/")
gtk.main()
| {
"content_hash": "fccba2b7dfa907129d1a3d20e968c8e4",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 37,
"avg_line_length": 14.933333333333334,
"alnum_prop": 0.7142857142857143,
"repo_name": "jlegendary/SimpleCV",
"id": "bcdcf9d3e592f6179fca57e1fad63261248f42d1",
"size": "224",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "SimpleCV/examples/web-based/webdisplay/webkit-gtk.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1728"
},
{
"name": "CSS",
"bytes": "1187"
},
{
"name": "HTML",
"bytes": "17691"
},
{
"name": "JavaScript",
"bytes": "13899"
},
{
"name": "NSIS",
"bytes": "10632"
},
{
"name": "Perl",
"bytes": "5044"
},
{
"name": "Python",
"bytes": "1699737"
},
{
"name": "Shell",
"bytes": "10617"
}
],
"symlink_target": ""
} |
from django.contrib.auth.models import User
from models import ShiftEvent
from datetime import date, datetime, timedelta
from django.core.cache import cache
from django.db.models import Q
import os.path, time
from django.utils.timezone import get_default_timezone
from django.conf import settings
def get_last_update_time():
updated = cache.get('updated')
if not updated:
# updated = ShiftEvent.objects.all().order_by('-modified')[0].modified
call_path = os.path.join(settings.PROJECT_ROOT, os.pardir, 'call.txt')
if os.path.exists(call_path):
updated = datetime.fromtimestamp(os.path.getmtime(call_path), get_default_timezone())
cache.set('updated', updated, 3600)
else:
return None
return updated
def get_user_event_ids(user):
user_event_ids = cache.get('user_event_ids' + str(user.id), 'no_cache')
if user_event_ids == 'no_cache':
# print 'no user_event_ids'
user_event_ids = ShiftEvent.objects.filter(user=user).values_list('id', flat=True)
cache.set('user_event_ids' + str(user.id), user_event_ids, 43200)
return user_event_ids
def get_upcoming_ids(user, user_events, start, horizon):
upcoming_ids = cache.get('upcoming_ids' + str(user), 'no_cache')
# upcoming_ids = None
if upcoming_ids == 'no_cache':
upcoming_ids = user_events.filter(date__gte=start,date__lte=horizon).values_list('id', flat=True)
if not upcoming_ids: # get next soonest call after horizon date
horizon_events = user_events.filter(date__gt=horizon).order_by('date')
upcoming_ids = horizon_events.values_list('id', flat=True)[:1]
cache.set('upcoming_ids' + str(user), upcoming_ids, 43200)
return upcoming_ids
def get_user_conflict_ids(user, force_refresh=False):
user_conflict_ids = cache.get('user_conflict_ids' + str(user.id), 'no_cache')
if user_conflict_ids == 'no_cache' or force_refresh:
hindsight = date.today() - timedelta(days = 14)
events = ShiftEvent.objects.filter(user=user, date__gte=hindsight).order_by('date').select_related('shift__night_call')
seen = set()
conflict_dates = set()
last_night_call = None
for event in events:
if event.date in seen:
conflict_dates.add(event.date)
else:
seen.add(event.date)
if (event.date - timedelta(days=1)) == last_night_call:
conflict_dates.add(event.date)
conflict_dates.add(last_night_call)
if event.shift.night_call:
last_night_call = event.date
user_conflict_ids = ShiftEvent.objects.filter(user=user, date__in=conflict_dates).values_list('id', flat=True)
cache.set('user_conflict_ids' + str(user.id), user_conflict_ids, 43200)
return user_conflict_ids
def get_conflict_ids(force_refresh=False):
conflict_ids = cache.get('conflict_ids', 'no_cache') #return an iterable if no conflicts
# print 'cached conflict_ids: ' + str(conflict_ids)
# conflict_ids = None
if conflict_ids == 'no_cache' or force_refresh:
conflict_ids = set()
# dummy = get_dummy_user()
for user in User.objects.all():
# if not user == dummy:
conflict_ids |= set(get_user_conflict_ids(user, force_refresh=force_refresh))
cache.set('conflict_ids', conflict_ids, 43200) # 60s x 60min x 24h = 86400s
print 'generated conflict_ids: ' + str(conflict_ids)
return conflict_ids
def get_post_call_ids():
post_call_ids = cache.get('post_call_ids', [])
if not post_call_ids:
hindsight = date.today() - timedelta(days = 90)
post_call_ids = ShiftEvent.objects.filter(Q(shift__initials="Mt") | Q(shift__night_call=True),
date__gte=hindsight, date__lte=date.today()).values_list('id', flat=True)
cache.set('post_call_ids', post_call_ids, 43200)
#def get_dummy_user():
# dummy = cache.get('dummy_user')
# if not dummy:
# dummy = User.objects.get(id=16)
# cache.set('dummy_user', dummy, 2592000)
# return dummy | {
"content_hash": "e1211cf718179f8aefeef3f25988be7b",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 123,
"avg_line_length": 41.691489361702125,
"alnum_prop": 0.673896402143404,
"repo_name": "swiharta/radres",
"id": "c0bd3fd901739bccb914289b8bf50a9fa0a54443",
"size": "3919",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "radcal/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "69146"
},
{
"name": "C#",
"bytes": "3961"
},
{
"name": "CSS",
"bytes": "810888"
},
{
"name": "CoffeeScript",
"bytes": "36376"
},
{
"name": "ColdFusion",
"bytes": "3203"
},
{
"name": "Java",
"bytes": "42001"
},
{
"name": "JavaScript",
"bytes": "1372325"
},
{
"name": "PHP",
"bytes": "19400"
},
{
"name": "Perl",
"bytes": "468"
},
{
"name": "Python",
"bytes": "195378"
},
{
"name": "Ruby",
"bytes": "672"
},
{
"name": "Shell",
"bytes": "110"
},
{
"name": "Visual Basic",
"bytes": "2769"
}
],
"symlink_target": ""
} |
'''Trains a LSTM on the IMDB sentiment classification task.
The dataset is actually too small for LSTM to be of any advantage
compared to simpler, much faster methods such as TF-IDF + LogReg.
Notes:
- RNNs are tricky. Choice of batch size is important,
choice of loss and optimizer is critical, etc.
Some configurations won't converge.
- LSTM loss decrease patterns during training can be quite different
from what you see with CNNs/MLPs/etc.
'''
from __future__ import print_function
import numpy as np
np.random.seed(1337) # for reproducibility
from keras.preprocessing import sequence
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Embedding
from keras.layers import LSTM, SimpleRNN, GRU
from keras.datasets import imdb
max_features = 20000
maxlen = 80 # cut texts after this number of words (among top max_features most common words)
batch_size = 32
print('Loading data...')
(X_train, y_train), (X_test, y_test) = imdb.load_data(nb_words=max_features)
print(len(X_train), 'train sequences')
print(len(X_test), 'test sequences')
print('Pad sequences (samples x time)')
X_train = sequence.pad_sequences(X_train, maxlen=maxlen)
X_test = sequence.pad_sequences(X_test, maxlen=maxlen)
print('X_train shape:', X_train.shape)
print('X_test shape:', X_test.shape)
print('Build model...')
model = Sequential()
model.add(Embedding(max_features, 128, input_length=maxlen, dropout=0.2))
model.add(LSTM(128, dropout_W=0.2, dropout_U=0.2)) # try using a GRU instead, for fun
model.add(Dense(1))
model.add(Activation('sigmoid'))
# try using different optimizers and different optimizer configs
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
print('Train...')
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=15,
validation_data=(X_test, y_test))
score, acc = model.evaluate(X_test, y_test,
batch_size=batch_size)
print('Test score:', score)
print('Test accuracy:', acc)
| {
"content_hash": "d241f38a3622e96660698055d08486cc",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 94,
"avg_line_length": 34.91525423728814,
"alnum_prop": 0.7271844660194174,
"repo_name": "relh/keras",
"id": "6c9dcff95e980d226fd929b496f1aefd0eb8d11c",
"size": "2060",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/imdb_lstm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "697"
},
{
"name": "Python",
"bytes": "905485"
}
],
"symlink_target": ""
} |
"""
test_digimesh.py
By James Saunders, 2016
james@saunders-family.net
Tests the XBee DigiMesh implementation class for API compliance
"""
import unittest
from xbee.thread.digimesh import DigiMesh
class TestDigiMesh(unittest.TestCase):
"""
Tests DigiMesh-specific features
More tests will need adding in time
"""
def setUp(self):
self.digimesh = DigiMesh(None)
def test_split_tx_status(self):
data = b'\x8b\x01\xff\xff\x01\x01\x01'
info = self.digimesh._split_response(data)
expected_info = {
'id': 'tx_status',
'frame_id': b'\x01',
'reserved': b'\xff\xff',
'retries': b'\x01',
'deliver_status': b'\x01',
'discover_status': b'\x01'
}
self.assertEqual(info, expected_info)
| {
"content_hash": "94dd0fc1f9f58b2d569b79033f17cea8",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 63,
"avg_line_length": 26.242424242424242,
"alnum_prop": 0.5715935334872979,
"repo_name": "nioinnovation/python-xbee",
"id": "21979aaa1a20f50cce3bed90ec05f1fdb220d468",
"size": "866",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "xbee/thread/tests/test_digimesh.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "161765"
}
],
"symlink_target": ""
} |
"""Sign and send a heartbeat message to a remote service."""
import argparse
import sys
from infra.tools.heartbeat import heartbeat
def parse_options():
# Gather user options.
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--urls', help='URLs to send heartbeat.',
action='append', default=[
'https://chrome-heartbeats.appspot.com/heartbeat'])
parser.add_argument('-n', '--name', help='Override the hostname detector.')
parser.add_argument('-i', '--ident', help='Override the machine ID.')
parser.add_argument('-s', '--secret',
default='~/.heartbeat.key',
help='Key file for authentication')
args = parser.parse_args()
secret = heartbeat.get_secret(args.secret)
return secret, args.name, args.ident, args.urls
def main():
# Step 1: Get options and secret.
secret, name, ident, urls = parse_options()
# Step 2: Create and populate the heartbeat object.
data = heartbeat.get_heartbeat_data(name, ident)
# Step 3: Sign heartbeat with key.
signed_message = heartbeat.get_hashed_message(data, secret)
# Step 4: Send heartbeat
result = heartbeat.send(signed_message, urls)
if result == 402:
# Unknown user -> need to send key along with request.
print 'Server does not recognize the hostname. Resending with key.'
signed_message['key'] = secret
result = heartbeat.send(signed_message, urls)
return result
if __name__ == '__main__':
sys.exit(main())
| {
"content_hash": "4ff8fea617836a55164a6881f5f13fc4",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 77,
"avg_line_length": 30.875,
"alnum_prop": 0.6700404858299596,
"repo_name": "nicko96/Chrome-Infra",
"id": "27c37712ca312037dd06ef1dc73a0efff097fbef",
"size": "1575",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "infra/tools/heartbeat/__main__.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "100398"
},
{
"name": "Go",
"bytes": "648467"
},
{
"name": "HTML",
"bytes": "7323317"
},
{
"name": "JavaScript",
"bytes": "913960"
},
{
"name": "Makefile",
"bytes": "11281"
},
{
"name": "Protocol Buffer",
"bytes": "2730"
},
{
"name": "Python",
"bytes": "4034630"
},
{
"name": "Shell",
"bytes": "21687"
}
],
"symlink_target": ""
} |
"""Support for Big Ass Fans fan."""
from __future__ import annotations
import math
from typing import Any
from aiobafi6 import OffOnAuto
from homeassistant import config_entries
from homeassistant.components.fan import (
DIRECTION_FORWARD,
DIRECTION_REVERSE,
FanEntity,
FanEntityFeature,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util.percentage import (
percentage_to_ranged_value,
ranged_value_to_percentage,
)
from .const import DOMAIN, PRESET_MODE_AUTO, SPEED_COUNT, SPEED_RANGE
from .entity import BAFEntity
from .models import BAFData
async def async_setup_entry(
hass: HomeAssistant,
entry: config_entries.ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up SenseME fans."""
data: BAFData = hass.data[DOMAIN][entry.entry_id]
if data.device.has_fan:
async_add_entities([BAFFan(data.device, data.device.name)])
class BAFFan(BAFEntity, FanEntity):
"""BAF ceiling fan component."""
_attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION
_attr_preset_modes = [PRESET_MODE_AUTO]
_attr_speed_count = SPEED_COUNT
@callback
def _async_update_attrs(self) -> None:
"""Update attrs from device."""
self._attr_is_on = self._device.fan_mode == OffOnAuto.ON
self._attr_current_direction = DIRECTION_FORWARD
if self._device.reverse_enable:
self._attr_current_direction = DIRECTION_REVERSE
if self._device.speed is not None:
self._attr_percentage = ranged_value_to_percentage(
SPEED_RANGE, self._device.speed
)
else:
self._attr_percentage = None
auto = self._device.fan_mode == OffOnAuto.AUTO
self._attr_preset_mode = PRESET_MODE_AUTO if auto else None
super()._async_update_attrs()
async def async_set_percentage(self, percentage: int) -> None:
"""Set the speed of the fan, as a percentage."""
device = self._device
if device.fan_mode != OffOnAuto.ON:
device.fan_mode = OffOnAuto.ON
device.speed = math.ceil(percentage_to_ranged_value(SPEED_RANGE, percentage))
async def async_turn_on(
self,
percentage: int | None = None,
preset_mode: str | None = None,
**kwargs: Any,
) -> None:
"""Turn the fan on with a percentage or preset mode."""
if preset_mode is not None:
await self.async_set_preset_mode(preset_mode)
return
if percentage is None:
self._device.fan_mode = OffOnAuto.ON
return
await self.async_set_percentage(percentage)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the fan off."""
self._device.fan_mode = OffOnAuto.OFF
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set the preset mode of the fan."""
if preset_mode != PRESET_MODE_AUTO:
raise ValueError(f"Invalid preset mode: {preset_mode}")
self._device.fan_mode = OffOnAuto.AUTO
async def async_set_direction(self, direction: str) -> None:
"""Set the direction of the fan."""
self._device.reverse_enable = direction == DIRECTION_REVERSE
| {
"content_hash": "64c1d864a90d06c6b829b574690ad8a9",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 86,
"avg_line_length": 34.670103092783506,
"alnum_prop": 0.6520963425512935,
"repo_name": "w1ll1am23/home-assistant",
"id": "360926363a57b6b627bd2b76280dd8f5b0ce1e06",
"size": "3363",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "homeassistant/components/baf/fan.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "52277012"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
} |
import os
from subprocess import Popen, PIPE
from rpyc.lib import safe_import
from rpyc.lib.compat import BYTES_LITERAL
signal = safe_import("signal")
# modified from the stdlib pipes module for windows
_safechars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@%_-+=:,./'
_funnychars = '"`$\\'
def shquote(text):
if not text:
return "''"
for c in text:
if c not in _safechars:
break
else:
return text
if "'" not in text:
return "'" + text + "'"
def escaped(c):
if c in _funnychars:
return '\\' + c
else:
return c
res = "".join(escaped(c) for c in text)
return '"' + res + '"'
class ProcessExecutionError(Exception):
"""raised by :func:`SshContext.execute` should the executed process
terminate with an error"""
pass
import subprocess
def _get_startupinfo():
if subprocess.mswindows:
import _subprocess
sui = subprocess.STARTUPINFO()
sui.dwFlags |= _subprocess.STARTF_USESHOWWINDOW #@UndefinedVariable
sui.wShowWindow = _subprocess.SW_HIDE #@UndefinedVariable
return sui
else:
return None
class SshTunnel(object):
"""
Represents an active SSH tunnel (as created by ``ssh -L``).
.. note::
Do not instantiate this class yourself -- use the :func:`SshContext.tunnel`
function for that.
"""
PROGRAM = r"""import sys;sys.stdout.write("ready\n\n\n");sys.stdout.flush();sys.stdin.readline()"""
def __init__(self, sshctx, loc_host, loc_port, rem_host, rem_port):
self.loc_host = loc_host
self.loc_port = loc_port
self.rem_host = rem_host
self.rem_port = rem_port
self.sshctx = sshctx
self.proc = sshctx.popen("python", "-u", "-c", self.PROGRAM,
L = "[%s]:%s:[%s]:%s" % (loc_host, loc_port, rem_host, rem_port))
banner = self.proc.stdout.readline().strip()
if banner != BYTES_LITERAL("ready"):
raise ValueError("tunnel failed", banner)
def __del__(self):
try:
self.close()
except Exception:
pass
def __str__(self):
return "%s:%s --> (%s)%s:%s" % (self.loc_host, self.loc_port, self.sshctx.host,
self.rem_host, self.rem_port)
def is_open(self):
"""returns True if the ``ssh`` process is alive, False otherwise"""
return self.proc and self.proc.poll() is None
def close(self):
"""closes (terminates) the SSH tunnel"""
if not self.is_open():
return
self.proc.stdin.write(BYTES_LITERAL("foo\n\n\n"))
self.proc.stdin.close()
self.proc.stdout.close()
self.proc.stderr.close()
try:
self.proc.kill()
except AttributeError:
if signal:
os.kill(self.proc.pid, signal.SIGTERM)
self.proc.wait()
self.proc = None
class SshContext(object):
"""
An *SSH context* encapsulates all the details required to establish an SSH
connection to other host. It includes the host name, user name, TCP port,
identity file, etc.
Once constructed, it can serve as a factory for SSH operations, such as
executing a remote program and getting its stdout, or uploading/downloading
files using ``scp``. It also serves for creating SSH tunnels.
Example::
>>> sshctx = SshContext("mymachine", username="borg", keyfile="/home/foo/.ssh/mymachine-id")
>>> sshctx.execute("ls")
(0, "...", "")
"""
def __init__(self, host, user = None, port = None, keyfile = None,
ssh_program = "ssh", ssh_env = None, ssh_cwd = None,
scp_program = "scp", scp_env = None, scp_cwd = None):
self.host = host
self.user = user
self.port = port
self.keyfile = keyfile
self.ssh_program = ssh_program
self.ssh_env = ssh_env
self.ssh_cwd = ssh_cwd
self.scp_program = scp_program
self.scp_env = scp_env
self.scp_cwd = scp_cwd
def __str__(self):
uri = "ssh://"
if self.user:
uri += "%s@%s" % (self.user, self.host)
else:
uri += self.host
if self.port:
uri += ":%d" % (self.port)
return uri
def _convert_kwargs_to_args(self, kwargs):
args = []
for k, v in kwargs.items():
if v is True:
args.append("-%s" % (k,))
elif v is False:
pass
else:
args.append("-%s" % (k,))
args.append(str(v))
return args
def _process_scp_cmdline(self, kwargs):
args = [self.scp_program]
if "r" not in kwargs:
kwargs["r"] = True
if self.keyfile and "i" not in kwargs:
kwargs["i"] = self.keyfile
if self.port and "P" not in kwargs:
kwargs["P"] = self.port
args.extend(self._convert_kwargs_to_args(kwargs))
if self.user:
host = "%s@%s" % (self.user, self.host)
else:
host = self.host
return args, host
def _process_ssh_cmdline(self, kwargs):
args = [self.ssh_program]
if self.keyfile and "i" not in kwargs:
kwargs["i"] = self.keyfile
if self.port and "p" not in kwargs:
kwargs["p"] = self.port
args.extend(self._convert_kwargs_to_args(kwargs))
if self.user:
args.append("%s@%s" % (self.user, self.host))
else:
args.append(self.host)
return args
def popen(self, *args, **kwargs):
"""Runs the given command line remotely (over SSH), returning the
``subprocess.Popen`` instance of the command
:param args: the command line arguments
:param kwargs: additional keyword arguments passed to ``ssh``
:returns: a ``Popen`` instance
Example::
proc = ctx.popen("ls", "-la")
proc.wait()
"""
cmdline = self._process_ssh_cmdline(kwargs)
cmdline.extend(shquote(a) for a in args)
return Popen(cmdline, stdin = PIPE, stdout = PIPE, stderr = PIPE,
cwd = self.ssh_cwd, env = self.ssh_env, shell = False,
startupinfo = _get_startupinfo())
def execute(self, *args, **kwargs):
"""Runs the given command line remotely (over SSH), waits for it to finish,
returning the return code, stdout, and stderr of the executed process.
:param args: the command line arguments
:param kwargs: additional keyword arguments passed to ``ssh``, except for
``retcode`` and ``input``.
:param retcode: *keyword only*, the expected return code (Defaults to 0
-- success). An exception is raised if the return code does
not match the expected one, unless it is ``None``, in
which case it will not be tested.
:param input: *keyword only*, an input string that will be passed to
``Popen.communicate``. Defaults to ``None``
:raises: :class:`ProcessExecutionError` if the expected return code
is not matched
:returns: a tuple of (return code, stdout, stderr)
Example::
rc, out, err = ctx.execute("ls", "-la")
"""
retcode = kwargs.pop("retcode", 0)
input = kwargs.pop("input", None)
proc = self.popen(*args, **kwargs)
stdout, stderr = proc.communicate(input)
if retcode is not None and proc.returncode != retcode:
raise ProcessExecutionError(proc.returncode, stdout, stderr)
return proc.returncode, stdout, stderr
def upload(self, src, dst, **kwargs):
"""
Uploads *src* from the local machine to *dst* on the other side. By default,
``-r`` (recursive copy) is given to ``scp``, so *src* can be either a file or
a directory. To override this behavior, pass ``r = False`` as a keyword argument.
:param src: the source path (on the local side)
:param dst: the destination path (on the remote side)
:param kwargs: any additional keyword arguments, passed to ``scp``.
"""
cmdline, host = self._process_scp_cmdline(kwargs)
cmdline.append(src)
cmdline.append("%s:%s" % (host, dst))
proc = Popen(cmdline, stdin = PIPE, stdout = PIPE, stderr = PIPE, shell = False,
cwd = self.scp_cwd, env = self.scp_env, startupinfo = _get_startupinfo())
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise ValueError("upload failed", stdout, stderr)
def download(self, src, dst, **kwargs):
"""
Downloads *src* from the other side to *dst* on the local side. By default,
``-r`` (recursive copy) is given to ``scp``, so *src* can be either a file or
a directory. To override this behavior, pass ``r = False`` as a keyword argument.
:param src: the source path (on the other side)
:param dst: the destination path (on the local side)
:param kwargs: any additional keyword arguments, passed to ``scp``.
"""
cmdline, host = self._process_scp_cmdline(kwargs)
cmdline.append("%s:%s" % (host, src))
cmdline.append(dst)
proc = Popen(cmdline, stdin = PIPE, stdout = PIPE, stderr = PIPE, shell = False,
cwd = self.scp_cwd, env = self.scp_env, startupinfo = _get_startupinfo())
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise ValueError("upload failed", stdout, stderr)
def tunnel(self, loc_port, rem_port, loc_host = "localhost", rem_host = "localhost"):
"""
Creates an SSH tunnel from the local port to the remote one. This is
translated to ``ssh -L loc_host:loc_port:rem_host:rem_port``.
:param loc_port: the local TCP port to forward
:param rem_port: the remote (server) TCP port, to which the local port
will be forwarded
:returns: an :class:`SshTunnel` instance
"""
return SshTunnel(self, loc_host, loc_port, rem_host, rem_port)
| {
"content_hash": "335f150218a4ffdbf4223677d7558a5f",
"timestamp": "",
"source": "github",
"line_count": 274,
"max_line_length": 103,
"avg_line_length": 37.47080291970803,
"alnum_prop": 0.5740722703808318,
"repo_name": "bkillenit/AbletonAPI",
"id": "f8cc11aa4035d0b41d803fd792aebf83b82f60f4",
"size": "10267",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python-api-materials/code/RpycHost/rpyc/utils/ssh.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3969"
},
{
"name": "Clojure",
"bytes": "306617"
},
{
"name": "HTML",
"bytes": "515"
},
{
"name": "JavaScript",
"bytes": "1367208"
},
{
"name": "Python",
"bytes": "401253"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import print_function
from tornado import gen, ioloop
from tchannel import TChannel, Response, thrift
tchannel = TChannel('thrift-server', hostport='localhost:54497')
service = thrift.load('tests/data/idls/ThriftTest.thrift')
@tchannel.thrift.register(service.ThriftTest)
@gen.coroutine
def testString(request):
assert request.headers == {'req': 'header'}
assert request.body.thing == 'req'
return Response('resp', headers={'resp': 'header'})
tchannel.listen()
print(tchannel.hostport)
ioloop.IOLoop.current().start()
| {
"content_hash": "97cd9de7a5c5be37b280bf865e8d8bf0",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 64,
"avg_line_length": 23.8,
"alnum_prop": 0.7394957983193278,
"repo_name": "uber/tchannel-python",
"id": "c0cdf1218804d5b30e8c1c1299e838ed692feb89",
"size": "1698",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/simple/thrift/server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "597"
},
{
"name": "Makefile",
"bytes": "3621"
},
{
"name": "Python",
"bytes": "776222"
},
{
"name": "Shell",
"bytes": "1484"
},
{
"name": "Thrift",
"bytes": "13128"
}
],
"symlink_target": ""
} |
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from datasciencebox.core.logger import getLogger
logger = getLogger()
class Driver(object):
@classmethod
def new(cls, settings):
logger.debug('Creating new driver')
cloud = settings['CLOUD'].lower()
if cloud == 'bare':
return None
elif cloud == 'aws':
return Driver.aws_create(settings)
elif cloud == 'gcp':
return Driver.gcp_create(settings)
@classmethod
def aws_create(cls, settings):
logger.debug('Creating AWS driver')
cls = get_driver(cls.aws_region_map[settings['AWS_REGION'].lower()])
return cls(settings['AWS_KEY'], settings['AWS_SECRET'])
aws_region_map = {
'us-east-1': Provider.EC2_US_EAST,
'us-west-1': Provider.EC2_US_WEST,
'us-west-2': Provider.EC2_US_WEST_OREGON,
'eu-west-1': Provider.EC2_EU_WEST,
'eu-central-1': None,
'sa-east-1': Provider.EC2_SA_EAST,
'ap-northeast-1': Provider.EC2_AP_NORTHEAST,
'ap-southeast-1': Provider.EC2_AP_SOUTHEAST,
'ap-southeast-2': Provider.EC2_AP_SOUTHEAST2,
}
@classmethod
def gcp_create(cls, settings):
logger.debug('Creating GCP driver')
import libcloud.security
libcloud.security.VERIFY_SSL_CERT = False
ComputeEngine = get_driver(Provider.GCE)
driver = ComputeEngine(settings['GCP_EMAIL'],
settings['GCP_KEY_FILE'],
project=settings['GCP_PROJECT'],
datacenter=settings['GCP_DATACENTER'])
return driver
| {
"content_hash": "3fb8374e143e106f29719f68884d2108",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 76,
"avg_line_length": 34.04,
"alnum_prop": 0.5969447708578144,
"repo_name": "danielfrg/datasciencebox",
"id": "99dcd8335ad58bd310d40c1d2be789339fbbf45d",
"size": "1702",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "datasciencebox/core/cloud/driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1672"
},
{
"name": "Python",
"bytes": "167196"
},
{
"name": "Ruby",
"bytes": "3184"
},
{
"name": "SaltStack",
"bytes": "31230"
},
{
"name": "Scheme",
"bytes": "52"
},
{
"name": "Shell",
"bytes": "133"
}
],
"symlink_target": ""
} |
from tests.isolated_testcase import IsolatedTestCase
class CoalaLowercaseTest(IsolatedTestCase):
def test_coala_lowercase(self):
self.assertCommand('what is Coala?',
'coala is always written with a lower case c')
def test_cep(self):
self.assertCommand('what is a CEP?',
'cEP is always written with a lower case c')
| {
"content_hash": "c25a8ce3f76e3ba3a652150b127d2240",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 73,
"avg_line_length": 33.083333333333336,
"alnum_prop": 0.6246851385390428,
"repo_name": "coala/corobo",
"id": "cd9fa14e65f7a4d8fb2e785ebfe6ffa6d6f5873e",
"size": "397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/coala_lowercase_c_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1146"
},
{
"name": "Python",
"bytes": "106054"
},
{
"name": "RAML",
"bytes": "1704"
},
{
"name": "Shell",
"bytes": "2456"
}
],
"symlink_target": ""
} |
import pytest
import unittest
from modules.sfp_voipbl import sfp_voipbl
from sflib import SpiderFoot
@pytest.mark.usefixtures
class TestModuleVoipbl(unittest.TestCase):
def test_opts(self):
module = sfp_voipbl()
self.assertEqual(len(module.opts), len(module.optdescs))
def test_setup(self):
sf = SpiderFoot(self.default_options)
module = sfp_voipbl()
module.setup(sf, dict())
def test_watchedEvents_should_return_list(self):
module = sfp_voipbl()
self.assertIsInstance(module.watchedEvents(), list)
def test_producedEvents_should_return_list(self):
module = sfp_voipbl()
self.assertIsInstance(module.producedEvents(), list)
| {
"content_hash": "61ed6bc824ac0fb03cbbd91102821d64",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 64,
"avg_line_length": 27.653846153846153,
"alnum_prop": 0.6884561891515995,
"repo_name": "smicallef/spiderfoot",
"id": "96549b009a7df41f62f19c2aff90cad7e0ac2744",
"size": "719",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/unit/modules/test_sfp_voipbl.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9833"
},
{
"name": "Dockerfile",
"bytes": "2779"
},
{
"name": "JavaScript",
"bytes": "34248"
},
{
"name": "Python",
"bytes": "2845553"
},
{
"name": "RobotFramework",
"bytes": "7584"
},
{
"name": "Shell",
"bytes": "1636"
}
],
"symlink_target": ""
} |
from hazelcast.serialization.bits import *
from hazelcast.protocol.builtin import FixSizedTypesCodec
from hazelcast.protocol.client_message import OutboundMessage, REQUEST_HEADER_SIZE, create_initial_buffer, RESPONSE_HEADER_SIZE
from hazelcast.protocol.builtin import StringCodec
from hazelcast.protocol.builtin import DataCodec
# hex: 0x030100
_REQUEST_MESSAGE_TYPE = 196864
# hex: 0x030101
_RESPONSE_MESSAGE_TYPE = 196865
_REQUEST_TIMEOUT_MILLIS_OFFSET = REQUEST_HEADER_SIZE
_REQUEST_INITIAL_FRAME_SIZE = _REQUEST_TIMEOUT_MILLIS_OFFSET + LONG_SIZE_IN_BYTES
_RESPONSE_RESPONSE_OFFSET = RESPONSE_HEADER_SIZE
def encode_request(name, value, timeout_millis):
buf = create_initial_buffer(_REQUEST_INITIAL_FRAME_SIZE, _REQUEST_MESSAGE_TYPE)
FixSizedTypesCodec.encode_long(buf, _REQUEST_TIMEOUT_MILLIS_OFFSET, timeout_millis)
StringCodec.encode(buf, name)
DataCodec.encode(buf, value, True)
return OutboundMessage(buf, False)
def decode_response(msg):
initial_frame = msg.next_frame()
return FixSizedTypesCodec.decode_boolean(initial_frame.buf, _RESPONSE_RESPONSE_OFFSET)
| {
"content_hash": "c8ecafae7c086c0f571ec43c55624ec6",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 127,
"avg_line_length": 40.77777777777778,
"alnum_prop": 0.7920072661217076,
"repo_name": "hazelcast/hazelcast-python-client",
"id": "782800e4e0f2137ac050b3b6a76987d85d832f11",
"size": "1101",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hazelcast/protocol/codec/queue_offer_codec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2300326"
},
{
"name": "Shell",
"bytes": "1900"
}
],
"symlink_target": ""
} |
"""Define fixtures available for all tests."""
import json
import time
import pytest
from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, CONTENT_TYPE_JSON
from .common import TEST_EMAIL_ADDRESS, TEST_PASSWORD, TEST_TOKEN, TEST_USER_ID
from tests.common import MockConfigEntry, load_fixture
@pytest.fixture
def config_entry(hass):
"""Config entry version 1 fixture."""
return MockConfigEntry(
domain=FLO_DOMAIN,
data={CONF_USERNAME: TEST_USER_ID, CONF_PASSWORD: TEST_PASSWORD},
version=1,
)
@pytest.fixture
def aioclient_mock_fixture(aioclient_mock):
"""Fixture to provide a aioclient mocker."""
now = round(time.time())
# Mocks the login response for flo.
aioclient_mock.post(
"https://api.meetflo.com/api/v1/users/auth",
text=json.dumps(
{
"token": TEST_TOKEN,
"tokenPayload": {
"user": {"user_id": TEST_USER_ID, "email": TEST_EMAIL_ADDRESS},
"timestamp": now,
},
"tokenExpiration": 86400,
"timeNow": now,
}
),
headers={"Content-Type": CONTENT_TYPE_JSON},
status=200,
)
# Mocks the device for flo.
aioclient_mock.get(
"https://api-gw.meetflo.com/api/v2/devices/98765",
text=load_fixture("flo/device_info_response.json"),
status=200,
headers={"Content-Type": CONTENT_TYPE_JSON},
)
# Mocks the water consumption for flo.
aioclient_mock.get(
"https://api-gw.meetflo.com/api/v2/water/consumption",
text=load_fixture("flo/water_consumption_info_response.json"),
status=200,
headers={"Content-Type": CONTENT_TYPE_JSON},
)
# Mocks the location info for flo.
aioclient_mock.get(
"https://api-gw.meetflo.com/api/v2/locations/mmnnoopp",
text=load_fixture("flo/location_info_expand_devices_response.json"),
status=200,
headers={"Content-Type": CONTENT_TYPE_JSON},
)
# Mocks the user info for flo.
aioclient_mock.get(
"https://api-gw.meetflo.com/api/v2/users/12345abcde",
text=load_fixture("flo/user_info_expand_locations_response.json"),
status=200,
headers={"Content-Type": CONTENT_TYPE_JSON},
params={"expand": "locations"},
)
# Mocks the user info for flo.
aioclient_mock.get(
"https://api-gw.meetflo.com/api/v2/users/12345abcde",
text=load_fixture("flo/user_info_expand_locations_response.json"),
status=200,
headers={"Content-Type": CONTENT_TYPE_JSON},
)
# Mocks the valve open call for flo.
aioclient_mock.post(
"https://api-gw.meetflo.com/api/v2/devices/98765",
text=load_fixture("flo/device_info_response.json"),
status=200,
headers={"Content-Type": CONTENT_TYPE_JSON},
json={"valve": {"target": "open"}},
)
# Mocks the valve close call for flo.
aioclient_mock.post(
"https://api-gw.meetflo.com/api/v2/devices/98765",
text=load_fixture("flo/device_info_response_closed.json"),
status=200,
headers={"Content-Type": CONTENT_TYPE_JSON},
json={"valve": {"target": "closed"}},
)
# Mocks the health test call for flo.
aioclient_mock.post(
"https://api-gw.meetflo.com/api/v2/devices/98765/healthTest/run",
text=load_fixture("flo/user_info_expand_locations_response.json"),
status=200,
headers={"Content-Type": CONTENT_TYPE_JSON},
)
# Mocks the health test call for flo.
aioclient_mock.post(
"https://api-gw.meetflo.com/api/v2/locations/mmnnoopp/systemMode",
text=load_fixture("flo/user_info_expand_locations_response.json"),
status=200,
headers={"Content-Type": CONTENT_TYPE_JSON},
json={"systemMode": {"target": "home"}},
)
# Mocks the health test call for flo.
aioclient_mock.post(
"https://api-gw.meetflo.com/api/v2/locations/mmnnoopp/systemMode",
text=load_fixture("flo/user_info_expand_locations_response.json"),
status=200,
headers={"Content-Type": CONTENT_TYPE_JSON},
json={"systemMode": {"target": "away"}},
)
# Mocks the health test call for flo.
aioclient_mock.post(
"https://api-gw.meetflo.com/api/v2/locations/mmnnoopp/systemMode",
text=load_fixture("flo/user_info_expand_locations_response.json"),
status=200,
headers={"Content-Type": CONTENT_TYPE_JSON},
json={
"systemMode": {
"target": "sleep",
"revertMinutes": 120,
"revertMode": "home",
}
},
)
| {
"content_hash": "a92561c80af56fb2c097d5c65a8defb9",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 83,
"avg_line_length": 35.74626865671642,
"alnum_prop": 0.6081419624217119,
"repo_name": "partofthething/home-assistant",
"id": "907feb855698a31a938b36e142958865159942cc",
"size": "4790",
"binary": false,
"copies": "8",
"ref": "refs/heads/dev",
"path": "tests/components/flo/conftest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "31051838"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('liveblog', '0003_auto_20160825_1459'),
]
operations = [
migrations.AlterField(
model_name='liveblogentry',
name='headline',
field=models.CharField(max_length=255, null=True, blank=True),
),
migrations.AlterField(
model_name='liveblogresponse',
name='author',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL, blank=True, null=True),
),
]
| {
"content_hash": "33eece8ecd90cbe2fa045b0af033e48a",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 88,
"avg_line_length": 26.916666666666668,
"alnum_prop": 0.6114551083591331,
"repo_name": "theonion/django-bulbs",
"id": "80ca7cde65af1acbf52fa745cff4db43d4400040",
"size": "670",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bulbs/liveblog/migrations/0004_auto_20160908_1512.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "36651"
},
{
"name": "HTML",
"bytes": "73968"
},
{
"name": "JavaScript",
"bytes": "57288"
},
{
"name": "Python",
"bytes": "1055540"
},
{
"name": "Ruby",
"bytes": "397"
},
{
"name": "Shell",
"bytes": "1629"
}
],
"symlink_target": ""
} |
import numpy as np
from common.numpy_fast import clip, interp
from selfdrive.config import Conversions as CV
class LongCtrlState:
#*** this function handles the long control state transitions
# long_control_state labels:
off = 0 # Off
pid = 1 # moving and tracking targets, with PID control running
stopping = 2 # stopping and changing controls to almost open loop as PID does not fit well at such a low speed
starting = 3 # starting and releasing brakes in open loop before giving back to PID
def long_control_state_trans(enabled, long_control_state, v_ego, v_target, v_pid, output_gb):
stopping_speed = 0.5
stopping_target_speed = 0.3
starting_target_speed = 0.5
brake_threshold_to_pid = 0.2
stopping_condition = ((v_ego < stopping_speed) and (v_pid < stopping_target_speed) and (v_target < stopping_target_speed))
if not enabled:
long_control_state = LongCtrlState.off
else:
if long_control_state == LongCtrlState.off:
if enabled:
long_control_state = LongCtrlState.pid
elif long_control_state == LongCtrlState.pid:
if stopping_condition:
long_control_state = LongCtrlState.stopping
elif long_control_state == LongCtrlState.stopping:
if (v_target > starting_target_speed):
long_control_state = LongCtrlState.starting
elif long_control_state == LongCtrlState.starting:
if stopping_condition:
long_control_state = LongCtrlState.stopping
elif output_gb >= -brake_threshold_to_pid:
long_control_state = LongCtrlState.pid
return long_control_state
def get_compute_gb():
# see debug/dump_accel_from_fiber.py
w0 = np.array([[ 1.22056961, -0.39625418, 0.67952657],
[ 1.03691769, 0.78210306, -0.41343188]])
b0 = np.array([ 0.01536703, -0.14335321, -0.26932889])
w2 = np.array([[-0.59124422, 0.42899439, 0.38660881],
[ 0.79973811, 0.13178682, 0.08550351],
[-0.15651935, -0.44360259, 0.76910877]])
b2 = np.array([ 0.15624429, 0.02294923, -0.0341086 ])
w4 = np.array([[-0.31521443],
[-0.38626176],
[ 0.52667892]])
b4 = np.array([-0.02922216])
def compute_output(dat, w0, b0, w2, b2, w4, b4):
m0 = np.dot(dat, w0) + b0
m0 = leakyrelu(m0, 0.1)
m2 = np.dot(m0, w2) + b2
m2 = leakyrelu(m2, 0.1)
m4 = np.dot(m2, w4) + b4
return m4
def leakyrelu(x, alpha):
return np.maximum(x, alpha * x)
def _compute_gb(dat):
#linearly extrap below v1 using v1 and v2 data
v1 = 5.
v2 = 10.
vx = dat[1]
if vx > 5.:
m4 = compute_output(dat, w0, b0, w2, b2, w4, b4)
else:
dat[1] = v1
m4v1 = compute_output(dat, w0, b0, w2, b2, w4, b4)
dat[1] = v2
m4v2 = compute_output(dat, w0, b0, w2, b2, w4, b4)
m4 = (vx - v1) * (m4v2 - m4v1) / (v2 - v1) + m4v1
return m4
return _compute_gb
# takes in [desired_accel, current_speed] -> [-1.0, 1.0] where -1.0 is max brake and 1.0 is max gas
compute_gb = get_compute_gb()
_KP_BP = [0., 5., 35.]
_KP_V = [1.2, 0.8, 0.5]
_kI_BP = [0., 35.]
_kI_V = [0.18, 0.12]
def pid_long_control(v_ego, v_pid, Ui_accel_cmd, gas_max, brake_max, jerk_factor, gear, rate):
#*** This function compute the gb pedal positions in order to track the desired speed
# proportional and integral terms. More precision at low speed
Kp = interp(v_ego, _KP_BP, _KP_V)
Ki = interp(v_ego, _kI_BP, _kI_V)
# scle Kp and Ki by jerk factor drom drive_thread
Kp = (1. + jerk_factor)*Kp
Ki = (1. + jerk_factor)*Ki
# this is ugly but can speed reports 0 when speed<0.3m/s and we can't have that jump
v_ego_min = 0.3
v_ego = max(v_ego, v_ego_min)
v_error = v_pid - v_ego
Up_accel_cmd = v_error*Kp
Ui_accel_cmd_new = Ui_accel_cmd + v_error*Ki*1.0/rate
accel_cmd_new = Ui_accel_cmd_new + Up_accel_cmd
output_gb_new = compute_gb([accel_cmd_new, v_ego])
# Anti-wind up for integrator: only update integrator if we not against the throttle and brake limits
# do not wind up if we are changing gear and we are on the gas pedal
if (((v_error >= 0. and (output_gb_new < gas_max or Ui_accel_cmd < 0)) or
(v_error <= 0. and (output_gb_new > - brake_max or Ui_accel_cmd > 0))) and
not (v_error >= 0. and gear == 11 and output_gb_new > 0)):
#update integrator
Ui_accel_cmd = Ui_accel_cmd_new
accel_cmd = Ui_accel_cmd + Up_accel_cmd
# go from accel to pedals
output_gb = compute_gb([accel_cmd, v_ego])
output_gb = output_gb[0]
# useful to know if control is against the limit
long_control_sat = False
if output_gb > gas_max or output_gb < -brake_max:
long_control_sat = True
output_gb = clip(output_gb, -brake_max, gas_max)
return output_gb, Up_accel_cmd, Ui_accel_cmd, long_control_sat
stopping_brake_rate = 0.2 # brake_travel/s while trying to stop
starting_brake_rate = 0.6 # brake_travel/s while releasing on restart
starting_Ui = 0.5 # Since we don't have much info about acceleration at this point, be conservative
brake_stopping_target = 0.5 # apply at least this amount of brake to maintain the vehicle stationary
_MAX_SPEED_ERROR_BP = [0., 30.] # speed breakpoints
_MAX_SPEED_ERROR_V = [1.5, .8] # max positive v_pid error VS actual speed; this avoids controls windup due to slow pedal resp
class LongControl(object):
def __init__(self):
self.long_control_state = LongCtrlState.off # initialized to off
self.long_control_sat = False
self.Up_accel_cmd = 0.
self.last_output_gb = 0.
self.reset(0.)
def reset(self, v_pid):
self.Ui_accel_cmd = 0.
self.v_pid = v_pid
def update(self, enabled, v_ego, v_cruise, v_target_lead, a_target, jerk_factor, VP):
brake_max_bp = [0., 5., 20., 100.] # speeds
brake_max_v = [1.0, 1.0, 0.8, 0.8] # values
# brake and gas limits
brake_max = interp(v_ego, brake_max_bp, brake_max_v)
# TODO: not every time
if VP.brake_only:
gas_max = 0
else:
gas_max_bp = [0., 100.] # speeds
gas_max_v = [0.6, 0.6] # values
gas_max = interp(v_ego, gas_max_bp, gas_max_v)
overshoot_allowance = 2.0 # overshoot allowed when changing accel sign
output_gb = self.last_output_gb
rate = 100
# limit max target speed based on cruise setting:
v_cruise_mph = round(v_cruise * CV.KPH_TO_MPH) # what's displayed in mph on the IC
v_target = min(v_target_lead, v_cruise_mph * CV.MPH_TO_MS / VP.ui_speed_fudge)
max_speed_delta_up = a_target[1]*1.0/rate
max_speed_delta_down = a_target[0]*1.0/rate
# *** long control substate transitions
self.long_control_state = long_control_state_trans(enabled, self.long_control_state, v_ego, v_target, self.v_pid, output_gb)
# *** long control behavior based on state
# TODO: move this to drive_helpers
# disabled
if self.long_control_state == LongCtrlState.off:
self.v_pid = v_ego # do nothing
output_gb = 0.
self.Ui_accel_cmd = 0.
# tracking objects and driving
elif self.long_control_state == LongCtrlState.pid:
#reset v_pid close to v_ego if it was too far and new v_target is closer to v_ego
if ((self.v_pid > v_ego + overshoot_allowance) and
(v_target < self.v_pid)):
self.v_pid = max(v_target, v_ego + overshoot_allowance)
elif ((self.v_pid < v_ego - overshoot_allowance) and
(v_target > self.v_pid)):
self.v_pid = min(v_target, v_ego - overshoot_allowance)
# move v_pid no faster than allowed accel limits
if (v_target > self.v_pid + max_speed_delta_up):
self.v_pid += max_speed_delta_up
elif (v_target < self.v_pid + max_speed_delta_down):
self.v_pid += max_speed_delta_down
else:
self.v_pid = v_target
# to avoid too much wind up on acceleration, limit positive speed error
if not VP.brake_only:
max_speed_error = interp(v_ego, _MAX_SPEED_ERROR_BP, _MAX_SPEED_ERROR_V)
self.v_pid = min(self.v_pid, v_ego + max_speed_error)
# TODO: removed anti windup on gear change, does it matter?
output_gb, self.Up_accel_cmd, self.Ui_accel_cmd, self.long_control_sat = pid_long_control(v_ego, self.v_pid, \
self.Ui_accel_cmd, gas_max, brake_max, jerk_factor, 0, rate)
# intention is to stop, switch to a different brake control until we stop
elif self.long_control_state == LongCtrlState.stopping:
if v_ego > 0. or output_gb > -brake_stopping_target:
output_gb -= stopping_brake_rate/rate
output_gb = clip(output_gb, -brake_max, gas_max)
self.v_pid = v_ego
self.Ui_accel_cmd = 0.
# intention is to move again, release brake fast before handling control to PID
elif self.long_control_state == LongCtrlState.starting:
if output_gb < -0.2:
output_gb += starting_brake_rate/rate
self.v_pid = v_ego
self.Ui_accel_cmd = starting_Ui
self.last_output_gb = output_gb
final_gas = clip(output_gb, 0., gas_max)
final_brake = -clip(output_gb, -brake_max, 0.)
return final_gas, final_brake
| {
"content_hash": "c0cb9a68402af2858814db8103188e6d",
"timestamp": "",
"source": "github",
"line_count": 239,
"max_line_length": 128,
"avg_line_length": 38.1673640167364,
"alnum_prop": 0.6342907257180442,
"repo_name": "BrevanJ04/Comma-Test-0.2.9",
"id": "d6e1c8e8197a3078e20b2f08a43a79fb6423d256",
"size": "9122",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "selfdrive/controls/lib/longcontrol.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "19893"
},
{
"name": "C",
"bytes": "1494987"
},
{
"name": "C++",
"bytes": "634329"
},
{
"name": "Cap'n Proto",
"bytes": "11243"
},
{
"name": "Makefile",
"bytes": "7624"
},
{
"name": "Python",
"bytes": "189711"
},
{
"name": "Shell",
"bytes": "451"
}
],
"symlink_target": ""
} |
"""
Built-in, globally-available admin actions.
"""
from django.contrib import messages
from django.contrib.admin import helpers
from django.contrib.admin.utils import get_deleted_objects, model_ngettext
from django.core.exceptions import PermissionDenied
from django.db import router
from django.template.response import TemplateResponse
from django.utils.encoding import force_text
from django.utils.translation import ugettext as _, ugettext_lazy
def delete_selected(modeladmin, request, queryset):
"""
Default action which deletes the selected objects.
This action first displays a confirmation page which shows all the
deleteable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
Next, it deletes all selected objects and redirects back to the change list.
"""
opts = modeladmin.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not modeladmin.has_delete_permission(request):
raise PermissionDenied
using = router.db_for_write(modeladmin.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, model_count, perms_needed, protected = get_deleted_objects(
queryset, opts, request.user, modeladmin.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post') and not protected:
if perms_needed:
raise PermissionDenied
n = queryset.count()
if n:
for obj in queryset:
obj_display = force_text(obj)
modeladmin.log_deletion(request, obj, obj_display)
queryset.delete()
modeladmin.message_user(request, _("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(modeladmin.opts, n)
}, messages.SUCCESS)
# Return None to display the change list page again.
return None
objects_name = model_ngettext(queryset)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Are you sure?")
context = dict(
modeladmin.admin_site.each_context(request),
title=title,
objects_name=objects_name,
deletable_objects=[deletable_objects],
model_count=dict(model_count).items(),
queryset=queryset,
perms_lacking=perms_needed,
protected=protected,
opts=opts,
action_checkbox_name=helpers.ACTION_CHECKBOX_NAME,
media=modeladmin.media,
)
request.current_app = modeladmin.admin_site.name
# Display the confirmation page
return TemplateResponse(request, modeladmin.delete_selected_confirmation_template or [
"admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.model_name),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html"
], context)
delete_selected.short_description = ugettext_lazy("Delete selected %(verbose_name_plural)s")
| {
"content_hash": "c9cd17be6572b85d479b2341d0f341ab",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 95,
"avg_line_length": 37.47126436781609,
"alnum_prop": 0.6858895705521473,
"repo_name": "auready/django",
"id": "5630d1c94cade4136c211dce383868b623eadb7b",
"size": "3260",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "django/contrib/admin/actions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "53169"
},
{
"name": "HTML",
"bytes": "173634"
},
{
"name": "JavaScript",
"bytes": "448151"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "12200962"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg \
as FigureCanvas
from traitsui.basic_editor_factory import BasicEditorFactory
from traitsui.qt4.editor import Editor
# noinspection PyUnresolvedReferences
from mpl_toolkits.mplot3d import Axes3D
class MPLFigureCanvas(FigureCanvas):
pass
class _MPLFigureEditor(Editor):
def init(self, parent):
self.control = self._create_control(parent)
def update_editor(self):
pass
def _create_control(self, parent):
figure = self.value
f = MPLFigureCanvas(figure)
return f
class MPLFigureEditor(BasicEditorFactory):
klass = _MPLFigureEditor
# ============= EOF =============================================
| {
"content_hash": "ce988b46674a9f7b926bd32c5ffe0eff",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 66,
"avg_line_length": 24.15625,
"alnum_prop": 0.6778783958602846,
"repo_name": "UManPychron/pychron",
"id": "7070788b65f7e5f019093650450b0c7acfed2450",
"size": "1708",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pychron/core/ui/qt/mpl_figure_editor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "131"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "279"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "40346"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10234954"
},
{
"name": "Shell",
"bytes": "10753"
}
],
"symlink_target": ""
} |
"""
Django settings for project project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'b&7pw+!!$q#@#3(qkt5%$dtz+5$_9ez%t8h_m9z%4i+juq8#9j'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'corsheaders',
'app',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
# REST Framework settings for Token authentication and Default permissions
REST_FRAMEWORK = {
# Enable Token Authentication
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication',
),
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
]
}
# CORS headers settings
"""
Please refer to the following URL for all default settings:
https://github.com/ottoyiu/django-cors-headers/
"""
# Allow requests from all hosts, this setting should be set to True ONLY during development
# Set it to false and use a Whitelist for production
CORS_ORIGIN_ALLOW_ALL = True
| {
"content_hash": "c857de37de8c16490027b8fcbef9f2be",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 91,
"avg_line_length": 27.789473684210527,
"alnum_prop": 0.7012987012987013,
"repo_name": "o5k/django-rest-framework-seed",
"id": "40b878b8284727068dbc30c3ca01284208eb5fbb",
"size": "3696",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "project/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7891"
}
],
"symlink_target": ""
} |
import sys
import os
import os.path
from jinja2 import Template
from configparser import ConfigParser
import io
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: <program> <deploy_cfg_template_file> <file_with_properties>")
print("Properties from <file_with_properties> will be applied to <deploy_cfg_template_file>")
print("template which will be overwritten with .orig copy saved in the same folder first.")
sys.exit(1)
file = open(sys.argv[1], 'r')
text = file.read()
t = Template(text)
config = ConfigParser()
if os.path.isfile(sys.argv[2]):
config.read(sys.argv[2])
elif "KBASE_ENDPOINT" in os.environ:
kbase_endpoint = os.environ.get("KBASE_ENDPOINT")
props = "[global]\n" + \
"kbase_endpoint = " + kbase_endpoint + "\n" + \
"job_service_url = " + kbase_endpoint + "/userandjobstate\n" + \
"workspace_url = " + kbase_endpoint + "/ws\n" + \
"shock_url = " + kbase_endpoint + "/shock-api\n" + \
"handle_url = " + kbase_endpoint + "/handle_service\n" + \
"srv_wiz_url = " + kbase_endpoint + "/service_wizard\n" + \
"njsw_url = " + kbase_endpoint + "/njs_wrapper\n"
if "AUTH_SERVICE_URL" in os.environ:
props += "auth_service_url = " + os.environ.get("AUTH_SERVICE_URL") + "\n"
elif "auth2services" in kbase_endpoint:
props += "auth_service_url = " + kbase_endpoint + "/auth/api/legacy/KBase/Sessions/Login\n"
props += "auth_service_url_allow_insecure = " + \
os.environ.get("AUTH_SERVICE_URL_ALLOW_INSECURE", "false") + "\n"
config.readfp(io.StringIO(props))
else:
raise ValueError('Neither ' + sys.argv[2] + ' file nor KBASE_ENDPOINT env-variable found')
props = dict(config.items("global"))
output = t.render(props)
with open(sys.argv[1] + ".orig", 'w') as f:
f.write(text)
with open(sys.argv[1], 'w') as f:
f.write(output)
| {
"content_hash": "0ce2df785be7e367b23c48c8ad8081c5",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 103,
"avg_line_length": 46.75,
"alnum_prop": 0.5775401069518716,
"repo_name": "briehl/narrative-test",
"id": "a630d598a61fcc24c6911d229588969186768589",
"size": "2057",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "scripts/prepare_deploy_cfg.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1004"
},
{
"name": "Makefile",
"bytes": "2991"
},
{
"name": "Perl",
"bytes": "58822"
},
{
"name": "Python",
"bytes": "540669"
},
{
"name": "Ruby",
"bytes": "6857"
},
{
"name": "Shell",
"bytes": "929"
}
],
"symlink_target": ""
} |
import os
import pickle
import warnings
from copy import deepcopy
from pynq.devicetree import DeviceTreeSegment, get_dtbo_base_name
from .hwh_parser import HWH, get_hwh_name
class DeviceMeta(type):
"""Metaclass for all types of Device
It is responsible for enumerating the devices in the system and
selecting a `default_device` that is used by applications that
are oblivious to multiple-device scenarios
The main implementation is the `Device` class which should be subclassed
for each type of hardware that is supported. Each subclass should have
a `_probe_` function which returns an array of `Device` objects and
a `_probe_priority_` constant which is used to determine the
default device.
"""
_subclasses = {}
def __init__(cls, name, bases, attrs):
if "_probe_" in attrs:
priority = attrs["_probe_priority_"]
if (
priority in DeviceMeta._subclasses
and DeviceMeta._subclasses[priority].__name__ != name
):
raise RuntimeError("Multiple Device subclasses with same priority")
DeviceMeta._subclasses[priority] = cls
super().__init__(name, bases, attrs)
@property
def devices(cls):
"""All devices found in the system
An array of `Device` objects. Probing is done when this
property is first accessed
"""
if not hasattr(cls, "_devices"):
cls._devices = []
for key in sorted(DeviceMeta._subclasses.keys()):
cls._devices.extend(DeviceMeta._subclasses[key]._probe_())
if len(cls._devices) == 0 and "XILINX_XRT" not in os.environ:
warnings.warn(
"No devices found, is the XRT environment sourced?", UserWarning
)
return cls._devices
@property
def active_device(cls):
"""The device used by PYNQ if `None` used for a device parameter
This defaults to the device with the lowest priority and index but
can be overridden to globally change the default.
"""
if not hasattr(cls, "_active_device"):
if len(cls.devices) == 0:
raise RuntimeError("No Devices Found")
cls._active_device = cls.devices[0]
return cls._active_device
@active_device.setter
def active_device(cls, value):
cls._active_device = value
def clear_state(dict_in):
"""Clear the state information for a given dictionary.
Parameters
----------
dict_in : obj
Input dictionary to be cleared.
"""
if not isinstance(dict_in,dict):
return dict_in
for k,v in dict_in.items():
if isinstance(v,dict):
dict_in[k] = clear_state(v)
if k == 'state':
dict_in[k] = None
return dict_in
class Device(metaclass=DeviceMeta):
"""Construct a new Device Instance
This should be called by subclasses providing a globally unique
identifier for the device.
Parameters
----------
tag: str
The unique identifier associated with the device
"""
def __init__(self, tag, warn=False):
# Args validation
if type(tag) is not str:
raise ValueError("Argument 'tag' must be a string")
self.tag = tag
self.parser = None
def set_bitfile_name(self, bitfile_name: str) -> None:
self.bitfile_name = bitfile_name
self.parser = self.get_bitfile_metadata(self.bitfile_name)
self.mem_dict = self.parser.mem_dict
self.ip_dict = self.parser.ip_dict
self.gpio_dict = self.parser.gpio_dict
self.interrupt_pins = self.parser.interrupt_pins
self.interrupt_controllers = self.parser.interrupt_controllers
self.hierarchy_dict = self.parser.hierarchy_dict
self.systemgraph = self.parser.systemgraph
def allocate(self, shape, dtype, **kwargs):
"""Allocate an array on the device
Returns a buffer on memory accessible to the device
Parameters
----------
shape : tuple(int)
The shape of the array
dtype : np.dtype
The type of the elements of the array
Returns
------
PynqBuffer
The buffer shared between the host and the device
"""
return self.default_memory.allocate(shape, dtype, **kwargs)
def reset(self, parser=None, timestamp=None, bitfile_name=None):
"""Reset all the dictionaries.
This method must be called after a bitstream download.
1. In case there is a `hwh` file, this method will reset
the states of the IP, GPIO, and interrupt dictionaries .
2. In case there is no `hwh` file, this method will simply
clear the state information stored for all dictionaries.
An existing parser given as the input can significantly reduce
the reset time, since the PL can reset based on the
information provided by the parser.
Parameters
----------
parser : HWH
A parser object to speed up the reset process.
timestamp : str
The timestamp to embed in the reset
bitfile_name : str
The bitfile being loaded as part of the reset
"""
if parser is not None:
self.ip_dict = parser.ip_dict
self.gpio_dict = parser.gpio_dict
self.interrupt_controllers = parser.interrupt_controllers
self.interrupt_pins = parser.interrupt_pins
self.hierarchy_dict = parser.hierarchy_dict
self.mem_dict = parser.mem_dict
else:
hwh_name = get_hwh_name(self._bitfile_name)
if os.path.isfile(hwh_name):
self.ip_dict = clear_state(self.ip_dict)
self.gpio_dict = clear_state(self.gpio_dict)
self.hierarchy_dict = clear_state(self.hierarchy_dict)
else:
self.clear_dict()
if timestamp is not None:
self.timestamp = timestamp
if bitfile_name is not None:
self.bitfile_name = bitfile_name
def clear_dict(self):
"""Clear all the dictionaries stored in PL.
This method will clear all the related dictionaries, including IP
dictionary, GPIO dictionary, etc.
"""
self.ip_dict = {}
self.gpio_dict = {}
self.interrupt_controllers = {}
self.interrupt_pins = {}
self.hierarchy_dict = {}
self.mem_dict = {}
def load_ip_data(self, ip_name, data, zero=False):
"""This method writes data to the addressable IP.
Note
----
The data is assumed to be in binary format (.bin). The data
name will be stored as a state information in the IP dictionary.
Parameters
----------
ip_name : str
The name of the addressable IP.
data : str
The absolute path of the data to be loaded.
zero : bool
Zero out the address of the IP not covered by data
Returns
-------
None
"""
from pynq import MMIO
if ip_name in self.ip_dict:
self.ip_dict[ip_name]["state"] = data
elif ip_name in self.mem_dict:
self.mem_dict[ip_name]["state"] = data
ip_dict = self.ip_dict
mem_dict = self.mem_dict
if ip_name in ip_dict:
address = ip_dict[ip_name]["addr_range"]
target_size = ip_dict[ip_name]["addr_range"]
elif ip_name in mem_dict:
address = mem_dict[ip_name]["base_address"]
target_size = mem_dict[ip_name]["size"]
with open(data, "rb") as bin_file:
size = os.fstat(bin_file.fileno()).st_size
if size > target_size:
raise RuntimeError("Binary file too big for IP")
mmio = MMIO(address, target_size, device=self)
buf = bin_file.read(size)
if len(buf) % 4 != 0:
padding = 4 - len(buf) % 4
buf += b"\x00" * padding
size += padding
mmio.write(0, buf)
if zero and size < target_size:
mmio.write(size, b"\x00" * (target_size - size))
def update_partial_region(self, hier, parser):
"""Merge the parser information from partial region.
Combine the currently PL information and the partial HWH file
parsing results.
Parameters
----------
hier : str
The name of the hierarchical block as the partial region.
parser : HWH
A parser object for the partial region.
"""
self._update_pr_ip(parser, hier)
self._update_pr_gpio(parser)
self._update_pr_intr_pins(parser)
self._update_pr_hier(hier)
def _update_pr_ip(self, parser, hier):
merged_ip_dict = deepcopy(self.ip_dict)
if type(parser) is HWH:
for k in merged_ip_dict.copy():
if k.startswith(hier):
merged_ip_dict.pop(k)
for k, v in parser.ip_dict.items():
merged_ip_dict[v['fullpath']] = v
else:
raise ValueError("Cannot find HWH PR region parser.")
self.ip_dict = merged_ip_dict
def _update_pr_gpio(self, parser):
new_gpio_dict = dict()
for k, v in self.gpio_dict.items():
for pin in v["pins"]:
if pin in parser.pins:
v |= parser.nets[parser.pins[pin]]
new_gpio_dict[k] = v
self.gpio_dict = new_gpio_dict
def _update_pr_intr_pins(self, parser):
new_interrupt_pins = dict()
for k, v in self.interrupt_pins.items():
if k in parser.pins:
net_set = parser.nets[parser.pins[k]]
hier_map = {i.count("/"): i for i in net_set}
hier_map = sorted(hier_map.items(), reverse=True)
fullpath = hier_map[0][-1]
new_interrupt_pins[fullpath] = deepcopy(v)
new_interrupt_pins[fullpath]["fullpath"] = fullpath
else:
new_interrupt_pins[k] = v
self._interrupt_pins = new_interrupt_pins
def _update_pr_hier(self, hier):
self.hierarchy_dict[hier] = {
"ip": dict(),
"hierarchies": dict(),
"interrupts": dict(),
"gpio": dict(),
"fullpath": hier,
"memories": dict(),
}
for name, val in self.ip_dict.items():
hier, _, ip = name.rpartition("/")
if hier:
self.hierarchy_dict[hier]["ip"][ip] = val
self.hierarchy_dict[hier]["ip"][ip] = val
for name, val in self.hierarchy_dict.items():
hier, _, subhier = name.rpartition("/")
if hier:
self.hierarchy_dict[hier]["hierarchies"][subhier] = val
for interrupt, val in self._interrupt_pins.items():
block, _, pin = interrupt.rpartition("/")
if block in self.ip_dict:
self.ip_dict[block]["interrupts"][pin] = val
if block in self.hierarchy_dict:
self.hierarchy_dict[block]["interrupts"][pin] = val
for gpio in self.gpio_dict.values():
for connection in gpio["pins"]:
ip, _, pin = connection.rpartition("/")
if ip in self.ip_dict:
self.ip_dict[ip]["gpio"][pin] = gpio
elif ip in self.hierarchy_dict:
self.hierarchy_dict[ip]["gpio"][pin] = gpio
def clear_devicetree(self):
"""Clear the device tree dictionary.
This should be used when downloading the full bitstream, where all the
dtbo are cleared from the system.
"""
for i in self.devicetree_dict:
self.devicetree_dict[i].remove()
def insert_device_tree(self, abs_dtbo):
"""Insert device tree segment.
For device tree segments associated with full / partial bitstreams,
users can provide the relative or absolute paths of the dtbo files.
Parameters
----------
abs_dtbo : str
The absolute path to the device tree segment.
"""
dtbo_base_name = get_dtbo_base_name(abs_dtbo)
if not hasattr(self, "devicetree_dict"):
self.devicetree_dict = {}
self.devicetree_dict[dtbo_base_name] = DeviceTreeSegment(abs_dtbo)
self.devicetree_dict[dtbo_base_name].remove()
self.devicetree_dict[dtbo_base_name].insert()
def remove_device_tree(self, abs_dtbo):
"""Remove device tree segment for the overlay.
Parameters
----------
abs_dtbo : str
The absolute path to the device tree segment.
"""
dtbo_base_name = get_dtbo_base_name(abs_dtbo)
self.devicetree_dict[dtbo_base_name].remove()
del self.devicetree_dict[dtbo_base_name]
def shutdown(self):
"""Shutdown the AXI connections to the PL in preparation for
reconfiguration
"""
from ..mmio import MMIO
from .global_state import (
GlobalState,
global_state_file_exists,
load_global_state,
initial_global_state_file_boot_check
)
initial_global_state_file_boot_check()
if global_state_file_exists():
gs = load_global_state()
for sd_ip in gs.shutdown_ips.values():
mmio = MMIO(sd_ip.base_addr, device=self)
# Request shutdown
mmio.write(0x0, 0x1)
i = 0
while mmio.read(0x0) != 0x0F and i < 16000:
i += 1
if i >= 16000:
warnings.warn(
"Timeout for shutdown manager. It's likely "
"the configured bitstream and metadata "
"don't match."
)
def post_download(self, bitstream, parser, name: str = "Unknown"):
if not bitstream.partial:
import datetime
t = datetime.datetime.now()
bitstream.timestamp = "{}/{}/{} {}:{}:{} +{}".format(
t.year, t.month, t.day, t.hour, t.minute, t.second, t.microsecond
)
self.reset(parser, bitstream.timestamp, bitstream.bitfile_name)
def has_capability(self, cap):
"""Test if the device as a desired capability
Parameters
----------
cap : str
The desired capability
Returns
-------
bool
True if the devices support cap
"""
if not hasattr(self, "capabilities"):
return False
return cap in self.capabilities and self.capabilities[cap]
def get_bitfile_metadata(self, bitfile_name):
return None
def close(self):
""" Deprecated """
warnings.warn("PL Server has been deprecated -- this call"
"will be removed in a future release")
pass
| {
"content_hash": "7ed17efa2f7ff9fbf5ba13bc2a955bff",
"timestamp": "",
"source": "github",
"line_count": 446,
"max_line_length": 84,
"avg_line_length": 34.05156950672646,
"alnum_prop": 0.5612695068150392,
"repo_name": "schelleg/PYNQ",
"id": "5ce427b80d1d17c33794619eae5b6194f7245e50",
"size": "15268",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pynq/pl_server/device.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "51"
},
{
"name": "BitBake",
"bytes": "1840"
},
{
"name": "C",
"bytes": "1062607"
},
{
"name": "C++",
"bytes": "76769"
},
{
"name": "CMake",
"bytes": "578"
},
{
"name": "JavaScript",
"bytes": "239958"
},
{
"name": "Jupyter Notebook",
"bytes": "17148467"
},
{
"name": "Makefile",
"bytes": "165279"
},
{
"name": "Python",
"bytes": "1388540"
},
{
"name": "Shell",
"bytes": "67192"
},
{
"name": "SystemVerilog",
"bytes": "53374"
},
{
"name": "Tcl",
"bytes": "1383109"
},
{
"name": "VHDL",
"bytes": "738710"
},
{
"name": "Verilog",
"bytes": "284588"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals, print_function, division, absolute_import
import unittest
from neo.rawio.elanrawio import ElanRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestElanRawIO(BaseTestRawIO, unittest.TestCase, ):
rawioclass = ElanRawIO
entities_to_test = ['File_elan_1.eeg']
files_to_download = [
'File_elan_1.eeg',
'File_elan_1.eeg.ent',
'File_elan_1.eeg.pos',
]
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "27e2466b0d730d11601262fc2c58044f",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 82,
"avg_line_length": 24.095238095238095,
"alnum_prop": 0.6739130434782609,
"repo_name": "rgerkin/python-neo",
"id": "c0336b30164970a904b4ca2bd7333cb8ced699b0",
"size": "567",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neo/test/rawiotest/test_elanrawio.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "2486594"
}
],
"symlink_target": ""
} |
def checkio(expression):
'''
purpose of this func:
test if every forward bracket has closed properly in the FILO manner
use dictionary to pair the open bracket and close bracket together
use linkedlist to stored expected closing bracket
for every open bracket, add
'''
backwardbrackets = [']','}',')'];
bracdic = {'[':']','{':'}','(':')'}
backlist=[]
toEnd = 0
for _ in expression:
if _ in bracdic.keys():
toEnd +=1
backlist.append(bracdic[_]);
if _ in backwardbrackets:
if toEnd > 0 and _ == backlist[-1]:
#found ending brackets
toEnd -=1
backlist.pop()
else:
return False
print backlist
if toEnd == 0:
print len(backlist)
return True
else:
#print len(backlist)
return False
#return False if len(list) > 0 else True
#These "asserts" using only for self-checking and not necessary for auto-testing
if __name__ == '__main__':
#print checkio(u"{[(3+1)+2]+}")
print checkio("(((([[[{{{3}}}]]]]))))"); #extra bracket]
assert checkio(u"((5+3)*2+1)") == True, "Simple"
assert checkio(u"{[(3+1)+2]+}") == True, "Different types"
assert checkio(u"(3+{1-1)}") == False, ") is alone inside {}"
assert checkio(u"[1+1]+(2*2)-{3/3}") == True, "Different operators"
assert checkio(u"(({[(((1)-2)+3)-3]/3}-3)") == False, "One is redundant"
assert checkio(u"2+3") == True, "No brackets, no problem"
| {
"content_hash": "a9aa7066a49c4d1f49e3f5f06068122c",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 80,
"avg_line_length": 29.680851063829788,
"alnum_prop": 0.6028673835125448,
"repo_name": "dramaticlly/Python4Interview",
"id": "d2647a27ec0ef85b3f83cef198248602f1e4ff7f",
"size": "1395",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bracketCheck.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "7894"
},
{
"name": "C++",
"bytes": "3543"
},
{
"name": "Java",
"bytes": "69468"
},
{
"name": "Python",
"bytes": "88459"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from future.builtins import str
from future.utils import with_metaclass
from json import loads
try:
from urllib.request import urlopen
from urllib.parse import urlencode
except ImportError:
from urllib import urlopen, urlencode
from django.contrib.contenttypes.fields import GenericForeignKey
from django.db import models
from django.db.models.base import ModelBase
from django.db.models.signals import post_save
from django.template.defaultfilters import truncatewords_html
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.timesince import timesince
from django.utils.timezone import now
from django.utils.translation import ugettext, ugettext_lazy as _
from mezzanine.conf import settings
from mezzanine.core.fields import RichTextField, OrderField
from mezzanine.core.managers import DisplayableManager, CurrentSiteManager
from mezzanine.utils.html import TagCloser
from mezzanine.utils.models import base_concrete_model, get_user_model_name
from mezzanine.utils.sites import current_site_id, current_request
from mezzanine.utils.urls import admin_url, slugify, unique_slug
user_model_name = get_user_model_name()
class SiteRelated(models.Model):
"""
Abstract model for all things site-related. Adds a foreignkey to
Django's ``Site`` model, and filters by site with all querysets.
See ``mezzanine.utils.sites.current_site_id`` for implementation
details.
"""
objects = CurrentSiteManager()
class Meta:
abstract = True
site = models.ForeignKey("sites.Site", editable=False)
def save(self, update_site=False, *args, **kwargs):
"""
Set the site to the current site when the record is first
created, or the ``update_site`` argument is explicitly set
to ``True``.
"""
if update_site or (self.id is None and self.site_id is None):
self.site_id = current_site_id()
super(SiteRelated, self).save(*args, **kwargs)
@python_2_unicode_compatible
class Slugged(SiteRelated):
"""
Abstract model that handles auto-generating slugs. Each slugged
object is also affiliated with a specific site object.
"""
title = models.CharField(_("Title"), max_length=500)
slug = models.CharField(_("URL"), max_length=2000, blank=True, null=True,
help_text=_("Leave blank to have the URL auto-generated from "
"the title."))
class Meta:
abstract = True
def __str__(self):
return self.title
def save(self, *args, **kwargs):
"""
If no slug is provided, generates one before saving.
"""
if not self.slug:
self.slug = self.generate_unique_slug()
super(Slugged, self).save(*args, **kwargs)
def generate_unique_slug(self):
"""
Create a unique slug by passing the result of get_slug() to
utils.urls.unique_slug, which appends an index if necessary.
"""
# For custom content types, use the ``Page`` instance for
# slug lookup.
concrete_model = base_concrete_model(Slugged, self)
slug_qs = concrete_model.objects.exclude(id=self.id)
return unique_slug(slug_qs, "slug", self.get_slug())
def get_slug(self):
"""
Allows subclasses to implement their own slug creation logic.
"""
attr = "title"
if settings.USE_MODELTRANSLATION:
from modeltranslation.utils import build_localized_fieldname
attr = build_localized_fieldname(attr, settings.LANGUAGE_CODE)
# Get self.title_xx where xx is the default language, if any.
# Get self.title otherwise.
return slugify(getattr(self, attr, None) or self.title)
def admin_link(self):
return "<a href='%s'>%s</a>" % (self.get_absolute_url(),
ugettext("View on site"))
admin_link.allow_tags = True
admin_link.short_description = ""
class MetaData(models.Model):
"""
Abstract model that provides meta data for content.
"""
_meta_title = models.CharField(_("Title"), null=True, blank=True,
max_length=500,
help_text=_("Optional title to be used in the HTML title tag. "
"If left blank, the main title field will be used."))
description = models.TextField(_("Description"), blank=True)
gen_description = models.BooleanField(_("Generate description"),
help_text=_("If checked, the description will be automatically "
"generated from content. Uncheck if you want to manually "
"set a custom description."), default=True)
class Meta:
abstract = True
def save(self, *args, **kwargs):
"""
Set the description field on save.
"""
if self.gen_description:
self.description = strip_tags(self.description_from_content())
super(MetaData, self).save(*args, **kwargs)
def meta_title(self):
"""
Accessor for the optional ``_meta_title`` field, which returns
the string version of the instance if not provided.
"""
return self._meta_title or str(self)
def description_from_content(self):
"""
Returns the first block or sentence of the first content-like
field.
"""
description = ""
# Use the first RichTextField, or TextField if none found.
for field_type in (RichTextField, models.TextField):
if not description:
for field in self._meta.fields:
if (isinstance(field, field_type) and
field.name != "description"):
description = getattr(self, field.name)
if description:
from mezzanine.core.templatetags.mezzanine_tags \
import richtext_filters
description = richtext_filters(description)
break
# Fall back to the title if description couldn't be determined.
if not description:
description = str(self)
# Strip everything after the first block or sentence.
ends = ("</p>", "<br />", "<br/>", "<br>", "</ul>",
"\n", ". ", "! ", "? ")
for end in ends:
pos = description.lower().find(end)
if pos > -1:
description = TagCloser(description[:pos]).html
break
else:
description = truncatewords_html(description, 100)
return description
class TimeStamped(models.Model):
"""
Provides created and updated timestamps on models.
"""
class Meta:
abstract = True
created = models.DateTimeField(null=True, editable=False)
updated = models.DateTimeField(null=True, editable=False)
def save(self, *args, **kwargs):
_now = now()
self.updated = _now
if not self.id:
self.created = _now
super(TimeStamped, self).save(*args, **kwargs)
CONTENT_STATUS_DRAFT = 1
CONTENT_STATUS_PUBLISHED = 2
CONTENT_STATUS_CHOICES = (
(CONTENT_STATUS_DRAFT, _("Draft")),
(CONTENT_STATUS_PUBLISHED, _("Published")),
)
SHORT_URL_UNSET = "unset"
class Displayable(Slugged, MetaData, TimeStamped):
"""
Abstract model that provides features of a visible page on the
website such as publishing fields. Basis of Mezzanine pages.
"""
status = models.IntegerField(_("Status"),
choices=CONTENT_STATUS_CHOICES, default=CONTENT_STATUS_PUBLISHED,
help_text=_("With Draft chosen, will only be shown for admin users "
"on the site."))
publish_date = models.DateTimeField(_("Published from"),
help_text=_("With Published chosen, won't be shown until this time"),
blank=True, null=True)
expiry_date = models.DateTimeField(_("Expires on"),
help_text=_("With Published chosen, won't be shown after this time"),
blank=True, null=True)
short_url = models.URLField(blank=True, null=True)
in_sitemap = models.BooleanField(_("Show in sitemap"), default=True)
objects = DisplayableManager()
search_fields = {"title": 5}
class Meta:
abstract = True
def save(self, *args, **kwargs):
"""
Set default for ``publish_date``.
"""
if self.publish_date is None:
self.publish_date = now()
super(Displayable, self).save(*args, **kwargs)
def get_admin_url(self):
return admin_url(self, "change", self.id)
def publish_date_since(self):
"""
Returns the time since ``publish_date``.
"""
return timesince(self.publish_date)
publish_date_since.short_description = _("Published from")
def get_absolute_url(self):
"""
Raise an error if called on a subclass without
``get_absolute_url`` defined, to ensure all search results
contains a URL.
"""
name = self.__class__.__name__
raise NotImplementedError("The model %s does not have "
"get_absolute_url defined" % name)
def get_absolute_url_with_host(self):
"""
Returns host + ``get_absolute_url`` - used by the various
``short_url`` mechanics below.
Technically we should use ``self.site.domain``, here, however
if we were to invoke the ``short_url`` mechanics on a list of
data, we'd trigger a db query per
item. Using ``current_request`` should provide the same
result, since site related data should only be loaded based
on the current host anyway.
"""
return current_request().build_absolute_uri(self.get_absolute_url())
def set_short_url(self):
"""
Generates the ``short_url`` attribute if the model does not
already have one. Used by the ``set_short_url_for`` template
tag and ``TweetableAdmin``.
If no sharing service is defined (bitly is the one implemented,
but others could be by overriding ``generate_short_url``), the
``SHORT_URL_UNSET`` marker gets stored in the DB. In this case,
``short_url`` is temporarily (eg not persisted) set to
host + ``get_absolute_url`` - this is so that we don't
permanently store ``get_absolute_url``, since it may change
over time.
"""
if self.short_url == SHORT_URL_UNSET:
self.short_url = self.get_absolute_url_with_host()
elif not self.short_url:
self.short_url = self.generate_short_url()
self.save()
def generate_short_url(self):
"""
Returns a new short URL generated using bit.ly if credentials for the
service have been specified.
"""
from mezzanine.conf import settings
settings.use_editable()
if settings.BITLY_ACCESS_TOKEN:
url = "https://api-ssl.bit.ly/v3/shorten?%s" % urlencode({
"access_token": settings.BITLY_ACCESS_TOKEN,
"uri": self.get_absolute_url_with_host(),
})
response = loads(urlopen(url).read().decode("utf-8"))
if response["status_code"] == 200:
return response["data"]["url"]
return SHORT_URL_UNSET
def _get_next_or_previous_by_publish_date(self, is_next, **kwargs):
"""
Retrieves next or previous object by publish date. We implement
our own version instead of Django's so we can hook into the
published manager and concrete subclasses.
"""
arg = "publish_date__gt" if is_next else "publish_date__lt"
order = "publish_date" if is_next else "-publish_date"
lookup = {arg: self.publish_date}
concrete_model = base_concrete_model(Displayable, self)
try:
queryset = concrete_model.objects.published
except AttributeError:
queryset = concrete_model.objects.all
try:
return queryset(**kwargs).filter(**lookup).order_by(order)[0]
except IndexError:
pass
def get_next_by_publish_date(self, **kwargs):
"""
Retrieves next object by publish date.
"""
return self._get_next_or_previous_by_publish_date(True, **kwargs)
def get_previous_by_publish_date(self, **kwargs):
"""
Retrieves previous object by publish date.
"""
return self._get_next_or_previous_by_publish_date(False, **kwargs)
class RichText(models.Model):
"""
Provides a Rich Text field for managing general content and making
it searchable.
"""
content = RichTextField(_("Content"))
search_fields = ("content",)
class Meta:
abstract = True
class OrderableBase(ModelBase):
"""
Checks for ``order_with_respect_to`` on the model's inner ``Meta``
class and if found, copies it to a custom attribute and deletes it
since it will cause errors when used with ``ForeignKey("self")``.
Also creates the ``ordering`` attribute on the ``Meta`` class if
not yet provided.
"""
def __new__(cls, name, bases, attrs):
if "Meta" not in attrs:
class Meta:
pass
attrs["Meta"] = Meta
if hasattr(attrs["Meta"], "order_with_respect_to"):
order_field = attrs["Meta"].order_with_respect_to
attrs["order_with_respect_to"] = order_field
del attrs["Meta"].order_with_respect_to
if not hasattr(attrs["Meta"], "ordering"):
setattr(attrs["Meta"], "ordering", ("_order",))
return super(OrderableBase, cls).__new__(cls, name, bases, attrs)
class Orderable(with_metaclass(OrderableBase, models.Model)):
"""
Abstract model that provides a custom ordering integer field
similar to using Meta's ``order_with_respect_to``, since to
date (Django 1.2) this doesn't work with ``ForeignKey("self")``,
or with Generic Relations. We may also want this feature for
models that aren't ordered with respect to a particular field.
"""
_order = OrderField(_("Order"), null=True)
class Meta:
abstract = True
def with_respect_to(self):
"""
Returns a dict to use as a filter for ordering operations
containing the original ``Meta.order_with_respect_to`` value
if provided. If the field is a Generic Relation, the dict
returned contains names and values for looking up the
relation's ``ct_field`` and ``fk_field`` attributes.
"""
try:
name = self.order_with_respect_to
value = getattr(self, name)
except AttributeError:
# No ``order_with_respect_to`` specified on the model.
return {}
# Support for generic relations.
field = getattr(self.__class__, name)
if isinstance(field, GenericForeignKey):
names = (field.ct_field, field.fk_field)
return dict([(n, getattr(self, n)) for n in names])
return {name: value}
def save(self, *args, **kwargs):
"""
Set the initial ordering value.
"""
if self._order is None:
lookup = self.with_respect_to()
lookup["_order__isnull"] = False
concrete_model = base_concrete_model(Orderable, self)
self._order = concrete_model.objects.filter(**lookup).count()
super(Orderable, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
"""
Update the ordering values for siblings.
"""
lookup = self.with_respect_to()
lookup["_order__gte"] = self._order
concrete_model = base_concrete_model(Orderable, self)
after = concrete_model.objects.filter(**lookup)
after.update(_order=models.F("_order") - 1)
super(Orderable, self).delete(*args, **kwargs)
def _get_next_or_previous_by_order(self, is_next, **kwargs):
"""
Retrieves next or previous object by order. We implement our
own version instead of Django's so we can hook into the
published manager, concrete subclasses and our custom
``with_respect_to`` method.
"""
lookup = self.with_respect_to()
lookup["_order"] = self._order + (1 if is_next else -1)
concrete_model = base_concrete_model(Orderable, self)
try:
queryset = concrete_model.objects.published
except AttributeError:
queryset = concrete_model.objects.filter
try:
return queryset(**kwargs).get(**lookup)
except concrete_model.DoesNotExist:
pass
def get_next_by_order(self, **kwargs):
"""
Retrieves next object by order.
"""
return self._get_next_or_previous_by_order(True, **kwargs)
def get_previous_by_order(self, **kwargs):
"""
Retrieves previous object by order.
"""
return self._get_next_or_previous_by_order(False, **kwargs)
class Ownable(models.Model):
"""
Abstract model that provides ownership of an object for a user.
"""
user = models.ForeignKey(user_model_name, verbose_name=_("Author"),
related_name="%(class)ss")
class Meta:
abstract = True
def is_editable(self, request):
"""
Restrict in-line editing to the objects's owner and superusers.
"""
return request.user.is_superuser or request.user.id == self.user_id
class SitePermission(models.Model):
"""
Permission relationship between a user and a site that's
used instead of ``User.is_staff``, for admin and inline-editing
access.
"""
user = models.OneToOneField(user_model_name, verbose_name=_("Author"),
related_name="%(class)ss")
sites = models.ManyToManyField("sites.Site", blank=True,
verbose_name=_("Sites"))
class Meta:
verbose_name = _("Site permission")
verbose_name_plural = _("Site permissions")
def create_site_permission(sender, **kw):
sender_name = "%s.%s" % (sender._meta.app_label, sender._meta.object_name)
if sender_name.lower() != user_model_name.lower():
return
user = kw["instance"]
if user.is_staff and not user.is_superuser:
perm, created = SitePermission.objects.get_or_create(user=user)
if created or perm.sites.count() < 1:
perm.sites.add(current_site_id())
# We don't specify the user model here, because with Django's custom
# user models, everything explodes. So we check the name of it in
# the signal.
post_save.connect(create_site_permission)
| {
"content_hash": "b55e89604dd42056e062cee0c4d9c6de",
"timestamp": "",
"source": "github",
"line_count": 523,
"max_line_length": 78,
"avg_line_length": 36.01147227533461,
"alnum_prop": 0.6122438143782521,
"repo_name": "damnfine/mezzanine",
"id": "aab68541fcdd68cc7e665e1c904697ea48035ec2",
"size": "18834",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mezzanine/core/models.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "44023"
},
{
"name": "DIGITAL Command Language",
"bytes": "8315"
},
{
"name": "HTML",
"bytes": "53116"
},
{
"name": "JavaScript",
"bytes": "110072"
},
{
"name": "Python",
"bytes": "466757"
}
],
"symlink_target": ""
} |
from __future__ import with_statement # This isn't required in Python 2.6
import neuroptikon
import wx.glcanvas
from pydispatch import dispatcher
import osg, osgDB, osgGA, osgManipulator, osgText, osgViewer
from math import log, pi
import os.path, platform, sys, cPickle
try:
import xml.etree.cElementTree as ElementTree
except ImportError:
import xml.etree.ElementTree as ElementTree
from gettext import gettext
from pick_handler import PickHandler
from dragger_cull_callback import DraggerCullCallback
from network.object import Object
from network.pathway import Pathway # pylint: disable=E0611,F0401
from network.arborization import Arborization
from network.stimulus import Stimulus
from network.neuron import Neuron
from network.object_list import ObjectList
from network.synapse import Synapse
from visible import Visible
import layout as layout_module
from shape import Shape
from library.texture import Texture
# Navigation modes
PANNING_MODE = 0
ROTATING_MODE = 1
# TODO: DRAG_SELECTING_MODE = 2
# TODO: other modes?
class Display(wx.glcanvas.GLCanvas):
def __init__(self, parent, network = None, wxId = wx.ID_ANY):
"""
Displays allow the visualization of networks.
Each display can visualize any number of objects from a single network. By default all objects added to the network are visualized but this can be disabled by setting the display's autoVisualize attribute to False
Multiple displays can visualize the same network at the same time. By default the selection is synchronized between displays so selecting an object in one display will select the corresponding object in all other displays. This can be disabled by calling setSynchronizeDisplays(False) on the network.
You should never create an instance of this class directly. Instances are automatically created when you open a new window either via File --> New Network or by calling displayNetwork() in a console or script.
"""
style = wx.WANTS_CHARS | wx.FULL_REPAINT_ON_RESIZE | wx.HSCROLL | wx.VSCROLL
attribList = [wx.glcanvas.WX_GL_RGBA, wx.glcanvas.WX_GL_DOUBLEBUFFER]
if neuroptikon.config.ReadBool('Smooth All Objects') and hasattr(wx.glcanvas, 'WX_GL_SAMPLE_BUFFERS'):
attribList += [wx.glcanvas.WX_GL_SAMPLE_BUFFERS, 1, wx.glcanvas.WX_GL_SAMPLES, 4]
attribList += [wx.glcanvas.WX_GL_DEPTH_SIZE, 16, 0, 0]
wx.glcanvas.GLCanvas.__init__(self, parent, wxId, attribList = attribList, pos = wx.DefaultPosition, size = (200,200), style = style, name = "")
self.glContext = wx.glcanvas.GLContext(self)
self._name = None
self.network = network
if self.network is not None:
self.network.addDisplay(self)
self.displayRules = []
self.autoVisualize = True
self.visibles = {}
self._visibleIds = {}
self.selectedVisibles = set()
self.highlightedVisibles = set()
self.animatedVisibles = set()
self.selectConnectedVisibles = True
self._showRegionNames = True
self._showNeuronNames = False
self._showNeuronNamesOnSelection = False
self._printNeuronNamesOnSelection = False
self._hideUnselectedNeurons = False
self._hideSynapsesOnConnections = True
self._labelsFloatOnTop = False
self._showFlow = False
self._highlightOnlyWithinSelection = False
self._useGhosts = True
self._ghostingOpacity = 0.15
self._primarySelectionColor = (0, 0, 1, .4)
self._secondarySelectionColor = (0, 0, 1, .2)
self._visiblesSelectionColors = {}
self._selectionHighlightDepth = 3
self.viewDimensions = 2
self.console = None
self._recomputeBounds = True
self._recomputeBoundsScheduled = False
self.visiblesMin = [-100, -100, -100]
self.visiblesMax = [100, 100, 100]
self.visiblesCenter = [0, 0, 0]
self.visiblesSize = [200, 200, 200]
self._navigationMode = PANNING_MODE
self._previous3DNavMode = ROTATING_MODE
self.orthoCenter = (0, 0)
self.orthoViewPlane = 'xy'
self.orthoXPlane = 0
self.orthoYPlane = 1
self.orthoZoom = 0
self.zoomScale = 1
self.rootNode = osg.MatrixTransform()
self.rootStateSet = self.rootNode.getOrCreateStateSet()
self.rootNode.setMatrix(osg.Matrixd.identity())
self.rootStateSet.setMode(osg.GL_NORMALIZE, osg.StateAttribute.ON )
if platform.system() == 'Windows':
self.scrollWheelScale = 0.1
else:
self.scrollWheelScale = 1
# TODO: only if pref set?
# Not in osg 3.2.1?
# osg.DisplaySettings.instance().setNumMultiSamples(4)
self.trackball = osgGA.TrackballManipulator()
self._previousTrackballMatrix = None
self._previousTrackballCenter = None
self._pickHandler = PickHandler(self)
self.viewer = osgViewer.Viewer()
self.viewer.setThreadingModel(osgViewer.ViewerBase.SingleThreaded) # TODO: investigate multithreaded options
self.viewer.addEventHandler(osgViewer.StatsHandler())
self.viewer.setSceneData(self.rootNode)
self.viewer.addEventHandler(self._pickHandler)
light = self.viewer.getLight()
light.setAmbient(osg.Vec4f(0.4, 0.4, 0.4, 1))
light.setDiffuse(osg.Vec4f(0.5, 0.5, 0.5, 1))
self.viewer.setLight(light)
self._first3DView = True
self.backgroundColor = None
clearColor = (neuroptikon.config.ReadFloat("Color/Background/Red", 0.75), \
neuroptikon.config.ReadFloat("Color/Background/Green", 0.75), \
neuroptikon.config.ReadFloat("Color/Background/Blue", 0.75), \
neuroptikon.config.ReadFloat("Color/Background/Alpha", 0.0))
self.setBackgroundColor(clearColor)
self.Bind(wx.EVT_SIZE, self.onSize)
self.Bind(wx.EVT_PAINT, self.onPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.onEraseBackground)
self.Bind(wx.EVT_KEY_DOWN, self.onKeyDown)
self.Bind(wx.EVT_KEY_UP, self.onKeyUp)
self.Bind(wx.EVT_MOUSE_EVENTS, self.onMouseEvent) # TODO: factor this out into individual events
self.Bind(wx.EVT_MOUSEWHEEL, self.onMouseWheel)
self.Bind(wx.EVT_SCROLLWIN, self.onScroll)
self.dragSelection = None
self.draggerLOD = None
self.simpleDragger = None
self.compositeDragger = None
self.activeDragger = None
self.commandMgr = None
self.draggerScale = 1.0
self.draggerOffset = (0.0, 0.0, 0.0)
self.selectionShouldExtend = False
self.findShortestPath = False
self._selectedShortestPath = False
self._useMouseOverSelecting = False
self.hoverSelect = True
self.hoverSelecting = False
self.hoverSelected = False # set to True if the current selection was made by hovering
width, height = self.GetClientSize()
self.graphicsWindow = self.viewer.setUpViewerAsEmbeddedInWindow(0, 0, width, height)
self.SetDropTarget(DisplayDropTarget(self))
self._nextUniqueId = -1
self._animationTimer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.onAnimate, self._animationTimer)
self._suppressRefresh = False
if neuroptikon.runningFromSource:
shaderDir = os.path.join(neuroptikon.rootDir, 'display')
else:
shaderDir = neuroptikon.rootDir
with open(os.path.join(shaderDir, 'flow_shader.vert')) as f:
flowVertexShader = f.read()
with open(os.path.join(shaderDir, 'flow_shader.frag')) as f:
flowFragmentShader = f.read()
self.flowProgram = osg.Program()
self.flowProgram.addShader(osg.Shader(osg.Shader.VERTEX, flowVertexShader))
self.flowProgram.addShader(osg.Shader(osg.Shader.FRAGMENT, flowFragmentShader))
self.defaultFlowColor = (1.0, 1.0, 1.0, 1.0)
self.defaultFlowToColorUniform = osg.Uniform('flowToColor', osg.Vec4f(*self.defaultFlowColor))
self.rootStateSet.addUniform(self.defaultFlowToColorUniform)
self.defaultFlowFromColorUniform = osg.Uniform('flowFromColor', osg.Vec4f(*self.defaultFlowColor))
self.rootStateSet.addUniform(self.defaultFlowFromColorUniform)
self.defaultFlowSpacing = 0.4 # Distance between pulses
self.defaultFlowToSpacingUniform = osg.Uniform('flowToSpacing', self.defaultFlowSpacing)
self.rootStateSet.addUniform(self.defaultFlowToSpacingUniform)
self.defaultFlowFromSpacingUniform = osg.Uniform('flowFromSpacing', self.defaultFlowSpacing)
self.rootStateSet.addUniform(self.defaultFlowFromSpacingUniform)
self.defaultFlowSpeed = 0.15 # Pulse speed
self.defaultFlowToSpeedUniform = osg.Uniform('flowToSpeed', self.defaultFlowSpeed)
self.rootStateSet.addUniform(self.defaultFlowToSpeedUniform)
self.defaultFlowFromSpeedUniform = osg.Uniform('flowFromSpeed', self.defaultFlowSpeed)
self.rootStateSet.addUniform(self.defaultFlowFromSpeedUniform)
self.defaultFlowSpread = 0.9 # The pulse should cover 50% of the path
self.defaultFlowToSpreadUniform = osg.Uniform('flowToSpread', self.defaultFlowSpread)
self.rootStateSet.addUniform(self.defaultFlowToSpreadUniform)
self.defaultFlowFromSpreadUniform = osg.Uniform('flowFromSpread', self.defaultFlowSpread)
self.rootStateSet.addUniform(self.defaultFlowFromSpreadUniform)
dispatcher.connect(self._onSelectionOrShowFlowChanged, ('set', 'selection'), self)
dispatcher.connect(self._onSelectionOrShowFlowChanged, ('set', 'showFlow'), self)
self.lastUsedLayout = None
self._closing = False
self._visibleBeingAdded = None
self.compassCamera = None
self._compassDrawables = {}
def _fromXMLElement(self, xmlElement):
self._suppressRefresh = True
name = xmlElement.findtext('Name')
if name is not None:
self.setName(name)
colorElement = xmlElement.find('BackgroundColor')
if colorElement is None:
colorElement = xmlElement.find('backgroundColor')
if colorElement is not None:
red = float(colorElement.get('r'))
green = float(colorElement.get('g'))
blue = float(colorElement.get('b'))
alpha = float(colorElement.get('a'))
self.setBackgroundColor((red, green, blue, alpha))
flowAppearanceElement = xmlElement.find('DefaultFlowAppearance')
if flowAppearanceElement is None:
flowAppearanceElement = xmlElement.find('defaultFlowAppearance')
if flowAppearanceElement is not None:
colorElement = flowAppearanceElement.find('Color')
if colorElement is None:
colorElement = flowAppearanceElement.find('color')
if colorElement is not None:
red = float(colorElement.get('r'))
green = float(colorElement.get('g'))
blue = float(colorElement.get('b'))
alpha = float(colorElement.get('a'))
self.setDefaultFlowColor((red, green, blue))
if flowAppearanceElement.get('spacing') is not None:
self.setDefaultFlowSpacing(float(flowAppearanceElement.get('spacing')))
if flowAppearanceElement.get('speed') is not None:
self.setDefaultFlowSpeed(float(flowAppearanceElement.get('speed')))
if flowAppearanceElement.get('spread') is not None:
self.setDefaultFlowSpread(float(flowAppearanceElement.get('spread')))
if self.defaultFlowSpacing == 1.0 and self.defaultFlowSpeed == 1.0 and self.defaultFlowSpread == 0.2:
# Switch to new world-space relative defaults.
self.setDefaultFlowSpacing(0.05)
self.setDefaultFlowSpeed(0.05)
visibleElements = xmlElement.findall('Visible')
# Add all of the nodes
for visibleElement in visibleElements:
if visibleElement.find('Path') is None and visibleElement.find('path') is None:
visible = Visible._fromXMLElement(visibleElement, self)
if visible is None:
raise ValueError, gettext('Could not create visualized item')
self.addVisible(visible)
# Add all of the paths (must be done after nodes are added)
for visibleElement in visibleElements:
if visibleElement.find('Path') is not None or visibleElement.find('path') is not None:
visible = Visible._fromXMLElement(visibleElement, self)
if visible is None:
raise ValueError, gettext('Could not create visualized item')
self.addVisible(visible)
self.computeVisiblesBound()
self.setViewDimensions(int(xmlElement.get('dimensions')))
trueValues = ['true', 'True', 'TRUE', '1']
if xmlElement.get('showRegionNames') is not None:
self.setShowRegionNames(xmlElement.get('showRegionNames') in trueValues)
if xmlElement.get('showNeuronNames') is not None:
self.setShowNeuronNames(xmlElement.get('showNeuronNames') in trueValues)
if xmlElement.get('showNeuronNamesOnSelection') is not None:
self.setShowNeuronNamesOnSelection(xmlElement.get('showNeuronNamesOnSelection') in trueValues)
if xmlElement.get('printNeuronNamesOnSelection') is not None:
self.setPrintNeuronNamesOnSelection(xmlElement.get('printNeuronNamesOnSelection') in trueValues)
if xmlElement.get('hideUnselectedNeurons') is not None:
self.setHideUnselectedNeurons(xmlElement.get('hideUnselectedNeurons') in trueValues)
if xmlElement.get('showFlow') is not None:
self.setShowFlow(xmlElement.get('showFlow') in trueValues)
if xmlElement.get('useGhosting') is not None:
self.setUseGhosts(xmlElement.get('useGhosting') in trueValues)
if xmlElement.get('ghostingOpacity') is not None:
self.setGhostingOpacity(float(xmlElement.get('ghostingOpacity')))
if xmlElement.get('useMouseOverSelecting') is not None:
self._useMouseOverSelecting = xmlElement.get('useMouseOverSelecting') in trueValues
if xmlElement.get('autoVisualize') is not None:
self.autoVisualize = xmlElement.get('autoVisualize') in trueValues
if xmlElement.get('labelsFloatOnTop') is not None:
self.setLabelsFloatOnTop(xmlElement.get('labelsFloatOnTop') in trueValues)
if xmlElement.get('selectionHighlightDepth') is not None:
self.setSelectionHighlightDepth(int(xmlElement.get('selectionHighlightDepth')))
if xmlElement.get('highlightOnlyWithinSelection') is not None:
self.setHighlightOnlyWithinSelection(xmlElement.get('highlightOnlyWithinSelection') in trueValues)
if xmlElement.get('showCompass') is not None:
self.setShowCompass(xmlElement.get('showCompass') in trueValues)
selectedVisibleIds = xmlElement.get('selectedVisibleIds')
visiblesToSelect = []
if selectedVisibleIds is not None:
for visibleId in selectedVisibleIds.split(','):
if visibleId.isdigit() and int(visibleId) in self._visibleIds:
visiblesToSelect.append(self._visibleIds[int(visibleId)])
self.selectVisibles(visiblesToSelect)
self._suppressRefresh = False
self._recomputeBounds = True
if self.viewDimensions == 2:
self.zoomToFit()
else:
self.resetView()
self.Refresh()
def _toXMLElement(self, parentElement):
displayElement = ElementTree.SubElement(parentElement, 'Display')
if self._name:
ElementTree.SubElement(displayElement, 'Name').text = self._name
# Add the background color
colorElement = ElementTree.SubElement(displayElement, 'BackgroundColor')
colorElement.set('r', str(self.backgroundColor[0]))
colorElement.set('g', str(self.backgroundColor[1]))
colorElement.set('b', str(self.backgroundColor[2]))
colorElement.set('a', str(self.backgroundColor[3]))
# Add the default flow appearance
flowAppearanceElement = ElementTree.SubElement(displayElement, 'DefaultFlowAppearance')
colorElement = ElementTree.SubElement(flowAppearanceElement, 'Color')
colorElement.set('r', str(self.defaultFlowColor[0]))
colorElement.set('g', str(self.defaultFlowColor[1]))
colorElement.set('b', str(self.defaultFlowColor[2]))
colorElement.set('a', str(self.defaultFlowColor[3]))
flowAppearanceElement.set('spacing', str(self.defaultFlowSpacing))
flowAppearanceElement.set('speed', str(self.defaultFlowSpeed))
flowAppearanceElement.set('spread', str(self.defaultFlowSpread))
# Add the display rules
for displayRule in self.displayRules:
ruleElement = displayRule._toXMLElement(displayElement)
if ruleElement is None:
raise ValueError, gettext('Could not save display rule')
# Add the visibles
for visibles in self.visibles.itervalues():
for visible in visibles:
if visible.parent is None:
visibleElement = visible._toXMLElement(displayElement)
if visibleElement is None:
raise ValueError, gettext('Could not save visualized item')
displayElement.set('dimensions', str(self.viewDimensions))
displayElement.set('showRegionNames', 'true' if self._showRegionNames else 'false')
displayElement.set('showNeuronNames', 'true' if self._showNeuronNames else 'false')
displayElement.set('showNeuronNamesOnSelection', 'true' if self._showNeuronNamesOnSelection else 'false')
displayElement.set('hideUnselectedNeurons', 'true' if self._hideUnselectedNeurons else 'false')
displayElement.set('hideSynapsesOnConnections', 'true' if self._hideSynapsesOnConnections else 'false')
displayElement.set('showFlow', 'true' if self._showFlow else 'false')
displayElement.set('useGhosting', 'true' if self._useGhosts else 'false')
displayElement.set('ghostingOpacity', str(self._ghostingOpacity))
displayElement.set('useMouseOverSelecting', 'true' if self._useMouseOverSelecting else 'false')
displayElement.set('autoVisualize', 'true' if self.autoVisualize else 'false')
displayElement.set('labelsFloatOnTop', 'true' if self._labelsFloatOnTop else 'false')
displayElement.set('selectionHighlightDepth', str(self._selectionHighlightDepth))
displayElement.set('highlightOnlyWithinSelection', 'true' if self._highlightOnlyWithinSelection else 'false')
displayElement.set('showCompass', 'true' if self.isShowingCompass() else 'false')
selectedVisibleIds = []
for visible in self.selectedVisibles:
selectedVisibleIds.append(str(visible.displayId))
displayElement.set('selectedVisibleIds', ','.join(selectedVisibleIds))
return displayElement
def _toScriptFile(self, scriptFile, scriptRefs, displayRef, savingNetwork):
if self._name != None:
scriptFile.write(displayRef + '.setName(' + repr(self._name) + ')\n')
scriptFile.write(displayRef + '.setBackgroundColor((' + ', '.join([str(component) for component in self.backgroundColor]) + '))\n')
scriptFile.write(displayRef + '.setDefaultFlowColor(' + str(self.defaultFlowColor) + ')\n')
scriptFile.write(displayRef + '.setDefaultFlowSpacing(' + str(self.defaultFlowSpacing) + ')\n')
scriptFile.write(displayRef + '.setDefaultFlowSpeed(' + str(self.defaultFlowSpeed) + ')\n')
scriptFile.write(displayRef + '.setDefaultFlowSpread(' + str(self.defaultFlowSpread) + ')\n')
scriptFile.write(displayRef + '.setViewDimensions(' + str(self.viewDimensions) + ')\n')
scriptFile.write(displayRef + '.setShowCompass(' + str(self.isShowingCompass()) + ')\n')
scriptFile.write(displayRef + '.setShowRegionNames(' + str(self._showRegionNames) + ')\n')
scriptFile.write(displayRef + '.setShowNeuronNames(' + str(self._showNeuronNames) + ')\n')
scriptFile.write(displayRef + '.setShowNeuronNamesOnSelection(' + str(self._showNeuronNamesOnSelection) + ')\n')
scriptFile.write(displayRef + '.setPrintNeuronNamesOnSelection(' + str(self._showNeuronNamesOnSelection) + ')\n')
scriptFile.write(displayRef + '.setHideUnselectedNeurons(' + str(self._hideUnselectedNeurons) + ')\n')
scriptFile.write(displayRef + '.setHideSynapsesOnConnections(' + str(self._hideSynapsesOnConnections) + ')\n')
scriptFile.write(displayRef + '.setShowFlow(' + str(self._showFlow) + ')\n')
scriptFile.write(displayRef + '.setUseGhosts(' + str(self._useGhosts) + ')\n')
scriptFile.write(displayRef + '.setGhostingOpacity(' + str(self._ghostingOpacity) + ')\n')
scriptFile.write(displayRef + '.setUseMouseOverSelecting(' + str(self._useMouseOverSelecting) + ')\n')
scriptFile.write(displayRef + '.setLabelsFloatOnTop(' + str(self._labelsFloatOnTop) + ')\n')
scriptFile.write(displayRef + '.setSelectionHighlightDepth(' + str(self._selectionHighlightDepth) + ')\n')
scriptFile.write(displayRef + '.setHighlightOnlyWithinSelection(' + str(self._highlightOnlyWithinSelection) + ')\n')
scriptFile.write('\n')
# First visualize all of the nodes.
for visibles in self.visibles.itervalues():
for visible in visibles:
if not visible.isPath() and visible.parent is None and not isinstance(visible.client, Stimulus):
visible._toScriptFile(scriptFile, scriptRefs, displayRef, savingNetwork)
# Next visualize all of the connections between the nodes.
for visibles in self.visibles.itervalues():
for visible in visibles:
if visible.isPath():
visible._toScriptFile(scriptFile, scriptRefs, displayRef, savingNetwork)
objectRefs = []
visibleIds = []
for visible in self.selectedVisibles:
if visible.client:
objectRefs.append(scriptRefs[visible.client.networkId])
else:
visibleIds += [visible.displayId]
if any(objectRefs):
scriptFile.write(displayRef + '.selectObjects([' + ', '.join(objectRefs) + '])\n')
for visibleId in visibleIds:
scriptFile.write(displayRef + '.selectVisibles([' + displayRef + '.visibleWithId(' + visibleId + ')], extend = True)')
if self.viewDimensions == 2:
scriptFile.write('\n' + displayRef + '.zoomToFit()\n')
else:
scriptFile.write('\n' + displayRef + '.resetView()\n')
def setName(self, name):
if name != self._name:
self._name = name
dispatcher.send(('set', 'name'), self)
def name(self):
return None if not self._name else str(self._name)
def _generateUniqueId(self):
self._nextUniqueId += 1
return self._nextUniqueId
def setViewDimensions(self, dimensions):
""" Set the number of dimension in which to visualize the network.
The argument must be either 2 or 3.
"""
if dimensions not in (2, 3):
raise ValueError, 'The dimensions argument passed to setViewDimensions() must be 2 or 3.'
if dimensions != self.viewDimensions:
self.viewDimensions = dimensions
width, height = self.GetClientSize()
self._clearDragger()
if self.viewDimensions == 2:
self._previous3DNavMode = self._navigationMode
self.setNavigationMode(PANNING_MODE)
self._previousTrackballMatrix = self.trackball.getMatrix()
self._previousTrackballCenter = self.trackball.getCenter()
self.viewer.setCameraManipulator(None)
self.computeVisiblesBound()
self._resetView()
elif self.viewDimensions == 3:
self.setNavigationMode(self._previous3DNavMode)
# Hide the scroll bars before we get the size of the viewport.
self.SetScrollbar(wx.HORIZONTAL, 0, width, width, True)
self.SetScrollbar(wx.VERTICAL, 0, height, height, True)
width, height = self.GetClientSize()
self.graphicsWindow = self.viewer.setUpViewerAsEmbeddedInWindow(0, 0, width, height)
self.viewer.getCamera().setProjectionMatrixAsPerspective(30.0, float(width)/height, 1.0, 1000.0)
self.viewer.setCameraManipulator(self.trackball)
if self._first3DView:
self.resetView()
self._first3DView = False
else:
self.trackball.computeHomePosition()
self.viewer.home()
self.trackball.setByMatrix(self._previousTrackballMatrix)
#self.trackball.setCenter(self._previousTrackballCenter)
if len(self.selectedVisibles) == 1:
visible = list(self.selectedVisibles)[0]
if visible._isDraggable():
self._addDragger(visible)
# Call _updatePath on all path visibles so parallel edges are drawn correctly.
for visibles in self.visibles.values():
for visible in visibles:
if visible.isPath():
visible._updatePath()
self._updateCompassAxes()
self.Refresh()
dispatcher.send(('set', 'viewDimensions'), self)
def onViewIn2D(self, event_):
self.setViewDimensions(2)
def onViewIn3D(self, event_):
self.setViewDimensions(3)
def setOrthoViewPlane(self, plane):
"""
Set which plane should be viewed in 2D.
The argument must be one of 'xy', 'xz' or 'zy'.
"""
if plane not in ('xy', 'xz', 'zy'):
raise ValueError, "The plane argument passed to setOrthoViewPlane() must be one of 'xy', 'xz' or 'zy'"
if plane != self.orthoViewPlane:
self.orthoViewPlane = plane
if self.orthoViewPlane == 'xy':
self.orthoXPlane = 0
self.orthoYPlane = 1
elif self.orthoViewPlane == 'xz':
self.orthoXPlane = 0
self.orthoYPlane = 2
elif self.orthoViewPlane == 'zy':
self.orthoXPlane = 1
self.orthoYPlane = 2
self._resetView()
# Call _updatePath on all path visibles so parallel edges are drawn correctly.
for visibles in self.visibles.values():
for visible in visibles:
if visible.isPath():
visible._updatePath()
self._updateCompassAxes()
self.Refresh()
dispatcher.send(('set', 'orthoViewPlane'), self)
def setShowCompass(self, showCompass):
def _addCompassAxis(geode, text, position):
# Add a line along the axis.
axis = osg.Geometry()
axis.setVertexArray(Shape.vectorArrayFromList([(0.0, 0.0, 0.0), (position[0] * 0.75, position[1] * 0.75, position[2] * 0.75)]))
axis.addPrimitiveSet(Shape.primitiveSetFromList(osg.PrimitiveSet.LINE_STRIP, range(2)))
axis.setNormalArray(Shape.vectorArrayFromList([(0.0, 0.0, 0.0)]))
axis.setNormalBinding(osg.Geometry.BIND_OVERALL)
axis.setColorArray(Shape.vectorArrayFromList([(0.5, 0.5, 0.5)]))
axis.setColorBinding(osg.Geometry.BIND_OVERALL)
geode.addDrawable(axis)
# Add the axis label.
label = osgText.Text()
label.setCharacterSizeMode(osgText.Text.SCREEN_COORDS)
if Visible.labelFont is None:
label.setCharacterSize(48.0)
else:
label.setFont(Visible.labelFont)
label.setCharacterSize(18.0)
label.setAxisAlignment(osgText.Text.SCREEN)
label.setAlignment(osgText.Text.CENTER_CENTER)
label.setColor(osg.Vec4(0.25, 0.25, 0.25, 1.0))
label.setBackdropColor(osg.Vec4(0.75, 0.75, 0.75, 0.25))
label.setBackdropType(osgText.Text.OUTLINE)
label.setPosition(osg.Vec3(*position))
label.setText(text)
geode.addDrawable(label)
return (axis, label)
if showCompass != (self.compassCamera != None):
if showCompass:
self.compassCamera = osg.Camera()
self.compassCamera.setProjectionMatrixAsPerspective(30.0, 1.0, 1.0, 10000.0)
self.compassCamera.setReferenceFrame(osg.Transform.ABSOLUTE_RF)
self.compassCamera.setViewMatrixAsLookAt(osg.Vec3d(0, 0, 5), osg.Vec3d(0, 0, 0), osg.Vec3d(0, 1, 0))
self.compassCamera.setClearMask(osg.GL_DEPTH_BUFFER_BIT)
self.compassCamera.setRenderOrder(osg.Camera.POST_RENDER)
self.compassCamera.setAllowEventFocus(False)
self.compassCamera.setViewport(0, 0, 50, 50)
# Add the axes
self._compassGeode = osg.Geode()
self.compassTransform = osg.MatrixTransform()
self.compassTransform.addChild(self._compassGeode)
self.compassCamera.addChild(self.compassTransform)
self._compassDrawables['X'] = _addCompassAxis(self._compassGeode, 'X', (1.0, 0.0, 0.0))
self._compassDrawables['Y'] = _addCompassAxis(self._compassGeode, 'Y', (0.0, 1.0, 0.0))
self._compassDrawables['Z'] = _addCompassAxis(self._compassGeode, 'Z', (0.0, 0.0, 1.0))
self._updateCompassAxes()
stateSet = self._compassGeode.getOrCreateStateSet()
stateSet.setMode(osg.GL_LIGHTING, osg.StateAttribute.OFF)
stateSet.setMode(osg.GL_LINE_SMOOTH, osg.StateAttribute.ON)
stateSet.setRenderingHint(osg.StateSet.TRANSPARENT_BIN)
stateSet.setMode(osg.GL_BLEND, osg.StateAttribute.ON)
self.rootNode.addChild(self.compassCamera)
else:
self.rootNode.removeChild(self.compassCamera)
self._compassGeode = None
self.compassCamera = None
self.Refresh()
def isShowingCompass(self):
return self.compassCamera != None
def _updateCompassAxes(self):
# Show/hide the desired axes.
if self.compassCamera:
if self.viewDimensions == 2:
if self.orthoViewPlane == 'xy':
axesToShow = ['X', 'Y']
elif self.orthoViewPlane == 'xz':
axesToShow = ['X', 'Z']
elif self.orthoViewPlane == 'zy':
axesToShow = ['Y', 'Z']
else:
axesToShow = ['X', 'Y', 'Z']
for axis in ['X', 'Y', 'Z']:
for drawable in self._compassDrawables[axis]:
if axis in axesToShow:
if not self._compassGeode.containsDrawable(drawable):
self._compassGeode.addDrawable(drawable)
else:
if self._compassGeode.containsDrawable(drawable):
self._compassGeode.removeDrawable(drawable)
def _updateCompass(self):
if self.viewDimensions == 2:
if self.orthoViewPlane == 'xy':
rotation = osg.Quat(0, osg.Vec3(1, 0, 0))
elif self.orthoViewPlane == 'xz':
rotation = osg.Quat(-pi / 2.0, osg.Vec3(1, 0, 0))
elif self.orthoViewPlane == 'zy':
rotation = osg.Quat(pi / 2.0, osg.Vec3(0, 1, 0))
else:
rotation = self.trackball.getRotation().inverse()
self.compassTransform.setMatrix(osg.Matrixd.rotate(rotation))
def setUseStereo(self, useStereo):
"""
Set whether the visualization should be viewable through red/blue 3D glasses.
The argument should be either True or False.
"""
settings = self.viewer.getDisplaySettings()
if useStereo:
if settings is None:
settings = osg.DisplaySettings()
self.viewer.setDisplaySettings(settings)
settings.setStereo(True)
settings.setStereoMode(osg.DisplaySettings.ANAGLYPHIC)
elif settings is not None:
settings.setStereo(False)
self.Refresh()
def _resetView(self):
if self.viewDimensions == 2:
width, height = self.GetClientSize()
# TODO: if self.orthoZoom just changed to 0 then width and height will be too small by assuming the scroll bars are still there.
zoom = 2.0 ** (self.orthoZoom / 10.0)
self.viewer.getCamera().setProjectionMatrixAsOrtho2D(self.orthoCenter[0] - (width + 20) * self.zoomScale / 2.0 / zoom,
self.orthoCenter[0] + (width + 20) * self.zoomScale / 2.0 / zoom,
self.orthoCenter[1] - (height + 20) * self.zoomScale / 2.0 / zoom,
self.orthoCenter[1] + (height + 20) * self.zoomScale / 2.0 / zoom)
if self.orthoViewPlane == 'xy':
self.viewer.getCamera().setViewMatrix(osg.Matrixd.translate(osg.Vec3d(0.0, 0.0, self.visiblesMin[2] - 2.0)))
elif self.orthoViewPlane == 'xz':
self.viewer.getCamera().setViewMatrix(osg.Matrixd.translate(osg.Vec3d(0.0, self.visiblesMax[1] + 2.0, 0.0)) * \
osg.Matrixd.rotate(osg.Quat(pi / -2.0, osg.Vec3d(1, 0, 0))))
elif self.orthoViewPlane == 'zy':
self.viewer.getCamera().setViewMatrix(osg.Matrixd.translate(osg.Vec3d(self.visiblesMax[0] + 2.0, 0.0, 0.0)) * \
osg.Matrixd.rotate(osg.Quat(pi / 2.0, osg.Vec3d(0, 1, 0))))
self.SetScrollbar(wx.HORIZONTAL, (self.orthoCenter[0] - self.visiblesMin[0]) / self.visiblesSize[0] * width - width / zoom / 2.0, width / zoom, width, True)
self.SetScrollbar(wx.VERTICAL, (self.visiblesMax[1] - self.orthoCenter[1]) / self.visiblesSize[1] * height - height / zoom / 2.0, height / zoom, height, True)
def computeVisiblesBound(self):
if self._recomputeBounds:
# This:
# boundingSphere = node.getBound()
# sphereCenter = boundingSphere.center()
# computes a screwy center. Because there's no camera?
# Manually compute the bounding box instead.
# TODO: figure out how to let the faster C++ code do this
origBounds = (self.visiblesCenter, self.visiblesSize)
self.visiblesMin = [100000, 100000, 100000]
self.visiblesMax = [-100000, -100000, -100000]
for visibles in self.visibles.values():
for visible in visibles:
x, y, z = visible.worldPosition()
w, h, d = visible.worldSize()
if x - w / 2.0 < self.visiblesMin[0]:
self.visiblesMin[0] = x - w / 2.0
if x + w / 2.0 > self.visiblesMax[0]:
self.visiblesMax[0] = x + w / 2.0
if y - h / 2.0 < self.visiblesMin[1]:
self.visiblesMin[1] = y - h / 2.0
if y + h / 2.0 > self.visiblesMax[1]:
self.visiblesMax[1] = y + h / 2.0
if z - d / 2.0 < self.visiblesMin[2]:
self.visiblesMin[2] = z - d / 2.0
if z + d / 2.0 > self.visiblesMax[2]:
self.visiblesMax[2] = z + d / 2.0
if visible.isPath():
for x, y, z in visible.pathMidPoints():
if x < self.visiblesMin[0]:
self.visiblesMin[0] = x
if x > self.visiblesMax[0]:
self.visiblesMax[0] = x
if y < self.visiblesMin[1]:
self.visiblesMin[1] = y
if y > self.visiblesMax[1]:
self.visiblesMax[1] = y
if z < self.visiblesMin[2]:
self.visiblesMin[2] = z
if z > self.visiblesMax[2]:
self.visiblesMax[2] = z
self.visiblesCenter = ((self.visiblesMin[0] + self.visiblesMax[0]) / 2.0, (self.visiblesMin[1] + self.visiblesMax[1]) / 2.0, (self.visiblesMin[2] + self.visiblesMax[2]) / 2.0)
self.visiblesSize = (self.visiblesMax[0] - self.visiblesMin[0], self.visiblesMax[1] - self.visiblesMin[1], self.visiblesMax[2] - self.visiblesMin[2])
self._recomputeBounds = False
if origBounds != (self.visiblesCenter, self.visiblesSize):
# The size of the glow effect is based on the bounding box of the whole display.
# This is expensive so only do it if something actually changed.
for visibles in self.visibles.itervalues():
for visible in visibles:
visible._updateGlow()
width, height = self.GetClientSize()
xZoom = self.visiblesSize[self.orthoXPlane] / (width - 10.0)
yZoom = self.visiblesSize[self.orthoYPlane] / (height - 10.0)
if xZoom > yZoom:
self.zoomScale = xZoom
else:
self.zoomScale = yZoom
def centerView(self):
"""
Deprecated, use resetView or zoomToFit instead.
"""
if self.viewDimensions == 2:
self.zoomToFit()
else:
self.resetView()
def resetView(self):
"""
Reset the view point of the 3D view to the default distance and rotation.
"""
if self.viewDimensions == 3:
self.trackball.setNode(self.rootNode)
self.trackball.computeHomePosition()
self.viewer.home()
self.trackball.setRotation(osg.Quat(0, 0, 0, 1))
self.Refresh()
def zoomToFit(self):
"""
Change the magnification of the 2D view so that all objects are visible.
"""
if self.viewDimensions == 2:
self.computeVisiblesBound()
self.orthoCenter = (self.visiblesCenter[self.orthoXPlane], self.visiblesCenter[self.orthoYPlane])
self.orthoZoom = 0
self._resetView()
self.Refresh()
#osgDB.writeNodeFile(self.rootNode, "test.osg");
def zoomToSelection(self):
"""
Change the magnification of the 2D view so that all selected or highlighted objects are visible.
"""
minX, maxX = (1e300, -1e300)
minY, maxY = (1e300, -1e300)
for visible in self.selectedVisibles.union(self.highlightedVisibles).union(self.animatedVisibles):
worldPos = visible.worldPosition()
worldSize = visible.worldSize()
minX = min(minX, worldPos[0] - worldSize[0] / 2.0)
maxX = max(maxX, worldPos[0] + worldSize[0] / 2.0)
minY = min(minY, worldPos[1] - worldSize[1] / 2.0)
maxY = max(maxY, worldPos[1] + worldSize[1] / 2.0)
self.orthoCenter = ((minX + maxX) / 2.0, (minY + maxY) / 2.0)
width, height = self.GetClientSize()
xZoom = (width - 20) * self.zoomScale / (maxX - minX)
yZoom = (height - 20) * self.zoomScale / (maxY - minY)
self.orthoZoom = log(min(xZoom, yZoom), 2) * 10.0
self._resetView()
self.Refresh()
def _zoom(self, amount):
if self.viewDimensions == 2:
self.orthoZoom += 10 * amount
if self.orthoZoom < 0:
self.orthoZoom = 0
# Alter orthoCenter if the new zoom level will cause any visibles to fall outside the reach of the scroll bars.
width, height = self.GetClientSize()
zoom = 2 ** (self.orthoZoom / 10.0)
horScrollPos = (self.orthoCenter[0] - self.visiblesMin[0]) / self.visiblesSize[0] * width - width / zoom / 2.0
maxHorScrollPos = width - width / zoom
if horScrollPos < 0.0:
self.orthoCenter = ((width / zoom / 2.0) / width * self.visiblesSize[0] + self.visiblesMin[0], self.orthoCenter[1])
elif horScrollPos > maxHorScrollPos:
self.orthoCenter = ((maxHorScrollPos + width / zoom / 2.0) / width * self.visiblesSize[0] + self.visiblesMin[0], self.orthoCenter[1])
vertScrollPos = (self.visiblesMax[1] - self.orthoCenter[1]) / self.visiblesSize[1] * height - height / zoom / 2.0
maxVertScrollPos = height - height / zoom
if vertScrollPos < 0.0:
self.orthoCenter = (self.orthoCenter[0], self.visiblesMax[1] - (height / zoom / 2.0) * self.visiblesSize[1] / height)
elif vertScrollPos > maxVertScrollPos:
self.orthoCenter = (self.orthoCenter[0], self.visiblesMax[1] - (maxVertScrollPos + height / zoom / 2.0) * self.visiblesSize[1] / height)
elif self.viewDimensions == 3:
self.computeVisiblesBound()
self.trackball.setDistance(self.trackball.getDistance() - max(self.visiblesSize) * 0.2 * amount)
self._resetView()
self.Refresh()
def zoomIn(self):
"""
Increase the magnification of the view.
"""
self._zoom(1.0)
def zoomOut(self):
"""
Decrease the magnification of the view.
"""
self._zoom(-1.0)
def onScroll(self, event):
width, height = self.GetClientSize()
zoom = 2 ** (self.orthoZoom / 10.0)
if event.GetOrientation() == wx.HORIZONTAL:
# Reverse the calculation in _resetView():
# pos = (self.orthoCenter[0] - self.visiblesMin[0]) / self.visiblesSize[0] * width - width / zoom / 2
# pos + width / zoom / 2 = (self.orthoCenter[0] - self.visiblesMin[0]) / self.visiblesSize[0] * width
# (pos + width / zoom / 2) * self.visiblesSize[0] / width = self.orthoCenter[0] - self.visiblesMin[0]
self.orthoCenter = ((event.GetPosition() + width / zoom / 2.0) / width * self.visiblesSize[0] + self.visiblesMin[0], self.orthoCenter[1])
else:
# Reverse the calculation in _resetView():
# pos = (self.visiblesMax[1] - self.orthoCenter[1]) / self.visiblesSize[1] * height - height / zoom / 2
# pos + height / zoom / 2 = (self.visiblesMax[1] - self.orthoCenter[1]) / self.visiblesSize[1] * height
# (pos + height / zoom / 2) * self.visiblesSize[1] / height = self.visiblesMax[1] - self.orthoCenter[1]
self.orthoCenter = (self.orthoCenter[0], self.visiblesMax[1] - (event.GetPosition() + height / zoom / 2.0) * self.visiblesSize[1] / height)
self._resetView()
self.Refresh()
def setNavigationMode(self, mode):
if mode != self._navigationMode:
self._navigationMode = mode
def navigationMode(self):
return self._navigationMode
def shiftView(self, dx, dy):
if self.viewDimensions == 3:
self._shiftView(dx, dy)
elif self.orthoZoom > 0:
# At least on the Mac the scroll bars don't update if set immediately. Instead, queue the update to happen after all current events have cleared.
wx.CallAfter(self._shiftView, dx, dy)
def _shiftView(self, dx, dy):
width, height = self.GetClientSize()
if self.viewDimensions == 2:
# Convert screen coordinates to world coordinates.
dx = -dx / (width - 20.0) * width
dy = -dy / (height - 20.0) * height
zoom = 2.0 ** (self.orthoZoom / 10.0)
self.orthoCenter = (self.orthoCenter[0] + dx * self.zoomScale / zoom, self.orthoCenter[1] + dy * self.zoomScale / zoom)
self._resetView()
else:
# Mimic the panning code from OSG's trackball manipulator (in TrackballManipulator::calcMovement()).
# It expects dx and dy to be normalized (-1.0 ... 1.0).
dx /= width / 2.0
dy /= height / 2.0
scale = -0.3 * self.trackball.getDistance()
rotation = osg.Matrixd()
rotation.makeRotate(self.trackball.getRotation())
shiftVector = osg.Vec3d(dx * scale, dy * scale, 0.0)
center = self.trackball.getCenter()
center += rotation.preMult(shiftVector)
self.trackball.setCenter(center)
self.Refresh()
def setBackgroundColor(self, color):
"""
Set the background color of the entire display.
The color argument should be a tuple or list of four values between 0.0 and 1.0 indicating the red, green, blue and alpha values of the color. For example:
* (0.0, 0.0, 0.0, 1.0) -> black
* (1.0, 0.0, 0.0, 1.0) -> red
* (0.0, 1.0, 0.0, 1.0) -> green
* (0.0, 0.0, 1.0, 1.0) -> blue
* (1.0, 1.0, 1.0, 1.0) -> white
* (1.0, 1.0, 1.0, 0.0) -> white, but clear if saved as image
"""
if not isinstance(color, (list, tuple)) or len(color) != 4:
raise ValueError, 'The color passed to setBackgroundColor() must be a tuple or list of four numbers.'
for colorComponent in color:
if not isinstance(colorComponent, (int, float)) or colorComponent < 0.0 or colorComponent > 1.0:
raise ValueError, 'The components of the color passed to setBackgroundColor() must all be numbers between 0.0 and 1.0, inclusive.'
if color != self.backgroundColor:
self.viewer.getCamera().setClearColor(osg.Vec4f(color[0], color[1], color[2], color[3]))
self.backgroundColor = color
dispatcher.send(('set', 'backgroundColor'), self)
def setUseMouseOverSelecting(self, useIt):
"""
Set whether objects should be automatically selected as the mouse passes over them.
This setting is ignored if a manual selection is already in place.
"""
if useIt != self._useMouseOverSelecting:
self._useMouseOverSelecting = useIt
dispatcher.send(('set', 'useMouseOverSelecting'), self)
def useMouseOverSelecting(self):
return self._useMouseOverSelecting
def onMouseEvent(self, event):
if event.ButtonDown():
self.selectionShouldExtend = event.CmdDown()
self.findShortestPath = event.ShiftDown()
self.graphicsWindow.getEventQueue().mouseButtonPress(event.GetX(), event.GetY(), event.GetButton())
elif event.ButtonUp():
self.graphicsWindow.getEventQueue().mouseButtonRelease(event.GetX(), event.GetY(), event.GetButton())
elif event.Dragging():
self.graphicsWindow.getEventQueue().mouseMotion(event.GetX(), event.GetY())
elif event.Moving() and ((self._useMouseOverSelecting and self.hoverSelect) or self._visibleBeingAdded is not None):
if self._visibleBeingAdded is None:
self.hoverSelecting = True
self.graphicsWindow.getEventQueue().mouseButtonPress(event.GetX(), event.GetY(), wx.MOUSE_BTN_LEFT)
self.graphicsWindow.getEventQueue().mouseButtonRelease(event.GetX(), event.GetY(), wx.MOUSE_BTN_LEFT)
self.Refresh()
event.Skip()
def onMouseWheel(self, event):
if event.ShiftDown():
self._zoom(event.GetWheelRotation() / 100.0 * self.scrollWheelScale)
else:
self._zoom(event.GetWheelRotation() / 10.0 * self.scrollWheelScale)
event.Skip()
def onEraseBackground(self, event):
pass
def onSize(self, event):
w, h = self.GetClientSize()
if self.IsShownOnScreen():
self.SetCurrent(self.glContext)
if self.graphicsWindow.valid():
self.graphicsWindow.getEventQueue().windowResize(0, 0, w, h)
self.graphicsWindow.resized(0, 0, w, h)
self._resetView()
event.Skip()
def onPaint(self, event_):
wx.PaintDC(self)
if self.IsShownOnScreen(): #self.GetContext() != 0 and self.graphicsWindow.valid():
self.SetCurrent(self.glContext)
self.viewer.frame()
self.SwapBuffers()
def onAnimate(self, event):
self.Refresh()
event.Skip()
def _getConvertedKeyCode(self, event):
key = event.GetKeyCode()
if key >= ord('A') and key <= ord('Z'):
if not event.ShiftDown():
key += 32
return key
def onKeyDown(self, event):
key = self._getConvertedKeyCode(event)
self.graphicsWindow.getEventQueue().keyPress(key)
event.Skip()
def onKeyUp(self, event):
key = self._getConvertedKeyCode(event)
self.graphicsWindow.getEventQueue().keyRelease(key)
event.Skip()
def visiblesForObject(self, networkObject):
"""
Return the list of :class:`visible proxies <Display.Visible.Visible>` for the given object or an empty list if the object is not visualized.
"""
return list(self.visibles[networkObject.networkId]) if networkObject and networkObject.networkId in self.visibles else []
def Refresh(self, *args, **keywordArgs): # pylint: disable=W0221
if not self._suppressRefresh:
if self.compassCamera:
self._updateCompass()
wx.glcanvas.GLCanvas.Refresh(self, *args, **keywordArgs)
def _visibleChanged(self, signal):
if signal[1] in ('position', 'size', 'rotation', 'path', 'pathMidPoints'):
self._recomputeBounds = True
if not self._recomputeBoundsScheduled:
# Trigger a single recompute of the visibles bounds this pass through the event loop no matter how many visibles are updated.
wx.CallAfter(self._resetViewAfterVisiblesChanged)
self._recomputeBoundsScheduled = True
elif signal[1] in ('positionIsFixed', 'sizeIsFixed') and any(self.selectedVisibles):
self._clearDragger()
visible = list(self.selectedVisibles)[0]
if visible._isDraggable():
self._addDragger(visible)
self.Refresh()
if signal[1] not in ('glowColor'):
self.GetTopLevelParent().setModified(True)
def _resetViewAfterVisiblesChanged(self):
self.computeVisiblesBound()
if self.orthoZoom == 0:
self.orthoCenter = (self.visiblesCenter[self.orthoXPlane], self.visiblesCenter[self.orthoYPlane])
self._resetView()
self._recomputeBoundsScheduled = False
def addVisible(self, visible, parentVisible = None):
clientId = -1 if visible.client == None else visible.client.networkId
if clientId in self.visibles:
self.visibles[clientId].append(visible)
else:
self.visibles[clientId] = [visible]
self._visibleIds[visible.displayId] = visible
if parentVisible is None:
self.rootNode.addChild(visible.sgNode)
else:
parentVisible.addChildVisible(visible)
dispatcher.connect(self._visibleChanged, dispatcher.Any, visible)
def visibleWithId(self, visibleId):
if visibleId in self._visibleIds:
return self._visibleIds[visibleId]
else:
return None
def close(self):
self._closing = True
self.setNetwork(None)
def removeVisible(self, visible):
"""
Remove the indicated :class:`visual proxy <Display.Visible.Visible>` from the visualization.
If the object has any nested objects or connections then they will be removed as well.
"""
if visible.displayId not in self._visibleIds:
raise ValueError, 'The visible passed to removeVisible() is not part of the display.'
# Remove any child visibles before removing this one.
for childVisible in list(visible.children):
self.removeVisible(childVisible)
# Remove any dependent visibles before removing this one. (like an arborization before its region)
for dependentVisible in list(visible.dependentVisibles):
self.removeVisible(dependentVisible)
# Remove the visible from the current selection if needed.
if visible in self.selectedVisibles:
self.selectVisibles([visible], extend = True)
# Remove the visible's node from the scene graph.
if visible.parent:
visible.parent.removeChildVisible(visible)
self.rootNode.removeChild(visible.sgNode)
# Remove any dependencies.
dispatcher.disconnect(self._visibleChanged, dispatcher.Any, visible)
if visible.isPath():
visible.setPathEndPoints(None, None)
# Remove the visible from self._visibleIds and self.visibles.
del self._visibleIds[visible.displayId]
clientId = -1 if visible.client == None else visible.client.networkId
visibles = self.visibles[clientId]
visibles.remove(visible)
if not any(visibles):
del self.visibles[clientId]
visible.display = None
self.Refresh()
def visualizeObject(self, networkObject = None, orphanClass = None, **keywordArgs):
"""
Create a visual representation of the :class:`object <network.object.Object>`.
If you want to have a purely visual object that does not represent any object in the biological network then pass None.
You can customize the visualization of the object by passing additional parameters. The parameters that would be used for automatic visualization can be obtained by calling :meth:`defaultVisualizationParams() <network.object.Object.defaultVisualizationParams>` on the object.
Returns the :class:`visible proxy <Display.Visible.Visible>` of the object.
"""
# TODO: document the list of possible params somewhere.
# TODO: replace this whole block with display rules.
visible = Visible(self, networkObject)
isStimulus = False
# Start with the default params for this object, object class or dummy object and override with any supplied params.
if orphanClass:
visible.setOrphanClass(orphanClass)
params = orphanClass._defaultVisualizationParams()
if orphanClass == Stimulus:
edgeVisible = visible
nodeVisible = Visible(self, None)
target = keywordArgs['target']
del keywordArgs['target']
isStimulus = True
elif networkObject:
params = networkObject.defaultVisualizationParams()
else:
params = Object._defaultVisualizationParams()
for key, value in keywordArgs.iteritems():
params[key] = value
if isinstance(networkObject, Arborization):
dispatcher.connect(self._arborizationChangedFlow, ('set', 'sendsOutput'), networkObject)
dispatcher.connect(self._arborizationChangedFlow, ('set', 'receivesInput'), networkObject)
elif isinstance(networkObject, Pathway):
dispatcher.connect(self._pathwayChangedFlow, ('set', 'region1Projects'), networkObject)
dispatcher.connect(self._pathwayChangedFlow, ('set', 'region2Projects'), networkObject)
elif isinstance(networkObject, Stimulus):
edgeVisible = visible
nodeVisible = Visible(self, networkObject)
target = networkObject.target
isStimulus = True
if 'color' in params:
visible.setColor(params['color'])
if 'shape' in params:
if isinstance(params['shape'], str):
shape = neuroptikon.shapeClass(params['shape'])()
elif isinstance(params['shape'], type(self.__class__)):
shape = params['shape']()
else:
shape = params['shape']
visible.setShape(shape)
if 'opacity' in params:
visible.setOpacity(params['opacity'])
if isStimulus:
nodeVisible.setOpacity(params['opacity'])
if 'sizeIsAbsolute' in params:
visible.setSizeIsAbsolute(params['sizeIsAbsolute'])
if 'texture' in params:
visible.setTexture(params['texture'])
if 'textureScale' in params:
visible.setTextureScale(params['textureScale'])
if 'weight' in params:
visible.setWeight(params['weight'])
# Label and position are applied to the node visible of a stimulus.
if isStimulus:
visible = nodeVisible
if 'size' in params:
visible.setSize(params['size'])
if 'label' in params:
visible.setLabel(params['label'])
if 'labelColor' in params:
visible.setLabelColor(params['labelColor'])
if 'labelPosition' in params:
visible.setLabelPosition(params['labelPosition'])
if 'position' in params:
visible.setPosition(params['position'])
if 'positionIsFixed' in params:
visible.setPositionIsFixed(params['positionIsFixed'])
if 'rotation' in params:
visible.setRotation(params['rotation'])
if 'arrangedAxis' in params:
visible.setArrangedAxis(params['arrangedAxis'])
if 'arrangedSpacing' in params:
visible.setArrangedSpacing(params['arrangedSpacing'])
if 'arrangedWeight' in params:
visible.setArrangedWeight(params['arrangedWeight'])
if 'path' in params:
params['pathMidPoints'] = params['path']
del params['path']
pathStart, pathEnd = params.get('pathEndPoints', (None, None))
pathFlowsTo = params.get('flowTo', None)
pathFlowsFrom = params.get('flowFrom', None)
flowToColor = params.get('flowToColor', None)
flowFromColor = params.get('flowFromColor', None)
parentObject = params.get('parent', None)
if isinstance(parentObject, Object):
parentVisibles = self.visiblesForObject(parentObject)
parentVisible = parentVisibles[0] if len(parentVisibles) == 1 else None
else:
parentVisible = parentObject
self.addVisible(visible, parentVisible)
if isStimulus:
if isinstance(target, Object):
targetVisibles = self.visiblesForObject(target)
if len(targetVisibles) == 1:
target = targetVisibles[0]
if target is not None:
edgeVisible.setPathEndPoints(nodeVisible, target)
edgeVisible.setPathIsFixed(True)
edgeVisible.setFlowTo(True)
if flowToColor:
edgeVisible.setFlowToColor(flowToColor)
if self._showFlow:
edgeVisible.animateFlow()
nodeVisible.setShape(None)
edgeVisible.setPositionIsFixed(True)
self.addVisible(edgeVisible)
else:
if pathStart is not None and pathEnd is not None:
# The path start and end can either be objects or visibles.
if isinstance(pathStart, Object):
pathStartVisibles = self.visiblesForObject(pathStart)
else:
pathStartVisibles = [pathStart]
if isinstance(pathEnd, Object):
pathEndVisibles = self.visiblesForObject(pathEnd)
else:
pathEndVisibles = [pathEnd]
if len(pathStartVisibles) == 1 and len(pathEndVisibles) == 1:
pathStartVisible = pathStartVisibles[0]
# if pathStartVisible.isPath():
# pathStartVisible = pathStartVisible._pathEnd
pathEndVisible = pathEndVisibles[0]
# if pathEndVisible.isPath():
# pathEndVisible = pathEndVisible._pathStart
visible.setPathEndPoints(pathStartVisible, pathEndVisible)
visible.setPathMidPoints(params.get('pathMidPoints', []))
visible.setPathIsFixed(params.get('pathIsFixed', None))
visible.setFlowTo(pathFlowsTo)
if flowToColor:
visible.setFlowToColor(flowToColor)
visible.setFlowFrom(pathFlowsFrom)
if flowFromColor:
visible.setFlowFromColor(flowFromColor)
if self._showFlow:
visible.animateFlow()
childObjects = params.get('children', [])
for childObject in childObjects:
subVisibles = self.visiblesForObject(childObject)
if len(subVisibles) == 1:
# TODO: what if the subVisible is already a child?
self.rootNode.removeChild(subVisibles[0].sgNode)
visible.addChildVisible(subVisibles[0])
# The visible may be outside of the previously computed bounds.
_recomputeBounds = True
return visible
def removeObject(self, networkObject):
"""
Remove the indicated :class:`network object <network.object.Object>` from the visualization.
If the object has any nested objects or connections then they will be removed as well.
"""
while any(self.visiblesForObject(networkObject)):
self.removeVisible(self.visiblesForObject(networkObject)[0])
def clear(self):
"""
Remove every :class:`network object <network.object.Object>` from the visualization.
"""
while any(self.visibles):
self.removeVisible(self.visibles.values()[0][0])
def _arborizationChangedFlow(self, sender):
arborizationVis = self.visiblesForObject(sender)
if len(arborizationVis) == 1:
arborizationVis[0].setFlowTo(sender.sendsOutput)
arborizationVis[0].setFlowFrom(sender.receivesInput)
def _pathwayChangedFlow(self, sender):
pathwayVis = self.visiblesForObject(sender)
if len(pathwayVis) == 1:
pathwayVis[0].setFlowTo(sender.region1Projects)
pathwayVis[0].setFlowFrom(sender.region2Projects)
def setConsole(self, console):
self.console = console
def setNetwork(self, network):
if network != self.network:
if self.network != None:
self.network.removeDisplay(self)
# TBD: are there situations where you wouldn't want to clear anonymous visibles?
self.clear()
# TODO: anything else?
self.network = network
if network is not None:
self.network.addDisplay(self)
if self.autoVisualize:
for networkObject in network.objects:
if not networkObject.parentObject():
if not (isinstance(addedObject, Synapse) and self.hideSynapsesOnConnections()):
self.visualizeObject(networkObject)
dispatcher.connect(receiver=self._networkChanged, signal=dispatcher.Any, sender=self.network)
dispatcher.send(('set', 'network'), self)
def _networkChanged(self, affectedObjects=None, **arguments):
signal = arguments['signal']
if signal == 'addition' and self.autoVisualize:
for addedObject in affectedObjects:
if not addedObject.parentObject():
# TODO if object is synapse and not display synapse is on then don't add to visualize object
if not (isinstance(addedObject, Synapse) and self.hideSynapsesOnConnections()):
self.visualizeObject(addedObject)
self.Refresh()
elif signal == 'deletion':
for removedObject in affectedObjects:
self.removeObject(removedObject)
elif signal == 'hideSynapsesOnConnections':
# If we hide/show synapses we need to add or delete them from visibles
if self.autoVisualize:
for networkObject in self.network.objects:
if isinstance(networkObject, Synapse):
if self.hideSynapsesOnConnections():
self.removeObject(networkObject)
else:
self.visualizeObject(networkObject)
else:
pass # TODO: anything?
self.GetTopLevelParent().setModified(True)
def _neuronRegionChanged(self, sender):
# TODO: untested method
visible = self.visiblesForObject(sender)
if visible.parent is not None:
visible.parent.removeChildVisible(visible)
if sender.region is not None:
newParent = self.visiblesForObject(sender.region)
if newParent is not None:
newParent.addChildVisible(visible)
def setShowRegionNames(self, show):
"""
Set whether the names of regions should be shown by default in the visualization.
"""
if show != self._showRegionNames:
self._showRegionNames = show
dispatcher.send(('set', 'showRegionNames'), self)
self.Refresh()
def showRegionNames(self):
"""
Return whether the names of regions should be shown by default in the visualization.
"""
return self._showRegionNames
def setShowNeuronNames(self, show):
"""
Set whether the names of neurons should be shown by default in the visualization.
"""
if show != self._showNeuronNames:
self._showNeuronNames = show
dispatcher.send(('set', 'showNeuronNames'), self)
self.Refresh()
def showNeuronNames(self):
"""
Return whether the names of neurons should be shown by default in the visualization.
"""
return self._showNeuronNames
def hideUnselectedNeurons(self):
"""
Returns whether to hide unselected neurons (when at least one item is selected).
"""
return self._hideUnselectedNeurons
def setHideUnselectedNeurons(self, value):
"""
Set whether to hide hide unselected neurons when at least one other item is selected.
"""
if value != self._hideUnselectedNeurons:
self._hideUnselectedNeurons = value
dispatcher.send(('set', 'hideUnselectedNeurons'))
self.selectVisibles(self.selectedVisibles, reselect=True)
self.Refresh()
def hideSynapsesOnConnections(self):
"""
Returns whether to hide unselected neurons (when at least one item is selected).
"""
return self._hideSynapsesOnConnections
def setHideSynapsesOnConnections(self, value):
"""
Set whether to hide hide unselected neurons when at least one other item is selected.
"""
if value != self._hideSynapsesOnConnections:
self._hideSynapsesOnConnections = value
dispatcher.send('hideSynapsesOnConnections', self.network)
self.Refresh()
def setShowNeuronNamesOnSelection(self, show):
"""
Set whether the names of neurons should be shown by default in the visualization when selected.
"""
if show != self._showNeuronNamesOnSelection:
self._showNeuronNamesOnSelection = show
dispatcher.send(('set', 'showNeuronNamesOnSelection'), self)
self.Refresh()
def showNeuronNamesOnSelection(self):
"""
Return whether the names of neurons should be shown by default in the visualization when selected.
"""
return self._showNeuronNamesOnSelection
def setPrintNeuronNamesOnSelection(self, show):
"""
Set whether the names of neurons should be printed by default in the visualization when selected.
"""
if show != self._printNeuronNamesOnSelection:
self._printNeuronNamesOnSelection = show
dispatcher.send(('set', 'printNeuronNamesOnSelection'), self)
self.Refresh()
def printNeuronNamesOnSelection(self):
"""
Return whether the names of neurons should be printed by default in the visualization when selected.
"""
return self._printNeuronNamesOnSelection
def setLabelsFloatOnTop(self, floatLabels):
"""
Set whether labels should be rendered on top of all other objects in the visualization.
"""
if floatLabels != self._labelsFloatOnTop:
self._labelsFloatOnTop = floatLabels
dispatcher.send(('set', 'labelsFloatOnTop'), self)
self.Refresh()
def labelsFloatOnTop(self):
"""
Return whether labels should be rendered on top of all other objects in the visualization.
"""
return self._labelsFloatOnTop
def setShowFlow(self, showFlow):
"""
Set whether the flow of information should be shown for all objects in the visualization.
"""
if showFlow != self._showFlow:
self._showFlow = showFlow
dispatcher.send(('set', 'showFlow'), self)
def showFlow(self):
"""
Return whether the flow of information should be shown for all objects in the visualization.
"""
return self._showFlow
def setSelectionHighlightDepth(self, depth):
"""
Set how far away objects connected to the current selection should be highlighted.
"""
if depth != self._selectionHighlightDepth:
self._selectionHighlightDepth = depth
self._onSelectionOrShowFlowChanged()
dispatcher.send(('set', 'selectionHighlightDepth'), self)
def selectionHighlightDepth(self):
"""
Return how far away objects connected to the current selection should be highlighted.
"""
return self._selectionHighlightDepth
def setHighlightOnlyWithinSelection(self, flag):
"""
Set whether connections to objects outside of the selection should be highlighted when more than one object is selected.
"""
if flag != self._highlightOnlyWithinSelection:
self._highlightOnlyWithinSelection = flag
self._onSelectionOrShowFlowChanged()
dispatcher.send(('set', 'highlightOnlyWithinSelection'), self)
def highlightOnlyWithinSelection(self):
"""
Return whether connections to objects outside of the selection will be highlighted when more than one object is selected.
"""
return self._highlightOnlyWithinSelection
def setUseGhosts(self, useGhosts):
"""
Set whether unselected objects should be dimmed in the visualization.
"""
if useGhosts != self._useGhosts:
self._useGhosts = useGhosts
dispatcher.send(('set', 'useGhosts'), self)
self.Refresh()
def useGhosts(self):
"""
Return whether unselected objects should be dimmed in the visualization.
"""
return self._useGhosts
def setGhostingOpacity(self, opacity):
"""
Set the opacity to be used for unselected objects when ghosting is enabled.
The opacity must be between 0.0 and 1.0, inclusive.
"""
if not isinstance(opacity, (float, int)):
raise TypeError, 'The value passed to setGhostingOpacity() must be a number.'
elif opacity < 0.0 or opacity > 1.0:
raise ValueError, 'The value passed to setGhostingOpacity() must be between 0.0 and 1.0, inclusive.'
if opacity != self._ghostingOpacity:
self._ghostingOpacity = opacity
dispatcher.send(('set', 'ghostingOpacity'), self)
self.Refresh()
def ghostingOpacity(self):
"""
Return the opacity to be used for unselected objects when ghosting is enabled.
"""
return self._ghostingOpacity
def setLabel(self, networkObject, label):
"""
Set the label that adorns the visualization of the indicated :class:`network object <network.object.Object>`.
The label argument should be a string value or None to indicate that the object's abbreviation or name should be used. To have no label pass an empty string.
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise ValueError, 'The object argument passed to setLabel() must be an object from the network being visualized by this display.'
if not isinstance(label, (str, type(None))):
raise TypeError, 'The label argument passed to setLabel() must be a string or None.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
elif isinstance(networkObject, Stimulus):
visible = visibles[0 if visibles[1].isPath() else 1]
else:
visible = networkObject
if visible is not None:
visible.setLabel(label)
def setLabelColor(self, networkObject, color):
"""
Set the color of the label of the indicated :class:`network object <network.object.Object>`.
The color argument should be a tuple or list of three values between 0.0 and 1.0 indicating the red, green and blue values of the color. For example:
* (0.0, 0.0, 0.0) -> black
* (1.0, 0.0, 0.0) -> red
* (0.0, 1.0, 0.0) -> green
* (0.0, 0.0, 1.0) -> blue
* (1.0, 1.0, 1.0) -> white
Any alpha value should be set independently using :meth:`setVisibleOpacity <Display.Display.Display.setVisibleOpacity>`.
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise ValueError, 'The object argument passed to setLabelColor() must be an object from the network being visualized by this display .'
if (not isinstance(color, (tuple, list)) or len(color) != 3 or
not isinstance(color[0], (int, float)) or color[0] < 0.0 or color[0] > 1.0 or
not isinstance(color[1], (int, float)) or color[1] < 0.0 or color[1] > 1.0 or
not isinstance(color[2], (int, float)) or color[2] < 0.0 or color[2] > 1.0):
raise ValueError, 'The color argument passed to setLabelColor() should be a tuple or list of three integer or floating point values between 0.0 and 1.0, inclusively.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
elif isinstance(networkObject, Stimulus):
visible = visibles[0 if visibles[1].isPath() else 1]
else:
visible = networkObject
if visible is not None:
visible.setLabelColor(color)
def setLabelPosition(self, networkObject, position):
"""
Set the position of the label that adorns the visualization of the indicated :class:`network object <network.object.Object>`.
The position argument should be a tuple or list indicating the position of the label. The coordinates are local to the object with is usually a unit square centered at (0.0, 0.0). For example:
(0.0, 0.0) -> label at center of object
(-0.5, -0.5) -> label at lower left corner of object
(0.0, 0.5) -> label centered at top of object
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise ValueError, 'The object argument passed to setLabelPosition() must be an object from the network being visualized by this display .'
if not isinstance(position, (tuple, list)):
raise TypeError, 'The position argument passed to setLabelPosition() must be a tuple or list of numbers.'
for dim in position:
if not isinstance(dim, (int, float)):
raise TypeError, 'The components of the position argument passed to setLabelPosition() must be numbers.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
elif isinstance(networkObject, Stimulus):
visible = visibles[0 if visibles[1].isPath() else 1]
else:
visible = networkObject
if visible is not None:
visible.setLabelPosition(position)
def setVisiblePosition(self, networkObject, position = None, fixed = None):
"""
Set the position of the :class:`network object <network.object.Object>` within the display or within its visual container.
The position parameter should be a tuple or list of numbers. When setting the position of an object within another the coordinates are relative to a unit cube centered at (0.0, 0.0, 0.0).
The fixed parameter indicates whether the user should be given GUI controls to manipulate the position of the object.
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise ValueError, 'The object argument passed to setVisiblePosition() must be an object from the network being visualized by this display .'
if position != None:
if not isinstance(position, (tuple, list)) or len(position) != 3:
raise TypeError, 'The position argument passed to setVisiblePosition() must be a tuple or list of three numbers.'
for dim in position:
if not isinstance(dim, (int, float)):
raise TypeError, 'The components of the position argument passed to setVisiblePosition() must be numbers.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
elif isinstance(networkObject, Stimulus):
visible = visibles[0 if visibles[1].isPath() else 1]
else:
visible = networkObject
if visible is not None:
if position is not None:
visible.setPosition(position)
if fixed is not None:
visible.setPositionIsFixed(fixed)
def setVisibleRotation(self, networkObject, rotation):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visibles[0].setRotation(rotation)
def setVisibleSize(self, networkObject, size = None, fixed=True, absolute=False):
"""
Set the size of the :class:`network object <network.object.Object>` within the display or within its visual container.
The size parameter should be a tuple or list of numbers. When setting the position of an object within another the coordinates are relative to a unit cube centered at (0.0, 0.0, 0.0).
The fixed parameter indicates whether the user should be given GUI controls to manipulate the size of the object.
The absolute parameter indicates whether the size should be considered relative to the entire display (True) or relative to the visual container (False).
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise ValueError, 'The object argument passed to setVisibleSize() must be an object from the network being visualized by this display .'
if not isinstance(size, (tuple, list)):
raise TypeError, 'The size argument passed to setVisibleSize() must be a tuple or list of numbers.'
for dim in size:
if not isinstance(dim, (int, float)):
raise TypeError, 'The components of the size argument passed to setVisibleSize() must be numbers.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
else:
visible = networkObject
if visible is not None:
if size is not None:
visible.setSize(size)
visible.setSizeIsFixed(fixed)
visible.setSizeIsAbsolute(absolute)
def setVisibleColor(self, networkObject, color):
"""
Set the color of the indicated :class:`network object <network.object.Object>`.
The color argument should be a tuple or list of three values between 0.0 and 1.0 indicating the red, green and blue values of the color. For example:
* (0.0, 0.0, 0.0) -> black
* (1.0, 0.0, 0.0) -> red
* (0.0, 1.0, 0.0) -> green
* (0.0, 0.0, 1.0) -> blue
* (1.0, 1.0, 1.0) -> white
Any alpha value should be set independently using :meth:`setVisibleOpacity <Display.Display.Display.setVisibleOpacity>`.
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise TypeError, 'The object argument passed to setVisibleColor() must be an object from the network being visualized by this display.'
if (not isinstance(color, (tuple, list)) or len(color) != 3 or
not isinstance(color[0], (int, float)) or color[0] < 0.0 or color[0] > 1.0 or
not isinstance(color[1], (int, float)) or color[1] < 0.0 or color[1] > 1.0 or
not isinstance(color[2], (int, float)) or color[2] < 0.0 or color[2] > 1.0):
raise ValueError, 'The color argument should be a tuple or list of three integer or floating point values between 0.0 and 1.0, inclusively.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
elif isinstance(networkObject, Stimulus):
visible = visibles[0 if visibles[0].isPath() else 1]
else:
visible = networkObject
if visible is not None:
visible.setColor(color)
def setVisibleTexture(self, networkObject, texture, scale = 1.0):
"""
Set the :class:`texture <library.texture.Texture>` used to paint the surface of the visualized :class:`network object <network.object.Object>`.
>>> display.setVisibleTexture(region1, library.texture('Stripes'))
The texture parameter should be a :class:`texture <library.texture.Texture>` instance or None.
The scale parameter can be used to reduce or enlarge the texture relative to the visualized object.
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise TypeError, 'The object argument passed to setVisibleTexture() must be an object from the network being visualized by this display.'
if not isinstance(texture, (Texture, type(None))):
raise TypeError, 'The texture argument passed to setVisibleTexture() must be a texture from the library or None.'
if not isinstance(scale, (float, int)):
raise TypeError, 'The scale argument passed to setVisibleTexture() must be a number.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
elif isinstance(networkObject, Stimulus):
visible = visibles[0 if visibles[0].isPath() else 1]
else:
visible = networkObject
if visible is not None:
visible.setTexture(texture)
visible.setTextureScale(scale)
def setVisibleShape(self, networkObject, shape):
"""
Set the shape of the :class:`network object's <network.object.Object>` visualization.
>>> display.setVisibleShape(neuron1, shapes['Ball'])
>>> display.setVisibleShape(muscle1, shapes['Ring'](startAngle = 0.0, endAngle = pi))
The shape parameter should be one of the classes in the shapes dictionary, an instance of one of the classes or None.
"""
if isinstance(shape, str):
# Mapping for pre-0.9.4 scripts.
shapeNameMap = {'ball': 'Ball', 'box': 'Box', 'capsule': 'Capsule', 'cone': 'Cone', 'tube': 'Cylinder'}
if shape in shapeNameMap:
shape = shapeNameMap[shape]
shape = neuroptikon.shapeClass(shape)
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise TypeError, 'The object argument passed to setVisibleShape() must be an object from the network being visualized by this display.'
if shape != None and not isinstance(shape, Shape) and (not type(shape) == type(self.__class__) or not issubclass(shape, Shape)):
raise TypeError, 'The shape parameter must be an instance of one of the classes in the shapes dictionary, an instance of one of the classes or None.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
elif isinstance(networkObject, Stimulus):
visible = visibles[0 if visibles[0].isPath() else 1]
else:
visible = networkObject
if visible is not None:
visible.setShape(shape)
def setVisibleOpacity(self, networkObject, opacity):
"""
Set the opacity of the :class:`network object's <network.object.Object>` visualization.
The opacity parameter should be a number from 0.0 (fully transparent) to 1.0 (fully opaque).
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise TypeError, 'The object argument passed to setVisibleOpacity() must be an object from the network being visualized by this display.'
if not isinstance(opacity, (int, float)) or opacity < 0.0 or opacity > 1.0:
raise ValueError, 'The opacity argument passed to setVisibleOpacity() must be an number between 0.0 and 1.0, inclusive.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
elif isinstance(networkObject, Stimulus):
visible = visibles[0 if visibles[0].isPath() else 1]
else:
visible = networkObject
if visible is not None:
visible.setOpacity(opacity)
def setVisibleWeight(self, networkObject, weight):
"""
Set the weight of the :class:`network object's <network.object.Object>` visualization.
The weight parameter should be a float value with 1.0 being a neutral weight. Currently this only applies to visualized connections.
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise TypeError, 'The object argument passed to setVisibleWeight() must be an object from the network being visualized by this display.'
if not isinstance(weight, (int, float)):
raise TypeError, 'The weight argument passed to setVisibleWeight() must be an number.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
elif isinstance(networkObject, Stimulus):
visible = visibles[0 if visibles[0].isPath() else 1]
else:
visible = networkObject
if visible is not None:
visible.setWeight(weight)
def setVisiblePath(self, networkObject, startObject, endObject, midPoints = None, fixed = None):
"""
Set the start and end points of a connecting :class:`object <network.object.Object>` and any additional mid-points.
The start and end object should be from the same network and the mid-points should be a list of coordinates, e.g. [(0.1, 0.3), (0.1, 0.5), (0.2, 0.5)].
If the start or end objects are moved, resized, etc. then the connecting object's visualization will be adjusted to maintain the connection.
"""
if isinstance(startObject, list):
# Versions 0.9.4 and prior put the midPoints first.
swap = startObject
startObject = endObject
endObject = midPoints
midPoints = swap
if ((not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self)) or
not isinstance(startObject, (Object, Visible)) or (isinstance(startObject, Object) and startObject.network != self.network) or
not isinstance(endObject, (Object, Visible)) or (isinstance(endObject, Object) and endObject.network != self.network)):
raise ValueError, 'The object, startObject and endObject arguments passed to setVisiblePath() must be objects from the network being visualized by this display.'
if midPoints != None:
if not isinstance(midPoints, (list, tuple)):
raise TypeError, 'The midPoints argument passed to setVisiblePath() must be a list, a tuple or None.'
for midPoint in midPoints:
if not isinstance(midPoint, (list, tuple)) or len(midPoint) not in (2, 3):
raise ValueError, 'The mid-points passed to setVisiblePath() must be a list or tuple of numbers.'
for midPointDim in midPoint:
if not isinstance(midPointDim, (int, float)):
raise ValueError, 'Each list or tuple mid-point passed to setVisiblePath() must contain only numbers.'
if fixed != None:
if not isinstance(fixed, bool):
raise TypeError, 'The fixed argument passed to setVisiblePath() must be True, False or None'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
elif isinstance(networkObject, Stimulus):
visible = visibles[0 if visibles[0].isPath() else 1]
else:
visible = networkObject
if visible is not None:
if isinstance(startObject, Object):
startVisibles = self.visiblesForObject(startObject)
if len(startVisibles) != 1:
raise ValueError, 'The starting object of the path is not visualized.'
else:
startVisibles = [startObject]
if isinstance(endObject, Object):
endVisibles = self.visiblesForObject(endObject)
if len(endVisibles) != 1:
raise ValueError, 'The ending object of the path is not visualized.'
else:
endVisibles = [endObject]
visible.setPathEndPoints(startVisibles[0], endVisibles[0])
if midPoints != None:
visible.setPathMidPoints(midPoints)
if fixed != None:
visible.setPathIsFixed(fixed)
def setVisibleFlowTo(self, networkObject, show = True, color = None, spacing = None, speed = None, spread = None):
"""
Set the visualization style for the flow of information from the :class:`path object <network.object.Object>` start to its end.
The color argument should be a tuple containing red, green and blue values. For example:
* (0.0, 0.0, 0.0) -> black
* (1.0, 0.0, 0.0) -> red
* (0.0, 1.0, 0.0) -> green
* (0.0, 0.0, 1.0) -> blue
* (1.0, 1.0, 1.0) -> white
The spacing argument determines how far apart the pulses are placed and the speed argument determines how fast they move. Both arguments should be in world space coordinates.
The spread argument determines how far the tail of the pulse reaches, from 0.0 (no tail) to 1.0 (the tail reaches all the way to the next pulse).
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise TypeError, 'The object argument passed to setVisibleFlowTo() must be an object from the network being visualized by this display.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
elif isinstance(networkObject, Stimulus):
visible = visibles[0 if visibles[0].isPath() else 1]
else:
visible = networkObject
if visible is not None:
visible.setFlowTo(show)
if color is not None:
if len(color) == 3:
color = (color[0], color[1], color[2], 1.0)
visible.setFlowToColor(color)
if spacing is not None:
visible.setFlowToSpacing(spacing)
if speed is not None:
visible.setFlowToSpeed(speed)
if spread is not None:
visible.setFlowToSpread(spread)
def setVisibleFlowFrom(self, networkObject, show = True, color = None, spacing = None, speed = None, spread = None):
"""
Set the visualization style for the flow of information from the :class:`path object's <network.object.Object>` end back to its start.
The color argument should be a tuple containing red, green and blue values. For example:
* (0.0, 0.0, 0.0) -> black
* (1.0, 0.0, 0.0) -> red
* (0.0, 1.0, 0.0) -> green
* (0.0, 0.0, 1.0) -> blue
* (1.0, 1.0, 1.0) -> white
The spacing argument determines how far apart the pulses are placed and the speed argument determines how fast they move. Both arguments should be in world space coordinates.
The spread argument determines how far the tail of the pulse reaches, from 0.0 (no tail) to 1.0 (the tail reaches all the way to the next pulse).
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise TypeError, 'The object argument passed to setVisibleFlowFrom() must be an object from the network being visualized by this display.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
elif isinstance(networkObject, Stimulus):
visible = visibles[0 if visibles[0].isPath() else 1]
else:
visible = networkObject
if visible is not None:
visible.setFlowFrom(show)
if color is not None:
if len(color) == 3:
color = (color[0], color[1], color[2], 1.0)
visible.setFlowFromColor(color)
if spacing is not None:
visible.setFlowFromSpacing(spacing)
if speed is not None:
visible.setFlowFromSpeed(speed)
if spread is not None:
visible.setFlowFromSpread(color)
def setArrangedAxis(self, networkObject, axis = 'largest', recurse = False):
"""
Automatically arrange the visible children of the indicated :class:`network object <network.object.Object>` along the specified axis.
The axis value should be one of 'largest', 'X', 'Y', 'Z' or None. When 'largest' is indicated the children will be arranged along whichever axis is longest at any given time. Resizing the parent object therefore can change which axis is used.
If recurse is True then all descendants will have their axes set as well.
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise ValueError, 'The object argument passed to setArrangedAxis() must be an object from the network being visualized by this display .'
if axis not in [None, 'largest', 'X', 'Y', 'Z']:
raise ValueError, 'The axis argument passed to setArrangedAxis() must be one of \'largest\', \'X\', \'Y\', \'Z\' or None.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
else:
visible = networkObject
if visible is not None:
visible.setArrangedAxis(axis = axis, recurse = recurse)
def setArrangedSpacing(self, networkObject, spacing = .02, recurse = False):
"""
Set the visible spacing between the children of the indicated :class:`network object <network.object.Object>`.
The spacing is measured as a fraction of the whole. So a value of .02 uses 2% of the parent's size for the spacing between each object.
If recurse is True then all descendants will have their spacing set as well.
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise ValueError, 'The object argument passed to setArrangedSpacing() must be an object from the network being visualized by this display .'
if not isinstance(spacing, (int, float)):
raise TypeError, 'The spacing argument passed to setArrangedSpacing() must be an integer or floating point value.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
else:
visible = networkObject
if visible is not None:
visible.setArrangedSpacing(spacing = spacing, recurse = recurse)
def setArrangedWeight(self, networkObject, weight):
"""
Set the amount of its parent's space the indicated :class:`network object <network.object.Object>` should use compared to its siblings.
Larger weight values will result in more of the parent's space being used.
If recurse is True then all descendants will have their spacing set as well.
"""
if not isinstance(networkObject, (Object, Visible)) or (isinstance(networkObject, Object) and networkObject.network != self.network) or (isinstance(networkObject, Visible) and networkObject.display != self):
raise ValueError, 'The object argument passed to setArrangedWeight() must be an object from the network being visualized by this display .'
if not isinstance(weight, (int, float)):
raise TypeError, 'The weight argument passed to setArrangedWeight() must be an integer or floating point value.'
visible = None
if isinstance(networkObject, Object):
visibles = self.visiblesForObject(networkObject)
if len(visibles) == 1:
visible = visibles[0]
else:
visible = networkObject
if visible is not None:
visible.setArrangedWeight(weight)
def selectObjectsMatching(self, predicate):
matchingVisibles = []
for networkObject in self.network.objects:
if predicate.matches(networkObject):
for visible in self.visiblesForObject(networkObject):
matchingVisibles.append(visible)
self.selectVisibles(matchingVisibles)
def selectObjects(self, objects, extend = False, findShortestPath = False, color = None):
"""
Select the indicated :class:`network objects <network.object.Object>`.
If extend is True then the objects will be added to the current selection, otherwise the objects will replace the current selection.
If findShortestPath is True then the shortest path between the currently selected object(s)s and the indicated object(s) will be found and all will be selected.
"""
if not isinstance(objects, (list, tuple, set)):
raise TypeError, 'The objects argument passed to selectObjects must be a list, tuple or set.'
visibles = []
for networkObject in objects:
visibles.extend(self.visiblesForObject(networkObject))
if color:
for visible in visibles:
self._visiblesSelectionColors[visible] = color
self.selectVisibles(visibles, extend, findShortestPath)
def deselectObjects(self, objects):
"""
Deselect the indicated :class:`network objects <network.object.Object>`.
Objects will be deleted from the current selection.
"""
if not isinstance(objects, (list, tuple, set)):
raise TypeError, 'The objects argument passed to selectObjects must be a list, tuple or set.'
visibles = []
for networkObject in objects:
visibles.extend(self.visiblesForObject(networkObject))
self.deselectVisibles(visibles)
def deselectObject(self, networkObject):
"""
Deselect the indicated :class:`network objects <network.object.Object>`.
Objects will be deleted from the current selection.
"""
for visible in self.visiblesForObject(networkObject):
self.deselectVisibles([visible])
def selectObject(self, networkObject, extend = False, findShortestPath = False, color = None):
"""
Select the indicated :class:`network object <network.object.Object>`.
If extend is True then the object will be added to the current selection, otherwise the object will replace the current selection.
If findShortestPath is True then the shortest path between the currently selected object(s)s and the indicated object will be found and all will be selected.
"""
for visible in self.visiblesForObject(networkObject):
if color:
self._visiblesSelectionColors[visible] = color
self.selectVisibles([visible], extend, findShortestPath)
def objectIsSelected(self, networkObject):
"""
Return whether the indicated :class:`network object <network.object.Object>` is part of the current selection.
"""
for visible in self.visiblesForObject(networkObject):
if visible in self.selectedVisibles:
return True
return False
def selectVisibles(self, visibles, extend = False, findShortestPath = False, fromclick=False, reselect=False):
"""
Select the indicated :class:`visible proxies <display.visible.Visible>`.
If extend is True then the visible will be added to the current selection, otherwise the visible will replace the current selection.
If findShortestPath is True then the shortest path between the currently selected visible(s) and the indicated visible will be found and all will be selected.
"""
if (extend or findShortestPath) and not self.hoverSelected:
newSelection = set(self.selectedVisibles)
else:
newSelection = set()
if self._hideUnselectedNeurons and fromclick == True and len(visibles):
visibles = [visible for visible in visibles if visible.getCurrentOpacity() != 0]
if findShortestPath:
# Add the visibles that exist along the path to the selection.
pathWasFound = False
#TODO Slow
for visible in visibles:
for startVisible in self.selectedVisibles:
for pathObject in self.network.shortestPath(startVisible.client, visible.client):
for pathVisible in self.visiblesForObject(pathObject):
pathWasFound = True
if visible in self._visiblesSelectionColors:
self._visiblesSelectionColors[pathVisible] = self._visiblesSelectionColors[visible]
newSelection.add(pathVisible)
if not pathWasFound:
wx.Bell()
elif extend and len(visibles) == 1 and visibles[0] in newSelection:
# Remove the visible from the selection
newSelection.remove(visibles[0])
else:
# Add the visibles to the new selection.
for visible in visibles:
# Select the root of the object if appropriate.
rootObject = visible.client.rootObject()
if rootObject and not self.objectIsSelected(rootObject) and not self.visiblesForObject(rootObject)[0] in visibles:
visibles = self.visiblesForObject(rootObject)
# Highlight root object instead of visible
if visible in self._visiblesSelectionColors:
self._visiblesSelectionColors[visibles[0]] = self._visiblesSelectionColors[visible]
del self._visiblesSelectionColors[visible]
if any(visibles):
visible = visibles[0]
newSelection.add(visible)
self._selectedShortestPath = findShortestPath
if newSelection != self.selectedVisibles or (self.hoverSelected and not self.hoverSelecting) or reselect == True:
self._clearDragger()
self.selectedVisibles = newSelection
if len(self.selectedVisibles) == 0:
# There is no selection so hover selecting should be enabled.
self.hoverSelecting = False
self.hoverSelect = True
elif not self.hoverSelecting:
# An explicit selection has been made via the GUI or console.
self.hoverSelect = False # disable hover selecting
# TODO Dragging doesn't work so this just takes time
if len(self.selectedVisibles) == 1:
pass
# Add a dragger to the selected visible.
# visible = list(self.selectedVisibles)[0]
# if visible._isDraggable():
# self._addDragger(visible)
dispatcher.send(('set', 'selection'), self)
self.hoverSelected = self.hoverSelecting
self.hoverSelecting = False
self.Refresh()
def deselectVisibles(self, visibles):
"""
Deselect the indicated :class:`visible proxies <display.visible.Visible>`.
The visible will be deleted from the current selection.
"""
newSelection = set(self.selectedVisibles)
for visible in visibles:
if visible in newSelection:
newSelection.remove(visible)
if newSelection != self.selectedVisibles or (self.hoverSelected and not self.hoverSelecting):
self._clearDragger()
self.selectedVisibles = newSelection
if len(self.selectedVisibles) == 0:
# There is no selection so hover selecting should be enabled.
self.hoverSelecting = False
self.hoverSelect = True
elif not self.hoverSelecting:
# An explicit selection has been made via the GUI or console.
self.hoverSelect = False # disable hover selecting
# TODO Dragging doesn't work so this just takes time
if len(self.selectedVisibles) == 1:
pass
# Add a dragger to the selected visible.
# visible = list(self.selectedVisibles)[0]
# if visible._isDraggable():
# self._addDragger(visible)
dispatcher.send(('set', 'selection'), self)
self.hoverSelected = self.hoverSelecting
self.hoverSelecting = False
self.Refresh()
def selection(self):
return ObjectList(self.selectedVisibles)
def selectedObjects(self):
"""
Return the list of :class:`network objects <network.object.Object>` that are currently selected.
"""
selection = set()
for visible in self.selectedVisibles:
if visible.client is not None:
selection.add(visible.client)
return list(selection)
def selectAll(self):
"""
Select all :class:`network objects <network.object.Object>` in the visualization.
"""
visiblesToSelect = []
for visibles in self.visibles.itervalues():
for visible in visibles:
visiblesToSelect.append(visible)
self.selectVisibles(visiblesToSelect)
def _onSelectionOrShowFlowChanged(self):
# Update the highlighting, animation and ghosting based on the current selection.
# TODO: this should all be handled by display rules
refreshWasSupressed = self._suppressRefresh
self._suppressRefresh = True
def _highlightObject(networkObject, originalObject = None):
highlightedSomething = False
# Highlight/animate all visibles for this object.
# If root object's visible in colors, add this visible to colors too.
originalColors = []
if originalObject:
originalVisibles = self.visiblesForObject(originalObject)
originalColors = [o for o in originalVisibles if o in self._visiblesSelectionColors]
for visible in self.visiblesForObject(networkObject):
if visible.isPath():
if visible not in visiblesToAnimate:
visiblesToAnimate.add(visible)
visiblesToHighlight.add(visible)
highlightedSomething = True
if originalColors:
self._visiblesSelectionColors[visible] = self._visiblesSelectionColors[originalColors[0]]
elif visible not in visiblesToHighlight:
visiblesToHighlight.add(visible)
highlightedSomething = True
if originalColors:
self._visiblesSelectionColors[visible] = self._visiblesSelectionColors[originalColors[0]]
# Highlight to the root of the object if appropriate.
networkObject = networkObject.parentObject()
while networkObject:
if _highlightObject(networkObject):
networkObject = networkObject.parentObject()
else:
networkObject = None
return highlightedSomething
# TODO: selecting neuron X in Morphology.py doesn't highlight neurites
def _highlightConnectedObjects(rootObjects, maxDepth, highlightWithinSelection):
# Do a breadth-first search on the graph of objects.
queue = [[rootObject] for rootObject in rootObjects]
highlightedObjects = [rootObject.rootObject() for rootObject in rootObjects]
visitedObjects = highlightedObjects
while any(queue):
curPath = queue.pop(0)
curObject = curPath[-1]
originalObject = curPath[0]
visitedObjects.append(curObject)
curObjectRoot = curObject.rootObject()
# If we've reached a highlighted object or the maximum depth then highlight the objects in the current path.
if curObjectRoot in highlightedObjects or (not highlightWithinSelection and len(curPath) == maxDepth + 1):
for pathObject in curPath:
_highlightObject(pathObject, originalObject)
# If we haven't reached the maximum depth then add the next layer of connections to the end of the queue.
if len(curPath) <= maxDepth:
for connectedObject in curObjectRoot.connections():
if connectedObject not in curPath and connectedObject.rootObject() not in curPath and connectedObject not in visitedObjects:
queue += [curPath + [connectedObject]]
visiblesToHighlight = set()
visiblesToAnimate = set()
if self._selectedShortestPath or not self.selectConnectedVisibles:
isSingleSelection = (len(self.selectedVisibles) == 1) or not self._highlightOnlyWithinSelection
for selectedVisible in self.selectedVisibles:
if isinstance(selectedVisible.client, Object):
_highlightObject(selectedVisible.client)
else:
# The selected visible has no network counterpart so highlight/animate connected visibles purely based on connectivity in the visualization.
visiblesToHighlight.add(selectedVisible)
if selectedVisible.isPath() and (selectedVisible.flowTo() or selectedVisible.flowFrom()):
visiblesToAnimate.add(selectedVisible)
visiblesToHighlight.add(selectedVisible)
if selectedVisible.isPath():
# Highlight the visibles at each end of the path.
if selectedVisible.flowTo() or selectedVisible.flowFrom():
visiblesToAnimate.add(selectedVisible)
visiblesToHighlight.add(selectedVisible)
[visiblesToHighlight.add(endPoint) for endPoint in selectedVisible.pathEndPoints()]
elif self.selectConnectedVisibles and not self._selectedShortestPath:
# Animate paths connecting to this non-path visible and highlight the other end of the paths.
for pathVisible in selectedVisible.connectedPaths:
otherVis = pathVisible._pathCounterpart(selectedVisible)
if isSingleSelection or otherVis in self.selectedVisibles:
visiblesToAnimate.add(pathVisible)
visiblesToHighlight.add(pathVisible)
visiblesToHighlight.add(otherVis)
else:
# TODO: handle object-less visibles
# SLOW for selecting object, no time for deselecting objects
_highlightConnectedObjects(self.selectedObjects(), self._selectionHighlightDepth, len(self.selectedVisibles) > 1 and self._highlightOnlyWithinSelection)
if len(self.selectedVisibles) == 0 and self._showFlow:
for visibles in self.visibles.itervalues():
for visible in visibles:
if visible.isPath() and (visible.flowTo() or visible.flowFrom()):
visiblesToAnimate.add(visible)
# Turn off highlighting/animating for visibles that shouldn't have it anymore.
for highlightedNode in self.highlightedVisibles:
if highlightedNode not in visiblesToHighlight:
highlightedNode.setGlowColor(None)
if highlightedNode in self._visiblesSelectionColors:
del self._visiblesSelectionColors[highlightedNode]
for animatedEdge in self.animatedVisibles:
if animatedEdge not in visiblesToAnimate:
animatedEdge.animateFlow(False)
if animatedEdge in self._visiblesSelectionColors:
del self._visiblesSelectionColors[animatedEdge]
# Highlight/animate the visibles that should have it now.
selectedString = ""
for visibleToHighlight in visiblesToHighlight:
if visibleToHighlight in self.selectedVisibles:
if visibleToHighlight in self._visiblesSelectionColors:
visibleToHighlight.setGlowColor(self._visiblesSelectionColors[visibleToHighlight])
else:
visibleToHighlight.setGlowColor(self._primarySelectionColor)
visibleToHighlight._updateLabel()
if isinstance(visibleToHighlight.client, Neuron) and visibleToHighlight.client.name:
selectedString += " " + visibleToHighlight.client.name + ","
elif visibleToHighlight in self._visiblesSelectionColors:
visibleToHighlight.setGlowColor(self._visiblesSelectionColors[visibleToHighlight])
elif not self._useGhosts:
visibleToHighlight.setGlowColor(self._secondarySelectionColor)
else:
visibleToHighlight.setGlowColor(None)
if self._printNeuronNamesOnSelection and selectedString:
self.console.run("print 'Selected:" + selectedString[:-1] + "'", False, False)
# SLOW
for visibleToAnimate in visiblesToAnimate:
visibleToAnimate.animateFlow()
self.highlightedVisibles = visiblesToHighlight
self.animatedVisibles = visiblesToAnimate
# SLOWISH not the main culprit
if self._useGhosts:
# Dim everything that isn't selected, highlighted or animated.
for visibles in self.visibles.itervalues():
for visible in visibles:
visible._updateOpacity()
if any(self.animatedVisibles):
# Start the animation timer and cap the frame rate at 60 fps.
if not self._animationTimer.IsRunning():
self._animationTimer.Start(1000.0 / 60.0)
elif self._animationTimer.IsRunning():
# Don't need to redraw automatically if nothing is animated.
self._animationTimer.Stop()
self._suppressRefresh = refreshWasSupressed
def _addDragger(self, visible):
if visible.parent is None:
rootNode = self.rootNode
else:
rootNode = visible.parent.childGroup
lodBound = visible.sgNode.getBound()
rootNode.removeChild(visible.sgNode)
self.dragSelection = osgManipulator.Selection()
self.dragSelection.addChild(visible.sgNode)
rootNode.addChild(self.dragSelection)
self.compositeDragger = None
pixelCutOff = 200.0
if self.viewDimensions == 2:
self.draggerScale = 1.0
self.simpleDragger = osgManipulator.TranslatePlaneDragger()
if not visible.sizeIsFixed():
self.compositeDragger = osgManipulator.TabPlaneDragger()
if self.orthoViewPlane == 'xy':
if visible.parent is None or not visible.sizeIsAbsolute():
self.draggerOffset = (0.0, 0.0, visible.size()[2])
else:
self.draggerOffset = (0.0, 0.0, visible.size()[2] / visible.parent.worldSize()[2])
pixelCutOff /= visible.parent.worldSize()[0]
draggerMatrix = osg.Matrixd.rotate(pi / 2.0, osg.Vec3d(1, 0, 0)) * \
visible.sgNode.getMatrix() * \
osg.Matrixd.translate(*self.draggerOffset)
elif self.orthoViewPlane == 'xz':
if visible.parent is None or not visible.sizeIsAbsolute():
self.draggerOffset = (0.0, visible.size()[1], 0.0)
else:
self.draggerOffset = (0.0, visible.size()[1] / visible.parent.worldSize()[1], 0.0)
pixelCutOff /= visible.parent.worldSize()[0]
draggerMatrix = visible.sgNode.getMatrix() * \
osg.Matrixd.translate(*self.draggerOffset)
elif self.orthoViewPlane == 'zy':
if visible.parent is None or not visible.sizeIsAbsolute():
self.draggerOffset = (visible.size()[0], 0.0, 0.0)
else:
self.draggerOffset = (visible.size()[0] / visible.parent.worldSize()[0], 0.0, 0.0)
pixelCutOff /= visible.parent.worldSize()[1]
draggerMatrix = osg.Matrixd.rotate(pi / 2.0, osg.Vec3d(1, 0, 0)) * \
osg.Matrixd.rotate(pi / 2.0, osg.Vec3d(0, 1, 0)) * \
visible.sgNode.getMatrix() * \
osg.Matrixd.translate(*self.draggerOffset)
elif self.viewDimensions == 3:
self.draggerOffset = (0.0, 0.0, 0.0)
self.draggerScale = 1.02
self.simpleDragger = osgManipulator.TranslateAxisDragger()
if not visible.sizeIsFixed():
self.compositeDragger = osgManipulator.TabBoxDragger()
if visible.parent is not None and visible.sizeIsAbsolute():
pixelCutOff /= visible.parent.worldSize()[0]
draggerMatrix = osg.Matrixd.rotate(pi / 2.0, osg.Vec3d(1, 0, 0)) * \
osg.Matrixd.scale(self.draggerScale, self.draggerScale, self.draggerScale) * \
visible.sgNode.getMatrix()
self.simpleDragger.setMatrix(draggerMatrix)
self.simpleDragger.setupDefaultGeometry()
self.commandMgr = osgManipulator.CommandManager()
self.commandMgr.connect(self.simpleDragger, self.dragSelection)
if visible.sizeIsFixed():
rootNode.addChild(self.simpleDragger)
self.activeDragger = self.simpleDragger
else:
self.commandMgr.connect(self.compositeDragger, self.dragSelection)
self.compositeDragger.setMatrix(draggerMatrix)
self.compositeDragger.setupDefaultGeometry()
self.draggerLOD = osg.LOD()
self.draggerLOD.setRangeMode(osg.LOD.PIXEL_SIZE_ON_SCREEN)
self.draggerLOD.addChild(self.simpleDragger, 0.0, pixelCutOff)
self.draggerLOD.addChild(self.compositeDragger, pixelCutOff, 10000.0)
self.draggerLOD.setCenter(lodBound.center())
self.draggerLOD.setRadius(lodBound.radius())
rootNode.addChild(self.draggerLOD)
# TODO: This is a serious hack. The existing picking code in PickHandler doesn't handle the dragger LOD correctly. It always picks the composite dragger. Cull callbacks are added here so that we can know which dragger was most recently rendered.
self.activeDragger = None
self.simpleDragger.setCullCallback(DraggerCullCallback(self, self.simpleDragger).__disown__())
self.compositeDragger.setCullCallback(DraggerCullCallback(self, self.compositeDragger).__disown__())
# TODO: observe the visible's 'positionIsFixed' attribute and add/remove the draggers as needed
def _visibleWasDragged(self):
# TODO: It would be nice to constrain dragging if the visible has a parent. "Resistance" would be added when the child reached the parent border so that dragging slowed or stopped but if dragged far enough the child could force its way through.
visible = list(self.selectedVisibles)[0]
if self.activeDragger is not None:
matrix = self.activeDragger.getMatrix()
position = matrix.getTrans()
size = matrix.getScale()
if visible.parent is None or not visible.sizeIsAbsolute():
parentSize = (1.0, 1.0, 1.0)
else:
parentSize = visible.parent.worldSize()
visible.setPosition((position.x() - self.draggerOffset[0], position.y() - self.draggerOffset[1], position.z() - self.draggerOffset[2]))
visible.setSize((size.x() * parentSize[0] / self.draggerScale, size.y() * parentSize[1] / self.draggerScale, size.z() * parentSize[2] / self.draggerScale))
visible._updateTransform()
def _clearDragger(self):
if self.dragSelection != None:
visible = list(self.selectedVisibles)[0]
if visible.parent is None:
rootNode = self.rootNode
else:
rootNode = visible.parent.childGroup
self.commandMgr.disconnect(self.simpleDragger)
if self.compositeDragger is not None:
self.commandMgr.disconnect(self.compositeDragger)
self.commandMgr = None
self.dragSelection.removeChild(visible.sgNode)
rootNode.removeChild(self.dragSelection)
self.dragSelection = None
rootNode.addChild(visible.sgNode)
self._visibleWasDragged()
if self.draggerLOD is not None:
rootNode.removeChild(self.draggerLOD)
else:
rootNode.removeChild(self.simpleDragger)
self.simpleDragger.setCullCallback(None)
self.simpleDragger = None
if self.compositeDragger is not None:
self.compositeDragger.setCullCallback(None)
self.compositeDragger = None
self.draggerLOD = None
def onLayout(self, event):
layoutClasses = self.GetTopLevelParent().layoutClasses
layoutId = event.GetId()
if layoutId in layoutClasses:
layout = layoutClasses[layoutId]()
self.lastUsedLayout = layout
else:
layout = None
self.performLayout(layout)
def autoLayout(self, method = None):
# Backwards compatibility method, new code should use performLayout() instead.
if (method == 'graphviz' or method is None) and self.viewDimensions == 2:
from Layouts.force_directed import ForceDirectedLayout
self.performLayout(ForceDirectedLayout())
elif (method == 'spectral' or method is None) and self.viewDimensions == 3:
from Layouts.spectral import SpectralLayout
self.performLayout(SpectralLayout())
def performLayout(self, layout = None, **kwargs):
""" Perform an automatic layout of the :class:`network objects <network.object.Object>` in the visualization.
>>> display.performLayout(layouts['Force Directed'])
The layout parameter should be one of the classes in layouts, an instance of one of the classes or None to re-execute the previous or default layout.
"""
if layout != None and not isinstance(layout, layout_module.Layout) and (not type(layout) == type(self.__class__) or not issubclass(layout, layout_module.Layout)):
raise TypeError, 'The layout parameter passed to performLayout() should be one of the classes in layouts, an instance of one of the classes or None.'
self.beginProgress('Laying out the network...')
try:
if layout == None:
# Fall back to the last layout used.
layout = self.lastUsedLayout
else:
# If a layout class was passed in then create a default instance.
if isinstance(layout, type(self.__class__)):
layout = layout(**kwargs)
if not layout.__class__.canLayoutDisplay(self):
raise ValueError, gettext('The supplied layout cannot be used.')
if layout == None or not layout.__class__.canLayoutDisplay(self): # pylint: disable=E1103
layouts = neuroptikon.scriptLocals()['layouts']
if 'Graphviz' in layouts:
layout = layouts['Graphviz'](**kwargs)
elif 'Force Directed' in layouts:
layout = layouts['Force Directed'](**kwargs)
elif 'Spectral' in layouts:
layout = layouts['Spectral'](**kwargs)
else:
# Pick the first layout class capable of laying out the display.
for layoutClass in layouts.itervalues():
if layoutClass.canLayoutDisplay(self):
layout = layoutClass(**kwargs)
break
refreshWasSuppressed = self._suppressRefresh
self._suppressRefresh = True
layout.layoutDisplay(self)
self.lastUsedLayout = layout
except:
(exceptionType, exceptionValue) = sys.exc_info()[0:2]
wx.MessageBox(str(exceptionValue) + ' (' + exceptionType.__name__ + ')', gettext('An error occurred while performing the layout:'), parent = self, style = wx.ICON_ERROR | wx.OK)
finally:
self._suppressRefresh = refreshWasSuppressed
if self.viewDimensions == 2:
self.zoomToFit()
else:
self.resetView()
self.endProgress()
def saveViewAsImage(self, path):
"""
Save a snapshot of the current visualization to an image file.
The path parameter should indicate where the snapshot should be saved. The extension included in the path will determine the format of the image. Currently, bmp, jpg, png and tiff extensions are supported.
If the background color of the display has an alpha value less than 1.0 then the image saved will have a transparent background for formats that support it.
"""
width, height = self.GetClientSize()
image = osg.Image()
self.SetCurrent(self.glContext)
image.readPixels(0, 0, width, height, osg.GL_RGBA, osg.GL_UNSIGNED_BYTE)
osgDB.writeImageFile(image, path)
def onSaveView(self, event_):
fileTypes = ['JPG', 'Microsoft BMP', 'PNG', 'TIFF']
fileExtensions = ['jpg', 'bmp', 'png', 'tiff']
wildcard = ''
for index in range(0, len(fileTypes)):
if wildcard != '':
wildcard += '|'
wildcard += fileTypes[index] + '|' + fileExtensions[index]
fileDialog = wx.FileDialog(None, gettext('Save As:'), '', '', wildcard, wx.SAVE | wx.FD_OVERWRITE_PROMPT)
if fileDialog.ShowModal() == wx.ID_OK:
extension = fileExtensions[fileDialog.GetFilterIndex()]
savePath = str(fileDialog.GetPath())
if not savePath.endswith('.' + extension):
savePath += '.' + extension
self.saveViewAsImage(savePath)
fileDialog.Destroy()
def setDefaultFlowColor(self, color):
"""
Set the default color of the pulses in paths showing the flow of information.
The color argument should be a tuple or list of three values between 0.0 and 1.0 indicating the red, green and blue values of the color. For example:
* (0.0, 0.0, 0.0) -> black
* (1.0, 0.0, 0.0) -> red
* (0.0, 1.0, 0.0) -> green
* (0.0, 0.0, 1.0) -> blue
* (1.0, 1.0, 1.0) -> white
"""
if not isinstance(color, (list, tuple)): # or len(color) != 3:
raise ValueError, 'The color passed to setDefaultFlowColor() must be a tuple or list of three numbers.'
for colorComponent in color:
if not isinstance(colorComponent, (int, float)) or colorComponent < 0.0 or colorComponent > 1.0:
raise ValueError, 'The components of the color passed to setDefaultFlowColor() must all be numbers between 0.0 and 1.0, inclusive.'
if len(color) == 3:
color = (color[0], color[1], color[2], 1.0)
if color != self.defaultFlowColor:
self.defaultFlowColor = color
vec4color = osg.Vec4f(color[0], color[1], color[2], color[3])
self.defaultFlowToColorUniform.set(vec4color)
self.defaultFlowFromColorUniform.set(vec4color)
dispatcher.send(('set', 'defaultFlowColor'), self)
def setDefaultFlowSpacing(self, spacing):
"""
Set the default spacing between pulses in paths showing the flow of information.
The spacing argument is measured in world-space coordinates.
"""
if not isinstance(spacing, (int, float)):
raise TypeError, 'The spacing passed to setDefaultFlowSpacing() must be a number.'
if spacing != self.defaultFlowSpacing:
self.defaultFlowSpacing = float(spacing)
self.defaultFlowToSpacingUniform.set(self.defaultFlowSpacing)
self.defaultFlowFromSpacingUniform.set(self.defaultFlowSpacing)
dispatcher.send(('set', 'defaultFlowSpacing'), self)
def setDefaultFlowSpeed(self, speed):
"""
Set the default speed of the pulses in paths showing the flow of information.
The speed argument is measured in world-space coordinates per second.
"""
if not isinstance(speed, (int, float)):
raise TypeError, 'The speed passed to setDefaultFlowSpeed() must be a number.'
if speed != self.defaultFlowSpeed:
self.defaultFlowSpeed = float(speed)
self.defaultFlowToSpeedUniform.set(self.defaultFlowSpeed)
self.defaultFlowFromSpeedUniform.set(self.defaultFlowSpeed)
dispatcher.send(('set', 'defaultFlowSpeed'), self)
def setDefaultFlowSpread(self, spread):
"""
Set the length of the pulse tails in paths showing the flow of information.
The spread argument should be a number from 0.0 (no tail) to 1.0 (tail extends all the way to the next pulse).
"""
if not isinstance(spread, (int, float)):
raise TypeError, 'The spread passed to setDefaultFlowSpread() must be a number.'
if spread != self.defaultFlowSpread:
self.defaultFlowSpread = float(spread)
self.defaultFlowToSpreadUniform.set(self.defaultFlowSpread)
self.defaultFlowFromSpreadUniform.set(self.defaultFlowSpread)
dispatcher.send(('set', 'defaultFlowSpread'), self)
def beginProgress(self, message = None, visualDelay = 1.0):
"""
Display a message that a lengthy task has begun.
Each call to this method must be balanced by a call to :meth:`endProgress <display.display.Display.endProgress>`. Any number of :meth:`updateProgress <display.display.Display.updateProgress>` calls can be made in the interim. Calls to this method can be nested as long as the right number of :meth:`endProgress <display.display.Display.endProgress>` calls are made.
The visualDelay argument indicates how many seconds to wait until the progress user interface is shown. This avoids flashing the interface open and closed for tasks that end up running quickly.
"""
return self.GetTopLevelParent().beginProgress(message, visualDelay)
def updateProgress(self, message = None, fractionComplete = None):
"""
Update the message and/or completion fraction during a lengthy task.
If the user has pressed the Cancel button then this method will return False and the task should be aborted.
"""
return self.GetTopLevelParent().updateProgress(message, fractionComplete)
def endProgress(self):
"""
Indicate that the lengthy task has ended.
"""
return self.GetTopLevelParent().endProgress()
def addObjectOfClass(self, objectClass):
self._visibleBeingAdded = self.visualizeObject(None, **objectClass._defaultVisualizationParams())
self._visibleBeingAdded.objectClass = objectClass
def objectClassBeingAdded(self):
return self._visibleBeingAdded.objectClass if self._visibleBeingAdded else None
class DisplayDropTarget(wx.PyDropTarget):
def __init__(self, display):
wx.PyDropTarget.__init__(self)
self.display = display
# specify the type of data we will accept
self.dropData = wx.CustomDataObject("Neuroptikon Ontology Term")
self.SetDataObject(self.dropData)
def OnData(self, x_, y_, dragType):
if self.GetData():
termData = self.dropData.GetData()
termDict = cPickle.loads(termData)
ontologyId = termDict['Ontology']
termId = termDict['Term']
ontology = neuroptikon.library.ontology(ontologyId)
if ontology is not None:
term = ontology[termId]
if term is not None:
self.display.network.createRegion(ontologyTerm = term, addSubTerms = wx.GetKeyState(wx.WXK_ALT))
if len(self.display.visibles) == 1:
self.display.zoomToFit()
return dragType
| {
"content_hash": "a97c2e0c87f66ef8aeeac835e9f56da5",
"timestamp": "",
"source": "github",
"line_count": 2993,
"max_line_length": 375,
"avg_line_length": 47.75609756097561,
"alnum_prop": 0.606034953195181,
"repo_name": "JaneliaSciComp/Neuroptikon",
"id": "8b3fd4fbf3bbadaed26918c71b9d7c3a8099377a",
"size": "143164",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Source/display/display.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "GLSL",
"bytes": "3048"
},
{
"name": "HTML",
"bytes": "97998"
},
{
"name": "Inno Setup",
"bytes": "2349"
},
{
"name": "Python",
"bytes": "8142986"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.core.validators import MaxLengthValidator
from django_postgres_pgpfields.proxy import EncryptedProxyField
def remove_validators(validators, validator_class):
"""Exclude `validator_class` instances from `validators` list."""
return [v for v in validators if not isinstance(v, validator_class)]
class PGPMixin(object):
"""PGP encryption for field's value.
`PGPMixin` uses 'pgcrypto' to encrypt data in a postgres database.
"""
descriptor_class = EncryptedProxyField
def __init__(self, *args, **kwargs):
"""`max_length` should be set to None as encrypted text size is variable."""
kwargs['max_length'] = None
super(PGPMixin, self).__init__(*args, **kwargs)
def contribute_to_class(self, cls, name, **kwargs):
"""
Add a decrypted field proxy to the model.
Add to the field model an `EncryptedProxyField` to get the decrypted
values of the field.
The decrypted value can be accessed using the field's name attribute on
the model instance.
"""
super(PGPMixin, self).contribute_to_class(cls, name, **kwargs)
setattr(cls, self.name, self.descriptor_class(field=self))
def db_type(self, connection=None):
"""Value stored in the database is hexadecimal."""
return 'bytea'
def get_placeholder(self, value=None, compiler=None, connection=None):
"""
Tell postgres to encrypt this field using PGP.
`value`, `compiler`, and `connection` are ignored here as we don't need
custom operators.
"""
return self.encrypt_sql
def _check_max_length_attribute(self, **kwargs):
"""Override `_check_max_length_attribute` to remove check on max_length."""
return []
class RemoveMaxLengthValidatorMixin(object):
"""Exclude `MaxLengthValidator` from field validators."""
def __init__(self, *args, **kwargs):
"""Remove `MaxLengthValidator` in parent's `.__init__`."""
super(RemoveMaxLengthValidatorMixin, self).__init__(*args, **kwargs)
self.validators = remove_validators(self.validators, MaxLengthValidator)
class EmailPGPPublicKeyFieldMixin(PGPMixin, RemoveMaxLengthValidatorMixin):
"""Email mixin for PGP public key fields."""
| {
"content_hash": "1af9b4ee31f4a292abad498bf50f4535",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 84,
"avg_line_length": 35.784615384615385,
"alnum_prop": 0.6693895098882201,
"repo_name": "coldmind/django-postgres-pgpfields",
"id": "a154e74ba587fd69bc8f9674e7f026206ac9787e",
"size": "2326",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_postgres_pgpfields/mixins.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "305"
},
{
"name": "Python",
"bytes": "19591"
}
],
"symlink_target": ""
} |
import binascii
import hashlib
import hmac
from disqusapi import compat
from disqusapi.compat import urllib_parse as urlparse
def build_interfaces_by_method(interfaces):
"""
Create new dictionary from INTERFACES hashed by method then
the endpoints name. For use when using the disqusapi by the
method interface instead of the endpoint interface. For
instance:
'blacklists': {
'add': {
'formats': ['json', 'jsonp'],
'method': 'POST',
'required': ['forum']
}
}
is translated to:
'POST': {
'blacklists.add': {
'formats': ['json', 'jsonp'],
'method': 'POST',
'required': ['forum']
}
"""
def traverse(block, parts):
try:
method = block['method'].lower()
except KeyError:
for k, v in compat.iteritems(block):
traverse(v, parts + [k])
else:
path = '.'.join(parts)
try:
methods[method]
except KeyError:
methods[method] = {}
methods[method][path] = block
methods = {}
for key, val in compat.iteritems(interfaces):
traverse(val, [key])
return methods
def get_normalized_params(params):
"""
Given a list of (k, v) parameters, returns
a sorted, encoded normalized param
"""
return urlparse.urlencode(sorted(params))
def get_normalized_request_string(method, url, nonce, params, ext='', body_hash=None):
"""
Returns a normalized request string as described iN OAuth2 MAC spec.
http://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-00#section-3.3.1
"""
urlparts = urlparse.urlparse(url)
if urlparts.query:
norm_url = '%s?%s' % (urlparts.path, urlparts.query)
elif params:
norm_url = '%s?%s' % (urlparts.path, get_normalized_params(params))
else:
norm_url = urlparts.path
if not body_hash:
body_hash = get_body_hash(params)
port = urlparts.port
if not port:
assert urlparts.scheme in ('http', 'https')
if urlparts.scheme == 'http':
port = 80
elif urlparts.scheme == 'https':
port = 443
output = [nonce, method.upper(), norm_url, urlparts.hostname, port, body_hash, ext, '']
return '\n'.join(map(str, output))
def get_body_hash(params):
"""
Returns BASE64 ( HASH (text) ) as described in OAuth2 MAC spec.
http://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-00#section-3.2
"""
norm_params = get_normalized_params(params)
return binascii.b2a_base64(hashlib.sha1(norm_params).digest())[:-1]
def get_mac_signature(api_secret, norm_request_string):
"""
Returns HMAC-SHA1 (api secret, normalized request string)
"""
hashed = hmac.new(str(api_secret), norm_request_string, hashlib.sha1)
return binascii.b2a_base64(hashed.digest())[:-1]
| {
"content_hash": "f7d12be0ddd4464ff7a91026b67e3bd1",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 91,
"avg_line_length": 27.77358490566038,
"alnum_prop": 0.59375,
"repo_name": "disqus/disqus-python",
"id": "76bb9346e162ddacdb26d7c3031022d59c03f4fe",
"size": "2944",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "disqusapi/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "238"
},
{
"name": "Python",
"bytes": "23832"
}
],
"symlink_target": ""
} |
from . import TranslationRotation | {
"content_hash": "086bdee570d6667055b59501108db32c",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 34,
"avg_line_length": 34,
"alnum_prop": 0.8529411764705882,
"repo_name": "kastman/occiput",
"id": "080e9c3ecc511f7b30d860d5266c8b884a882dd3",
"size": "188",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "occiput/Registration/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "264798"
}
],
"symlink_target": ""
} |
"""
Helper functions for scrapy project
dupCheck(): Delete duplicate entries.
Called after each reactor.run() in crimelog.py
"""
import sys
sys.path.insert(0, '/home/amirkurtovic/crimeline')
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'crimeline.settings'
from storylist.models import Story
from scrapy.contrib.djangoitem import DjangoItem
def dupCheck():
'''
Searches database for duplicate URL entries before saving item.
item: Story object passed by each spider
'''
for url in Story.objects.values_list('url', flat=True).distinct():
Story.objects.filter(pk__in=Story.objects.filter(url=url).values_list('id', flat=True)[1:]).delete()
| {
"content_hash": "b9b691b52c72e7ca52a8f29a8a07d3cb",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 102,
"avg_line_length": 27.5,
"alnum_prop": 0.746969696969697,
"repo_name": "akurtovic/STL-Crimelog",
"id": "ba07ec87ddc6c5ebd5b75ba897590c8bbd8e8dd9",
"size": "660",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "crimelog/crimelog/helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "892"
},
{
"name": "JavaScript",
"bytes": "4054"
},
{
"name": "Python",
"bytes": "16695"
}
],
"symlink_target": ""
} |
"""Model script to test TF-TensorRT integration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from tensorflow.python.compiler.tensorrt.test import tf_trt_integration_test_base as trt_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.platform import test
class BinaryTensorWeightBroadcastTest(trt_test.TfTrtIntegrationTestBase):
"""Tests for scale & elementwise layers in TF-TRT."""
def _ConstOp(self, shape):
return constant_op.constant(np.random.randn(*shape), dtype=dtypes.float32)
def GraphFn(self, x):
for weights_shape in [
(1,), # scale
(24, 1, 1), # scale
(24, 24, 20), # scale
(20,), # elementwise
(1, 24, 1, 1), # elementwise
(1, 24, 24, 1), # elementwise
(1, 24, 24, 20), # elementwise
(24, 20), # elementwise
]:
a = self._ConstOp(weights_shape)
f = x + a
x = self.trt_incompatible_op(f)
a = self._ConstOp(weights_shape)
f = a + x
x = self.trt_incompatible_op(f)
return gen_array_ops.reshape(x, [5, -1], name="output_0")
def GetParams(self):
# TODO(aaroey): test graph with different dtypes.
return self.BuildParams(self.GraphFn, dtypes.float32, [[10, 24, 24, 20]],
[[5, 23040]])
def ExpectedEnginesToBuild(self, run_params):
"""Return the expected engines to build."""
return ["TRTEngineOp_%d" % i for i in range(16)]
# TODO(b/176540862): remove this routine to disallow native segment execution
# for TensorRT 7+.
def setUp(self):
super(trt_test.TfTrtIntegrationTestBase, self).setUp() # pylint: disable=bad-super-call
os.environ["TF_TRT_ALLOW_ENGINE_NATIVE_SEGMENT_EXECUTION"] = "True"
if __name__ == "__main__":
test.main()
| {
"content_hash": "0f1afa608d72341dba76787d4f1c8f17",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 93,
"avg_line_length": 33.32203389830509,
"alnum_prop": 0.6536113936927772,
"repo_name": "frreiss/tensorflow-fred",
"id": "7dc76363d5a1a70add921f23281f4cb580926b10",
"size": "2655",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/compiler/tensorrt/test/binary_tensor_weight_broadcast_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "6729"
},
{
"name": "Batchfile",
"bytes": "49527"
},
{
"name": "C",
"bytes": "871761"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "79093233"
},
{
"name": "CMake",
"bytes": "6500"
},
{
"name": "Dockerfile",
"bytes": "110545"
},
{
"name": "Go",
"bytes": "1852128"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "961600"
},
{
"name": "Jupyter Notebook",
"bytes": "549457"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1644156"
},
{
"name": "Makefile",
"bytes": "62398"
},
{
"name": "Objective-C",
"bytes": "116558"
},
{
"name": "Objective-C++",
"bytes": "303063"
},
{
"name": "PHP",
"bytes": "20523"
},
{
"name": "Pascal",
"bytes": "3982"
},
{
"name": "Pawn",
"bytes": "18876"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "40003007"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Roff",
"bytes": "2472"
},
{
"name": "Ruby",
"bytes": "7464"
},
{
"name": "Shell",
"bytes": "681596"
},
{
"name": "Smarty",
"bytes": "34740"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
"""Unit tests for `evaluation.py`."""
from absl.testing import absltest
from clrs._src import evaluation
from clrs._src import probing
from clrs._src import specs
import jax
import jax.numpy as jnp
import numpy as np
class EvaluationTest(absltest.TestCase):
def test_reduce_permutations(self):
b = 8
n = 16
pred = jnp.stack([jax.random.permutation(jax.random.PRNGKey(i), n)
for i in range(b)])
heads = jax.random.randint(jax.random.PRNGKey(42), (b,), 0, n)
perm = probing.DataPoint(name='test',
type_=specs.Type.PERMUTATION_POINTER,
location=specs.Location.NODE,
data=jax.nn.one_hot(pred, n))
mask = probing.DataPoint(name='test_mask',
type_=specs.Type.MASK_ONE,
location=specs.Location.NODE,
data=jax.nn.one_hot(heads, n))
output = evaluation.fuse_perm_and_mask(perm=perm, mask=mask)
expected_output = np.array(pred)
expected_output[np.arange(b), heads] = heads
self.assertEqual(output.name, 'test')
self.assertEqual(output.type_, specs.Type.POINTER)
self.assertEqual(output.location, specs.Location.NODE)
np.testing.assert_allclose(output.data, expected_output)
if __name__ == '__main__':
absltest.main()
| {
"content_hash": "8ecd8322bf63875b91cd46b1aecafa0a",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 70,
"avg_line_length": 34.175,
"alnum_prop": 0.6100950987564009,
"repo_name": "deepmind/clrs",
"id": "4f83ac399a583ded5ca9f77b717175106846fac1",
"size": "2064",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clrs/_src/evaluation_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "398809"
}
],
"symlink_target": ""
} |
import tensorflow as tf
import numpy as np
from neupy.utils import tensorflow_session
from neupy.core.properties import BoundedProperty, ProperFractionProperty
from .base import BaseOptimizer
__all__ = ('RPROP', 'IRPROPPlus')
class RPROP(BaseOptimizer):
"""
Resilient backpropagation (RPROP) is an optimization
algorithm for supervised learning.
RPROP algorithm takes into account only direction of the gradient
and completely ignores its magnitude. Every weight values has a unique
step size associated with it (by default all of the are equal to ``step``).
The rule is following, when gradient direction changes (sign of the
gradient) we decrease step size for specific weight multiplying it by
``decrease_factor`` and if sign stays the same than we increase step
size for this specific weight multiplying it by ``increase_factor``.
The step size is always bounded by ``minstep`` and ``maxstep``.
Notes
-----
Algorithm doesn't work with mini-batches.
Parameters
----------
minstep : float
Minimum possible value for step. Defaults to ``0.001``.
maxstep : float
Maximum possible value for step. Defaults to ``10``.
increase_factor : float
Increase factor for step in case when gradient doesn't change
sign compare to previous epoch.
decrease_factor : float
Decrease factor for step in case when gradient changes sign
compare to previous epoch.
{BaseOptimizer.Parameters}
Attributes
----------
{BaseOptimizer.Attributes}
Methods
-------
{BaseOptimizer.Methods}
Examples
--------
>>> import numpy as np
>>> from neupy import algorithms
>>> from neupy.layers import *
>>>
>>> x_train = np.array([[1, 2], [3, 4]])
>>> y_train = np.array([[1], [0]])
>>>
>>> network = Input(2) >> Sigmoid(3) >> Sigmoid(1)
>>> optimizer = algorithms.RPROP(network)
>>> optimizer.train(x_train, y_train)
See Also
--------
:network:`IRPROPPlus` : iRPROP+ algorithm.
:network:`GradientDescent` : GradientDescent algorithm.
"""
# This properties correct upper and lower bounds for steps.
minstep = BoundedProperty(default=0.001, minval=0)
maxstep = BoundedProperty(default=10, minval=0)
# This properties increase/decrease step by dividing it to
# some coefficient.
increase_factor = BoundedProperty(minval=1, default=1.2)
decrease_factor = ProperFractionProperty(default=0.5)
def update_prev_delta(self, prev_delta):
return prev_delta
def init_train_updates(self):
updates = []
variables = []
for (_, _), variable in self.network.variables.items():
if variable.trainable:
variables.append(variable)
gradients = tf.gradients(self.variables.loss, variables)
for parameter, gradient in zip(variables, gradients):
with tf.variable_scope(parameter.op.name):
steps = tf.Variable(
# Steps will be decreased after the first iteration,
# because all previous gradients are equal to zero.
# In order to make sure that network will use the same
# step per every weight we re-scale step and after the
# first iteration it will be multiplied by
# ``decrease_factor`` and scaled back to the default
# step value.
tf.ones_like(parameter) * self.step,
name="steps",
dtype=tf.float32,
)
prev_delta = tf.Variable(
tf.zeros(parameter.shape),
name="prev-delta",
dtype=tf.float32,
)
# We collect only signs since it ensures numerical stability
# after multiplication when we deal with small numbers.
prev_gradient_sign = tf.Variable(
tf.zeros(parameter.shape),
name="prev-grad-sign",
dtype=tf.float32,
)
updated_prev_delta = self.update_prev_delta(prev_delta)
gradient_sign = tf.sign(gradient)
grad_sign_product = gradient_sign * prev_gradient_sign
gradient_changed_sign = tf.equal(grad_sign_product, -1)
updated_steps = tf.clip_by_value(
tf.where(
tf.equal(grad_sign_product, 1),
steps * self.increase_factor,
tf.where(
gradient_changed_sign,
steps * self.decrease_factor,
steps,
)
),
self.minstep,
self.maxstep,
)
parameter_delta = tf.where(
gradient_changed_sign,
# If we subtract previous negative weight update it means
# that we will revert weight update that has been applied
# in the previous iteration.
-updated_prev_delta,
updated_steps * gradient_sign,
)
# Making sure that during the next iteration sign, after
# we multiplied by the new gradient, won't be negative.
# Otherwise, the same roll back using previous delta
# won't make much sense.
clipped_gradient_sign = tf.where(
gradient_changed_sign,
tf.zeros_like(gradient_sign),
gradient_sign,
)
updates.extend([
(parameter, parameter - parameter_delta),
(steps, updated_steps),
(prev_gradient_sign, clipped_gradient_sign),
(prev_delta, parameter_delta),
])
return updates
class IRPROPPlus(RPROP):
"""
iRPROP+ is an optimization algorithm for supervised learning.
This is a variation of the :network:`RPROP` algorithm.
Parameters
----------
{RPROP.minstep}
{RPROP.maxstep}
{RPROP.increase_factor}
{RPROP.decrease_factor}
{BaseOptimizer.regularizer}
{BaseOptimizer.network}
{BaseOptimizer.loss}
{BaseNetwork.show_epoch}
{BaseNetwork.shuffle_data}
{BaseNetwork.signals}
{Verbose.verbose}
Methods
-------
{BaseSkeleton.predict}
{BaseOptimizer.train}
{BaseSkeleton.fit}
Notes
-----
{RPROP.Notes}
Examples
--------
>>> import numpy as np
>>> from neupy import algorithms
>>> from neupy.layers import *
>>>
>>> x_train = np.array([[1, 2], [3, 4]])
>>> y_train = np.array([[1], [0]])
>>>
>>> network = Input(2) >> Sigmoid(3) >> Sigmoid(1)
>>> optimizer = algorithms.IRPROPPlus(network)
>>> optimizer.train(x_train, y_train)
References
----------
[1] Christian Igel, Michael Huesken (2000)
Improving the Rprop Learning Algorithm
See Also
--------
:network:`RPROP` : RPROP algorithm.
:network:`GradientDescent` : GradientDescent algorithm.
"""
def init_functions(self):
self.variables.update(
last_error=tf.Variable(np.nan, name='irprop-plus/last-error'),
previous_error=tf.Variable(
np.nan, name='irprop-plus/previous-error'),
)
super(IRPROPPlus, self).init_functions()
def one_training_update(self, X_train, y_train):
if len(self.errors.train) >= 2:
previous_error, last_error = self.errors.train[-2:]
session = tensorflow_session()
self.variables.last_error.load(last_error, session)
self.variables.previous_error.load(previous_error, session)
return super(IRPROPPlus, self).one_training_update(X_train, y_train)
def update_prev_delta(self, prev_delta):
last_error = self.variables.last_error
prev_error = self.variables.previous_error
return tf.where(
# We revert weight when gradient changed the sign only in
# cases when error increased. Otherwise we don't apply any
# update for this weight.
last_error > prev_error,
prev_delta,
tf.zeros_like(prev_delta),
)
| {
"content_hash": "889417067f42fc623e06b48a2204434b",
"timestamp": "",
"source": "github",
"line_count": 266,
"max_line_length": 79,
"avg_line_length": 31.5,
"alnum_prop": 0.5778732545649838,
"repo_name": "itdxer/neupy",
"id": "ee7cbb9400a908ceaf381c36db898e0213cf68b2",
"size": "8379",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neupy/algorithms/gd/rprop.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "13847"
},
{
"name": "JavaScript",
"bytes": "7460"
},
{
"name": "Python",
"bytes": "16002521"
},
{
"name": "Shell",
"bytes": "434"
}
],
"symlink_target": ""
} |
"""Test cases for Zinnia's views"""
from datetime import date
from django.test import TestCase
from django.utils import timezone
from django.contrib.sites.models import Site
from django.test.utils import override_settings
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.signals import user_logged_in
from django.contrib.auth.models import update_last_login
from django.contrib.auth.tests.utils import skipIfCustomUser
import django_comments as comments
from zinnia.models.entry import Entry
from zinnia.models.author import Author
from zinnia.models.category import Category
from zinnia.views import quick_entry
from zinnia.managers import DRAFT
from zinnia.managers import PUBLISHED
from zinnia.settings import PAGINATION
from zinnia.tests.utils import datetime
from zinnia.tests.utils import urlEqual
from zinnia.flags import get_user_flagger
from zinnia.signals import connect_discussion_signals
from zinnia.signals import disconnect_entry_signals
from zinnia.signals import disconnect_discussion_signals
from zinnia.url_shortener.backends.default import base36
@skipIfCustomUser
@override_settings(
TEMPLATE_LOADERS=(
'zinnia.tests.utils.VoidLoader',
),
TEMPLATE_CONTEXT_PROCESSORS=(
'django.core.context_processors.request',
))
class ViewsBaseCase(TestCase):
"""
Setup and utility function base case.
"""
def setUp(self):
disconnect_entry_signals()
disconnect_discussion_signals()
self.site = Site.objects.get_current()
self.author = Author.objects.create_user(username='admin',
email='admin@example.com',
password='password')
self.category = Category.objects.create(title='Tests', slug='tests')
params = {'title': 'Test 1',
'content': 'First test entry published',
'slug': 'test-1',
'tags': 'tests',
'creation_date': datetime(2010, 1, 1, 23, 00),
'status': PUBLISHED}
entry = Entry.objects.create(**params)
entry.sites.add(self.site)
entry.categories.add(self.category)
entry.authors.add(self.author)
self.first_entry = entry
params = {'title': 'Test 2',
'content': 'Second test entry published',
'slug': 'test-2',
'tags': 'tests',
'creation_date': datetime(2010, 5, 31, 23, 00),
'status': PUBLISHED}
entry = Entry.objects.create(**params)
entry.sites.add(self.site)
entry.categories.add(self.category)
entry.authors.add(self.author)
self.second_entry = entry
def create_published_entry(self):
params = {'title': 'My test entry',
'content': 'My test content',
'slug': 'my-test-entry',
'tags': 'tests',
'creation_date': datetime(2010, 1, 1, 23, 0),
'status': PUBLISHED}
entry = Entry.objects.create(**params)
entry.sites.add(self.site)
entry.categories.add(self.category)
entry.authors.add(self.author)
return entry
def check_publishing_context(self, url, first_expected,
second_expected=None,
friendly_context=None,
queries=None):
"""Test the numbers of entries in context of an url."""
if queries is not None:
with self.assertNumQueries(queries):
response = self.client.get(url)
else:
response = self.client.get(url)
self.assertEqual(len(response.context['object_list']),
first_expected)
if second_expected:
self.create_published_entry()
response = self.client.get(url)
self.assertEqual(len(response.context['object_list']),
second_expected)
if friendly_context:
self.assertEqual(
response.context['object_list'],
response.context[friendly_context])
return response
def check_capabilities(self, url, mimetype, queries=0):
"""Test simple views for the Weblog capabilities"""
with self.assertNumQueries(queries):
response = self.client.get(url)
self.assertEqual(response['Content-Type'], mimetype)
self.assertTrue('protocol' in response.context)
@override_settings(
ROOT_URLCONF='zinnia.tests.implementations.urls.default'
)
class ViewsTestCase(ViewsBaseCase):
"""
Test cases for generic views used in the application,
for reproducing and correcting issue :
http://github.com/Fantomas42/django-blog-zinnia/issues#issue/3
"""
@override_settings(USE_TZ=False)
def test_zinnia_entry_archive_index_no_timezone(self):
template_name_today = 'zinnia/archives/%s/entry_archive.html' % \
date.today().strftime('%Y/%m/%d')
response = self.check_publishing_context(
'/', 2, 3, 'entry_list', 2)
self.assertTemplateUsed(response, template_name_today)
@override_settings(USE_TZ=True, TIME_ZONE='Europe/Paris')
def test_zinnia_entry_archive_index_with_timezone(self):
template_name_today = 'zinnia/archives/%s/entry_archive.html' % \
timezone.localtime(timezone.now()
).strftime('%Y/%m/%d')
response = self.check_publishing_context(
'/', 2, 3, 'entry_list', 2)
self.assertTemplateUsed(response, template_name_today)
@override_settings(USE_TZ=False)
def test_zinnia_entry_archive_year_no_timezone(self):
response = self.check_publishing_context(
'/2010/', 2, 3, 'entry_list', 3)
self.assertTemplateUsed(
response, 'zinnia/archives/2010/entry_archive_year.html')
self.assertEqual(response.context['previous_year'], None)
self.assertEqual(response.context['next_year'], None)
response = self.client.get('/2011/')
self.assertEqual(response.context['previous_year'], date(2010, 1, 1))
self.assertEqual(response.context['next_year'], None)
@override_settings(USE_TZ=True, TIME_ZONE='Europe/Paris')
def test_zinnia_entry_archive_year_with_timezone(self):
response = self.check_publishing_context(
'/2010/', 2, 3, 'entry_list', 3)
self.assertTemplateUsed(
response, 'zinnia/archives/2010/entry_archive_year.html')
self.assertEqual(response.context['previous_year'], None)
self.assertEqual(response.context['next_year'], None)
response = self.client.get('/2011/')
self.assertEqual(response.context['previous_year'], date(2010, 1, 1))
self.assertEqual(response.context['next_year'], None)
@override_settings(USE_TZ=False)
def test_zinnia_entry_archive_week_no_timezone(self):
response = self.check_publishing_context(
'/2010/week/00/', 1, 2, 'entry_list', 3)
self.assertTemplateUsed(
response, 'zinnia/archives/2010/week/00/entry_archive_week.html')
# All days in a new year preceding the first Monday
# are considered to be in week 0.
self.assertEqual(response.context['week'], date(2009, 12, 28))
self.assertEqual(response.context['week_end_day'], date(2010, 1, 3))
self.assertEqual(response.context['previous_week'], None)
self.assertEqual(response.context['next_week'], date(2010, 5, 31))
self.assertEqual(list(response.context['date_list']),
[datetime(2010, 1, 1)])
response = self.client.get('/2011/week/01/')
self.assertEqual(response.context['week'], date(2011, 1, 3))
self.assertEqual(response.context['week_end_day'], date(2011, 1, 9))
self.assertEqual(response.context['previous_week'], date(2010, 5, 31))
self.assertEqual(response.context['next_week'], None)
self.assertEqual(list(response.context['date_list']), [])
@override_settings(USE_TZ=True, TIME_ZONE='Europe/Paris')
def test_zinnia_entry_archive_week_with_timezone(self):
response = self.check_publishing_context(
'/2010/week/00/', 1, 2, 'entry_list', 3)
self.assertTemplateUsed(
response, 'zinnia/archives/2010/week/00/entry_archive_week.html')
# All days in a new year preceding the first Monday
# are considered to be in week 0.
self.assertEqual(response.context['week'], date(2009, 12, 28))
self.assertEqual(response.context['week_end_day'], date(2010, 1, 3))
self.assertEqual(response.context['previous_week'], None)
self.assertEqual(response.context['next_week'], date(2010, 5, 31))
self.assertEqual(response.context['date_list'][0].date(),
datetime(2010, 1, 2).date())
response = self.client.get('/2011/week/01/')
self.assertEqual(response.context['week'], date(2011, 1, 3))
self.assertEqual(response.context['week_end_day'], date(2011, 1, 9))
self.assertEqual(response.context['previous_week'], date(2010, 5, 31))
self.assertEqual(response.context['next_week'], None)
self.assertEqual(list(response.context['date_list']), [])
@override_settings(USE_TZ=False)
def test_zinnia_entry_archive_month_no_timezone(self):
response = self.check_publishing_context(
'/2010/01/', 1, 2, 'entry_list', 3)
self.assertTemplateUsed(
response, 'zinnia/archives/2010/month/01/entry_archive_month.html')
self.assertEqual(response.context['previous_month'], None)
self.assertEqual(response.context['next_month'], date(2010, 5, 1))
self.assertEqual(list(response.context['date_list']),
[datetime(2010, 1, 1)])
response = self.client.get('/2010/05/')
self.assertEqual(response.context['previous_month'], date(2010, 1, 1))
self.assertEqual(response.context['next_month'], None)
self.assertEqual(list(response.context['date_list']),
[datetime(2010, 5, 31)])
@override_settings(USE_TZ=True, TIME_ZONE='Europe/Paris')
def test_zinnia_entry_archive_month_with_timezone(self):
response = self.check_publishing_context(
'/2010/01/', 1, 2, 'entry_list', 3)
self.assertTemplateUsed(
response, 'zinnia/archives/2010/month/01/entry_archive_month.html')
self.assertEqual(response.context['previous_month'], None)
self.assertEqual(response.context['next_month'], date(2010, 6, 1))
self.assertEqual(response.context['date_list'][0].date(),
datetime(2010, 1, 2).date())
response = self.client.get('/2010/06/')
self.assertEqual(response.context['previous_month'], date(2010, 1, 1))
self.assertEqual(response.context['next_month'], None)
self.assertEqual(response.context['date_list'][0].date(),
datetime(2010, 6, 1).date())
@override_settings(USE_TZ=False)
def test_zinnia_entry_archive_day_no_timezone(self):
response = self.check_publishing_context(
'/2010/01/01/', 1, 2, 'entry_list', 2)
self.assertTemplateUsed(
response, 'zinnia/archives/2010/01/01/entry_archive_day.html')
self.assertEqual(response.context['previous_month'], None)
self.assertEqual(response.context['next_month'], date(2010, 5, 1))
self.assertEqual(response.context['previous_day'], None)
self.assertEqual(response.context['next_day'], date(2010, 5, 31))
response = self.client.get('/2010/05/31/')
self.assertEqual(response.context['previous_month'], date(2010, 1, 1))
self.assertEqual(response.context['next_month'], None)
self.assertEqual(response.context['previous_day'], date(2010, 1, 1))
self.assertEqual(response.context['next_day'], None)
@override_settings(USE_TZ=True, TIME_ZONE='Europe/Paris')
def test_zinnia_entry_archive_day_with_timezone(self):
response = self.check_publishing_context(
'/2010/01/02/', 1, 2, 'entry_list', 2)
self.assertTemplateUsed(
response, 'zinnia/archives/2010/01/02/entry_archive_day.html')
self.assertEqual(response.context['previous_month'], None)
self.assertEqual(response.context['next_month'], date(2010, 6, 1))
self.assertEqual(response.context['previous_day'], None)
self.assertEqual(response.context['next_day'], date(2010, 6, 1))
response = self.client.get('/2010/06/01/')
self.assertEqual(response.context['previous_month'], date(2010, 1, 1))
self.assertEqual(response.context['next_month'], None)
self.assertEqual(response.context['previous_day'], date(2010, 1, 2))
self.assertEqual(response.context['next_day'], None)
@override_settings(USE_TZ=False)
def test_zinnia_entry_archive_today_no_timezone(self):
template_name_today = 'zinnia/archives/%s/entry_archive_today.html' % \
date.today().strftime('%Y/%m/%d')
with self.assertNumQueries(2):
response = self.client.get('/today/')
self.assertTemplateUsed(response, template_name_today)
self.assertEqual(response.context['day'], date.today())
self.assertEqual(response.context['previous_month'], date(2010, 5, 1))
self.assertEqual(response.context['next_month'], None)
self.assertEqual(response.context['previous_day'], date(2010, 5, 31))
self.assertEqual(response.context['next_day'], None)
@override_settings(USE_TZ=True, TIME_ZONE='Europe/Paris')
def test_zinnia_entry_archive_today_with_timezone(self):
template_name_today = 'zinnia/archives/%s/entry_archive_today.html' % \
timezone.localtime(timezone.now()
).strftime('%Y/%m/%d')
with self.assertNumQueries(2):
response = self.client.get('/today/')
self.assertTemplateUsed(response, template_name_today)
self.assertEqual(response.context['day'], timezone.localtime(
timezone.now()).date())
self.assertEqual(response.context['previous_month'], date(2010, 6, 1))
self.assertEqual(response.context['next_month'], None)
self.assertEqual(response.context['previous_day'], date(2010, 6, 1))
self.assertEqual(response.context['next_day'], None)
def test_zinnia_entry_shortlink(self):
with self.assertNumQueries(1):
response = self.client.get('/%s/' % base36(self.first_entry.pk))
self.assertEqual(response.status_code, 301)
self.assertEqual(
response['Location'],
'http://testserver%s' % self.first_entry.get_absolute_url())
def test_zinnia_entry_shortlink_unpublished(self):
"""
https://github.com/Fantomas42/django-blog-zinnia/pull/367
"""
self.first_entry.sites.clear()
with self.assertNumQueries(1):
response = self.client.get('/%s/' % base36(self.first_entry.pk))
self.assertEqual(response.status_code, 404)
def test_zinnia_entry_detail(self):
entry = self.first_entry
with self.assertNumQueries(1):
response = self.client.get(entry.get_absolute_url())
self.assertEqual(response.status_code, 200)
entry.sites.clear()
with self.assertNumQueries(1):
response = self.client.get(entry.get_absolute_url())
self.assertEqual(response.status_code, 404)
entry.sites.add(self.site)
entry.status = DRAFT
entry.save()
with self.assertNumQueries(2):
response = self.client.get(entry.get_absolute_url())
self.assertEqual(response.status_code, 404)
entry.status = PUBLISHED
entry.start_publication = datetime(2020, 1, 1, 12, 0)
entry.save()
with self.assertNumQueries(2):
response = self.client.get(entry.get_absolute_url())
self.assertEqual(response.status_code, 404)
entry.start_publication = None
entry.save()
with self.assertNumQueries(1):
response = self.client.get(entry.get_absolute_url())
self.assertEqual(response.status_code, 200)
@override_settings(USE_TZ=False)
def test_zinnia_entry_detail_no_timezone(self):
entry = self.create_published_entry()
entry.detail_template = 'entry_custom.html'
entry.save()
entry.sites.add(Site.objects.get_current())
with self.assertNumQueries(1):
response = self.client.get(entry.get_absolute_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'zinnia/archives/2010/01/01/my-test-entry_entry_custom.html')
@override_settings(USE_TZ=True, TIME_ZONE='Europe/Paris')
def test_zinnia_entry_detail_with_timezone(self):
entry = self.create_published_entry()
entry.detail_template = 'entry_custom.html'
entry.save()
with self.assertNumQueries(1):
response = self.client.get(entry.get_absolute_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'zinnia/archives/2010/01/02/my-test-entry_entry_custom.html')
@override_settings(USE_TZ=False)
def test_zinnia_entry_detail_login(self):
entry = self.create_published_entry()
entry.login_required = True
entry.save()
with self.assertNumQueries(1):
response = self.client.get(entry.get_absolute_url())
self.assertTemplateUsed(response, 'zinnia/login.html')
response = self.client.post(entry.get_absolute_url(),
{'username': 'admin',
'password': 'password'})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'zinnia/archives/2010/01/01/my-test-entry_entry_detail.html')
@override_settings(USE_TZ=False)
def test_zinnia_entry_detail_password(self):
entry = self.create_published_entry()
entry.password = 'password'
entry.save()
with self.assertNumQueries(1):
response = self.client.get(entry.get_absolute_url())
self.assertTemplateUsed(response, 'zinnia/password.html')
self.assertEqual(response.context['error'], False)
with self.assertNumQueries(1):
response = self.client.post(entry.get_absolute_url(),
{'entry_password': 'bad_password'})
self.assertTemplateUsed(response, 'zinnia/password.html')
self.assertEqual(response.context['error'], True)
with self.assertNumQueries(7):
response = self.client.post(entry.get_absolute_url(),
{'entry_password': 'password'})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'zinnia/archives/2010/01/01/my-test-entry_entry_detail.html')
@override_settings(USE_TZ=False)
def test_zinnia_entry_detail_login_password(self):
user_logged_in.disconnect(update_last_login)
entry = self.create_published_entry()
entry.password = 'password'
entry.login_required = True
entry.save()
with self.assertNumQueries(1):
response = self.client.get(entry.get_absolute_url())
self.assertTemplateUsed(response, 'zinnia/login.html')
with self.assertNumQueries(12):
response = self.client.post(entry.get_absolute_url(),
{'username': 'admin',
'password': 'password'})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'zinnia/password.html')
self.assertEqual(response.context['error'], False)
with self.assertNumQueries(7):
response = self.client.post(entry.get_absolute_url(),
{'entry_password': 'password'})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'zinnia/archives/2010/01/01/my-test-entry_entry_detail.html')
user_logged_in.connect(update_last_login)
def test_zinnia_entry_detail_preview(self):
self.first_entry.status = DRAFT
self.first_entry.save()
url = self.first_entry.get_absolute_url()
with self.assertNumQueries(2):
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
Author.objects.create_superuser(
'root', 'root@example.com', 'password')
self.client.login(username='root', password='password')
with self.assertNumQueries(3):
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.client.login(username=self.author.username, password='password')
with self.assertNumQueries(6):
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_zinnia_entry_channel(self):
self.check_publishing_context(
'/channel-test/', 2, 3, 'entry_list', 1)
def test_zinnia_category_list(self):
category = Category.objects.create(
title='New category', slug='new-category')
self.check_publishing_context(
'/categories/', 1,
friendly_context='category_list',
queries=0)
self.first_entry.categories.add(category)
self.check_publishing_context('/categories/', 2)
def test_zinnia_category_detail(self):
response = self.check_publishing_context(
'/categories/tests/', 2, 3, 'entry_list', 2)
self.assertTemplateUsed(
response, 'zinnia/category/tests/entry_list.html')
self.assertEqual(response.context['category'].slug, 'tests')
def test_zinnia_category_detail_paginated(self):
"""Test case reproducing issue #42 on category
detail view paginated"""
for i in range(PAGINATION):
params = {'title': 'My entry %i' % i,
'content': 'My content %i' % i,
'slug': 'my-entry-%i' % i,
'creation_date': datetime(2010, 1, 1),
'status': PUBLISHED}
entry = Entry.objects.create(**params)
entry.sites.add(self.site)
entry.categories.add(self.category)
response = self.client.get('/categories/tests/')
self.assertEqual(len(response.context['object_list']), PAGINATION)
response = self.client.get('/categories/tests/?page=2')
self.assertEqual(len(response.context['object_list']), 2)
response = self.client.get('/categories/tests/page/2/')
self.assertEqual(len(response.context['object_list']), 2)
self.assertEqual(response.context['category'].slug, 'tests')
def test_zinnia_author_list(self):
user = Author.objects.create(username='new-user',
email='new_user@example.com')
self.check_publishing_context(
'/authors/', 1,
friendly_context='author_list',
queries=0)
self.first_entry.authors.add(user)
self.check_publishing_context('/authors/', 2)
def test_zinnia_author_detail(self):
response = self.check_publishing_context(
'/authors/admin/', 2, 3, 'entry_list', 2)
self.assertTemplateUsed(
response, 'zinnia/author/admin/entry_list.html')
self.assertEqual(response.context['author'].username, 'admin')
def test_zinnia_author_detail_paginated(self):
"""Test case reproducing issue #207 on author
detail view paginated"""
for i in range(PAGINATION):
params = {'title': 'My entry %i' % i,
'content': 'My content %i' % i,
'slug': 'my-entry-%i' % i,
'creation_date': datetime(2010, 1, 1),
'status': PUBLISHED}
entry = Entry.objects.create(**params)
entry.sites.add(self.site)
entry.authors.add(self.author)
response = self.client.get('/authors/admin/')
self.assertEqual(len(response.context['object_list']), PAGINATION)
response = self.client.get('/authors/admin/?page=2')
self.assertEqual(len(response.context['object_list']), 2)
response = self.client.get('/authors/admin/page/2/')
self.assertEqual(len(response.context['object_list']), 2)
self.assertEqual(response.context['author'].username, 'admin')
def test_zinnia_tag_list(self):
self.check_publishing_context(
'/tags/', 1,
friendly_context='tag_list',
queries=1)
self.first_entry.tags = 'tests, tag'
self.first_entry.save()
self.check_publishing_context('/tags/', 2)
def test_zinnia_tag_detail(self):
response = self.check_publishing_context(
'/tags/tests/', 2, 3, 'entry_list', 2)
self.assertTemplateUsed(
response, 'zinnia/tag/tests/entry_list.html')
self.assertEqual(response.context['tag'].name, 'tests')
response = self.client.get('/tags/404/')
self.assertEqual(response.status_code, 404)
def test_zinnia_tag_detail_paginated(self):
for i in range(PAGINATION):
params = {'title': 'My entry %i' % i,
'content': 'My content %i' % i,
'slug': 'my-entry-%i' % i,
'tags': 'tests',
'creation_date': datetime(2010, 1, 1),
'status': PUBLISHED}
entry = Entry.objects.create(**params)
entry.sites.add(self.site)
response = self.client.get('/tags/tests/')
self.assertEqual(len(response.context['object_list']), PAGINATION)
response = self.client.get('/tags/tests/?page=2')
self.assertEqual(len(response.context['object_list']), 2)
response = self.client.get('/tags/tests/page/2/')
self.assertEqual(len(response.context['object_list']), 2)
self.assertEqual(response.context['tag'].name, 'tests')
def test_zinnia_entry_search(self):
self.check_publishing_context(
'/search/?pattern=test', 2, 3, 'entry_list', 1)
response = self.client.get('/search/?pattern=ab')
self.assertEqual(len(response.context['object_list']), 0)
self.assertEqual(response.context['error'],
_('The pattern is too short'))
response = self.client.get('/search/')
self.assertEqual(len(response.context['object_list']), 0)
self.assertEqual(response.context['error'],
_('No pattern to search found'))
def test_zinnia_entry_random(self):
response = self.client.get('/random/', follow=True)
self.assertTrue(response.redirect_chain[0][0].startswith(
'http://testserver/2010/'))
self.assertEqual(response.redirect_chain[0][1], 302)
def test_zinnia_sitemap(self):
with self.assertNumQueries(0):
response = self.client.get('/sitemap/')
self.assertEqual(len(response.context['entries']), 2)
self.assertEqual(len(response.context['categories']), 1)
entry = self.create_published_entry()
entry.categories.add(Category.objects.create(title='New category',
slug='new-category'))
response = self.client.get('/sitemap/')
self.assertEqual(len(response.context['entries']), 3)
self.assertEqual(len(response.context['categories']), 2)
def test_zinnia_trackback(self):
# Clear the cache of user flagger to avoid error on MySQL
get_user_flagger.cache_clear()
response = self.client.post('/trackback/404/')
trackback_url = '/trackback/%s/' % self.first_entry.pk
self.assertEqual(response.status_code, 404)
self.assertEqual(self.client.post(trackback_url).status_code, 301)
self.first_entry.trackback_enabled = False
self.first_entry.save()
self.assertEqual(self.first_entry.trackback_count, 0)
response = self.client.post(trackback_url,
{'url': 'http://example.com'})
self.assertEqual(response['Content-Type'], 'text/xml')
self.assertEqual(response.context['error'],
'Trackback is not enabled for Test 1')
self.first_entry.trackback_enabled = True
self.first_entry.save()
connect_discussion_signals()
get_user_flagger() # Memoize user flagger for stable query number
if comments.get_comment_app_name() == comments.DEFAULT_COMMENTS_APP:
# If we are using the default comment app,
# we can count the database queries executed.
with self.assertNumQueries(8):
response = self.client.post(trackback_url,
{'url': 'http://example.com'})
else:
response = self.client.post(trackback_url,
{'url': 'http://example.com'})
self.assertEqual(response['Content-Type'], 'text/xml')
self.assertEqual('error' in response.context, False)
disconnect_discussion_signals()
entry = Entry.objects.get(pk=self.first_entry.pk)
self.assertEqual(entry.trackback_count, 1)
response = self.client.post(trackback_url,
{'url': 'http://example.com'})
self.assertEqual(response.context['error'],
'Trackback is already registered')
def test_zinnia_trackback_on_entry_without_author(self):
# Clear the cache of user flagger to avoid error on MySQL
get_user_flagger.cache_clear()
self.first_entry.authors.clear()
response = self.client.post('/trackback/%s/' % self.first_entry.pk,
{'url': 'http://example.com'})
self.assertEqual(response['Content-Type'], 'text/xml')
self.assertEqual('error' in response.context, False)
def test_capabilities(self):
self.check_capabilities('/humans.txt', 'text/plain', 0)
self.check_capabilities('/rsd.xml', 'application/rsd+xml', 0)
self.check_capabilities('/wlwmanifest.xml',
'application/wlwmanifest+xml', 0)
self.check_capabilities('/opensearch.xml',
'application/opensearchdescription+xml', 0)
def test_comment_success(self):
with self.assertNumQueries(0):
response = self.client.get('/comments/success/')
self.assertTemplateUsed(response, 'comments/zinnia/entry/posted.html')
self.assertEqual(response.context['comment'], None)
with self.assertNumQueries(1):
response = self.client.get('/comments/success/?c=404')
self.assertEqual(response.context['comment'], None)
comment = comments.get_model().objects.create(
submit_date=timezone.now(),
comment='My Comment 1', content_object=self.category,
site=self.site, is_public=False)
success_url = '/comments/success/?c=%s' % comment.pk
with self.assertNumQueries(1):
response = self.client.get(success_url)
self.assertEqual(response.context['comment'], comment)
comment.is_public = True
comment.save()
with self.assertNumQueries(5):
response = self.client.get(success_url, follow=True)
self.assertEqual(
response.redirect_chain[1],
('http://example.com/categories/tests/', 302))
def test_comment_success_invalid_pk_issue_292(self):
with self.assertNumQueries(0):
response = self.client.get('/comments/success/?c=file.php')
self.assertTemplateUsed(response, 'comments/zinnia/entry/posted.html')
self.assertEqual(response.context['comment'], None)
def test_quick_entry(self):
Author.objects.create_superuser(
'root', 'root@example.com', 'password')
response = self.client.get('/quick-entry/')
self.assertEqual(response.status_code, 302)
self.assertEqual(
response['Location'],
'http://testserver/accounts/login/?next=/quick-entry/')
self.client.login(username='admin', password='password')
response = self.client.get('/quick-entry/')
self.assertEqual(response.status_code, 302)
self.assertEqual(
response['Location'],
'http://testserver/accounts/login/?next=/quick-entry/')
self.client.logout()
self.client.login(username='root', password='password')
response = self.client.get('/quick-entry/')
self.assertEqual(response.status_code, 302)
self.assertEqual(
response['Location'],
'http://testserver/admin/zinnia/entry/add/')
response = self.client.post('/quick-entry/', {'content': 'test'})
self.assertEqual(response.status_code, 302)
self.assertTrue(urlEqual(
response['Location'],
'http://testserver/admin/zinnia/entry/add/'
'?tags=&title=&sites=1&content='
'%3Cp%3Etest%3C%2Fp%3E&authors=2&slug='))
response = self.client.post('/quick-entry/',
{'title': 'test', 'tags': 'test',
'content': 'Test content',
'save_draft': ''})
entry = Entry.objects.get(title='test')
self.assertEqual(response.status_code, 302)
self.assertEqual(
response['Location'],
'http://testserver%s' % entry.get_absolute_url())
self.assertEqual(entry.status, DRAFT)
self.assertEqual(entry.title, 'test')
self.assertEqual(entry.tags, 'test')
self.assertEqual(entry.content, '<p>Test content</p>')
def test_quick_entry_non_ascii_title_issue_153(self):
Author.objects.create_superuser(
'root', 'root@example.com', 'password')
self.client.login(username='root', password='password')
response = self.client.post('/quick-entry/',
{'title': 'тест', 'tags': 'test-2',
'content': 'Test content',
'save_draft': ''})
self.assertEqual(response.status_code, 302)
self.assertTrue(urlEqual(
response['Location'],
'http://testserver/admin/zinnia/entry/add/'
'?tags=test-2&title=%D1%82%D0%B5%D1%81%D1%82'
'&sites=1&content=%3Cp%3ETest+content%3C%2Fp%3E'
'&authors=2&slug='))
def test_quick_entry_markup_language_issue_270(self):
original_markup_language = quick_entry.MARKUP_LANGUAGE
quick_entry.MARKUP_LANGUAGE = 'restructuredtext'
Author.objects.create_superuser(
'root', 'root@example.com', 'password')
self.client.login(username='root', password='password')
response = self.client.post('/quick-entry/',
{'title': 'Test markup',
'tags': 'test, markup',
'content': 'Hello *World* !',
'save_draft': ''})
entry = Entry.objects.get(title='Test markup')
self.assertEqual(response.status_code, 302)
self.assertEqual(
response['Location'],
'http://testserver%s' % entry.get_absolute_url())
self.assertEqual(
entry.content,
'Hello *World* !')
quick_entry.MARKUP_LANGUAGE = original_markup_language
@override_settings(
ROOT_URLCONF='zinnia.tests.implementations.urls.custom_detail_views'
)
class CustomDetailViewsTestCase(ViewsBaseCase):
"""
Tests with an alternate urls.py that modifies how author_detail,
tags_detail and categories_detail views to be called with a custom
template_name keyword argument and an extra_context.
"""
def test_custom_category_detail(self):
response = self.check_publishing_context('/categories/tests/', 2, 3)
self.assertTemplateUsed(response, 'zinnia/entry_custom_list.html')
self.assertEqual(response.context['category'].slug, 'tests')
self.assertEqual(response.context['extra'], 'context')
def test_custom_author_detail(self):
response = self.check_publishing_context('/authors/admin/', 2, 3)
self.assertTemplateUsed(response, 'zinnia/entry_custom_list.html')
self.assertEqual(response.context['author'].username, 'admin')
self.assertEqual(response.context['extra'], 'context')
def test_custom_tag_detail(self):
response = self.check_publishing_context('/tags/tests/', 2, 3)
self.assertTemplateUsed(response, 'zinnia/entry_custom_list.html')
self.assertEqual(response.context['tag'].name, 'tests')
self.assertEqual(response.context['extra'], 'context')
| {
"content_hash": "14b86c6457d8bb6db523b9b062954011",
"timestamp": "",
"source": "github",
"line_count": 797,
"max_line_length": 79,
"avg_line_length": 47.27352572145546,
"alnum_prop": 0.607028160416169,
"repo_name": "Maplecroft/django-blog-zinnia",
"id": "5b2d4047a8014f397c9edfa4bf8189efd036979b",
"size": "37696",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zinnia/tests/test_views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "77438"
},
{
"name": "HTML",
"bytes": "76185"
},
{
"name": "JavaScript",
"bytes": "235617"
},
{
"name": "Makefile",
"bytes": "1789"
},
{
"name": "Python",
"bytes": "519684"
}
],
"symlink_target": ""
} |
import setpath
import functions
def ifthenelse(*args):
"""
.. function:: ifthenelse(condition, x, y)
Returns *x* if *condition* is true, else returns *y*.
.. templateforparams Parameters:
:condition: exception type
:x: exception value
:y: traceback object
:returns: true or false
.. note::
The difference with the *if* construct in most programming languages
is that *x* and *y* expressions will always be evaluated.
Examples:
>>> sql("select ifthenelse(1>0,'yes','no') as answer")
answer
------
yes
"""
if len(args)<2:
raise functions.OperatorError("ifthenelse","operator needs at least two inputs")
if args[0]:
return args[1]
else:
if len(args)>2:
return args[2]
return None
ifthenelse.registered=True
if not ('.' in __name__):
"""
This is needed to be able to test the function, put it at the end of every
new function you create
"""
import sys
import setpath
from functions import *
testfunction()
if __name__ == "__main__":
reload(sys)
sys.setdefaultencoding('utf-8')
import doctest
doctest.testmod()
| {
"content_hash": "ae688ac34536ec72f8b89c3322f9062c",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 88,
"avg_line_length": 23.39622641509434,
"alnum_prop": 0.5879032258064516,
"repo_name": "XristosMallios/cache",
"id": "cfca0ecf3cf661e3ab7598b33fd192750b0f63e7",
"size": "1257",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "exareme-tools/madis/src/functions/row/boolean.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "66130"
},
{
"name": "HTML",
"bytes": "1330883"
},
{
"name": "Java",
"bytes": "4022490"
},
{
"name": "JavaScript",
"bytes": "6100034"
},
{
"name": "PHP",
"bytes": "124335"
},
{
"name": "Python",
"bytes": "2478415"
},
{
"name": "R",
"bytes": "671"
},
{
"name": "Shell",
"bytes": "15240"
}
],
"symlink_target": ""
} |
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from mock.mock import MagicMock, call, patch
import resource_management.core.source
from stacks.utils.RMFTestCase import *
import re
class TestStormBase(RMFTestCase):
COMMON_SERVICES_PACKAGE_DIR = "STORM/0.9.1/package"
STACK_VERSION = "2.1"
def assert_configure_default(self, confDir="/etc/storm/conf", has_metrics=False, legacy=True):
import params
self.assertResourceCalled('Directory', '/var/log/storm',
owner = 'storm',
group = 'hadoop',
mode = 0777,
create_parents = True,
cd_access='a',
)
self.assertResourceCalled('Directory', '/var/run/storm',
owner = 'storm',
group = 'hadoop',
create_parents = True,
cd_access='a',
mode=0755,
)
self.assertResourceCalled('Directory', '/hadoop/storm',
owner = 'storm',
group = 'hadoop',
create_parents = True,
cd_access='a',
mode=0755,
)
self.assertResourceCalled('Directory', confDir,
group = 'hadoop',
create_parents = True,
cd_access='a'
)
self.assertResourceCalled('File', '/etc/security/limits.d/storm.conf',
content = Template('storm.conf.j2'),
owner = 'root',
group = 'root',
mode = 0644,
)
self.assertResourceCalled('File', confDir + '/config.yaml',
owner = 'storm',
content = Template('config.yaml.j2'),
group = 'hadoop',
)
storm_yarn_content = self.call_storm_template_and_assert(confDir=confDir)
self.assertTrue(storm_yarn_content.find('_JAAS_PLACEHOLDER') == -1, 'Placeholder have to be substituted')
self.assertResourceCalled('File', confDir + '/storm-env.sh',
owner = 'storm',
content = InlineTemplate(self.getConfig()['configurations']['storm-env']['content'])
)
if has_metrics:
self.assertResourceCalled('File', confDir + '/storm-metrics2.properties',
content = Template('storm-metrics2.properties.j2'),
owner = 'storm',
group = 'hadoop',
)
self.assertResourceCalled('Link', '/usr/lib/storm/lib//ambari-metrics-storm-sink.jar',
action = ['delete'],
)
self.assertResourceCalled('Link', '/usr/lib/storm/lib/ambari-metrics-storm-sink.jar',
action = ['delete'],
)
if legacy:
self.assertResourceCalled('Execute', 'ambari-sudo.sh ln -s /usr/lib/storm/lib/ambari-metrics-storm-sink-legacy-with-common-*.jar /usr/lib/storm/lib//ambari-metrics-storm-sink.jar',
not_if = 'ls /usr/lib/storm/lib//ambari-metrics-storm-sink.jar',
only_if = 'ls /usr/lib/storm/lib/ambari-metrics-storm-sink-legacy-with-common-*.jar',
)
else:
self.assertResourceCalled('Execute', 'ambari-sudo.sh ln -s /usr/lib/storm/lib/ambari-metrics-storm-sink-with-common-*.jar /usr/lib/storm/lib//ambari-metrics-storm-sink.jar',
not_if = 'ls /usr/lib/storm/lib//ambari-metrics-storm-sink.jar',
only_if = 'ls /usr/lib/storm/lib/ambari-metrics-storm-sink-with-common-*.jar',
)
self.assertResourceCalled('File', confDir + '/storm_jaas.conf',
action=['delete'],
)
self.assertResourceCalled('File', confDir + '/client_jaas.conf',
action=['delete'],
)
return storm_yarn_content
def assert_configure_secured(self, confDir='/etc/storm/conf'):
import params
self.assertResourceCalled('Directory', '/var/log/storm',
owner = 'storm',
group = 'hadoop',
mode = 0777,
create_parents = True,
cd_access='a',
)
self.assertResourceCalled('Directory', '/var/run/storm',
owner = 'storm',
group = 'hadoop',
create_parents = True,
cd_access='a',
mode=0755,
)
self.assertResourceCalled('Directory', '/hadoop/storm',
owner = 'storm',
group = 'hadoop',
create_parents = True,
cd_access='a',
mode=0755,
)
self.assertResourceCalled('Directory', confDir,
group = 'hadoop',
create_parents = True,
cd_access='a'
)
self.assertResourceCalled('File', '/etc/security/limits.d/storm.conf',
content = Template('storm.conf.j2'),
owner = 'root',
group = 'root',
mode = 0644,
)
self.assertResourceCalled('File', confDir + '/config.yaml',
owner = 'storm',
content = Template('config.yaml.j2'),
group = 'hadoop',
)
storm_yarn_content = self.call_storm_template_and_assert(confDir=confDir)
self.assertTrue(storm_yarn_content.find('_JAAS_PLACEHOLDER') == -1, 'Placeholder have to be substituted')
self.assertResourceCalled('File', confDir + '/storm-env.sh',
owner = 'storm',
content = InlineTemplate(self.getConfig()['configurations']['storm-env']['content'])
)
self.assertResourceCalled('TemplateConfig', confDir + '/storm_jaas.conf',
owner = 'storm',
mode = 0644
)
return storm_yarn_content
def call_storm_template_and_assert(self, confDir="/etc/storm/conf"):
import storm_yaml_utils
with RMFTestCase.env as env:
storm_yarn_temlate = storm_yaml_utils.yaml_config_template(self.getConfig()['configurations']['storm-site'])
self.assertResourceCalled('File', confDir + '/storm.yaml',
owner = 'storm',
content= storm_yarn_temlate,
group = 'hadoop'
)
return storm_yarn_temlate.get_content()
| {
"content_hash": "811dbdd94abb6adf6892b9fa04c854c5",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 188,
"avg_line_length": 38.786127167630056,
"alnum_prop": 0.5864381520119225,
"repo_name": "arenadata/ambari",
"id": "dc615aeaff882d4fdcff1d3c47d1f5c1ec76a01c",
"size": "6733",
"binary": false,
"copies": "1",
"ref": "refs/heads/branch-adh-1.6",
"path": "ambari-server/src/test/python/stacks/2.1/STORM/test_storm_base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "46700"
},
{
"name": "C",
"bytes": "331204"
},
{
"name": "C#",
"bytes": "215907"
},
{
"name": "C++",
"bytes": "257"
},
{
"name": "CSS",
"bytes": "343739"
},
{
"name": "CoffeeScript",
"bytes": "8465"
},
{
"name": "Dockerfile",
"bytes": "6387"
},
{
"name": "EJS",
"bytes": "777"
},
{
"name": "FreeMarker",
"bytes": "2654"
},
{
"name": "Gherkin",
"bytes": "990"
},
{
"name": "Groovy",
"bytes": "15882"
},
{
"name": "HTML",
"bytes": "717983"
},
{
"name": "Handlebars",
"bytes": "1819641"
},
{
"name": "Java",
"bytes": "29172298"
},
{
"name": "JavaScript",
"bytes": "18571926"
},
{
"name": "Jinja",
"bytes": "1490416"
},
{
"name": "Less",
"bytes": "412933"
},
{
"name": "Makefile",
"bytes": "11111"
},
{
"name": "PHP",
"bytes": "149648"
},
{
"name": "PLpgSQL",
"bytes": "287501"
},
{
"name": "PowerShell",
"bytes": "2090340"
},
{
"name": "Python",
"bytes": "18507704"
},
{
"name": "R",
"bytes": "3943"
},
{
"name": "Ruby",
"bytes": "38590"
},
{
"name": "SCSS",
"bytes": "40072"
},
{
"name": "Shell",
"bytes": "924115"
},
{
"name": "Stylus",
"bytes": "820"
},
{
"name": "TSQL",
"bytes": "42351"
},
{
"name": "Vim script",
"bytes": "5813"
},
{
"name": "sed",
"bytes": "2303"
}
],
"symlink_target": ""
} |
from .._baseManager import BaseManager
from .PluginHandlers import PluginDescription, _Plugin
from .RunnableClass import MAINFILENAME, MAINCLASSNAME, Runnable
from sys import version_info
from os import path
from shutil import rmtree
from tempfile import mkdtemp
if version_info[0] == 2:
import imp
def unpack_archive(filename, extract_dir=None, format=None):
"""Unpack an archive.
>> copied from python 3 source <<
`filename` is the name of the archive.
`extract_dir` is the name of the target directory, where the archive
is unpacked. If not provided, the current working directory is used.
`format` is the archive format: one of "zip", "tar", or "gztar". Or any
other registered format. If not provided, unpack_archive will use the
filename extension and see if an unpacker was registered for that
extension.
In case none is found, a ValueError is raised.
"""
if extract_dir is None:
extract_dir = os.getcwd()
if format is not None:
try:
format_info = _UNPACK_FORMATS[format]
except KeyError:
raise ValueError("Unknown unpack format '{0}'".format(format))
func = format_info[1]
func(filename, extract_dir, **dict(format_info[2]))
else:
# we need to look at the registered unpackers supported extensions
format = _find_unpack_format(filename)
if format is None:
raise ReadError("Unknown archive format '{0}'".format(filename))
func = _UNPACK_FORMATS[format][1]
kwargs = dict(_UNPACK_FORMATS[format][2])
func(filename, extract_dir, **kwargs)
else:
from shutil import unpack_archive
if version_info[1] < 5:
from importlib.machinery import SourceFileLoader
else:
from importlib import util
class PluginsManager(BaseManager):
def __init__(self):
super(PluginsManager, self).__init__()
self._pluginPaths = {}
self._tempPluginPaths = {}
self._enabled = {}
self._plugins = {}
def addPlugin(self, ID, pluginPath, isEnabled=True):
if ID in self._pluginPaths.keys():
raise RuntimeError('the specified ID ({}) already exist.'.format(ID))
self._pluginPaths[ID] = pluginPath
self._enabled[ID] = isEnabled
self._plugins[ID] = self._injectPlugin(ID, pluginPath)
def removePlugin(self, ID):
if ID not in self._pluginPaths.keys():
raise RuntimeError('the specified ID ({}) does not exist.'.format(ID))
self._enabled.pop(ID)
self._pluginPaths.pop(ID)
self._plugins.pop(ID)
def setPluginEnableState(self, ID, stateBool):
if ID not in self._pluginPaths.keys():
raise RuntimeError('the specified ID ({}) does not exist.'.format(ID))
self._enabled[ID] = bool(stateBool)
def _injectPlugin(self, ID, pluginPath):
tempDir = mkdtemp(prefix='e3d_' + ID + '_')
self._tempPluginPaths[ID] = tempDir
unpack_archive(pluginPath, tempDir, 'gztar')
plugDesc = PluginDescription.fromDisk(tempDir)
module_name = plugDesc.name.lower().replace(' ', '_')
mainFilePath = path.join(tempDir, MAINFILENAME)
plugin_module = self._loadModule(mainFilePath, module_name)
mainClass = getattr(plugin_module, MAINCLASSNAME)(self._engine)
if not issubclass(type(mainClass), Runnable):
raise TypeError('main class of plugin \'{}\' must inherith from Runnable'.format(plugDesc.name))
data = {'name': ID, 'path': pluginPath}
mainClass.preparePlugin(data)
return _Plugin(plugDesc, mainClass, pluginPath)
def _loadModule(self, mainFilePath, module_name):
if version_info[0] == 2:
plugin_module = imp.load_source(module_name, mainFilePath)
else:
if version_info[1] < 5:
plugin_module = SourceFileLoader(module_name, mainFilePath).load_module()
else:
spec = util.spec_from_file_location(module_name, mainFilePath)
plugin_module = util.module_from_spec(spec)
spec.loader.exec_module(plugin_module)
return plugin_module
def preUpdatePlugins(self):
for p in self._plugins.values():
p.mainClass.onPreUpdate()
def postUpdatePlugins(self):
for p in self._plugins.values():
p.mainClass.onPostUpdate()
def terminate(self):
for ID, p in self._plugins.items():
try:
p.mainClass.terminate()
except Exception as ex:
self._engine.log('plugin \'{}\' failed to terminate:\n\t{}'.format(p.description.name, str(ex)))
try:
rmtree(self._tempPluginPaths[ID])
except Exception as ex:
self._engine.log('error removing plugin \'{}\' folder:\n\t{}'.format(p.description.name, str(ex)))
| {
"content_hash": "fae81b633961f69b8994e6c0af80f2ca",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 114,
"avg_line_length": 38.48091603053435,
"alnum_prop": 0.6141638563777029,
"repo_name": "jr-garcia/Engendro3D",
"id": "5b82b2b0ddda34aebcf4ea8197122d9d4ff9c6c5",
"size": "5041",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "e3d/plugin_management/PluginsManagerClass.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "GLSL",
"bytes": "17621"
},
{
"name": "Python",
"bytes": "456046"
}
],
"symlink_target": ""
} |
import math
import torch
from torch._six import inf
from torch.distributions import constraints
from torch.distributions.transforms import AbsTransform
from torch.distributions.normal import Normal
from torch.distributions.transformed_distribution import TransformedDistribution
class HalfNormal(TransformedDistribution):
r"""
Creates a half-normal distribution parameterized by `scale` where::
X ~ Normal(0, scale)
Y = |X| ~ HalfNormal(scale)
Example::
>>> m = HalfNormal(torch.tensor([1.0]))
>>> m.sample() # half-normal distributed with scale=1
tensor([ 0.1046])
Args:
scale (float or Tensor): scale of the full Normal distribution
"""
arg_constraints = {'scale': constraints.positive}
support = constraints.positive
has_rsample = True
def __init__(self, scale, validate_args=None):
base_dist = Normal(0, scale)
super(HalfNormal, self).__init__(base_dist, AbsTransform(),
validate_args=validate_args)
def expand(self, batch_shape, _instance=None):
new = self._get_checked_instance(HalfNormal, _instance)
return super(HalfNormal, self).expand(batch_shape, _instance=new)
@property
def scale(self):
return self.base_dist.scale
@property
def mean(self):
return self.scale * math.sqrt(2 / math.pi)
@property
def variance(self):
return self.scale.pow(2) * (1 - 2 / math.pi)
def log_prob(self, value):
log_prob = self.base_dist.log_prob(value) + math.log(2)
log_prob[value.expand(log_prob.shape) < 0] = -inf
return log_prob
def cdf(self, value):
return 2 * self.base_dist.cdf(value) - 1
def icdf(self, prob):
return self.base_dist.icdf((prob + 1) / 2)
def entropy(self):
return self.base_dist.entropy() - math.log(2)
| {
"content_hash": "4d924cb77eaf2c74a7cfa4cfbf77f3e2",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 80,
"avg_line_length": 29.75,
"alnum_prop": 0.6355042016806722,
"repo_name": "ryfeus/lambda-packs",
"id": "00d0015231c2e6f5784311939a631cc2e89e2bb0",
"size": "1904",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pytorch/source/torch/distributions/half_normal.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9768343"
},
{
"name": "C++",
"bytes": "76566960"
},
{
"name": "CMake",
"bytes": "191097"
},
{
"name": "CSS",
"bytes": "153538"
},
{
"name": "Cuda",
"bytes": "61768"
},
{
"name": "Cython",
"bytes": "3110222"
},
{
"name": "Fortran",
"bytes": "110284"
},
{
"name": "HTML",
"bytes": "248658"
},
{
"name": "JavaScript",
"bytes": "62920"
},
{
"name": "MATLAB",
"bytes": "17384"
},
{
"name": "Makefile",
"bytes": "152150"
},
{
"name": "Python",
"bytes": "549307737"
},
{
"name": "Roff",
"bytes": "26398"
},
{
"name": "SWIG",
"bytes": "142"
},
{
"name": "Shell",
"bytes": "7790"
},
{
"name": "Smarty",
"bytes": "4090"
},
{
"name": "TeX",
"bytes": "152062"
},
{
"name": "XSLT",
"bytes": "305540"
}
],
"symlink_target": ""
} |
"""
Base classes and methods used by all kernels
"""
__author__ = 'lejlot'
import numpy as np
from abc import abstractmethod, ABCMeta
class Kernel(object):
"""
Base, abstract kernel class
"""
__metaclass__ = ABCMeta
def __call__(self, data_1, data_2):
return self._compute(data_1, data_2)
@abstractmethod
def _compute(self, data_1, data_2):
"""
Main method which given two lists data_1 and data_2, with
N and M elements respectively should return a kernel matrix
of size N x M where K_{ij} = K(data_1_i, data_2_j)
"""
raise NotImplementedError('This is an abstract class')
def gram(self, data):
"""
Returns a Gramian, kernel matrix of matrix and itself
"""
return self._compute(data, data)
@abstractmethod
def dim(self):
"""
Returns dimension of the feature space
"""
raise NotImplementedError('This is an abstract class')
def __str__(self):
return self.__class__.__name__
def __repr__(self):
return str(self)
def __add__(self, kernel):
return KernelSum(self, kernel)
def __mul__(self, value):
if isinstance(value, Kernel):
return KernelProduct(self, value)
else:
if isinstance(self, ScaledKernel):
return ScaledKernel(self._kernel, self._scale * value)
else:
return ScaledKernel(self, value)
def __rmul__(self, value):
return self.__mul__(value)
def __div__(self, scale):
return ScaledKernel(self, 1./scale)
def __pow__(self, value):
return KernelPower(self, value)
class KernelSum(Kernel):
"""
Represents sum of a pair of kernels
"""
def __init__(self, kernel_1, kernel_2):
self._kernel_1 = kernel_1
self._kernel_2 = kernel_2
def _compute(self, data_1, data_2):
return self._kernel_1._compute(data_1, data_2) + \
self._kernel_2._compute(data_1, data_2)
def dim(self):
# It is too complex to analyze combined dimensionality, so we give a lower bound
return max(self._kernel_1.dim(), self._kernel_2.dim())
def __str__(self):
return '(' + str(self._kernel_1) + ' + ' + str(self._kernel_2) + ')'
class KernelProduct(Kernel):
"""
Represents product of a pair of kernels
"""
def __init__(self, kernel_1, kernel_2):
self._kernel_1 = kernel_1
self._kernel_2 = kernel_2
def _compute(self, data_1, data_2):
return self._kernel_1._compute(data_1, data_2) * \
self._kernel_2._compute(data_1, data_2)
def dim(self):
# It is too complex to analyze combined dimensionality, so we give a lower bound
return max(self._kernel_1.dim(), self._kernel_2.dim())
def __str__(self):
return '(' + str(self._kernel_1) + ' * ' + str(self._kernel_2) + ')'
class KernelPower(Kernel):
"""
Represents natural power of a kernel
"""
def __init__(self, kernel, d):
self._kernel = kernel
self._d = d
if not isinstance(d, int) or d<0:
raise Exception('Kernel power is only defined for non-negative integer degrees')
def _compute(self, data_1, data_2):
return self._kernel._compute(data_1, data_2) ** self._d
def dim(self):
# It is too complex to analyze combined dimensionality, so we give a lower bound
return self._kernel.dim()
def __str__(self):
return str(self._kernel) + '^' + str(self._d)
class ScaledKernel(Kernel):
"""
Represents kernel scaled by a float
"""
def __init__(self, kernel, scale):
self._kernel = kernel
self._scale = scale
if scale < 0:
raise Exception('Negation of the kernel is not a kernel!')
def _compute(self, data_1, data_2):
return self._scale * self._kernel._compute(data_1, data_2)
def dim(self):
return self._kernel.dim()
def __str__(self):
if self._scale == 1.0:
return str(self._kernel)
else:
return str(self._scale) + ' ' + str(self._kernel)
class GraphKernel(Kernel):
"""
Base, abstract GraphKernel kernel class
"""
pass
| {
"content_hash": "6e90130759686d94ad39c98ba07398f1",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 92,
"avg_line_length": 26.875,
"alnum_prop": 0.5704651162790698,
"repo_name": "gmum/pykernels",
"id": "d45ba126bf12a6017e3a7fb953244d20e4019af3",
"size": "4300",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pykernels/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "52220"
}
],
"symlink_target": ""
} |
"""
A module of deep feature selection based on stacked restricted Boltzman machine (deep belief net).
This module applies a deep structure with many hidden layers.
Thus, greedy layer-wise pretraining and supervised funetuning are used in optimization.
Copyright (c) 2008-2013, Theano Development Team All rights reserved.
Yifeng Li
CMMT, UBC, Vancouver
Sep 23, 2014
Contact: yifeng.li.cn@gmail.com
"""
from __future__ import division
import time
import math
import copy
import numpy
import theano
import theano.tensor as T
from logistic_sgd import LogisticRegression
from mlp import HiddenLayer
from deep_feat_select_mlp import InputLayer
from rbm import RBM
import classification as cl
class DFS(object):
"""Deep feature selection class.
This structure is input_layer + stacked RBM.
"""
def __init__(self, rng, n_in=784, n_hidden=[500, 500],
n_out=10, lambda1=0,lambda2=0,alpha1=0,alpha2=0):
"""This class is made to support a variable number of layers.
:type rng: numpy.random.RandomState
:param rng: numpy random number generator used to draw initial
weights
:type n_in: int
:param n_in: dimension of the input to the DFS
:type n_hidden: list of ints
:param n_hidden: intermediate layers size, must contain
at least one value
:type n_out: int
:param n_out: dimension of the output of the network
lambda1: float scalar, control the sparsity of the input weights.
The regularization term is lambda1( (1-lambda2)/2 * ||w||_2^2 + lambda2 * ||w||_1 ).
Thus, the larger lambda1 is, the sparser the input weights are.
lambda2: float scalar, control the smoothness of the input weights.
The regularization term is lambda1( (1-lambda2)/2 * ||w||_2^2 + lambda2 * ||w||_1 ).
Thus, the larger lambda2 is, the smoother the input weights are.
alpha1: float scalar, control the sparsity of the weight matrices in MLP.
The regularization term is alpha1( (1-alpha2)/2 * \sum||W_i||_2^2 + alpha2 \sum||W_i||_1 ).
Thus, the larger alpha1 is, the sparser the MLP weights are.
alpha2: float scalar, control the smoothness of the weight matrices in MLP.
The regularization term is alpha1( (1-alpha2)/2 * \sum||W_i||_2^2 + alpha2 \sum||W_i||_1 ).
Thus, the larger alpha2 is, the smoother the MLP weights are.
"""
self.hidden_layers = []
self.rbm_layers = []
self.params = []
self.n_layers = len(n_hidden)
assert self.n_layers > 0
# allocate symbolic variables for the data
self.x = T.matrix('x') # the data is presented as rasterized images
self.y = T.ivector('y') # the labels are presented as 1D vector of
# [int] labels
# input layer
input_layer=InputLayer(input=self.x,n_in=n_in)
self.params.extend(input_layer.params)
self.input_layer=input_layer
# hidden layers
for i in range(len(n_hidden)):
if i==0:
input_hidden=self.input_layer.output
n_in_hidden=n_in
else:
input_hidden=self.hidden_layers[i-1].output
n_in_hidden=n_hidden[i-1]
hd=HiddenLayer(rng=rng, input=input_hidden, n_in=n_in_hidden, n_out=n_hidden[i],
activation=T.nnet.sigmoid)
self.hidden_layers.append(hd)
self.params.extend(hd.params)
# Construct an RBM that shared weights with this layer
rbm_layer = RBM(numpy_rng=rng,
theano_rng=None,
input=input_hidden,
n_visible=n_in_hidden,
n_hidden=n_hidden[i],
W=hd.W,
hbias=hd.b)
self.rbm_layers.append(rbm_layer)
# The logistic regression layer gets as input the hidden units
# of the hidden layer
if len(n_hidden)<=0:
self.logRegressionLayer = LogisticRegression(
input=self.input_layer.output,
n_in=n_in,
n_out=n_out)
else:
self.logRegressionLayer = LogisticRegression(
input=self.hidden_layers[-1].output,
n_in=n_hidden[-1],
n_out=n_out)
self.params.extend(self.logRegressionLayer.params)
# regularization terms on coefficients of input layer
self.L1_input=abs(self.input_layer.w).sum()
self.L2_input=(self.input_layer.w **2).sum()
#self.hinge_loss_neg=(T.maximum(0,-self.input_layer.w)).sum() # penalize negative values
#self.hinge_loss_pos=(T.maximum(0,self.input_layer.w)).sum() # # penalize positive values
# regularization terms on weights of hidden layers
L1s=[]
L2_sqrs=[]
for i in range(len(n_hidden)):
L1s.append (abs(self.hidden_layers[i].W).sum())
L2_sqrs.append((self.hidden_layers[i].W ** 2).sum())
L1s.append(abs(self.logRegressionLayer.W).sum())
L2_sqrs.append((self.logRegressionLayer.W ** 2).sum())
self.L1 = T.sum(L1s)
self.L2_sqr = T.sum(L2_sqrs)
# negative log likelihood of the MLP is given by the negative
# log likelihood of the output of the model, computed in the
# logistic regression layer
self.negative_log_likelihood = self.logRegressionLayer.negative_log_likelihood
# same holds for the function computing the number of errors
self.errors = self.logRegressionLayer.errors(self.y)
# self.cost = self.negative_log_likelihood(self.y) \
# + lambda1*(1.0-lambda2)*0.5*self.L2_input \
# + lambda1*lambda2*(1.0-lambda3)*self.hinge_loss_pos \
# + lambda1*lambda2*lambda3*self.hinge_loss_neg \
# + alpha1*(1.0-alpha2)*0.5 * self.L2_sqr + alpha1*alpha2 * self.L1
self.cost = self.negative_log_likelihood(self.y) \
+ lambda1*(1.0-lambda2)*0.5*self.L2_input \
+ lambda1*lambda2*self.L1_input \
+ alpha1*(1.0-alpha2)*0.5 * self.L2_sqr + alpha1*alpha2 * self.L1
self.y_pred=self.logRegressionLayer.y_pred
self.y_pred_prob=self.logRegressionLayer.y_pred_prob
def get_params(self):
return copy.deepcopy(self.params)
def set_params(self, given_params):
self.params=given_params
def print_params(self):
for param in self.params:
print param.get_value(borrow=True)
def pretraining_functions(self, train_set_x, batch_size, persistent_k=15):
'''
Build the symbolic pretraining functions to update the parameter in one iteration.
'''
# index to a [mini]batch
index = T.lscalar('index') # index to a minibatch
learning_rate = T.scalar('learning_rate') # learning rate to use
# number of batches
#n_batches = int(math.ceil(train_set_x.get_value(borrow=True).shape[0] / batch_size))
# begining of a batch, given `index`
batch_begin = index * batch_size
# ending of a batch given `index`
batch_end = batch_begin + batch_size
pretrain_fns = []
for rbm_layer in self.rbm_layers:
# get the cost and the updates list
cost, updates = rbm_layer.get_cost_updates(learning_rate, persistent=None, k=persistent_k)
# compile the theano function
fn = theano.function(inputs=[index,
theano.Param(learning_rate, default=0.1)],
outputs=cost,
updates=updates,
givens={self.x: train_set_x[batch_begin:batch_end]})
# append `fn` to the list of functions
pretrain_fns.append(fn)
return pretrain_fns
def build_finetune_functions(self, train_set_x, train_set_y, valid_set_x, valid_set_y, batch_size, learning_rate_shared):
'''
Build symbolic funetuning functions for training and validating.
'''
# compute number of minibatches for training, validation and testing
n_valid_batches = int(math.ceil(valid_set_x.get_value(borrow=True).shape[0] / batch_size))
index = T.lscalar('index') # index to a [mini]batch
# compute the gradients with respect to the model parameters
gparams = T.grad(self.cost, self.params)
# compute list of fine-tuning updates
updates = []
for param, gparam in zip(self.params, gparams):
updates.append((param, param - gparam * learning_rate_shared))
train_fn = theano.function(inputs=[index],
outputs=self.cost,
updates=updates,
givens={
self.x: train_set_x[index * batch_size:
(index + 1) * batch_size],
self.y: train_set_y[index * batch_size:
(index + 1) * batch_size]},
name='train')
# test_score_i = theano.function([index], self.errors,
# givens={
# self.x: test_set_x[index * batch_size:
# (index + 1) * batch_size],
# self.y: test_set_y[index * batch_size:
# (index + 1) * batch_size]},
# name='test')
valid_score_i = theano.function([index], self.errors,
givens={
self.x: valid_set_x[index * batch_size:
(index + 1) * batch_size],
self.y: valid_set_y[index * batch_size:
(index + 1) * batch_size]},
name='valid')
# Create a function that scans the entire validation set
def valid_score():
return [valid_score_i(i) for i in xrange(n_valid_batches)]
# Create a function that scans the entire test set
# def test_score():
# return [test_score_i(i) for i in xrange(n_test_batches)]
return train_fn, valid_score
def build_test_function(self, test_set_x, batch_size):
"""
Build a symbolic test function.
"""
n_test_batches = int(math.ceil(test_set_x.get_value(borrow=True).shape[0] / batch_size))
index = T.lscalar('index') # index to a [mini]batch
test_score_i = theano.function([index], [self.y_pred,self.y_pred_prob],
givens={self.x: test_set_x[index * batch_size : (index + 1) * batch_size]},
name='test')
# Create a function that scans the entire test set
def test_score():
y_pred=[]
y_pred_prob=[]
for i in xrange(n_test_batches):
label,prob=test_score_i(i)
y_pred.extend(label)
y_pred_prob.extend(prob)
return y_pred,y_pred_prob
return test_score
def pretrain_model(model,train_set_x=None,
pretrain_lr=0.1,pretraining_epochs=100,
batch_size=100,persistent_k=15):
"""
Pretrain the model given training data.
"""
# get the pretraining functions for each layer
pretraining_fns = model.pretraining_functions(train_set_x=train_set_x,
batch_size=batch_size,
persistent_k=persistent_k)
n_train_batches = int(math.ceil(train_set_x.get_value(borrow=True).shape[0] / batch_size))
#n_train_batches = train_set_x.get_value(borrow=True).shape[0] // batch_size
print '... pretraining the model'
# pretrain each layer
for i in xrange(model.n_layers):
# go through pretraining epochs
for epoch in xrange(pretraining_epochs):
# go through the training set
c = []
for batch_index in xrange(n_train_batches):
c_batch=pretraining_fns[i](index=batch_index,
learning_rate=pretrain_lr)
c.append(c_batch)
print 'Pre-training layer %i, epoch %d, cost %f' % (i, epoch, numpy.mean(c))
# no need to return model, as it is passed by reference
def finetune_model(classifier=None,
train_set_x=None, train_set_y=None, valid_set_x=None, valid_set_y=None,
learning_rate=0.1, alpha=0.01,
n_hidden=[256,128,16], n_cl=2,
n_epochs=1000, batch_size=100, rng=numpy.random.RandomState(100)):
"""
Finetune the model by training and validation sets.
"""
# compute number of minibatches for training, validation and testing
n_train_batches = int(math.ceil(train_set_x.get_value(borrow=True).shape[0] / batch_size))
#n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] / batch_size
# shared variable to reduce the learning rate
learning_rate_shared=theano.shared(learning_rate,name='learn_rate_shared')
decay_rate=T.scalar(name='decay_rate',dtype=theano.config.floatX)
reduce_learning_rate=theano.function([decay_rate],learning_rate_shared,updates=[(learning_rate_shared,learning_rate_shared*decay_rate)])
train_model_one_iteration,validate_model=classifier.build_finetune_functions(train_set_x, train_set_y,
valid_set_x, valid_set_y,
batch_size, learning_rate_shared)
print '... finetuning'
# early-stopping parameters
patience = 5000 # look as this many examples regardless
patience_increase = 2 # wait this much longer when a new best is
# found
improvement_threshold = 0.995 # a relative improvement of this much is
# considered significant
validation_frequency = min(n_train_batches, patience // 2)
# go through this many
# minibatche before checking the network
# on the validation set; in this case we
# check every epoch
best_validation_loss = numpy.inf
max_num_epoch_change_learning_rate=100
max_num_epoch_not_improve=3*max_num_epoch_change_learning_rate
max_num_epoch_change_rate=0.8
learning_rate_decay_rate=0.8
epoch_change_count=0
start_time = time.clock()
done_looping = False
epoch = 0
while (epoch < n_epochs) and (not done_looping):
epoch = epoch + 1
epoch_change_count=epoch_change_count+1
if epoch_change_count % max_num_epoch_change_learning_rate ==0:
reduce_learning_rate(learning_rate_decay_rate)
max_num_epoch_change_learning_rate= \
cl.change_max_num_epoch_change_learning_rate(max_num_epoch_change_learning_rate,max_num_epoch_change_rate)
max_num_epoch_not_improve=3*max_num_epoch_change_learning_rate
epoch_change_count=0
for minibatch_index in xrange(n_train_batches):
minibatch_avg_cost = train_model_one_iteration(minibatch_index)
# iteration number
iter = (epoch - 1) * n_train_batches + minibatch_index
if (iter + 1) % validation_frequency == 0:
# compute zero-one loss on validation set
validation_losses = validate_model()
this_validation_loss = numpy.mean(validation_losses)
print('epoch %i, minibatch %i/%i, validation error %f %%' % \
(epoch, minibatch_index + 1, n_train_batches,
this_validation_loss * 100.))
# if we got the best validation score until now
if this_validation_loss < best_validation_loss:
num_epoch_not_improve=0
if this_validation_loss < best_validation_loss:
#improve patience if loss improvement is good enough
if this_validation_loss < best_validation_loss * \
improvement_threshold:
patience = max(patience, iter * patience_increase)
best_validation_loss = this_validation_loss
# save a copy of the currently best model parameter
best_model_params=classifier.get_params()
if patience <= iter:
done_looping = True
break
if this_validation_loss >= best_validation_loss:
num_epoch_not_improve=num_epoch_not_improve+1
if num_epoch_not_improve>=max_num_epoch_not_improve:
done_looping = True
break
# set the best model parameters
classifier.set_params(best_model_params)
end_time = time.clock()
training_time=end_time-start_time
print 'Training time: %f' %(training_time/60)
print 'Optimization complete with best validation score of %f,' %(best_validation_loss * 100.)
#def test_model(classifier, test_set_x_org):
# """
# test or prediction
# """
# test_set_x=theano.shared(numpy.asarray(test_set_x_org,dtype=theano.config.floatX),borrow=True)
# index = T.lscalar() # index to a [mini]batch
# data = T.matrix('data') # the data is presented as rasterized images
# get_y_pred=classifier.get_predicted(data)
# test_model_func = theano.function(inputs=[data], outputs=get_y_pred)
# y_predicted=test_model_func(test_set_x.get_value(borrow=True))
# return y_predicted
def train_model(train_set_x_org=None, train_set_y_org=None,
valid_set_x_org=None, valid_set_y_org=None,
pretrain_lr=0.1,finetune_lr=0.1, alpha=0.01,
lambda1=0, lambda2=0, alpha1=0, alpha2=0,
n_hidden=[256,256], persistent_k=15,
pretraining_epochs=20, training_epochs=1000,
batch_size=100, rng=numpy.random.RandomState(100)):
"""
Train the model using training and validation data.
INPUTS:
train_set_x_org: numpy 2d array, each row is a training sample.
train_set_y_org: numpy vector of type int {0,1,...,C-1}, class labels of training samples.
valid_set_x_org: numpy 2d array, each row is a validation sample.
This set is to monitor the convergence of optimization.
valid_set_y_org: numpy vector of type int {0,1,...,C-1}, class labels of validation samples.
pretrain_lr: float scalar, the learning rate of pretraining phase.
finetune_lr: float scalar, the initial learning rate of finetuning phase.
alpha: float, parameter to trade off the momentum term.
lambda1: float scalar, control the sparsity of the input weights.
The regularization term is lambda1( (1-lambda2)/2 * ||w||_2^2 + lambda2 * ||w||_1 ).
Thus, the larger lambda1 is, the sparser the input weights are.
lambda2: float scalar, control the smoothness of the input weights.
The regularization term is lambda1( (1-lambda2)/2 * ||w||_2^2 + lambda2 * ||w||_1 ).
Thus, the larger lambda2 is, the smoother the input weights are.
alpha1: float scalar, control the sparsity of the weight matrices in MLP.
The regularization term is alpha1( (1-alpha2)/2 * \sum||W_i||_2^2 + alpha2 \sum||W_i||_1 ).
Thus, the larger alpha1 is, the sparser the MLP weights are.
alpha2: float scalar, control the smoothness of the weight matrices in MLP.
The regularization term is alpha1( (1-alpha2)/2 * \sum||W_i||_2^2 + alpha2 \sum||W_i||_1 ).
Thus, the larger alpha2 is, the smoother the MLP weights are.
n_hidden, vector of int, n_hidden[i]: number of hidden units of the i-th layer.
persistent_chain_k: length of persistent chain from the last sampling to new sampling.
pretraining_epochs: int scalar, maximal number of epochs in the pretraining phase.
training_epochs: int scalar, maximal number of epochs in the finetuning phase.
batch_size: int scalar, minibatch size.
rng: numpy random number state.
OUTPUTS:
dfs: object of DFS, the model learned, returned for testing.
training_time: float, training time in seconds.
"""
train_set_x = theano.shared(numpy.asarray(train_set_x_org,dtype=theano.config.floatX),borrow=True)
train_set_y = T.cast(theano.shared(numpy.asarray(train_set_y_org,dtype=theano.config.floatX),borrow=True),'int32')
valid_set_x = theano.shared(numpy.asarray(valid_set_x_org,dtype=theano.config.floatX),borrow=True)
valid_set_y = T.cast(theano.shared(numpy.asarray(valid_set_y_org,dtype=theano.config.floatX),borrow=True),'int32')
# build the model
n_feat=train_set_x.get_value(borrow=True).shape[1]
n_cl=len(numpy.unique(train_set_y_org))
dfs=DFS(rng=rng, n_in=n_feat,
n_hidden=n_hidden, n_out=n_cl,
lambda1=lambda1, lambda2=lambda2, alpha1=alpha1, alpha2=alpha2)
# pretrain the model
start_time=time.clock()
pretrain_model(dfs,train_set_x,pretrain_lr=pretrain_lr,pretraining_epochs=pretraining_epochs,
persistent_k=persistent_k,
batch_size=batch_size)
# finetune
finetune_model(dfs,train_set_x=train_set_x, train_set_y=train_set_y,
valid_set_x=valid_set_x, valid_set_y=valid_set_y,
learning_rate=finetune_lr, alpha=alpha,
n_hidden=n_hidden, n_cl=n_cl,
n_epochs=training_epochs, batch_size=batch_size, rng=rng)
end_time=time.clock()
training_time=end_time-start_time
return dfs, training_time
def test_model(classifier,test_set_x_org,batch_size=200):
"""
Predict class labels of given data using the model learned.
INPUTS:
classifier_trained: object of DFS, the model learned by function "train_model".
test_set_x_org: numpy 2d array, each row is a sample whose label to be predicted.
batch_size: int scalar, batch size, efficient for a very large number of test samples.
OUTPUTS:
test_set_y_predicted: numpy int vector, the class labels predicted.
test_set_y_predicted_prob: numpy float vector, the probabilities.
test_time: test time in seconds.
"""
start_time=time.clock()
test_set_x = theano.shared(numpy.asarray(test_set_x_org,dtype=theano.config.floatX),borrow=True)
test_score=classifier.build_test_function(test_set_x,batch_size=batch_size)
test_set_y_predicted,test_set_y_predicted_prob=test_score()
end_time=time.clock()
test_time=end_time-start_time
return test_set_y_predicted,test_set_y_predicted_prob,test_time
| {
"content_hash": "7702f9ecf3da3d11fc09b86b87f23c1f",
"timestamp": "",
"source": "github",
"line_count": 506,
"max_line_length": 144,
"avg_line_length": 45.547430830039524,
"alnum_prop": 0.5947845706599557,
"repo_name": "yifeng-li/DECRES",
"id": "e6487b73c8e05cf9793f9cae94e120e9f496a33a",
"size": "23047",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deep_feat_select_DBN.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "628827"
}
],
"symlink_target": ""
} |
from django.db import models
from perfil.models import Persona
from especialidad import Especialidad
class Profesional(Persona):
especialidades = models.ManyToManyField(Especialidad)
@classmethod
def create(cls, profesional_id=0):
profesional = None
if profesional_id != 0:
profesional = Profesional.objects.get(pk=profesional_id)
else:
profesional = Profesional()
profesional._restricciones = []
return profesional
@property
def restricciones(self, ):
from restriccion import Restriccion
return Restriccion.objects.filter(profesional=self, estado="ON")\
.order_by('dia_semana', 'hora_desde')
| {
"content_hash": "f247f75879bca6454ddb97b20abd29db",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 77,
"avg_line_length": 26.8,
"alnum_prop": 0.5932835820895522,
"repo_name": "yo-alan/TrabajoFinalIntegrador",
"id": "93fa0b89401cf6cfddc6d74996cc4f10ad8f4ba9",
"size": "828",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "calendario/objects/profesional.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "149806"
},
{
"name": "HTML",
"bytes": "197886"
},
{
"name": "JavaScript",
"bytes": "667431"
},
{
"name": "Python",
"bytes": "94091"
}
],
"symlink_target": ""
} |
"""Component builders test for software construction tookit (LARGE test)."""
import sys
import TestFramework
def TestSConstruct(scons_globals):
"""Test SConstruct file.
Args:
scons_globals: Global variables dict from the SConscript file.
"""
# Get globals from SCons
Environment = scons_globals['Environment']
env = Environment(tools=['component_setup', 'seven_zip'])
env.PrependENVPath('PATH', env.File(sys.executable).dir.abspath)
env.Replace(SEVEN_ZIP='python $FAKE7Z',
FAKE7Z=env.File('fake7z.py').abspath)
# Test extract
print 'Will extract:', env.Extract7zip('outdir/dummy_file', 'foodir/foo.7z')
# Test archive
env.Compress7zip('comp.7z', ['bardir/bar1'])
env.Archive7zip('arch.7z', [env.Dir('bardir')])
foo7z_contents = """
7-Zip 4.23 Copyright (c) 1999-2005 Igor Pavlov 2005-06-29
Listing archive: test.zip
Date Time Attr Size Compressed Name
------------------- ----- ------------ ------------ ------------
2009-02-06 16:59:30 .R..A 46 44 apple
2009-03-23 13:58:16 D...A 30590 3198 berry
2009-03-30 13:17:18 .R..A 443 139 cherry
2009-03-30 13:16:50 DR..A 443 139 daquiri
2009-02-06 16:59:30 .R..A 81 75 eggplant
------------------- ----- ------------ ------------ ------------
31603 3595 5 files
"""
fake7z_contents = """#!/usr/bin/python2.4
import sys
if sys.argv[1] == 'l':
f = open(sys.argv[2], 'rt')
for l in f:
print l.strip()
f.close()
"""
expect_stdout = r"""scons: Reading SConscript files ...
Will extract: ['outdir\\apple', 'outdir\\cherry', 'outdir\\eggplant']
scons: done reading SConscript files.
scons: Building targets ...
cd bardir && python WORKDIR\test\fake7z.py a -t7z -mx0 WORKDIR\test\arch.7z ./
cd bardir && python WORKDIR\test\fake7z.py a -t7z -mx9 WORKDIR\test\comp.7z bar1
Delete("outdir")
python WORKDIR\test\fake7z.py x foodir\foo.7z -o"outdir"
scons: done building targets.
"""
def main():
test = TestFramework.TestFramework()
if sys.platform not in ['win32', 'cygwin']:
test.skip_test('This test is only for windows.\n')
return
base = 'test/'
test.subdir(base)
test.WriteSConscript(base + 'SConstruct', TestSConstruct)
test.subdir(base + 'foodir/')
test.write(base + 'foodir/foo.7z', foo7z_contents)
test.subdir(base + 'bardir/')
test.write(base + 'bardir/bar1', 'Sample input file 1')
test.write(base + 'bardir/bar2', 'Sample input file 2')
test.write(base + 'fake7z.py', fake7z_contents)
test.run(chdir=base, options='arch.7z comp.7z outdir',
stdout=expect_stdout.replace('WORKDIR', test.workdir))
test.pass_test()
if __name__ == '__main__':
main()
| {
"content_hash": "8e3acdd73e67e3b109c985f45359f171",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 80,
"avg_line_length": 28.408163265306122,
"alnum_prop": 0.6102729885057471,
"repo_name": "nguyentran/openviber",
"id": "e6dbb2f7fe83434e9ea36a99b4f1721eaa6e9383",
"size": "4334",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "tools/swtoolkit/test/seven_zip_test.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
from proto import arp_responder
def launch():
arp_responder.launch()
| {
"content_hash": "7dda6c804c2f3aa2bfde00105d3e466e",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 31,
"avg_line_length": 18.5,
"alnum_prop": 0.7297297297297297,
"repo_name": "anrl/gini3",
"id": "5be8152e16d136144f3d5bb477c50594bf36d044",
"size": "94",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "backend/src/pox/ext/gini/core/proto_arp_responder.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1048407"
},
{
"name": "C++",
"bytes": "8774"
},
{
"name": "HTML",
"bytes": "12085"
},
{
"name": "Makefile",
"bytes": "1533"
},
{
"name": "Python",
"bytes": "2067569"
},
{
"name": "Roff",
"bytes": "12445"
},
{
"name": "Shell",
"bytes": "1261"
}
],
"symlink_target": ""
} |
from flask import render_template, request
def object_list(template_name, query, paginate_by=20, **context):
page = request.args.get('page')
if page and page.isdigit():
page = int(page)
else:
page = 1
object_list = query.paginate(page, paginate_by)
return render_template(template_name, object_list=object_list,
**context)
| {
"content_hash": "a740aa48a70a3e507d2509d09908649c",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 66,
"avg_line_length": 30.416666666666668,
"alnum_prop": 0.6657534246575343,
"repo_name": "abacuspix/NFV_project",
"id": "a018e7f6354ec0562801805d7dc58ef7689fcd01",
"size": "365",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Learning_Flask_Framework/Learning_Flask_Framework/app/helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6037"
},
{
"name": "Gherkin",
"bytes": "419"
},
{
"name": "HTML",
"bytes": "342352"
},
{
"name": "JavaScript",
"bytes": "8828"
},
{
"name": "Mako",
"bytes": "2224"
},
{
"name": "Nginx",
"bytes": "231"
},
{
"name": "Python",
"bytes": "706126"
}
],
"symlink_target": ""
} |
from Axon.Component import component
from Axon.Ipc import producerFinished, shutdownMicroprocess
import time
from AlignTSPackets import AlignTSPackets
class ExtractPCR(component):
Inboxes = { "inbox" : "Individual tranport stream packets",
"control" : "Shutdown signalling",
}
Outboxes = { "outbox" : "",
"signal" : "Shutdown signalling",
}
def shutdown(self):
while self.dataReady("control"):
msg = self.recv("control")
self.send(msg,"signal")
if isinstance(msg,(producerFinished,shutdownMicroprocess)):
return True
return False
def main(self):
shutdown = False
while not shutdown:
while self.dataReady("inbox"):
self.parse(self.recv("inbox"))
shutdown = shutdown or self.shutdown()
if not shutdown and not self.anyReady():
self.pause()
yield 1
def parse(self, tspacket):
pid = ( (ord(tspacket[1])<<8) + ord(tspacket[2]) ) & 0x1fff
adaptionflag = (ord(tspacket[3]) & 0x30) >> 4
if adaptionflag == 2 or adaptionflag == 3:
# adaption field starts at byte 4
start=4
af_len = ord(tspacket[start+0])
# need at least 7 bytes in the adaption field for there to be PCR data
if af_len >= 7:
flags = ord(tspacket[start+1])
if (flags & 16):
# PCR is present, so lets extract it
# 48 bit field
pcr = (ord(tspacket[start+2]) << 40) + \
(ord(tspacket[start+3]) << 32) + \
(ord(tspacket[start+4]) << 24) + \
(ord(tspacket[start+5]) << 16) + \
(ord(tspacket[start+5]) << 8) + \
ord(tspacket[start+5])
pcr_base = pcr>>15 # top 33 bits
# middle 6 bits reserved
pcr_ext = pcr & 0x1ff # bottom 9 bits
real_pcr = pcr_base * 300 + pcr_ext
self.send( (pid, real_pcr), "outbox")
# self.send( "pid %4d : pcr = %10d . %3d\n" % (pid,pcr_base,pcr_ext), "outbox")
# print pid, pcr_base, pcr_ext
class MeasurePCRs(component):
def shutdown(self):
while self.dataReady("control"):
msg = self.recv("control")
self.send(msg,"signal")
if isinstance(msg,(producerFinished,shutdownMicroprocess)):
return True
return False
def main(self):
shutdown = False
pcrs = {}
while not shutdown:
while self.dataReady("inbox"):
now = time.time()
pid,pcr = self.recv("inbox")
if not pcrs.has_key(pid):
pcrs[pid] = pcr, now
else:
old_pcr, then = pcrs[pid]
rate = float(pcr-old_pcr) / float(now-then) / 1000000.0
print "pid %4d : approximating ... rate about %.5f MHz" % (pid, rate)
shutdown = shutdown or self.shutdown()
if not shutdown and not self.anyReady():
self.pause()
yield 1
if __name__ == "__main__":
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import ConsoleEchoer
from Kamaelia.Device.DVB.Core import DVB_Multiplex
import dvb3.frontend
FREQUENCY = 505.833330
FE_PARAMS = { "inversion" : dvb3.frontend.INVERSION_AUTO,
"constellation" : dvb3.frontend.QAM_16,
"coderate_HP" : dvb3.frontend.FEC_3_4,
"coderate_LP" : dvb3.frontend.FEC_3_4,
}
Pipeline( DVB_Multiplex(FREQUENCY, [0x2000], FE_PARAMS),
AlignTSPackets(),
ExtractPCR(),
MeasurePCRs(),
# ConsoleEchoer(),
).run()
| {
"content_hash": "6e620ec837e70c5ddb4a07445026c0a2",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 98,
"avg_line_length": 34.29457364341085,
"alnum_prop": 0.4588607594936709,
"repo_name": "sparkslabs/kamaelia",
"id": "170ebb4fd95849849742f668ba0cfc19bd2774c8",
"size": "5590",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Sketches/MH/DVB_Remuxing/ExtractPCR.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3814"
},
{
"name": "C",
"bytes": "212854"
},
{
"name": "C++",
"bytes": "327546"
},
{
"name": "CSS",
"bytes": "114434"
},
{
"name": "ChucK",
"bytes": "422"
},
{
"name": "HTML",
"bytes": "1288960"
},
{
"name": "Java",
"bytes": "31832"
},
{
"name": "JavaScript",
"bytes": "829491"
},
{
"name": "M4",
"bytes": "12224"
},
{
"name": "Makefile",
"bytes": "150947"
},
{
"name": "NSIS",
"bytes": "18867"
},
{
"name": "OCaml",
"bytes": "643"
},
{
"name": "PHP",
"bytes": "49059"
},
{
"name": "Perl",
"bytes": "504"
},
{
"name": "Processing",
"bytes": "2885"
},
{
"name": "Python",
"bytes": "18900785"
},
{
"name": "Ruby",
"bytes": "4165"
},
{
"name": "Shell",
"bytes": "707588"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os.path
import os
import pandas as pd
from .exceptions import (DatasetNotFound, InitException, NotFound, WrongParameter)
def _list_filenames(data_dir, dir_pattern=None, file_pattern=None):
""" List all files in a data_dir"""
import re
# parse all files in the folder
filenames = []
for root, subdirs, files in os.walk(data_dir):
if dir_pattern is None or re.match(dir_pattern, root):
for fname in files:
if file_pattern is None or re.match(file_pattern, fname):
filenames.append(os.path.normpath(os.path.join(root, fname)))
# make sure that sorting order is deterministic
return sorted(filenames)
class DocumentIndex(object):
def __init__(self, data_dir, data, filenames):
self.data_dir = data_dir
self.data = data
self.filenames = filenames
def _check_index(self, keys=None):
""" Given a list of keys check which keys will be used for indexing
and whether these keys could be used for an index
Parameters
----------
keys : list
one or multiple choices among "internal_id", "document_id", "rendition_id", "file_path".
default=["internal_id"]
Returns
-------
index_cols : list
a subset of keys that would be used for an index
"""
if keys is None:
keys = ['internal_id']
if "internal_id" in keys:
index_cols = ['internal_id',]
elif "document_id" in keys and \
"document_id" in self.data.columns and \
"rendition_id" in keys and \
"rendition_id" in self.data.columns:
index_cols = ['document_id', 'rendition_id']
elif "document_id" in keys and \
"document_id" in self.data.columns:
if self.data.document_id.is_unique:
index_cols = ['document_id',]
else:
raise ValueError('document_id cannot be used as an index, since it has duplicates'
' (and rendition_id has duplicates)')
elif "file_path" in keys and \
"file_path" in self.data.columns:
index_cols = ['file_path']
else:
raise ValueError('The query columns {} cannot be used as an index'.format(list(keys)))
if len(index_cols) == 1:
index_cols = index_cols[0]
# make sure we can use the selected columns as an index
self.data.set_index(index_cols, verify_integrity=True)
return index_cols
def search(self, query, strict=True, drop=True):
"""Search the filenames given by some user query
Parameters
----------
query : pandas.DataFrame
a DataFrame with one of the following fields "internal_id",
("document_id", "rendition_id"), "document_id", "file_path"
strict : bool
raise an error if some documents are not found
drop : bool
drop columns not in the dataset
Returns
-------
df : pd.DataFrame
the response dataframe with fields
"internal_id", "file_path" and optionally "document_id" and "rendition_id"
"""
if not isinstance(query, pd.DataFrame):
raise ValueError('The query {} must be a pandas DataFrame')
if not query.shape[0]:
raise ValueError('Query has zero element!')
index_cols = self._check_index(query.columns)
query['sort_order'] = query.index.values
res = self.data.merge(query, on=index_cols, how='inner', suffixes=('', '_query'))
# make sure we preserve the original order in the query
res.sort_values(by='sort_order', inplace=True)
del res['sort_order']
if res.shape[0] != query.shape[0]:
# some documents were not found
msg = ['Query elements not found:']
for index, row in query.iterrows():
if row[index_cols] not in self.data[index_cols].values:
msg.append(' * {}'.format(row.to_dict()))
if strict:
raise NotFound('\n'.join(msg))
else:
print('Warning: '+ '\n'.join(msg))
if drop:
# ignore all additional columns
res = res[self.data.columns]
return res
def _search_filenames(self, filenames):
""" A helper function that reproduces the previous behaviour in FeaturesVectorizer"""
query = pd.DataFrame(filenames, columns=['file_path'])
res = self.search(query)
return res.internal_id.values
def render_dict(self, res=None, return_file_path=False):
"""Render a pandas dataframe as a list of dicts
Parameters
----------
res : {pandas.DataFrame, None}
some dataset with additional data that must contain the 'internal_id' key
return_file_path : bool
return the file paths, default: False
Results
-------
out : dict
"""
if res is not None:
res = res.set_index('internal_id', drop=False)
db = self.data.set_index('internal_id', drop=False)
if not return_file_path:
base_keys = [key for key in self.data.columns if key != 'file_path']
else:
base_keys = list(self.data.columns)
if res is not None:
res_keys = [key for key in res if key not in base_keys]
if not return_file_path and 'file_path' in res_keys:
res_keys.remove('file_path')
db = db[base_keys]
out = []
if res is not None:
for index, row in res[res_keys].iterrows():
row_dict = row.to_dict()
db_sel = db.loc[index]
row_dict.update(db_sel.to_dict())
out.append(row_dict)
else:
for index, row in db.iterrows():
row_dict = row.to_dict()
out.append(row_dict)
return out
def render_list(self, res=None, return_file_path=False):
"""Render a pandas dataframe as a dict of lists
Parameters
----------
res : {pandas.DataFrame, None}
some dataset with additional data that must contain the 'internal_id' key
return_file_path : bool
return the file paths, default: False
Results
-------
out : dict
"""
if res is not None:
res = res.set_index('internal_id', drop=False)
db = self.data.set_index('internal_id', drop=False)
if not return_file_path:
base_keys = [key for key in self.data.columns if key != 'file_path']
else:
base_keys = list(self.data.columns)
db = db[base_keys]
if res is not None:
res_keys = [key for key in res if key not in base_keys]
if not return_file_path:
if 'file_path' in res_keys:
res_keys.remove('file_path')
else:
res_keys = []
out = {}
for key in base_keys + res_keys:
out[key] = []
if res is not None:
for index, row in res[res_keys].iterrows():
db_sel_dict = db.loc[index].to_dict()
for key, val in db_sel_dict.items():
out[key].append(val)
for key, val in row.to_dict().items():
out[key].append(val)
else:
for index, row in db.iterrows():
row_dict = row.to_dict()
for key, val in row_dict.items():
out[key].append(val)
return out
@classmethod
def from_list(cls, metadata):
""" Create a DocumentIndex from a list of dictionaries, for instance
{
document_id: 1,
rendition_id: 4,
file_path: "c:\dev\1.txt"
}
Parmaters
---------
metadata : list of dicts
a list of dictionaries with keys ['file_path', 'document_id', 'rendition_id']
describing the data ingestion (this overwrites data_dir)
Returns
-------
result : DocumentIndex
a DocumentIndex object
"""
metadata = sorted(metadata, key=lambda x: x['file_path'])
filenames = [el['file_path'] for el in metadata]
data_dir = cls._detect_data_dir(filenames)
if not filenames: # no files were found
raise WrongParameter('No files to process were found!')
filenames_rel = [os.path.relpath(el, data_dir) for el in filenames]
# modify the metadata list inplace
for idx, (db_el, file_path) in enumerate(zip(metadata, filenames_rel)):
db_el['file_path'] = file_path
db_el['internal_id'] = idx
db = pd.DataFrame(metadata)
return cls(data_dir, db, filenames)
@staticmethod
def _detect_data_dir(filenames):
data_dir = os.path.commonprefix(filenames)
data_dir = os.path.normpath(data_dir)
if os.path.exists(data_dir):
return data_dir
elif os.path.exists(os.path.dirname(data_dir)):
return os.path.dirname(data_dir)
else:
raise IOError('data_dir={} does not exist!'.format(data_dir))
@classmethod
def from_folder(cls, data_dir, file_pattern=None, dir_pattern=None):
""" Create a DocumentIndex from files in data_dir
Parmaters
---------
data_dir : str
path to the data directory (used only if metadata not provided), default: None
Returns
-------
result : DocumentIndex
a DocumentIndex object
"""
data_dir = os.path.normpath(data_dir)
if not os.path.exists(data_dir):
raise NotFound('data_dir={} does not exist'.format(data_dir))
filenames = _list_filenames(data_dir, dir_pattern, file_pattern)
filenames_rel = [os.path.relpath(el, data_dir) for el in filenames]
db = [{'file_path': file_path, 'internal_id': idx} \
for idx, file_path in enumerate(filenames_rel)]
db = pd.DataFrame(db)
return cls(data_dir, db, filenames)
| {
"content_hash": "7a00bc0fe6a0ec5cf8fe7e0b22f3fe96",
"timestamp": "",
"source": "github",
"line_count": 311,
"max_line_length": 98,
"avg_line_length": 33.71382636655949,
"alnum_prop": 0.554411063423939,
"repo_name": "kcompher/FreeDiscovUI",
"id": "4ac920546ad8b11fd346d2f95ab93ed896b5d68e",
"size": "10510",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "freediscovery/ingestion.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "404"
},
{
"name": "Makefile",
"bytes": "598"
},
{
"name": "Nginx",
"bytes": "451"
},
{
"name": "Python",
"bytes": "333007"
},
{
"name": "Shell",
"bytes": "3721"
}
],
"symlink_target": ""
} |
import time
import random
from six.moves import filter
from dpark.utils.log import get_logger
PURGE_ELAPSED = 60 * 5
FAILED_TIMES = 2
logger = get_logger(__name__)
class HostStatus:
def __init__(self, hostname, purge_elapsed=PURGE_ELAPSED):
self.hostname = hostname
self.failed_log = []
self.succeeded_log = []
self.failed_cnt = 0
self.succeed_cnt = 0
self.start_point = 0
self.failed_tasks = {}
self.purge_elapsed = purge_elapsed
def task_succeed(self, task_id):
self.succeeded_log.append(time.time())
if task_id in self.failed_tasks:
del self.failed_tasks[task_id]
def task_failed(self, task_id):
cur_ts = time.time()
self.failed_log.append(cur_ts)
if task_id in self.failed_tasks:
self.failed_tasks[task_id].append(cur_ts)
else:
self.failed_tasks[task_id] = [cur_ts]
def purge_old(self):
cur_ts = time.time()
prev_ts = cur_ts - self.purge_elapsed
self.failed_log = list(filter(lambda x: x > prev_ts, self.failed_log))
self.succeeded_log = list(filter(lambda x: x > prev_ts,
self.succeeded_log))
self.failed_cnt = len(self.failed_log)
self.succeed_cnt = len(self.succeeded_log)
self.start_point = self._begin_log_ts()
def recent_succeed_rate(self):
self.purge_old()
if self.failed_cnt + self.succeed_cnt < 1:
return 1
return self.succeed_cnt * 1.0 / (self.succeed_cnt + self.failed_cnt)
def failed_on(self, task_id):
return task_id in self.failed_tasks
def should_forbit(self, task_id):
self.purge_old()
if task_id in self.failed_tasks:
cur_elapsed = time.time() - self.failed_tasks[task_id][-1]
mask_elapsed = self.purge_elapsed * pow(2, len(self.failed_tasks[task_id]))
return cur_elapsed < mask_elapsed
return False
def _begin_log_ts(self):
ts = [time.time()]
if self.failed_log:
ts.append(self.failed_log[0])
if self.succeeded_log:
ts.append(self.succeeded_log[0])
return min(ts)
def total_recent_task_run(self):
return self.succeed_cnt + self.failed_cnt
def erase_failed_task(self, task_id):
if task_id in self.failed_tasks:
del self.failed_tasks[task_id]
class TaskHostManager:
def __init__(self):
self.host_dict = {}
self.task_host_failed_dict = {}
def register_host(self, hostname, purge_elapsed=PURGE_ELAPSED):
if hostname not in self.host_dict:
# logger.debug('register %s to the task host manager', hostname)
self.host_dict[hostname] = HostStatus(hostname, purge_elapsed=purge_elapsed)
def task_failed_on_host(self, task_id, host):
if host in self.host_dict:
host_status = self.host_dict[host]
return host_status.failed_on(task_id)
return False
def offer_choice(self, tid, host_offers, blacklist):
ordi_hosts = []
fail_hosts = []
forbit_host = []
for host in host_offers:
host_status = self.host_dict[host]
if host in blacklist or host_status.should_forbit(tid):
forbit_host.append(host)
elif self.task_failed_on_host(tid, host):
fail_hosts.append((host, host_status.recent_succeed_rate()))
else:
ordi_hosts.append((host, host_status.recent_succeed_rate()))
logger.debug('split the offer in to three parts \n '
'ordinary %s \nonce failed %s blacklist host %s',
str(ordi_hosts), str(fail_hosts), str(forbit_host))
if ordi_hosts:
return host_offers[self._random_weighted_choice(ordi_hosts)]
elif fail_hosts:
return host_offers[self._random_weighted_choice(fail_hosts)]
return None, None
@staticmethod
def _random_weighted_choice(w_list):
total = sum(w for h, w in w_list)
chosen_w = random.uniform(0, total)
cur_w = 0
for h, w in w_list:
if cur_w + w >= chosen_w:
return h
cur_w += w
assert False, 'Should not get here'
def task_succeed(self, task_id, hostname, reason):
logger.debug('task %s %s', task_id, str(reason))
if hostname in self.host_dict:
host_status = self.host_dict[hostname]
host_status.task_succeed(task_id)
if task_id in self.task_host_failed_dict:
for host in self.task_host_failed_dict[task_id]:
self.host_dict[host].erase_failed_task(task_id)
logger.debug('the failed hosts %s for task %s',
str(self.task_host_failed_dict[task_id]), task_id)
del self.task_host_failed_dict[task_id]
def task_failed(self, task_id, hostname, reason):
logger.debug('task %s failed with message %s', task_id, str(reason))
if hostname in self.host_dict:
host_status = self.host_dict[hostname]
host_status.task_failed(task_id)
if task_id not in self.task_host_failed_dict:
self.task_host_failed_dict[task_id] = set()
self.task_host_failed_dict[task_id].add(hostname)
def is_unhealthy_host(self, host):
if host not in self.host_dict:
return False
host_status = self.host_dict[host]
succeed_rate = host_status.recent_succeed_rate()
duration = time.time() - host_status.start_point
total_tasks = host_status.total_recent_task_run()
if duration > 30 and total_tasks > 20 and succeed_rate < 0.1:
logger.debug('the host %s will be judge unhealthy for '
'succeed rate %.1f%% with %d tasks in '
'duration more than %.3fs',
host, succeed_rate, total_tasks, duration)
return True
| {
"content_hash": "9444631945ea5f718a9e06cf9528d046",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 88,
"avg_line_length": 37.91875,
"alnum_prop": 0.5801879017636393,
"repo_name": "douban/dpark",
"id": "1be697631c1623a9f1e5edac20555bd087b0af37",
"size": "6067",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dpark/hostatus.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "12283"
},
{
"name": "CSS",
"bytes": "2638"
},
{
"name": "Dockerfile",
"bytes": "1378"
},
{
"name": "HTML",
"bytes": "9696"
},
{
"name": "JavaScript",
"bytes": "25347"
},
{
"name": "Python",
"bytes": "672082"
},
{
"name": "Shell",
"bytes": "1865"
}
],
"symlink_target": ""
} |
"""Render the blog indexes."""
from __future__ import unicode_literals
from collections import defaultdict
import os
try:
from urlparse import urljoin
except ImportError:
from urllib.parse import urljoin # NOQA
from nikola.plugin_categories import Task
from nikola import utils
class Indexes(Task):
"""Render the blog indexes."""
name = "render_indexes"
def set_site(self, site):
"""Set Nikola site."""
site.register_path_handler('index', self.index_path)
site.register_path_handler('index_atom', self.index_atom_path)
site.register_path_handler('section_index', self.index_section_path)
site.register_path_handler('section_index_atom', self.index_section_atom_path)
return super(Indexes, self).set_site(site)
def gen_tasks(self):
"""Render the blog indexes."""
self.site.scan_posts()
yield self.group_task()
kw = {
"translations": self.site.config['TRANSLATIONS'],
"messages": self.site.MESSAGES,
"output_folder": self.site.config['OUTPUT_FOLDER'],
"filters": self.site.config['FILTERS'],
"index_file": self.site.config['INDEX_FILE'],
"show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
"index_display_post_count": self.site.config['INDEX_DISPLAY_POST_COUNT'],
"indexes_title": self.site.config['INDEXES_TITLE'],
"strip_indexes": self.site.config['STRIP_INDEXES'],
"blog_title": self.site.config["BLOG_TITLE"],
"generate_atom": self.site.config["GENERATE_ATOM"],
}
template_name = "index.tmpl"
posts = self.site.posts
self.number_of_pages = dict()
self.number_of_pages_section = dict()
for lang in kw["translations"]:
def page_link(i, displayed_i, num_pages, force_addition, extension=None):
feed = "_atom" if extension == ".atom" else ""
return utils.adjust_name_for_index_link(self.site.link("index" + feed, None, lang), i, displayed_i,
lang, self.site, force_addition, extension)
def page_path(i, displayed_i, num_pages, force_addition, extension=None):
feed = "_atom" if extension == ".atom" else ""
return utils.adjust_name_for_index_path(self.site.path("index" + feed, None, lang), i, displayed_i,
lang, self.site, force_addition, extension)
if kw["show_untranslated_posts"]:
filtered_posts = posts
else:
filtered_posts = [x for x in posts if x.is_translation_available(lang)]
indexes_title = kw['indexes_title'](lang) or kw['blog_title'](lang)
self.number_of_pages[lang] = (len(filtered_posts) + kw['index_display_post_count'] - 1) // kw['index_display_post_count']
context = {}
context["pagekind"] = ["main_index", "index"]
yield self.site.generic_index_renderer(lang, filtered_posts, indexes_title, template_name, context, kw, 'render_indexes', page_link, page_path)
if self.site.config['POSTS_SECTIONS']:
kw["posts_section_are_indexes"] = self.site.config['POSTS_SECTION_ARE_INDEXES']
index_len = len(kw['index_file'])
groups = defaultdict(list)
for p in filtered_posts:
groups[p.section_slug(lang)].append(p)
# don't build sections when there is only one, aka. default setups
if not len(groups.items()) > 1:
continue
for section_slug, post_list in groups.items():
if lang not in self.number_of_pages_section:
self.number_of_pages_section[lang] = dict()
self.number_of_pages_section[lang][section_slug] = (len(post_list) + kw['index_display_post_count'] - 1) // kw['index_display_post_count']
def cat_link(i, displayed_i, num_pages, force_addition, extension=None):
feed = "_atom" if extension == ".atom" else ""
return utils.adjust_name_for_index_link(self.site.link("section_index" + feed, section_slug, lang), i, displayed_i,
lang, self.site, force_addition, extension)
def cat_path(i, displayed_i, num_pages, force_addition, extension=None):
feed = "_atom" if extension == ".atom" else ""
return utils.adjust_name_for_index_path(self.site.path("section_index" + feed, section_slug, lang), i, displayed_i,
lang, self.site, force_addition, extension)
context = {}
short_destination = os.path.join(section_slug, kw['index_file'])
link = short_destination.replace('\\', '/')
if kw['strip_indexes'] and link[-(1 + index_len):] == '/' + kw['index_file']:
link = link[:-index_len]
context["permalink"] = link
context["pagekind"] = ["section_page"]
context["description"] = self.site.config['POSTS_SECTION_DESCRIPTIONS'](lang)[section_slug] if section_slug in self.site.config['POSTS_SECTION_DESCRIPTIONS'](lang) else ""
if kw["posts_section_are_indexes"]:
context["pagekind"].append("index")
kw["posts_section_title"] = self.site.config['POSTS_SECTION_TITLE'](lang)
section_title = None
if type(kw["posts_section_title"]) is dict:
if section_slug in kw["posts_section_title"]:
section_title = kw["posts_section_title"][section_slug]
elif type(kw["posts_section_title"]) is str:
section_title = kw["posts_section_title"]
if not section_title:
section_title = post_list[0].section_name(lang)
section_title = section_title.format(name=post_list[0].section_name(lang))
task = self.site.generic_index_renderer(lang, post_list, section_title, "sectionindex.tmpl", context, kw, self.name, cat_link, cat_path)
else:
context["pagekind"].append("list")
output_name = os.path.join(kw['output_folder'], section_slug, kw['index_file'])
task = self.site.generic_post_list_renderer(lang, post_list, output_name, "list.tmpl", kw['filters'], context)
task['uptodate'] = [utils.config_changed(kw, 'nikola.plugins.task.indexes')]
task['basename'] = self.name
yield task
if not self.site.config["STORY_INDEX"]:
return
kw = {
"translations": self.site.config['TRANSLATIONS'],
"post_pages": self.site.config["post_pages"],
"output_folder": self.site.config['OUTPUT_FOLDER'],
"filters": self.site.config['FILTERS'],
"index_file": self.site.config['INDEX_FILE'],
"strip_indexes": self.site.config['STRIP_INDEXES'],
}
template_name = "list.tmpl"
index_len = len(kw['index_file'])
for lang in kw["translations"]:
# Need to group by folder to avoid duplicated tasks (Issue #758)
# Group all pages by path prefix
groups = defaultdict(list)
for p in self.site.timeline:
if not p.is_post:
destpath = p.destination_path(lang)
if destpath[-(1 + index_len):] == '/' + kw['index_file']:
destpath = destpath[:-(1 + index_len)]
dirname = os.path.dirname(destpath)
groups[dirname].append(p)
for dirname, post_list in groups.items():
context = {}
context["items"] = []
should_render = True
output_name = os.path.join(kw['output_folder'], dirname, kw['index_file'])
short_destination = os.path.join(dirname, kw['index_file'])
link = short_destination.replace('\\', '/')
if kw['strip_indexes'] and link[-(1 + index_len):] == '/' + kw['index_file']:
link = link[:-index_len]
context["permalink"] = link
context["pagekind"] = ["list"]
if dirname == "/":
context["pagekind"].append("front_page")
for post in post_list:
# If there is an index.html pending to be created from
# a story, do not generate the STORY_INDEX
if post.destination_path(lang) == short_destination:
should_render = False
else:
context["items"].append((post.title(lang),
post.permalink(lang)))
if should_render:
task = self.site.generic_post_list_renderer(lang, post_list,
output_name,
template_name,
kw['filters'],
context)
task['uptodate'] = task['uptodate'] + [utils.config_changed(kw, 'nikola.plugins.task.indexes')]
task['basename'] = self.name
yield task
def index_path(self, name, lang, is_feed=False):
"""Return path to an index."""
extension = None
if is_feed:
extension = ".atom"
index_file = os.path.splitext(self.site.config['INDEX_FILE'])[0] + extension
else:
index_file = self.site.config['INDEX_FILE']
return utils.adjust_name_for_index_path_list([_f for _f in [self.site.config['TRANSLATIONS'][lang],
self.site.config['INDEX_PATH'],
index_file] if _f],
name,
utils.get_displayed_page_number(name, self.number_of_pages[lang], self.site),
lang,
self.site,
extension=extension)
def index_section_path(self, name, lang, is_feed=False):
"""Return path to an index for sections."""
extension = None
if is_feed:
extension = ".atom"
index_file = os.path.splitext(self.site.config['INDEX_FILE'])[0] + extension
else:
index_file = self.site.config['INDEX_FILE']
return utils.adjust_name_for_index_path_list([_f for _f in [self.site.config['TRANSLATIONS'][lang],
name,
index_file] if _f],
None,
utils.get_displayed_page_number(None, self.number_of_pages_section[lang][name], self.site),
lang,
self.site,
extension=extension)
def index_atom_path(self, name, lang):
"""Return path to an Atom index."""
return self.index_path(name, lang, is_feed=True)
def index_section_atom_path(self, name, lang):
"""Return path to an Atom index for sections."""
return self.index_section_path(name, lang, is_feed=True)
| {
"content_hash": "8f05173d89620dc3726f53ba207e30cf",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 191,
"avg_line_length": 53.23504273504273,
"alnum_prop": 0.49120976157983465,
"repo_name": "jjconti/nikola",
"id": "616670f5f53488a716f5dbf8a028e9db6f2cd625",
"size": "13599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nikola/plugins/task/indexes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "18265"
},
{
"name": "JavaScript",
"bytes": "19228"
},
{
"name": "Python",
"bytes": "967917"
},
{
"name": "Shell",
"bytes": "3057"
},
{
"name": "XSLT",
"bytes": "3527"
}
],
"symlink_target": ""
} |
"""
Given a digit string, return all possible letter combinations that the number could represent.
A mapping of digit to letters (just like on the telephone buttons) is given below.
Input:Digit string "23"
Output: ["ad", "ae", "af", "bd", "be", "bf", "cd", "ce", "cf"].
Note:
Although the above answer is in lexicographical order, your answer could be in any order you want.
"""
class Solution:
# @return a list of strings, [s1, s2]
def letterCombinations(self, digits):
self.digit_map = { '2': 'abc',
'3': 'def',
'4': 'ghi',
'5': 'jkl',
'6': 'mno',
'7': 'pqrs',
'8': 'tuv',
'9': 'wxyz'
}
return self.letterCombinations_2(digits)
def letterCombinations_1(self, digits):
ret = ['']
for digit in digits:
res = []
for comb in ret:
for digit_char in self.digit_map[digit]:
res.append(comb+digit_char)
ret = res
return ret
# Recursion way to do this
def letterCombinations_2(self, digits):
ret = []
self.letterCombinations_rec(0, digits, '', ret)
return ret
def letterCombinations_rec(self, i, digits, res, ret):
if i == len(digits):
ret.append(res[:])
return
for char in self.digit_map[digits[i]]:
self.letterCombinations_rec(i+1, digits, res + char, ret)
| {
"content_hash": "a4afacef8f46fd4e77269eb47910ab42",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 98,
"avg_line_length": 32.40816326530612,
"alnum_prop": 0.5,
"repo_name": "cyandterry/Python-Study",
"id": "12f5be5839d3525f49e368234d945030fc6188cc",
"size": "1588",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Ninja/Leetcode/17_Letter_Combinations_of_a_Phone_Number.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1211176"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import argparse
import numpy as np
import mdtraj as md
import multiprocessing as mp
import AdaptivePELE.analysis.trajectory_processing as tp
try:
basestring
except NameError:
basestring = str
def parseArguments():
desc = "Program that filters the trajectories outside or inside an sphere defined by a radi and a point"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("--append_results", action="store_true", help="Flag to set whether all the filtered points should be merged together")
parser.add_argument("--filter_larger", action="store_false", help="Flag to set whether we should filter the points outside or inside the radi (default, ie not specified, is outside)")
parser.add_argument("--processors", type=int, default=4, help="Number of cpus to use")
parser.add_argument("--minimum_length", type=int, default=300, help="Minimum steps for the filtered traj to be considered valid")
parser.add_argument("radi", type=int, default=2, help="Number of cpus to use")
parser.add_argument("--center_point", type=int, nargs='+', help="Point to filter", required=True)
parser.add_argument("resname", help="Ligand resname")
parser.add_argument("topology", help="Glob string for the topology")
parser.add_argument("trajectories", help="Glob string for the trajectories")
args = parser.parse_args()
return args.trajectories, args.topology, args.resname, args.radi, args.center_point, args.processors, args.filter_larger, args.append_results
def radifilter(num, traj, top, ligand, radi, center_point, minimum_length, Larger, Append_Results):
valid_frames = []
NOT_CORD = False
interval_start = None
interval_final = None
final_traj = None
off_count = 0
md_traj = md.load(traj, top=top)
if isinstance(center_point, basestring):
NOT_CORD = True
DUM_traj = md_traj.atom_slice(tp.extract_ligand_indexes(md_traj, center_point))
DUM_center = md.compute_center_of_mass(DUM_traj) * 10
else:
center_point = np.array(center_point)
ligand_traj = md_traj.atom_slice(tp.extract_ligand_indexes(md_traj, ligand))
center_traj = md.compute_center_of_mass(ligand_traj) * 10
for i, frame in enumerate(center_traj):
if NOT_CORD:
center_point = DUM_center[i]
distance = np.linalg.norm(frame-center_point)
if (distance <= radi and not Larger) or (distance >= radi and Larger):
if interval_start is None:
interval_start = i
interval_final = i
elif interval_start is not None:
valid_frames.append((interval_start, interval_final))
interval_start = None
interval_final = None
if interval_start is not None:
valid_frames.append((interval_start, interval_final))
if len(valid_frames) == 0:
print("traj %s does not have any frame inside the defined sphere" % num)
for interval in valid_frames:
if not Append_Results:
if (interval[1] - interval[0]) < minimum_length:
print("Interval from %s %s of traj %s not long enough" % (interval[0], interval[1], num))
else:
if off_count:
name = "%s.%s" % (num, off_count)
else:
name = num
md_traj[interval[0]:interval[1]+1].save_xtc("trajectory_radi_%s_filtered_%s.xtc" % (radi, name))
print("trajectory_radi_%s_filtered_%s.xtc with length %s" % (radi, name, (interval[1] - interval[0])))
off_count += 1
else:
if off_count:
print("%s.%s" % (num, off_count))
if final_traj:
final_traj = final_traj + md_traj[interval[0]:interval[1]+1]
else:
final_traj = md_traj[interval[0]:interval[1]+1]
off_count += 1
if Append_Results:
final_traj.save_xtc("trajectory_radi_%s_filtered_%s.xtc" % (radi, num))
def main(trajectory_template, topology, ligand, radi, center_point, minimum_length, processors, Larger, Append_Results):
pool = mp.Pool(processors)
workers = []
num = 0
for traj, top in tp.load_trajs(trajectory_template, topology, PELE_order=False):
print("Procesing %s num %s" % (traj, num))
workers.append(pool.apply_async(radifilter, args=(num, traj, top, ligand, radi, center_point, minimum_length, Larger, Append_Results)))
num = num + 1
for worker in workers:
worker.get()
print("FINISHED")
if __name__ == "__main__":
trajectory_template, topology, ligand, radi, center_point, minimum_length, processors, Larger, Append_Results = parseArguments()
main(trajectory_template, topology, ligand, radi, center_point, minimum_length, processors, Larger, Append_Results)
| {
"content_hash": "1ccf3d54eac6c5d75544c71576ef2288",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 187,
"avg_line_length": 49.17171717171717,
"alnum_prop": 0.6454396055875102,
"repo_name": "AdaptivePELE/AdaptivePELE",
"id": "f374375a339f858350da6753da5b101479de3f77",
"size": "4868",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "AdaptivePELE/analysis/filter_by_radius.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5090"
},
{
"name": "MATLAB",
"bytes": "39265"
},
{
"name": "Makefile",
"bytes": "5601"
},
{
"name": "Python",
"bytes": "1078513"
},
{
"name": "R",
"bytes": "13841"
},
{
"name": "Shell",
"bytes": "97160"
}
],
"symlink_target": ""
} |
__source__ = 'https://leetcode.com/problems/random-point-in-non-overlapping-rectangles/'
# Time: O(N)
# Space: O(N)
#
# Description: Leetcode # 497. Random Point in Non-overlapping Rectangles
#
# Given a list of non-overlapping axis-aligned rectangles rects,
# write a function pick which randomly and uniformily picks an integer point in the space covered by the rectangles.
#
# Note:
#
# An integer point is a point that has integer coordinates.
# A point on the perimeter of a rectangle is included in the space covered by the rectangles.
# ith rectangle = rects[i] = [x1,y1,x2,y2], where [x1, y1] are the integer coordinates of the bottom-left corner,
# and [x2, y2] are the integer coordinates of the top-right corner.
# length and width of each rectangle does not exceed 2000.
# 1 <= rects.length <= 100
# pick return a point as an array of integer coordinates [p_x, p_y]
# pick is called at most 10000 times.
#
# Example 1:
#
# Input:
# ["Solution","pick","pick","pick"]
# [[[[1,1,5,5]]],[],[],[]]
# Output:
# [null,[4,1],[4,1],[3,3]]
#
# Example 2:
#
# Input:
# ["Solution","pick","pick","pick","pick","pick"]
# [[[[-2,-2,-1,-1],[1,0,3,0]]],[],[],[],[],[]]
# Output:
# [null,[-1,-2],[2,0],[-2,-1],[3,0],[-2,-2]]
#
# Explanation of Input Syntax:
#
# The input is two lists: the subroutines called and their arguments. Solution's constructor has one argument,
# the array of rectangles rects. pick has no arguments.
# Arguments are always wrapped with a list, even if there aren't any.
#
import unittest
class Solution:
pass # start coding
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/random-point-in-non-overlapping-rectangles/solution/
#
Approach 1: Prefix Sum and Binary Search
Complexity Analysis
Time Complexity: O(N) preprocessing. O(log(N)) pick.
Space Complexity: O(N)
# 132ms 87.74%
class Solution {
int[][] rects;
List<Integer> psum = new ArrayList<>();
int tot = 0;
Random rand = new Random();
public Solution(int[][] rects) {
this.rects = rects;
for (int[] x : rects) {
//number of points for each rectangle
//for ex -2 ~-1, there are 2 points can be chose
tot += (x[2] - x[0] + 1) * (x[3] - x[1] + 1);
psum.add(tot);
}
}
public int[] pick() {
int targ = rand.nextInt(tot);
int lo = 0;
int hi = rects.length - 1;
//find the closest number of target, pick the ceiling one
while (lo != hi) {
int mid = lo + (hi - lo) / 2;
if (targ >= psum.get(mid)) lo = mid + 1;
else hi = mid;
}
int[] x = rects[lo];
int width = x[2] - x[0] + 1;
int height = x[3] - x[1] + 1;
int base = psum.get(lo) - width * height;
//int base = lo-1 >=0 ? psum.get(lo-1) : 0;
//can use random method here, but use %&/ is more efficient
return new int[]{x[0] + (targ - base) % width, x[1] + (targ - base) / width};
}
}
/**
* Your Solution object will be instantiated and called as such:
* Solution obj = new Solution(rects);
* int[] param_1 = obj.pick();
*/
'''
| {
"content_hash": "ae9827a9077fb64ce5b2d7bdd2f05b1a",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 117,
"avg_line_length": 30.574074074074073,
"alnum_prop": 0.5941853422168383,
"repo_name": "JulyKikuAkita/PythonPrac",
"id": "81ea31410669c1492102b488c3ca3a49cdf3bf5b",
"size": "3302",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cs15211/RandomPointinNon-overlappingRectangles.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "191608"
},
{
"name": "HTML",
"bytes": "647778"
},
{
"name": "Python",
"bytes": "5429558"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function
import os.path
from unittest.case import expectedFailure
from commoncode.testcase import FileBasedTesting
from cluecode_assert_utils import check_detection
"""
This test suite is based a rather large subset of Android ICS, providing a
rather diversified sample of a typical Linux-based user space environment.
"""
class TestCopyright(FileBasedTesting):
test_data_dir = os.path.join(os.path.dirname(__file__), 'data')
def test_ics_android_mock_android_mk(self):
test_file = self.get_test_loc('ics/android-mock/Android.mk')
expected = [
u'Copyright (c) 2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_android_mock_notice(self):
test_file = self.get_test_loc('ics/android-mock/NOTICE')
expected = [
u'Copyright (c) 2005-2008, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_android_mock_regenerate_from_source_sh(self):
test_file = self.get_test_loc('ics/android-mock/regenerate_from_source.sh')
expected = [
u'Copyright (c) 2011 The Android Open Source Project.',
]
check_detection(expected, test_file)
def test_ics_android_mock_livetests_com_google_android_testing_mocking_test_androidmanifest_xml(self):
test_file = self.get_test_loc('ics/android-mock-livetests-com-google-android-testing-mocking-test/AndroidManifest.xml')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_android_mock_src_com_google_android_testing_mocking_androidmock_java(self):
test_file = self.get_test_loc('ics/android-mock-src-com-google-android-testing-mocking/AndroidMock.java')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_android_mock_src_com_google_android_testing_mocking_generatedmockjar_readme(self):
test_file = self.get_test_loc('ics/android-mock-src-com-google-android-testing-mocking/GeneratedMockJar.readme')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_antlr_android_mk(self):
test_file = self.get_test_loc('ics/antlr/Android.mk')
expected = [
u'Copyright (c) 2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_antlr_src_org_antlr_runtime_antlrfilestream_java(self):
test_file = self.get_test_loc('ics/antlr-src-org-antlr-runtime/ANTLRFileStream.java')
expected = [
u'Copyright (c) 2005-2009 Terence Parr',
]
check_detection(expected, test_file)
def test_ics_apache_harmony_notice(self):
test_file = self.get_test_loc('ics/apache-harmony/NOTICE')
expected = [
u'Copyright 2001-2004 The Apache Software Foundation.',
u'Copyright 2001-2006 The Apache Software Foundation.',
u'Copyright 2003-2004 The Apache Software Foundation.',
u'Copyright 2004 The Apache Software Foundation.',
]
check_detection(expected, test_file)
def test_ics_apache_http_cleanspec_mk(self):
test_file = self.get_test_loc('ics/apache-http/CleanSpec.mk')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_apache_http_thirdpartyproject_prop(self):
test_file = self.get_test_loc('ics/apache-http/ThirdPartyProject.prop')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_apache_http_src_org_apache_commons_codec_binarydecoder_java(self):
test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-codec/BinaryDecoder.java')
expected = [
u'Copyright 2001-2004 The Apache Software Foundation.',
]
check_detection(expected, test_file)
def test_ics_apache_http_src_org_apache_commons_codec_overview_html(self):
test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-codec/overview.html')
expected = [
u'Copyright 2003-2004 The Apache Software Foundation.',
]
check_detection(expected, test_file)
def test_ics_apache_http_src_org_apache_commons_logging_logfactory_java(self):
test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-logging/LogFactory.java')
expected = [
u'Copyright 2001-2006 The Apache Software Foundation.',
]
check_detection(expected, test_file)
def test_ics_apache_http_src_org_apache_commons_logging_package_html(self):
test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-logging/package.html')
expected = [
u'Copyright 2001-2004 The Apache Software Foundation.',
]
check_detection(expected, test_file)
def test_ics_apache_http_src_org_apache_commons_logging_impl_weakhashtable_java(self):
test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-logging-impl/WeakHashtable.java')
expected = [
u'Copyright 2004 The Apache Software Foundation.',
]
check_detection(expected, test_file)
def test_ics_apache_xml_notice(self):
test_file = self.get_test_loc('ics/apache-xml/NOTICE')
expected = [
u'Copyright 1999-2006 The Apache Software Foundation',
u'Copyright 1999-2006 The Apache Software Foundation',
u'copyright (c) 1999-2002, Lotus Development Corporation., http://www.lotus.com.',
u'copyright (c) 2001-2002, Sun Microsystems., http://www.sun.com.',
u'copyright (c) 2003, IBM Corporation., http://www.ibm.com.',
u'Copyright 1999-2006 The Apache Software Foundation',
u'copyright (c) 1999, IBM Corporation., http://www.ibm.com.',
u'copyright (c) 1999, Sun Microsystems., http://www.sun.com.',
u'at iClick, Inc., software copyright (c) 1999.',
u'Copyright 2001-2003,2006 The Apache Software Foundation.',
u'copyright (c) 1999, IBM Corporation., http://www.ibm.com.',
u'copyright (c) 1999, Sun Microsystems., http://www.sun.com.',
u'copyright (c) 2000 World Wide Web Consortium, http://www.w3.org',
]
check_detection(expected, test_file)
def test_ics_apache_xml_src_main_java_org_apache_xpath_domapi_xpathstylesheetdom3exception_java(self):
test_file = self.get_test_loc('ics/apache-xml-src-main-java-org-apache-xpath-domapi/XPathStylesheetDOM3Exception.java')
expected = [
u'Copyright (c) 2002 World Wide Web Consortium, Massachusetts Institute of Technology, Institut National de Recherche en Informatique',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_apache_xml_src_main_java_org_apache_xpath_domapi_xpathstylesheetdom3exception_java_trail_name(self):
test_file = self.get_test_loc('ics/apache-xml-src-main-java-org-apache-xpath-domapi/XPathStylesheetDOM3Exception.java')
expected = [
u'Copyright (c) 2002 World Wide Web Consortium, '
u'(Massachusetts Institute of Technology, '
u'Institut National de Recherche en Informatique et en Automatique, '
u'Keio University).',
]
check_detection(expected, test_file)
def test_ics_astl_android_mk(self):
test_file = self.get_test_loc('ics/astl/Android.mk')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_notice(self):
test_file = self.get_test_loc('ics/astl/NOTICE')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_include_algorithm(self):
test_file = self.get_test_loc('ics/astl-include/algorithm')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_include_basic_ios_h(self):
test_file = self.get_test_loc('ics/astl-include/basic_ios.h')
expected = [
u'Copyright (c) 2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_include_streambuf(self):
test_file = self.get_test_loc('ics/astl-include/streambuf')
expected = [
u'Copyright (c) 2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_include_string(self):
test_file = self.get_test_loc('ics/astl-include/string')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_src_ostream_cpp(self):
test_file = self.get_test_loc('ics/astl-src/ostream.cpp')
expected = [
u'Copyright (c) 2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_tests_test_vector_cpp(self):
test_file = self.get_test_loc('ics/astl-tests/test_vector.cpp')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_bison_aclocal_m4(self):
test_file = self.get_test_loc('ics/bison/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1999, 2000, 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_android_mk(self):
test_file = self.get_test_loc('ics/bison/Android.mk')
expected = [
u'Copyright 2006 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_bison_changelog(self):
test_file = self.get_test_loc('ics/bison/ChangeLog')
expected = [
u'Copyright (c) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_config_log(self):
test_file = self.get_test_loc('ics/bison/config.log')
expected = [
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_config_status(self):
test_file = self.get_test_loc('ics/bison/config.status')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2000, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_configure(self):
test_file = self.get_test_loc('ics/bison/configure')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2000, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_configure_ac(self):
test_file = self.get_test_loc('ics/bison/configure.ac')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_copying(self):
test_file = self.get_test_loc('ics/bison/COPYING')
expected = [
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bison_gnumakefile(self):
test_file = self.get_test_loc('ics/bison/GNUmakefile')
expected = [
u'Copyright (c) 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_install(self):
test_file = self.get_test_loc('ics/bison/INSTALL')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_makefile(self):
test_file = self.get_test_loc('ics/bison/Makefile')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_makefile_am(self):
test_file = self.get_test_loc('ics/bison/Makefile.am')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_makefile_cfg(self):
test_file = self.get_test_loc('ics/bison/Makefile.cfg')
expected = [
u'Copyright (c) 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_makefile_maint(self):
test_file = self.get_test_loc('ics/bison/Makefile.maint')
expected = [
u'Copyright (c) 2001-2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_news(self):
test_file = self.get_test_loc('ics/bison/NEWS')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_notice(self):
test_file = self.get_test_loc('ics/bison/NOTICE')
expected = [
u'Copyright (c) 1992-2006 Free Software Foundation, Inc.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bison_packaging(self):
test_file = self.get_test_loc('ics/bison/PACKAGING')
expected = [
u'Copyright (c) 2002, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_readme(self):
test_file = self.get_test_loc('ics/bison/README')
expected = [
u'Copyright (c) 1992, 1998, 1999, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_todo(self):
test_file = self.get_test_loc('ics/bison/TODO')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_config_guess(self):
test_file = self.get_test_loc('ics/bison-build-aux/config.guess')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_config_rpath(self):
test_file = self.get_test_loc('ics/bison-build-aux/config.rpath')
expected = [
u'Copyright 1996-2006 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_depcomp(self):
test_file = self.get_test_loc('ics/bison-build-aux/depcomp')
expected = [
u'Copyright (c) 1999, 2000, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_install_sh(self):
test_file = self.get_test_loc('ics/bison-build-aux/install-sh')
expected = [
u'Copyright (c) 1994 X Consortium',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_mdate_sh(self):
test_file = self.get_test_loc('ics/bison-build-aux/mdate-sh')
expected = [
u'Copyright (c) 1995, 1996, 1997, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_missing(self):
test_file = self.get_test_loc('ics/bison-build-aux/missing')
expected = [
u'Copyright (c) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_texinfo_tex(self):
test_file = self.get_test_loc('ics/bison-build-aux/texinfo.tex')
expected = [
u'Copyright (c) 1985, 1986, 1988, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_ylwrap(self):
test_file = self.get_test_loc('ics/bison-build-aux/ylwrap')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_data_c_m4(self):
test_file = self.get_test_loc('ics/bison-data/c.m4')
expected = [
u'Copyright (c) 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) $2 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_data_c_m4_2(self):
test_file = self.get_test_loc('ics/bison-data/c++.m4')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_data_makefile_am(self):
test_file = self.get_test_loc('ics/bison-data/Makefile.am')
expected = [
u'Copyright (c) 2002, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_data_readme(self):
test_file = self.get_test_loc('ics/bison-data/README')
expected = [
u'Copyright (c) 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_data_m4sugar_m4sugar_m4(self):
test_file = self.get_test_loc('ics/bison-data-m4sugar/m4sugar.m4')
expected = [
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_djgpp_config_bat(self):
test_file = self.get_test_loc('ics/bison-djgpp/config.bat')
expected = [
u'Copyright (c) 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_djgpp_config_sed(self):
test_file = self.get_test_loc('ics/bison-djgpp/config.sed')
expected = [
u'Copyright (c) 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_djgpp_makefile_maint(self):
test_file = self.get_test_loc('ics/bison-djgpp/Makefile.maint')
expected = [
u'Copyright (c) 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_djgpp_readme_in(self):
test_file = self.get_test_loc('ics/bison-djgpp/README.in')
expected = [
u'Copyright (c) 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_djgpp_subpipe_h(self):
test_file = self.get_test_loc('ics/bison-djgpp/subpipe.h')
expected = [
u'Copyright (c) 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_doc_bison_texinfo(self):
test_file = self.get_test_loc('ics/bison-doc/bison.texinfo')
expected = [
u'Copyright 1988, 1989, 1990, 1991, 1992, 1993, 1995, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_doc_fdl_texi(self):
test_file = self.get_test_loc('ics/bison-doc/fdl.texi')
expected = [
u'Copyright 2000,2001,2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_doc_gpl_texi(self):
test_file = self.get_test_loc('ics/bison-doc/gpl.texi')
expected = [
u'Copyright 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bison_doc_makefile_am(self):
test_file = self.get_test_loc('ics/bison-doc/Makefile.am')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_doc_refcard_tex(self):
test_file = self.get_test_loc('ics/bison-doc/refcard.tex')
expected = [
u'Copyright (c) 1998, 2001 Free Software Foundation, Inc.',
u'Copyright \\copyright\\ \\year\\ Free Software Foundation, Inc.',
u'Copyright \\copyright\\ \\year\\ Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_examples_extexi(self):
test_file = self.get_test_loc('ics/bison-examples/extexi')
expected = [
u'Copyright 1992, 2000, 2001, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_examples_makefile_am(self):
test_file = self.get_test_loc('ics/bison-examples/Makefile.am')
expected = [
u'Copyright (c) 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_abitset_c(self):
test_file = self.get_test_loc('ics/bison-lib/abitset.c')
expected = [
u'Copyright (c) 2002, 2003, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_abitset_h(self):
test_file = self.get_test_loc('ics/bison-lib/abitset.h')
expected = [
u'Copyright (c) 2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_argmatch_c(self):
test_file = self.get_test_loc('ics/bison-lib/argmatch.c')
expected = [
u'Copyright (c) 1990, 1998, 1999, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_argmatch_h(self):
test_file = self.get_test_loc('ics/bison-lib/argmatch.h')
expected = [
u'Copyright (c) 1990, 1998, 1999, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_basename_c(self):
test_file = self.get_test_loc('ics/bison-lib/basename.c')
expected = [
u'Copyright (c) 1990, 1998, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_bbitset_h(self):
test_file = self.get_test_loc('ics/bison-lib/bbitset.h')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_bitset_c(self):
test_file = self.get_test_loc('ics/bison-lib/bitset.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_bitset_h(self):
test_file = self.get_test_loc('ics/bison-lib/bitset.h')
expected = [
u'Copyright (c) 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_bitsetv_c(self):
test_file = self.get_test_loc('ics/bison-lib/bitsetv.c')
expected = [
u'Copyright (c) 2001, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_bitsetv_print_c(self):
test_file = self.get_test_loc('ics/bison-lib/bitsetv-print.c')
expected = [
u'Copyright (c) 2001, 2002, 2004, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_dirname_c(self):
test_file = self.get_test_loc('ics/bison-lib/dirname.c')
expected = [
u'Copyright (c) 1990, 1998, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_dirname_h(self):
test_file = self.get_test_loc('ics/bison-lib/dirname.h')
expected = [
u'Copyright (c) 1998, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_dup_safer_c(self):
test_file = self.get_test_loc('ics/bison-lib/dup-safer.c')
expected = [
u'Copyright (c) 2001, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_error_c(self):
test_file = self.get_test_loc('ics/bison-lib/error.c')
expected = [
u'Copyright (c) 1990-1998, 2000-2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_error_h(self):
test_file = self.get_test_loc('ics/bison-lib/error.h')
expected = [
u'Copyright (c) 1995, 1996, 1997, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_exit_h(self):
test_file = self.get_test_loc('ics/bison-lib/exit.h')
expected = [
u'Copyright (c) 1995, 2001 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_exitfail_c(self):
test_file = self.get_test_loc('ics/bison-lib/exitfail.c')
expected = [
u'Copyright (c) 2002, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_get_errno_c(self):
test_file = self.get_test_loc('ics/bison-lib/get-errno.c')
expected = [
u'Copyright (c) 2002, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_getopt_c(self):
test_file = self.get_test_loc('ics/bison-lib/getopt.c')
expected = [
u'Copyright (c) 1987,88,89,90,91,92,93,94,95,96,98,99,2000,2001,2002,2003,2004,2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_getopt_h(self):
test_file = self.get_test_loc('ics/bison-lib/getopt_.h')
expected = [
u'Copyright (c) 1989-1994,1996-1999,2001,2003,2004,2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_getopt_int_h(self):
test_file = self.get_test_loc('ics/bison-lib/getopt_int.h')
expected = [
u'Copyright (c) 1989-1994,1996-1999,2001,2003,2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_getopt1_c(self):
test_file = self.get_test_loc('ics/bison-lib/getopt1.c')
expected = [
u'Copyright (c) 1987,88,89,90,91,92,93,94,96,97,98,2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_gettext_h(self):
test_file = self.get_test_loc('ics/bison-lib/gettext.h')
expected = [
u'Copyright (c) 1995-1998, 2000-2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_hard_locale_c(self):
test_file = self.get_test_loc('ics/bison-lib/hard-locale.c')
expected = [
u'Copyright (c) 1997, 1998, 1999, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_hard_locale_h(self):
test_file = self.get_test_loc('ics/bison-lib/hard-locale.h')
expected = [
u'Copyright (c) 1999, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_hash_c(self):
test_file = self.get_test_loc('ics/bison-lib/hash.c')
expected = [
u'Copyright (c) 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_hash_h(self):
test_file = self.get_test_loc('ics/bison-lib/hash.h')
expected = [
u'Copyright (c) 1998, 1999, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_makefile_am(self):
test_file = self.get_test_loc('ics/bison-lib/Makefile.am')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_malloc_c(self):
test_file = self.get_test_loc('ics/bison-lib/malloc.c')
expected = [
u'Copyright (c) 1997, 1998 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_mbswidth_c(self):
test_file = self.get_test_loc('ics/bison-lib/mbswidth.c')
expected = [
u'Copyright (c) 2000-2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_mbswidth_h(self):
test_file = self.get_test_loc('ics/bison-lib/mbswidth.h')
expected = [
u'Copyright (c) 2000-2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_obstack_c(self):
test_file = self.get_test_loc('ics/bison-lib/obstack.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_obstack_h(self):
test_file = self.get_test_loc('ics/bison-lib/obstack.h')
expected = [
u'Copyright (c) 1988-1994,1996-1999,2003,2004,2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_quote_c(self):
test_file = self.get_test_loc('ics/bison-lib/quote.c')
expected = [
u'Copyright (c) 1998, 1999, 2000, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_quote_h(self):
test_file = self.get_test_loc('ics/bison-lib/quote.h')
expected = [
u'Copyright (c) 1998, 1999, 2000, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_quotearg_c(self):
test_file = self.get_test_loc('ics/bison-lib/quotearg.c')
expected = [
u'Copyright (c) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_quotearg_h(self):
test_file = self.get_test_loc('ics/bison-lib/quotearg.h')
expected = [
u'Copyright (c) 1998, 1999, 2000, 2001, 2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_stdbool_h(self):
test_file = self.get_test_loc('ics/bison-lib/stdbool_.h')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_stdio_safer_h(self):
test_file = self.get_test_loc('ics/bison-lib/stdio-safer.h')
expected = [
u'Copyright (c) 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_stpcpy_c(self):
test_file = self.get_test_loc('ics/bison-lib/stpcpy.c')
expected = [
u'Copyright (c) 1992, 1995, 1997, 1998 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_stpcpy_h(self):
test_file = self.get_test_loc('ics/bison-lib/stpcpy.h')
expected = [
u'Copyright (c) 1995, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strdup_c(self):
test_file = self.get_test_loc('ics/bison-lib/strdup.c')
expected = [
u'Copyright (c) 1991, 1996, 1997, 1998, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strdup_h(self):
test_file = self.get_test_loc('ics/bison-lib/strdup.h')
expected = [
u'Copyright (c) 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strerror_c(self):
test_file = self.get_test_loc('ics/bison-lib/strerror.c')
expected = [
u'Copyright (c) 1986, 1988, 1989, 1991, 2002, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_stripslash_c(self):
test_file = self.get_test_loc('ics/bison-lib/stripslash.c')
expected = [
u'Copyright (c) 1990, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strndup_c(self):
test_file = self.get_test_loc('ics/bison-lib/strndup.c')
expected = [
u'Copyright (c) 1996, 1997, 1998, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strndup_h(self):
test_file = self.get_test_loc('ics/bison-lib/strndup.h')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strtol_c(self):
test_file = self.get_test_loc('ics/bison-lib/strtol.c')
expected = [
u'Copyright (c) 1991, 1992, 1994, 1995, 1996, 1997, 1998, 1999, 2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strtoul_c(self):
test_file = self.get_test_loc('ics/bison-lib/strtoul.c')
expected = [
u'Copyright (c) 1991, 1997 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strverscmp_c(self):
test_file = self.get_test_loc('ics/bison-lib/strverscmp.c')
expected = [
u'Copyright (c) 1997, 2000, 2002, 2004 Free Software Foundation, Inc.',
u'Jean-Francois Bignolles <bignolle@ecoledoc.ibp.fr>, 1997.'
]
check_detection(expected, test_file)
def test_ics_bison_lib_strverscmp_h(self):
test_file = self.get_test_loc('ics/bison-lib/strverscmp.h')
expected = [
u'Copyright (c) 1997, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_subpipe_c(self):
test_file = self.get_test_loc('ics/bison-lib/subpipe.c')
expected = [
u'Copyright (c) 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_subpipe_h(self):
test_file = self.get_test_loc('ics/bison-lib/subpipe.h')
expected = [
u'Copyright (c) 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_timevar_c(self):
test_file = self.get_test_loc('ics/bison-lib/timevar.c')
expected = [
u'Copyright (c) 2000, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_timevar_h(self):
test_file = self.get_test_loc('ics/bison-lib/timevar.h')
expected = [
u'Copyright (c) 2000, 2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_unistd_safer_h(self):
test_file = self.get_test_loc('ics/bison-lib/unistd-safer.h')
expected = [
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_unlocked_io_h(self):
test_file = self.get_test_loc('ics/bison-lib/unlocked-io.h')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_xalloc_h(self):
test_file = self.get_test_loc('ics/bison-lib/xalloc.h')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_xalloc_die_c(self):
test_file = self.get_test_loc('ics/bison-lib/xalloc-die.c')
expected = [
u'Copyright (c) 1997, 1998, 1999, 2000, 2002, 2003, 2004, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_bison_i18n_m4(self):
test_file = self.get_test_loc('ics/bison-m4/bison-i18n.m4')
expected = [
u'Copyright (c) 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_c_working_m4(self):
test_file = self.get_test_loc('ics/bison-m4/c-working.m4')
expected = [
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_cxx_m4(self):
test_file = self.get_test_loc('ics/bison-m4/cxx.m4')
expected = [
u'Copyright (c) 2004, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_dirname_m4(self):
test_file = self.get_test_loc('ics/bison-m4/dirname.m4')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_dos_m4(self):
test_file = self.get_test_loc('ics/bison-m4/dos.m4')
expected = [
u'Copyright (c) 2000, 2001, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_error_m4(self):
test_file = self.get_test_loc('ics/bison-m4/error.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_exitfail_m4(self):
test_file = self.get_test_loc('ics/bison-m4/exitfail.m4')
expected = [
u'Copyright (c) 2002, 2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_extensions_m4(self):
test_file = self.get_test_loc('ics/bison-m4/extensions.m4')
expected = [
u'Copyright (c) 2003, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_gettext_gl_m4(self):
test_file = self.get_test_loc('ics/bison-m4/gettext_gl.m4')
expected = [
u'Copyright (c) 1995-2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_iconv_m4(self):
test_file = self.get_test_loc('ics/bison-m4/iconv.m4')
expected = [
u'Copyright (c) 2000-2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_inttypes_h_gl_m4(self):
test_file = self.get_test_loc('ics/bison-m4/inttypes_h_gl.m4')
expected = [
u'Copyright (c) 1997-2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_lib_ld_gl_m4(self):
test_file = self.get_test_loc('ics/bison-m4/lib-ld_gl.m4')
expected = [
u'Copyright (c) 1996-2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_lib_link_m4(self):
test_file = self.get_test_loc('ics/bison-m4/lib-link.m4')
expected = [
u'Copyright (c) 2001-2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_m4_m4(self):
test_file = self.get_test_loc('ics/bison-m4/m4.m4')
expected = [
u'Copyright 2000 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_mbrtowc_m4(self):
test_file = self.get_test_loc('ics/bison-m4/mbrtowc.m4')
expected = [
u'Copyright (c) 2001-2002, 2004-2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_mbstate_t_m4(self):
test_file = self.get_test_loc('ics/bison-m4/mbstate_t.m4')
expected = [
u'Copyright (c) 2000, 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_mbswidth_m4(self):
test_file = self.get_test_loc('ics/bison-m4/mbswidth.m4')
expected = [
u'Copyright (c) 2000-2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_nls_m4(self):
test_file = self.get_test_loc('ics/bison-m4/nls.m4')
expected = [
u'Copyright (c) 1995-2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_obstack_m4(self):
test_file = self.get_test_loc('ics/bison-m4/obstack.m4')
expected = [
u'Copyright (c) 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_onceonly_m4(self):
test_file = self.get_test_loc('ics/bison-m4/onceonly.m4')
expected = [
u'Copyright (c) 2002-2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_progtest_m4(self):
test_file = self.get_test_loc('ics/bison-m4/progtest.m4')
expected = [
u'Copyright (c) 1996-2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_quotearg_m4(self):
test_file = self.get_test_loc('ics/bison-m4/quotearg.m4')
expected = [
u'Copyright (c) 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_stdbool_m4(self):
test_file = self.get_test_loc('ics/bison-m4/stdbool.m4')
expected = [
u'Copyright (c) 2002-2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_stdio_safer_m4(self):
test_file = self.get_test_loc('ics/bison-m4/stdio-safer.m4')
expected = [
u'Copyright (c) 2002, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_stpcpy_m4(self):
test_file = self.get_test_loc('ics/bison-m4/stpcpy.m4')
expected = [
u'Copyright (c) 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_strndup_m4(self):
test_file = self.get_test_loc('ics/bison-m4/strndup.m4')
expected = [
u'Copyright (c) 2002-2003, 2005-2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_strtol_m4(self):
test_file = self.get_test_loc('ics/bison-m4/strtol.m4')
expected = [
u'Copyright (c) 2002, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_ulonglong_gl_m4(self):
test_file = self.get_test_loc('ics/bison-m4/ulonglong_gl.m4')
expected = [
u'Copyright (c) 1999-2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_unlocked_io_m4(self):
test_file = self.get_test_loc('ics/bison-m4/unlocked-io.m4')
expected = [
u'Copyright (c) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_warning_m4(self):
test_file = self.get_test_loc('ics/bison-m4/warning.m4')
expected = [
u'Copyright (c) 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_xstrndup_m4(self):
test_file = self.get_test_loc('ics/bison-m4/xstrndup.m4')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_assoc_c(self):
test_file = self.get_test_loc('ics/bison-src/assoc.c')
expected = [
u'Copyright (c) 2002, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_closure_c(self):
test_file = self.get_test_loc('ics/bison-src/closure.c')
expected = [
u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_closure_h(self):
test_file = self.get_test_loc('ics/bison-src/closure.h')
expected = [
u'Copyright (c) 1984, 1989, 2000, 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_complain_c(self):
test_file = self.get_test_loc('ics/bison-src/complain.c')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_complain_h(self):
test_file = self.get_test_loc('ics/bison-src/complain.h')
expected = [
u'Copyright (c) 2000, 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_conflicts_c(self):
test_file = self.get_test_loc('ics/bison-src/conflicts.c')
expected = [
u'Copyright (c) 1984, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_conflicts_h(self):
test_file = self.get_test_loc('ics/bison-src/conflicts.h')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_derives_c(self):
test_file = self.get_test_loc('ics/bison-src/derives.c')
expected = [
u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_files_c(self):
test_file = self.get_test_loc('ics/bison-src/files.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_getargs_c(self):
test_file = self.get_test_loc('ics/bison-src/getargs.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'(c) d Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bison_src_getargs_c_lead_copy(self):
test_file = self.get_test_loc('ics/bison-src/getargs.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) d Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_getargs_h(self):
test_file = self.get_test_loc('ics/bison-src/getargs.h')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_gram_c(self):
test_file = self.get_test_loc('ics/bison-src/gram.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_gram_h(self):
test_file = self.get_test_loc('ics/bison-src/gram.h')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_lalr_c(self):
test_file = self.get_test_loc('ics/bison-src/lalr.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_lalr_h(self):
test_file = self.get_test_loc('ics/bison-src/lalr.h')
expected = [
u'Copyright (c) 1984, 1986, 1989, 2000, 2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_lr0_c(self):
test_file = self.get_test_loc('ics/bison-src/LR0.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_lr0_h(self):
test_file = self.get_test_loc('ics/bison-src/LR0.h')
expected = [
u'Copyright 1984, 1986, 1989, 2000, 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_main_c(self):
test_file = self.get_test_loc('ics/bison-src/main.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 1995, 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_muscle_tab_c(self):
test_file = self.get_test_loc('ics/bison-src/muscle_tab.c')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_muscle_tab_h(self):
test_file = self.get_test_loc('ics/bison-src/muscle_tab.h')
expected = [
u'Copyright (c) 2001, 2002, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_nullable_c(self):
test_file = self.get_test_loc('ics/bison-src/nullable.c')
expected = [
u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_nullable_h(self):
test_file = self.get_test_loc('ics/bison-src/nullable.h')
expected = [
u'Copyright (c) 2000, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_output_c(self):
test_file = self.get_test_loc('ics/bison-src/output.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_output_h(self):
test_file = self.get_test_loc('ics/bison-src/output.h')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_parse_gram_c(self):
test_file = self.get_test_loc('ics/bison-src/parse-gram.c')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_parse_gram_h(self):
test_file = self.get_test_loc('ics/bison-src/parse-gram.h')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_print_c(self):
test_file = self.get_test_loc('ics/bison-src/print.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_print_h(self):
test_file = self.get_test_loc('ics/bison-src/print.h')
expected = [
u'Copyright 2000 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_reader_c(self):
test_file = self.get_test_loc('ics/bison-src/reader.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 1998, 2000, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_reader_h(self):
test_file = self.get_test_loc('ics/bison-src/reader.h')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_reduce_c(self):
test_file = self.get_test_loc('ics/bison-src/reduce.c')
expected = [
u'Copyright (c) 1988, 1989, 2000, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_scan_skel_c(self):
test_file = self.get_test_loc('ics/bison-src/scan-skel.c')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_scan_skel_l(self):
test_file = self.get_test_loc('ics/bison-src/scan-skel.l')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_state_h(self):
test_file = self.get_test_loc('ics/bison-src/state.h')
expected = [
u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_symtab_c(self):
test_file = self.get_test_loc('ics/bison-src/symtab.c')
expected = [
u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_symtab_h(self):
test_file = self.get_test_loc('ics/bison-src/symtab.h')
expected = [
u'Copyright (c) 1984, 1989, 1992, 2000, 2001, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_system_h(self):
test_file = self.get_test_loc('ics/bison-src/system.h')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_uniqstr_c(self):
test_file = self.get_test_loc('ics/bison-src/uniqstr.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_vcg_h(self):
test_file = self.get_test_loc('ics/bison-src/vcg.h')
expected = [
u'Copyright (c) 2001, 2002, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_actions_at(self):
test_file = self.get_test_loc('ics/bison-tests/actions.at')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_atconfig(self):
test_file = self.get_test_loc('ics/bison-tests/atconfig')
expected = [
u'Copyright (c) 2000, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_atlocal(self):
test_file = self.get_test_loc('ics/bison-tests/atlocal')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_c_at(self):
test_file = self.get_test_loc('ics/bison-tests/c++.at')
expected = [
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_calc_at(self):
test_file = self.get_test_loc('ics/bison-tests/calc.at')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_conflicts_at(self):
test_file = self.get_test_loc('ics/bison-tests/conflicts.at')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_cxx_type_at(self):
test_file = self.get_test_loc('ics/bison-tests/cxx-type.at')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_existing_at(self):
test_file = self.get_test_loc('ics/bison-tests/existing.at')
expected = [
u'Copyright (c) 1989, 1990, 1991, 1992, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_glr_regression_at(self):
test_file = self.get_test_loc('ics/bison-tests/glr-regression.at')
expected = [
u'Copyright (c) 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_headers_at(self):
test_file = self.get_test_loc('ics/bison-tests/headers.at')
expected = [
u'Copyright (c) 2001, 2002, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_local_at(self):
test_file = self.get_test_loc('ics/bison-tests/local.at')
expected = [
u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_makefile_am(self):
test_file = self.get_test_loc('ics/bison-tests/Makefile.am')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_output_at(self):
test_file = self.get_test_loc('ics/bison-tests/output.at')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_sets_at(self):
test_file = self.get_test_loc('ics/bison-tests/sets.at')
expected = [
u'Copyright (c) 2001, 2002, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_synclines_at(self):
test_file = self.get_test_loc('ics/bison-tests/synclines.at')
expected = [
u'Copyright (c) 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_testsuite_at(self):
test_file = self.get_test_loc('ics/bison-tests/testsuite.at')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_torture_at(self):
test_file = self.get_test_loc('ics/bison-tests/torture.at')
expected = [
u'Copyright (c) 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_blktrace_blkiomon_c(self):
test_file = self.get_test_loc('ics/blktrace/blkiomon.c')
expected = [
u'Copyright IBM Corp. 2008',
]
check_detection(expected, test_file)
def test_ics_blktrace_blkiomon_h(self):
test_file = self.get_test_loc('ics/blktrace/blkiomon.h')
expected = [
u'Copyright IBM Corp. 2008',
]
check_detection(expected, test_file)
def test_ics_blktrace_blkparse_c(self):
test_file = self.get_test_loc('ics/blktrace/blkparse.c')
expected = [
u'Copyright (c) 2005 Jens Axboe <axboe@suse.de>',
u'Copyright (c) 2006 Jens Axboe <axboe@kernel.dk>',
]
check_detection(expected, test_file)
def test_ics_blktrace_blkrawverify_c(self):
test_file = self.get_test_loc('ics/blktrace/blkrawverify.c')
expected = [
u'Copyright (c) 2006 Alan D. Brunelle <Alan.Brunelle@hp.com>',
]
check_detection(expected, test_file)
def test_ics_blktrace_btrace(self):
test_file = self.get_test_loc('ics/blktrace/btrace')
expected = [
u'Copyright (c) 2005 Silicon Graphics, Inc.',
]
check_detection(expected, test_file)
def test_ics_blktrace_btrace_spec(self):
test_file = self.get_test_loc('ics/blktrace/btrace.spec')
expected = [
u'Copyright (c) 2005 SUSE LINUX Products GmbH, Nuernberg, Germany.',
]
check_detection(expected, test_file)
def test_ics_blktrace_jhash_h(self):
test_file = self.get_test_loc('ics/blktrace/jhash.h')
expected = [
u'Copyright (c) 2006. Bob Jenkins (bob_jenkins@burtleburtle.net)',
u'Copyright (c) 2009 Jozsef Kadlecsik (kadlec@blackhole.kfki.hu)',
]
check_detection(expected, test_file)
def test_ics_blktrace_notice(self):
test_file = self.get_test_loc('ics/blktrace/NOTICE')
expected = [
u'Copyright (c) 1997, 2002, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2005 Jens Axboe <axboe@suse.de>',
u'Copyright (c) 2006 Alan D. Brunelle <Alan.Brunelle@hp.com>',
u'Copyright (c) 2006 Jens Axboe <axboe@kernel.dk>',
u'Copyright (c) 2006. Bob Jenkins (bob_jenkins@burtleburtle.net)',
u'Copyright (c) 2009 Jozsef Kadlecsik (kadlec@blackhole.kfki.hu)',
u'Copyright IBM Corp. 2008',
u'Copyright (c) 2005 SUSE LINUX Products GmbH, Nuernberg, Germany.',
u'Copyright (c) 2005 Silicon Graphics, Inc.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_blktrace_rbtree_c(self):
test_file = self.get_test_loc('ics/blktrace/rbtree.c')
expected = [
u'(c) 1999 Andrea Arcangeli <andrea@suse.de>',
u'(c) 2002 David Woodhouse <dwmw2@infradead.org>',
]
check_detection(expected, test_file)
def test_ics_blktrace_rbtree_h(self):
test_file = self.get_test_loc('ics/blktrace/rbtree.h')
expected = [
u'(c) 1999 Andrea Arcangeli <andrea@suse.de>',
]
check_detection(expected, test_file)
def test_ics_blktrace_strverscmp_c(self):
test_file = self.get_test_loc('ics/blktrace/strverscmp.c')
expected = [
u'Copyright (c) 1997, 2002, 2005 Free Software Foundation, Inc.',
u'Jean-Francois Bignolles <bignolle@ecoledoc.ibp.fr>, 1997.'
]
check_detection(expected, test_file)
def test_ics_blktrace_btreplay_btrecord_c(self):
test_file = self.get_test_loc('ics/blktrace-btreplay/btrecord.c')
expected = [
u'Copyright (c) 2007 Alan D. Brunelle <Alan.Brunelle@hp.com>',
]
check_detection(expected, test_file)
def test_ics_blktrace_btreplay_btrecord_h(self):
test_file = self.get_test_loc('ics/blktrace-btreplay/btrecord.h')
expected = [
u'Copyright (c) 2007 Alan D. Brunelle <Alan.Brunelle@hp.com>',
]
check_detection(expected, test_file)
def test_ics_blktrace_btreplay_doc_abstract_tex(self):
test_file = self.get_test_loc('ics/blktrace-btreplay-doc/abstract.tex')
expected = [
u'Copyright (c) 2007 Alan D. Brunelle <Alan.Brunelle@hp.com>',
]
check_detection(expected, test_file)
def test_ics_blktrace_btt_bno_plot_py(self):
test_file = self.get_test_loc('ics/blktrace-btt/bno_plot.py')
expected = [
u'(c) Copyright 2008 Hewlett-Packard Development Company, L.P.',
]
check_detection(expected, test_file)
def test_ics_blktrace_btt_btt_plot_py(self):
test_file = self.get_test_loc('ics/blktrace-btt/btt_plot.py')
expected = [
u'(c) Copyright 2009 Hewlett-Packard Development Company, L.P.',
]
check_detection(expected, test_file)
def test_ics_blktrace_btt_notice(self):
test_file = self.get_test_loc('ics/blktrace-btt/NOTICE')
expected = [
u'(c) Copyright 2007 Hewlett-Packard Development Company, L.P.',
u'(c) Copyright 2008 Hewlett-Packard Development Company, L.P.',
u'Copyright (c) 2006 Alan D. Brunelle <Alan.Brunelle@hp.com>',
u'Copyright (c) 2007 Alan D. Brunelle <Alan.Brunelle@hp.com>',
u'(c) Copyright 2008 Hewlett-Packard Development Company, L.P.',
u'(c) Copyright 2009 Hewlett-Packard Development Company, L.P.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_blktrace_btt_plat_c(self):
test_file = self.get_test_loc('ics/blktrace-btt/plat.c')
expected = [
u'(c) Copyright 2008 Hewlett-Packard Development Company, L.P. Alan D. Brunelle <alan.brunelle@hp.com>',
]
check_detection(expected, test_file)
def test_ics_blktrace_btt_q2d_c(self):
test_file = self.get_test_loc('ics/blktrace-btt/q2d.c')
expected = [
u'(c) Copyright 2007 Hewlett-Packard Development Company, L.P.',
]
check_detection(expected, test_file)
def test_ics_blktrace_doc_blktrace_tex(self):
test_file = self.get_test_loc('ics/blktrace-doc/blktrace.tex')
expected = [
u'Copyright (c) 2005, 2006 Alan D. Brunelle <Alan.Brunelle@hp.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_android_mk(self):
test_file = self.get_test_loc('ics/bluetooth-bluez/Android.mk')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_copying_lib(self):
test_file = self.get_test_loc('ics/bluetooth-bluez/COPYING.LIB')
expected = [
u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_notice(self):
test_file = self.get_test_loc('ics/bluetooth-bluez/NOTICE')
expected = [
u'Copyright (c) 2004-2008 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2006-2007 Nokia Corporation',
u'Copyright (c) 2006-2009 Nokia Corporation',
u'Copyright (c) 2008 Joao Paulo Rechi Vita',
u'Copyright (c) 2008-2009 Leonid Movshovich <event.riga@gmail.org>',
u'Copyright (c) 2008-2009 Nokia Corporation',
u'Copyright (c) 2009 Lennart Poettering',
u'Copyright (c) 2009 Intel Corporation',
u'Copyright (c) 2009 Joao Paulo Rechi Vita',
u'Copyright (c) 2009-2010 Motorola Inc.',
u'Copyright (c) 2004-2005 Henryk Ploetz <henryk@ploetzli.ch>',
u'Copyright (c) 2004-2008 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2006 Brad Midgley <bmidgley@xmission.com>',
u'Copyright (c) 2005-2008 Brad Midgley <bmidgley@xmission.com>',
u'Copyright (c) 2006-2007 Nokia Corporation',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_readme(self):
test_file = self.get_test_loc('ics/bluetooth-bluez/README')
expected = [
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_attrib_att_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-attrib/att.c')
expected = [
u'Copyright (c) 2010 Nokia Corporation',
u'Copyright (c) 2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_attrib_gatttool_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-attrib/gatttool.h')
expected = [
u'Copyright (c) 2011 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_a2dp_codecs_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/a2dp-codecs.h')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_android_audio_hw_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/android_audio_hw.c')
expected = [
u'Copyright (c) 2008-2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_ctl_bluetooth_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/ctl_bluetooth.c')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_gateway_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/gateway.c')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2008-2009 Leonid Movshovich <event.riga@gmail.org>',
u'Copyright (c) 2010 ProFUSION',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_bluez_audio_gateway_c_trail_name(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/gateway.c')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2008-2009 Leonid Movshovich <event.riga@gmail.org>',
u'Copyright (c) 2010 ProFUSION embedded systems',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_liba2dp_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/liba2dp.c')
expected = [
u'Copyright (c) 2006-2007 Nokia Corporation',
u'Copyright (c) 2004-2008 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_media_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/media.c')
expected = [
u'Copyright (c) 2006-2007 Nokia Corporation',
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_sink_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/sink.c')
expected = [
u'Copyright (c) 2006-2007 Nokia Corporation',
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009-2010 Motorola Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_source_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/source.c')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009 Joao Paulo Rechi Vita',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_telephony_maemo5_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/telephony-maemo5.c')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_telephony_ofono_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/telephony-ofono.c')
expected = [
u'Copyright (c) 2009-2010 Intel Corporation',
u'Copyright (c) 2006-2009 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_btio_btio_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-btio/btio.c')
expected = [
u'Copyright (c) 2009-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009-2010 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_common_android_bluez_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-common/android_bluez.c')
expected = [
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_compat_bnep_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-compat/bnep.c')
expected = [
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_compat_fakehid_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-compat/fakehid.c')
expected = [
u'Copyright (c) 2003-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_adapter_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/adapter-api.txt')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2006 Johan Hedberg <johan.hedberg@nokia.com>',
u'Copyright (c) 2005-2006 Claudio Takahasi <claudio.takahasi@indt.org.br>',
u'Copyright (c) 2006-2007 Luiz von Dentz',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_agent_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/agent-api.txt')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2006 Johan Hedberg <johan.hedberg@nokia.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_attribute_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/attribute-api.txt')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_audio_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/audio-api.txt')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2007 Johan Hedberg <johan.hedberg@nokia.com>',
u'Copyright (c) 2005-2006 Brad Midgley <bmidgley@xmission.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_control_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/control-api.txt')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2007-2008 David Stockwell <dstockwell@frequency-one.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_mgmt_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/mgmt-api.txt')
expected = [
u'Copyright (c) 2008-2009 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_oob_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/oob-api.txt')
expected = [
u'Copyright (c) 2011 Szymon Janc <szymon.janc@tieto.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_sap_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/sap-api.txt')
expected = [
u'Copyright (c) 2010 ST-Ericsson SA',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_gdbus_gdbus_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-gdbus/gdbus.h')
expected = [
u'Copyright (c) 2004-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_gdbus_notice(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-gdbus/NOTICE')
expected = [
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_health_hdp_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-health/hdp.c')
expected = [
u'Copyright (c) 2010 GSyC/LibreSoft, Universidad Rey Juan Carlos. Authors Santiago Carot Nemesio',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_bluez_health_hdp_c_extra_author(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-health/hdp.c')
expected = [
u'Copyright (c) 2010 GSyC/LibreSoft, Universidad Rey Juan Carlos.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_health_mcap_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-health/mcap.c')
expected = [
u'Copyright (c) 2010 GSyC/LibreSoft, Universidad Rey Juan Carlos.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_health_mcap_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-health/mcap.h')
expected = [
u'Copyright (c) 2010 GSyC/LibreSoft, Universidad Rey Juan Carlos.',
u'Copyright (c) 2010 Signove',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_lib_bluetooth_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-lib/bluetooth.c')
expected = [
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_lib_notice(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-lib/NOTICE')
expected = [
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2001-2002 Nokia Corporation',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>',
u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2001-2002 Nokia Corporation',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_lib_sdp_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-lib/sdp.c')
expected = [
u'Copyright (c) 2001-2002 Nokia Corporation',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_lib_uuid_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-lib/uuid.c')
expected = [
u'Copyright (c) 2011 Nokia Corporation',
u'Copyright (c) 2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_lib_bluetooth_cmtp_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-lib-bluetooth/cmtp.h')
expected = [
u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_plugins_builtin_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-plugins/builtin.h')
expected = [
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_plugins_dbusoob_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-plugins/dbusoob.c')
expected = [
u'Copyright (c) 2011 ST-Ericsson SA',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sap_main_c_trail_institut(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sap/main.c')
expected = [
u'Copyright (c) 2010 Instituto Nokia de Tecnologia - INdT',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sap_sap_h_trail_institut(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sap/sap.h')
expected = [
u'Copyright (c) 2010 Instituto Nokia de Tecnologia - INdT',
u'Copyright (c) 2010 ST-Ericsson SA',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sap_sap_dummy_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sap/sap-dummy.c')
expected = [
u'Copyright (c) 2010 ST-Ericsson SA',
u'Copyright (c) 2011 Tieto Poland',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sap_server_c_trail_institut(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sap/server.c')
expected = [
u'Copyright (c) 2010 Instituto Nokia de Tecnologia - INdT',
u'Copyright (c) 2010 ST-Ericsson SA',
u'Copyright (c) 2011 Tieto Poland',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sap_server_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sap/server.h')
expected = [
u'Copyright (c) 2010 ST-Ericsson SA',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sbc_formats_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/formats.h')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sbc_sbc_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbc.c')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2005 Henryk Ploetz <henryk@ploetzli.ch>',
u'Copyright (c) 2005-2008 Brad Midgley <bmidgley@xmission.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sbc_sbc_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbc.h')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2005 Henryk Ploetz <henryk@ploetzli.ch>',
u'Copyright (c) 2005-2006 Brad Midgley <bmidgley@xmission.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sbc_sbc_primitives_iwmmxt_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbc_primitives_iwmmxt.c')
expected = [
u'Copyright (c) 2010 Keith Mok <ek9852@gmail.com>',
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2005 Henryk Ploetz <henryk@ploetzli.ch>',
u'Copyright (c) 2005-2006 Brad Midgley <bmidgley@xmission.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sbc_sbcdec_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbcdec.c')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'(c) 2004-2010 Marcel Holtmann',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_bluez_sbc_sbcdec_c_lead_copy(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbcdec.c')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2010 Marcel Holtmann',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sbc_sbctester_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbctester.c')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2007-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2007-2008 Frederic Dalleau <fdalleau@free.fr>',
u'(c) 2007-2010 Marcel Holtmann',
u'(c) 2007-2008 Frederic Dalleau',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_bluez_sbc_sbctester_c_lead_copy_lead_copy(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbctester.c')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2007-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2007-2008 Frederic Dalleau <fdalleau@free.fr>',
u'Copyright (c) 2007-2010 Marcel Holtmann',
u'Copyright (c) 2007-2008 Frederic Dalleau',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_src_dbus_common_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-src/dbus-common.c')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2007 Johan Hedberg <johan.hedberg@nokia.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_src_error_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-src/error.c')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2007-2008 Fabien Chevalier <fabchevalier@free.fr>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_src_notice(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-src/NOTICE')
expected = [
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_src_sdp_xml_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-src/sdp-xml.c')
expected = [
u'Copyright (c) 2005-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_test_attest_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-test/attest.c')
expected = [
u'Copyright (c) 2001-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_test_avtest_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-test/avtest.c')
expected = [
u'Copyright (c) 2007-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009-2010 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_test_gaptest_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-test/gaptest.c')
expected = [
u'Copyright (c) 2007-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_test_hciemu_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-test/hciemu.c')
expected = [
u'Copyright (c) 2000-2002 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2003-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_test_ipctest_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-test/ipctest.c')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009 Lennart Poettering',
u'Copyright (c) 2008 Joao Paulo Rechi Vita',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_test_notice(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-test/NOTICE')
expected = [
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2000-2002 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2001-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2003-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2007-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009 Nokia Corporation',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_hciattach_ath3k_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/hciattach_ath3k.c')
expected = [
u'Copyright (c) 2009-2010 Atheros Communications Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_hciattach_qualcomm_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/hciattach_qualcomm.c')
expected = [
u'Copyright (c) 2005-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2010, Code Aurora Forum.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_hciattach_ti_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/hciattach_ti.c')
expected = [
u'Copyright (c) 2007-2008 Texas Instruments, Inc.',
u'Copyright (c) 2005-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_hid2hci_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/hid2hci.c')
expected = [
u'Copyright (c) 2003-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2008-2009 Mario Limonciello <mario_limonciello@dell.com>',
u'Copyright (c) 2009-2011 Kay Sievers <kay.sievers@vrfy.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_lexer_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/lexer.c')
expected = [
u'Copyright (c) 2002-2008 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_notice(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/NOTICE')
expected = [
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2001-2002 Nokia Corporation',
u'Copyright (c) 2002-2003 Jean Tourrilhes <jt@hpl.hp.com>',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>',
u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2003-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2006-2007 Nokia Corporation',
u'Copyright (c) 2007-2008 Texas Instruments, Inc.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_sdptool_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/sdptool.c')
expected = [
u'Copyright (c) 2001-2002 Nokia Corporation',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>',
u'Copyright (c) 2002-2003 Jean Tourrilhes <jt@hpl.hp.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_ubcsp_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/ubcsp.c')
expected = [
u'Copyright (c) 2000-2005 CSR Ltd.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_acinclude_m4(self):
test_file = self.get_test_loc('ics/bluetooth-glib/acinclude.m4')
expected = [
u'Copyright (c) 2001-2002 Free Software Foundation, Inc.',
u'Copyright (c) 1999-2003 Free Software Foundation, Inc.',
u'Copyright (c) 2002 Free Software Foundation, Inc.',
u'Copyright (c) 2002 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 1997-2002 Free Software Foundation, Inc.',
u'Copyright (c) 1997-2002 Free Software Foundation, Inc.',
u'Copyright (c) 1997-2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_copying(self):
test_file = self.get_test_loc('ics/bluetooth-glib/COPYING')
expected = [
u'Copyright (c) 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib/glib.h')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gettextize_in(self):
test_file = self.get_test_loc('ics/bluetooth-glib/glib-gettextize.in')
expected = [
u'Copyright (c) 1995-1998, 2000, 2001 Free Software Foundation, Inc.',
u'Copyright (c) 1995-1998, 2000, 2001 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_docs_reference_glib_regex_syntax_sgml(self):
test_file = self.get_test_loc('ics/bluetooth-glib-docs-reference-glib/regex-syntax.sgml')
expected = [
u'Copyright (c) 1997-2006 University of Cambridge.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gappinfo_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gappinfo.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gbufferedinputstream_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gbufferedinputstream.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2007 Jurg Billeter',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gdatainputstream_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gdatainputstream.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2007 Jurg Billeter',
u'Copyright (c) 2009 Codethink Limited',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gdesktopappinfo_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gdesktopappinfo.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2007 Ryan Lortie',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gemblem_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gemblem.c')
expected = [
u'Copyright (c) 2008 Clemens N. Buss <cebuzz@gmail.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gmount_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gmount.c')
expected = [
u'Copyright (c) 2006-2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gwin32mount_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gwin32mount.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2008 Hans Breuer',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_fam_fam_module_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-fam/fam-module.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2007 Sebastian Droge.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_fen_fen_data_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-fen/fen-data.c')
expected = [
u'Copyright (c) 2008 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_fen_gfendirectorymonitor_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-fen/gfendirectorymonitor.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2007 Sebastian Droge.',
u'Copyright (c) 2008 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_inotify_inotify_diag_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-inotify/inotify-diag.c')
expected = [
u'Copyright (c) 2005 John McCutchan',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_inotify_inotify_diag_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-inotify/inotify-diag.h')
expected = [
u'Copyright (c) 2006 John McCutchan <john@johnmccutchan.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_inotify_inotify_helper_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-inotify/inotify-helper.c')
expected = [
u'Copyright (c) 2007 John McCutchan',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_inotify_inotify_path_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-inotify/inotify-path.c')
expected = [
u'Copyright (c) 2006 John McCutchan',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_tests_buffered_input_stream_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/buffered-input-stream.c')
expected = [
u'Copyright (c) 2008 Red Hat, Inc. Authors',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_gio_tests_buffered_input_stream_c_extra_author(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/buffered-input-stream.c')
expected = [
u'Copyright (c) 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_tests_desktop_app_info_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/desktop-app-info.c')
expected = [
u'Copyright (c) 2008 Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_tests_filter_streams_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/filter-streams.c')
expected = [
u'Copyright (c) 2009 Codethink Limited',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_tests_memory_input_stream_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/memory-input-stream.c')
expected = [
u'Copyright (c) 2007 Imendio AB Authors Tim Janik',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_gio_tests_memory_input_stream_c_extra_author(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/memory-input-stream.c')
expected = [
u'Copyright (c) 2007 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_tests_simple_async_result_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/simple-async-result.c')
expected = [
u'Copyright (c) 2009 Ryan Lortie',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_win32_gwinhttpfile_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-win32/gwinhttpfile.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2008 Novell, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_win32_winhttp_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-win32/winhttp.h')
expected = [
u'Copyright (c) 2007 Francois Gouget',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_test_mime_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/test-mime.c')
expected = [
u'Copyright (c) 2003,2004 Red Hat, Inc.',
u'Copyright (c) 2003,2004 Jonathan Blandford <jrb@alum.mit.edu>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_xdgmime_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmime.h')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2003 Jonathan Blandford <jrb@alum.mit.edu>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_xdgmimealias_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimealias.c')
expected = [
u'Copyright (c) 2004 Red Hat, Inc.',
u'Copyright (c) 2004 Matthias Clasen <mclasen@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_xdgmimealias_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimealias.h')
expected = [
u'Copyright (c) 2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_xdgmimecache_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimecache.c')
expected = [
u'Copyright (c) 2005 Matthias Clasen <mclasen@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_xdgmimeicon_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimeicon.c')
expected = [
u'Copyright (c) 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_xdgmimemagic_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimemagic.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2003 Jonathan Blandford <jrb@alum.mit.edu>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gatomic_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gatomic.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright (c) 2003 Sebastian Wilhelmi',
u'Copyright (c) 2007 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gatomic_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gatomic.h')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright (c) 2003 Sebastian Wilhelmi',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gbase64_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gbase64.h')
expected = [
u'Copyright (c) 2005 Alexander Larsson <alexl@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gbookmarkfile_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gbookmarkfile.h')
expected = [
u'Copyright (c) 2005-2006 Emmanuele Bassi',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gbsearcharray_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gbsearcharray.h')
expected = [
u'Copyright (c) 2000-2003 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gchecksum_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gchecksum.h')
expected = [
u'Copyright (c) 2007 Emmanuele Bassi <ebassi@gnome.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gconvert_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gconvert.c')
expected = [
u'Copyright Red Hat Inc., 2000',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gdataset_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gdataset.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright (c) 1998 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gdatasetprivate_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gdatasetprivate.h')
expected = [
u'Copyright (c) 2005 Red Hat',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gdir_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gdir.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright 2001 Hans Breuer',
u'Copyright 2004 Tor Lillqvist',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gdir_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gdir.h')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright 2001 Hans Breuer',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gerror_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gerror.h')
expected = [
u'Copyright 2000 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gfileutils_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gfileutils.c')
expected = [
u'Copyright 2000 Red Hat, Inc.',
u'Copyright (c) 1991,92,93,94,95,96,97,98,99 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gi18n_lib_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gi18n-lib.h')
expected = [
u'Copyright (c) 1995-1997, 2002 Peter Mattis, Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_giochannel_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/giochannel.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright 1998 Owen Taylor',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gkeyfile_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gkeyfile.c')
expected = [
u'Copyright 2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gkeyfile_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gkeyfile.h')
expected = [
u'Copyright 2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_glib_object_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/glib-object.h')
expected = [
u'Copyright (c) 1998, 1999, 2000 Tim Janik and Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gmain_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gmain.h')
expected = [
u'Copyright (c) 1998-2000 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gmappedfile_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gmappedfile.h')
expected = [
u'Copyright 2005 Matthias Clasen',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_goption_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/goption.c')
expected = [
u'Copyright (c) 1999, 2003 Red Hat Software',
u'Copyright (c) 2004 Anders Carlsson <andersca@gnome.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_goption_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/goption.h')
expected = [
u'Copyright (c) 2004 Anders Carlsson <andersca@gnome.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gpattern_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gpattern.c')
expected = [
u'Copyright (c) 1995-1997, 1999 Peter Mattis, Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gpoll_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gpoll.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright 1998 Owen Taylor',
u'Copyright 2008 Red Hat, Inc.',
u'Copyright (c) 1994, 1996, 1997 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gqsort_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gqsort.c')
expected = [
u'Copyright (c) 1991, 1992, 1996, 1997,1999,2004 Free Software Foundation, Inc.',
u'Copyright (c) 2000 Eazel, Inc.',
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gregex_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gregex.h')
expected = [
u'Copyright (c) 1999, 2000 Scott Wimer',
u'Copyright (c) 2004, Matthias Clasen <mclasen@redhat.com>',
u'Copyright (c) 2005 - 2007, Marco Barisione <marco@barisione.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gsequence_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gsequence.h')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006, 2007 Soeren Sandmann (sandmann@daimi.au.dk)',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gslice_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gslice.c')
expected = [
u'Copyright (c) 2005 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gstdio_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gstdio.c')
expected = [
u'Copyright 2004 Tor Lillqvist',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gstrfuncs_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gstrfuncs.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright (c) 1991,92,94,95,96,97,98,99,2000,01,02 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gstring_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gstring.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gtestutils_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gtestutils.c')
expected = [
u'Copyright (c) 2007 Imendio AB Authors Tim Janik, Sven Herzberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_glib_gtestutils_c_extra_author(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gtestutils.c')
expected = [
u'Copyright (c) 2007 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gthread_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gthread.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright 1998 Sebastian Wilhelmi University of Karlsruhe Owen Taylor',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gthreadprivate_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gthreadprivate.h')
expected = [
u'Copyright (c) 2003 Sebastian Wilhelmi',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gunicode_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gunicode.h')
expected = [
u'Copyright (c) 1999, 2000 Tom Tromey',
u'Copyright 2000, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gunicodeprivate_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gunicodeprivate.h')
expected = [
u'Copyright (c) 2003 Noah Levitt',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gunidecomp_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gunidecomp.c')
expected = [
u'Copyright (c) 1999, 2000 Tom Tromey',
u'Copyright 2000 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_guniprop_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/guniprop.c')
expected = [
u'Copyright (c) 1999 Tom Tromey',
u'Copyright (c) 2000 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gutils_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gutils.c')
expected = [
u'Copyright (c) 1995-1998 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright (c) 2007 Red Hat Inc.',
u'Copyright (c) 1995, 1996, 1997, 1998 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gnulib_asnprintf_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/asnprintf.c')
expected = [
u'Copyright (c) 1999, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gnulib_g_gnulib_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/g-gnulib.h')
expected = [
u'Copyright (c) 2003 Matthias Clasen',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gnulib_printf_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/printf.c')
expected = [
u'Copyright (c) 2003 Matthias Clasen',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gnulib_printf_args_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/printf-args.c')
expected = [
u'Copyright (c) 1999, 2002-2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gnulib_printf_parse_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/printf-parse.c')
expected = [
u'Copyright (c) 1999-2000, 2002-2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gnulib_vasnprintf_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/vasnprintf.h')
expected = [
u'Copyright (c) 2002-2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gmodule_gmodule_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule.c')
expected = [
u'Copyright (c) 1998 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gmodule_gmodule_rc_in(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule.rc.in')
expected = [
u'Copyright (c) 1998-2000 Tim Janik.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gmodule_gmodule_ar_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-ar.c')
expected = [
u'Copyright (c) 1998, 2000 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gmodule_gmodule_beos_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-beos.c')
expected = [
u'Copyright (c) 1998, 2000 Tim Janik',
u'Copyright (c) 1999 Richard',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_gmodule_gmodule_beos_c_trail_name(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-beos.c')
expected = [
u'Copyright (c) 1998, 2000 Tim Janik',
u'Copyright (C) 1999 Richard Offer and Shawn T. Amundson (amundson@gtk.org)',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gmodule_gmodule_dyld_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-dyld.c')
expected = [
u'Copyright (c) 1998, 2000 Tim Janik',
u'Copyright (c) 2001 Dan Winship',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gmodule_gmodule_win32_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-win32.c')
expected = [
u'Copyright (c) 1998, 2000 Tim Janik',
u'Copyright (c) 1998 Tor Lillqvist',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_gboxed_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gboxed.c')
expected = [
u'Copyright (c) 2000-2001 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_gclosure_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gclosure.c')
expected = [
u'Copyright (c) 2000-2001 Red Hat, Inc.',
u'Copyright (c) 2005 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_genums_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/genums.c')
expected = [
u'Copyright (c) 1998-1999, 2000-2001 Tim Janik and Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_gobject_rc_in(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gobject.rc.in')
expected = [
u'Copyright (c) 1998-2004 Tim Janik and Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_gparam_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gparam.c')
expected = [
u'Copyright (c) 1997-1999, 2000-2001 Tim Janik and Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_gsourceclosure_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gsourceclosure.c')
expected = [
u'Copyright (c) 2001 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_gtypemodule_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gtypemodule.c')
expected = [
u'Copyright (c) 2000 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_makefile_am(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/Makefile.am')
expected = [
u'Copyright (c) 1997,98,99,2000 Tim Janik and Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_tests_threadtests_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject-tests/threadtests.c')
expected = [
u'Copyright (c) 2008 Imendio AB Authors Tim Janik',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_gobject_tests_threadtests_c_extra_author(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject-tests/threadtests.c')
expected = [
u'Copyright (c) 2008 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gthread_gthread_rc_in(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gthread/gthread.rc.in')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball',
u'Copyright (c) 1998 Sebastian Wilhelmi.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_gthread_gthread_rc_in_trail_name(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gthread/gthread.rc.in')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald.',
u'Copyright (c) 1998 Sebastian Wilhelmi.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gthread_gthread_win32_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gthread/gthread-win32.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright 1998-2001 Sebastian Wilhelmi University of Karlsruhe',
u'Copyright 2001 Hans Breuer',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_m4macros_glib_gettext_m4(self):
test_file = self.get_test_loc('ics/bluetooth-glib-m4macros/glib-gettext.m4')
expected = [
u'Copyright (c) 1995-2002 Free Software Foundation, Inc.',
u'Copyright (c) 2001-2003,2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_po_makefile_in_in(self):
test_file = self.get_test_loc('ics/bluetooth-glib-po/Makefile.in.in')
expected = [
u'Copyright (c) 1995, 1996, 1997 by Ulrich Drepper <drepper@gnu.ai.mit.edu>',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_po_po2tbl_sed_in(self):
test_file = self.get_test_loc('ics/bluetooth-glib-po/po2tbl.sed.in')
expected = [
u'Copyright (c) 1995 Free Software Foundation, Inc. Ulrich Drepper <drepper@gnu.ai.mit.edu>, 1995.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gio_test_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/gio-test.c')
expected = [
u'Copyright (c) 2000 Tor Lillqvist',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_hash_test_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/hash-test.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright (c) 1999 The Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_mapping_test_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/mapping-test.c')
expected = [
u'Copyright (c) 2005 Matthias Clasen',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_markup_collect_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/markup-collect.c')
expected = [
u'Copyright (c) 2007 Ryan Lortie',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_onceinit_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/onceinit.c')
expected = [
u'Copyright (c) 2007 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_patterntest_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/patterntest.c')
expected = [
u'Copyright (c) 2001 Matthias Clasen <matthiasc@poet.de>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_regex_test_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/regex-test.c')
expected = [
u'Copyright (c) 2005 - 2006, Marco Barisione <marco@barisione.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_scannerapi_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/scannerapi.c')
expected = [
u'Copyright (c) 2007 Patrick Hulin',
u'Copyright (c) 2007 Imendio AB Authors Tim Janik',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_tests_scannerapi_c_extra_author(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/scannerapi.c')
expected = [
u'Copyright (c) 2007 Patrick Hulin',
u'Copyright (c) 2007 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_slice_concurrent_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/slice-concurrent.c')
expected = [
u'Copyright (c) 2006 Stefan Westerfeld',
u'Copyright (c) 2007 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_testingbase64_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/testingbase64.c')
expected = [
u'Copyright (c) 2008 Asbjoern Pettersen',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gobject_accumulator_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/accumulator.c')
expected = [
u'Copyright (c) 2001, 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gobject_deftype_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/deftype.c')
expected = [
u'Copyright (c) 2006 Behdad Esfahbod',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gobject_override_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/override.c')
expected = [
u'Copyright (c) 2001, James Henstridge',
u'Copyright (c) 2003, Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gobject_references_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/references.c')
expected = [
u'Copyright (c) 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gobject_singleton_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/singleton.c')
expected = [
u'Copyright (c) 2006 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gobject_testcommon_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/testcommon.h')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_refcount_closures_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-refcount/closures.c')
expected = [
u'Copyright (c) 2005 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_readme(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump/README')
expected = [
u'Copyright (c) 2000-2002 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_att_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/att.c')
expected = [
u'Copyright (c) 2011 Andre Dieb Martins <andre.dieb@gmail.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_bnep_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/bnep.c')
expected = [
u'Copyright (c) 2002-2003 Takashi Sasai <sasai@sm.sony.co.jp>',
u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_cmtp_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/cmtp.c')
expected = [
u'Copyright (c) 2002-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_hci_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/hci.c')
expected = [
u'Copyright (c) 2000-2002 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_hidp_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/hidp.c')
expected = [
u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_rfcomm_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/rfcomm.c')
expected = [
u'Copyright (c) 2001-2002 Wayne Lee <waynelee@qualcomm.com>',
u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_sdp_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/sdp.c')
expected = [
u'Copyright (c) 2001-2002 Ricky Yuen <ryuen@qualcomm.com>',
u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bouncycastle_notice(self):
test_file = self.get_test_loc('ics/bouncycastle/NOTICE')
expected = [
u'Copyright (c) 2000-2010 The Legion Of The Bouncy Castle',
]
check_detection(expected, test_file)
def test_ics_bouncycastle_src_main_java_org_bouncycastle_crypto_digests_openssldigest_java(self):
test_file = self.get_test_loc('ics/bouncycastle-src-main-java-org-bouncycastle-crypto-digests/OpenSSLDigest.java')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_bsdiff_bsdiff_1(self):
test_file = self.get_test_loc('ics/bsdiff/bsdiff.1')
expected = [
u'Copyright 2003-2005 Colin Percival',
]
check_detection(expected, test_file)
def test_ics_bsdiff_bsdiff_c(self):
test_file = self.get_test_loc('ics/bsdiff/bsdiff.c')
expected = [
u'Copyright 2003-2005 Colin Percival',
]
check_detection(expected, test_file)
def test_ics_bzip2_blocksort_c(self):
test_file = self.get_test_loc('ics/bzip2/blocksort.c')
expected = [
u'Copyright (c) 1996-2010 Julian Seward <jseward@bzip.org>',
]
check_detection(expected, test_file)
def test_ics_bzip2_bzip2_c(self):
test_file = self.get_test_loc('ics/bzip2/bzip2.c')
expected = [
u'Copyright (c) 1996-2010 Julian Seward <jseward@bzip.org>',
u'Copyright (c) 1996-2010 by Julian Seward.',
]
check_detection(expected, test_file)
def test_ics_bzip2_license(self):
test_file = self.get_test_loc('ics/bzip2/LICENSE')
expected = [
u'copyright (c) 1996-2010 Julian R Seward.',
]
check_detection(expected, test_file)
def test_ics_bzip2_makefile(self):
test_file = self.get_test_loc('ics/bzip2/Makefile')
expected = [
u'Copyright (c) 1996-2010 Julian Seward <jseward@bzip.org>',
]
check_detection(expected, test_file)
def test_ics_bzip2_manual_html(self):
test_file = self.get_test_loc('ics/bzip2/manual.html')
expected = [
u'Copyright (c) 1996-2010 Julian Seward',
u'copyright (c) 1996-2010 Julian Seward.',
]
check_detection(expected, test_file)
def test_ics_bzip2_xmlproc_sh(self):
test_file = self.get_test_loc('ics/bzip2/xmlproc.sh')
expected = [
u'Copyright (c) 1996-2010 Julian Seward <jseward@bzip.org>',
]
check_detection(expected, test_file)
def test_ics_chromium_license(self):
test_file = self.get_test_loc('ics/chromium/LICENSE')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_android_execinfo_cc(self):
test_file = self.get_test_loc('ics/chromium-android/execinfo.cc')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_android_prefix_h(self):
test_file = self.get_test_loc('ics/chromium-android/prefix.h')
expected = [
u'Copyright 2010, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_chromium_android_jni_jni_utils_cc(self):
test_file = self.get_test_loc('ics/chromium-android-jni/jni_utils.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_android_ui_base_l10n_l10n_util_cc(self):
test_file = self.get_test_loc('ics/chromium-android-ui-base-l10n/l10n_util.cc')
expected = [
u'Copyright 2010, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_chromium_app_sql_init_status_h(self):
test_file = self.get_test_loc('ics/chromium-app-sql/init_status.h')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_atomicops_internals_x86_gcc_cc(self):
test_file = self.get_test_loc('ics/chromium-base/atomicops_internals_x86_gcc.cc')
expected = [
u'Copyright (c) 2006-2008 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_atomicops_internals_x86_gcc_h(self):
test_file = self.get_test_loc('ics/chromium-base/atomicops_internals_x86_gcc.h')
expected = [
u'Copyright (c) 2006-2008 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_base_gyp(self):
test_file = self.get_test_loc('ics/chromium-base/base.gyp')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_compat_execinfo_h(self):
test_file = self.get_test_loc('ics/chromium-base/compat_execinfo.h')
expected = [
u'Copyright (c) 2006-2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_file_version_info_h(self):
test_file = self.get_test_loc('ics/chromium-base/file_version_info.h')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_file_version_info_mac_mm(self):
test_file = self.get_test_loc('ics/chromium-base/file_version_info_mac.mm')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_foundation_utils_mac_h(self):
test_file = self.get_test_loc('ics/chromium-base/foundation_utils_mac.h')
expected = [
u'Copyright (c) 2008 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_md5_cc(self):
test_file = self.get_test_loc('ics/chromium-base/md5.cc')
expected = [
u'Copyright 2006 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_string_tokenizer_h(self):
test_file = self.get_test_loc('ics/chromium-base/string_tokenizer.h')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_allocator_allocator_gyp(self):
test_file = self.get_test_loc('ics/chromium-base-allocator/allocator.gyp')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_i18n_icu_string_conversions_cc(self):
test_file = self.get_test_loc('ics/chromium-base-i18n/icu_string_conversions.cc')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
u'Copyright (c) 1995-2006 International Business Machines Corporation and others',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_dmg_fp_dtoa_cc(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-dmg_fp/dtoa.cc')
expected = [
u'Copyright (c) 1991, 2000, 2001 by Lucent Technologies.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_dmg_fp_g_fmt_cc(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-dmg_fp/g_fmt.cc')
expected = [
u'Copyright (c) 1991, 1996 by Lucent Technologies.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_dmg_fp_license(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-dmg_fp/LICENSE')
expected = [
u'Copyright (c) 1991, 2000, 2001 by Lucent Technologies.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_dmg_fp_thirdpartyproject_prop(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-dmg_fp/ThirdPartyProject.prop')
expected = [
u'Copyright 2011 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_dynamic_annotations_dynamic_annotations_c(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-dynamic_annotations/dynamic_annotations.c')
expected = [
u'Copyright (c) 2008-2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_dynamic_annotations_dynamic_annotations_gyp(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-dynamic_annotations/dynamic_annotations.gyp')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_icu_icu_utf_cc_trail_other(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-icu/icu_utf.cc')
expected = [
u'Copyright (c) 1999-2006, International Business Machines Corporation and others.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_icu_icu_utf_h(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-icu/icu_utf.h')
expected = [
u'Copyright (c) 1999-2004, International Business Machines Corporation and others.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_icu_license_trail_other(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-icu/LICENSE')
expected = [
u'Copyright (c) 1995-2009 International Business Machines Corporation and others',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_nspr_license(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-nspr/LICENSE')
expected = [
u'Copyright (c) 1998-2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_nspr_prcpucfg_h(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-nspr/prcpucfg.h')
expected = [
u'Copyright 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_nspr_prtime_cc(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-nspr/prtime.cc')
expected = [
u'Copyright (c) 2011 Google Inc',
u'Copyright (c) 1998-2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_build_branding_value_sh(self):
test_file = self.get_test_loc('ics/chromium-build/branding_value.sh')
expected = [
u'Copyright (c) 2008 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_build_install_build_deps_sh(self):
test_file = self.get_test_loc('ics/chromium-build/install-build-deps.sh')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
u'Copyright 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.',
u'Copyright 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.',
u'Copyright 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.',
u'Copyright 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_build_whitespace_file_txt(self):
test_file = self.get_test_loc('ics/chromium-build/whitespace_file.txt')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_build_mac_strip_from_xcode(self):
test_file = self.get_test_loc('ics/chromium-build-mac/strip_from_xcode')
expected = [
u'Copyright (c) 2008 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_nacl_loader_sb(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser/nacl_loader.sb')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_chromeos_panels_panel_scroller_container_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-chromeos-panels/panel_scroller_container.cc')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_cocoa_authorization_util_mm(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-cocoa/authorization_util.mm')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_download_download_extensions_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-download/download_extensions.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 1998-1999 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_firefox_profile_lock_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/firefox_profile_lock.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 2002 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_firefox_profile_lock_posix_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/firefox_profile_lock_posix.cc')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
u'Copyright (c) 2002 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_firefox_profile_lock_win_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/firefox_profile_lock_win.cc')
expected = [
u'Copyright (c) 2008 The Chromium Authors.',
u'Copyright (c) 2002 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_mork_reader_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/mork_reader.cc')
expected = [
u'Copyright (c) 2006 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_nss_decryptor_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/nss_decryptor.cc')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
u'Copyright (c) 1994-2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_nss_decryptor_mac_h(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/nss_decryptor_mac.h')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 1994-2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_nss_decryptor_win_h(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/nss_decryptor_win.h')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
u'Copyright (c) 1994-2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_metrics_system_metrics_proto(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-metrics/system_metrics.proto')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_renderer_host_render_widget_host_view_mac_mm(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-renderer_host/render_widget_host_view_mac.mm')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
u'Copyright (c) 2005, 2006, 2007, 2008, 2009 Apple Inc.',
u'(c) 2006, 2007 Graham Dennis (graham.dennis@gmail.com)',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_resources_about_credits_html(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-resources/about_credits.html')
expected = [
u'Copyright (c) 1991, 2000, 2001 by Lucent Technologies.',
u'Copyright (c) 2008-2009, Google Inc.',
u'Copyright (c) 1998-2000 the Initial Developer.',
u'Copyright (c) 1994-2000 the Initial Developer.',
u'(c) Copyright IBM Corporation. 2006, 2006.',
u'Copyright (c) 2006, Google Inc.',
u'Copyright (c) 2000-2008 Julian Seward.',
u'Copyright (c) 2007 Red Hat, inc',
u'Copyright 2003-2005 Colin Percival',
u'Copyright (c) 2000 the Initial Developer.',
u'Copyright 1993 by OpenVision Technologies, Inc.',
u'Copyright 2007 Google Inc.',
u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Alexander Kellett, Alexey Proskuryakov, Alex Mathews, Allan Sandfeld Jensen, Alp Toker, Anders Carlsson, Andrew Wellington, Antti',
u'Copyright (c) 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
u'Copyright (c) 2002-2010 The ANGLE Project',
u'Copyright (c) 2009 Apple Inc.',
u'Portions Copyright (c) 1999-2007 Apple Inc.',
u'copyright (c) 1996-2010 Julian R Seward.',
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 1998-1999 Netscape Communications Corporation.',
u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd and Clark Cooper',
u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Expat maintainers.',
u'Copyright (c) 2008 The Khronos Group Inc.',
u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002 by Remco Treffkorn, and others',
u'Copyright (c) 2005 by Eric S. Raymond.',
u'Copyright (c) 2007, 2010 Linux Foundation',
u'Copyright (c) 2006 IBM Corporation',
u'Copyright (c) 2000, 2006 Sun Microsystems, Inc.',
u'copyright (c) 1991-1998, Thomas G. Lane.',
u'Copyright (c) 1995-2009 International Business Machines Corporation and others',
u'(c) 1999 TaBE Project.',
u'Copyright (c) 1999 Pai-Hsiang Hsiao.',
u'Copyright (c) 1999 Computer Systems and Communication Lab, Institute of Information Science, Academia Sinica.',
u'Copyright 1996 Chih-Hao Tsai Beckman Institute, University of Illinois',
u'Copyright 2000, 2001, 2002, 2003 Nara Institute of Science and Technology.',
u'Copyright (c) 2002 the Initial Developer.',
u'Copyright (c) 2006-2008 Jason Evans',
u'Copyright (c) International Business Machines Corp., 2002,2007',
u'Copyright 2000-2007 Niels Provos',
u'Copyright 2007-2009 Niels Provos and Nick Mathewson',
u'Copyright (c) 2004 2005, Google Inc.',
u'copyright (c) 1991-1998, Thomas G. Lane.',
u'copyright by the Free Software Foundation',
u'Copyright (c) 1998-2005 Julian Smart, Robert Roebling',
u'Copyright (c) 2004, 2006-2009 Glenn Randers-Pehrson',
u'Copyright (c) 2000-2002 Glenn Randers-Pehrson',
u'Copyright (c) 1998, 1999 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
u'Copyright (c) 2001-2006 Cisco Systems, Inc.',
u'Copyright (c) 2010, Google Inc.',
u'Copyright (c) 2010, Google Inc.',
u'Copyright (c) 1998-2003 Daniel Veillard.',
u'Copyright (c) 2001-2002 Daniel Veillard.',
u'Copyright (c) 2001-2002 Thomas Broyer, Charlie Bozeman and Daniel Veillard.',
u'Copyright (c) 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
u'Copyright (c) 2005, 2006 Nick Galbreath',
u'Copyright 2008 MolokoCacao',
u'Copyright (c) 2004-2009 Sergey Lyubka',
u'Portions Copyright (c) 2009 Gilbert Wellisch',
u'Copyright (c) 2002 the Initial Developer.',
u'Copyright (c) 1998 the Initial Developer.',
u'Copyright (c) 2004-2009 by Mulle Kybernetik.',
u'Copyright (c) 2008 The Khronos Group Inc.',
u'Copyright (c) 1998-2008 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 2009 The Chromium Authors.',
u'Copyright 2007 Google Inc.',
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright 2008, Google Inc.',
u'Copyright (c) 2007 Giampaolo Rodola',
u'Copyright 2009, Google Inc.',
u'Copyright (c) 2009 Mozilla Corporation',
u'Copyright (c) 1998-2007 Marti Maria',
u'Copyright (c) 1994-1996 SunSoft, Inc.',
u'Copyright 2009 Google Inc.',
u'Copyright (c) 2006 Bob Ippolito',
u'Copyright 2002-2008 Xiph.org Foundation',
u'Copyright 2002-2008 Jean-Marc Valin',
u'Copyright 2005-2007 Analog Devices Inc.',
u'Copyright 2005-2008 Commonwealth Scientific and Industrial Research Organisation (CSIRO)',
u'Copyright 1993, 2002, 2006 David Rowe',
u'Copyright 2003 EpicGames',
u'Copyright 1992-1994 Jutta Degener, Carsten Bormann',
u'Copyright (c) 1995-1998 The University of Utah and the Regents of the University of California',
u'Copyright (c) 1998-2005 University of Chicago.',
u'Copyright (c) 2005-2006 Arizona Board of Regents (University of Arizona).',
u'Copyright (c) Andrew Tridgell 2004-2005',
u'Copyright (c) Stefan Metzmacher 2006',
u'Copyright (c) 2005, Google Inc.',
u'Copyright (c) 2007 Free Software Foundation, Inc.',
u'Copyright (c) 1998-1999 Netscape Communications Corporation.',
u'Copyright (c) 2001-2010 Peter Johnson and other Yasm developers.',
u'Copyright (c) 1995-2010 Jean-loup Gailly and Mark Adler',
u'Copyright (c) 1994-2006 Sun Microsystems Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_resources_gpu_internals_html(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-resources/gpu_internals.html')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_resources_keyboard_overlay_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-resources/keyboard_overlay.js')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_resources_file_manager_harness_html(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-resources-file_manager/harness.html')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_resources_file_manager_css_file_manager_css(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-resources-file_manager-css/file_manager.css')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_sync_engine_change_reorder_buffer_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-sync-engine/change_reorder_buffer.cc')
expected = [
u'Copyright (c) 2006-2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_sync_engine_clear_data_command_h(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-sync-engine/clear_data_command.h')
expected = [
u'Copyright (c) 2006-2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_ui_cocoa_applescript_examples_advanced_tab_manipulation_applescript(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-ui-cocoa-applescript-examples/advanced_tab_manipulation.applescript')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_userfeedback_proto_annotations_proto(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-userfeedback-proto/annotations.proto')
expected = [
u'Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_userfeedback_proto_chrome_proto(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-userfeedback-proto/chrome.proto')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_api_i18n_cld_background_html(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-api-i18n-cld/background.html')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_api_notifications_background_html(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-api-notifications/background.html')
expected = [
u'Copyright 2010 the Chromium Authors',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_java_hellolicenseservlet_java(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-java/HelloLicenseServlet.java')
expected = [
u'Copyright 2010 the Chromium Authors',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_php_notice(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-php/NOTICE')
expected = [
u'Copyright 2009 Google Inc.',
u'Copyright (c) 2010 John Resig',
u'Copyright (c) 2007 Andy Smith',
u'Copyright (c) 2010, Mewp',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_php_popuplib_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-php/popuplib.js')
expected = [
u'Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_php_lib_oauth_license_txt(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-php-lib-oauth/LICENSE.txt')
expected = [
u'Copyright (c) 2007 Andy Smith',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_python_notice(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-python/NOTICE')
expected = [
u'Copyright (c) 2007 Leah Culver',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_python_httplib2_init_py(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-python-httplib2/__init__.py')
expected = [
u'Copyright 2006, Joe Gregorio contributors',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_python_httplib2_init_py_extra_contributors(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-python-httplib2/__init__.py')
expected = [
u'Copyright 2006, Joe Gregorio',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_python_oauth2_init_py(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-python-oauth2/__init__.py')
expected = [
u'Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_benchmark_jquery_jquery_1_4_2_min_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-benchmark-jquery/jquery-1.4.2.min.js')
expected = [
u'Copyright 2010, John Resig',
u'Copyright 2010, The Dojo Foundation',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_benchmark_jst_jsevalcontext_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-benchmark-jst/jsevalcontext.js')
expected = [
u'Copyright 2006 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_benchmark_util_sorttable_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-benchmark-util/sorttable.js')
expected = [
u'Copyright 2006, Dean Edwards',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_gdocs_chrome_ex_oauthsimple_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-gdocs/chrome_ex_oauthsimple.js')
expected = [
u'copyright unitedHeroes.net',
u'Copyright (c) 2009, unitedHeroes.net',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_imageinfo_notice(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-imageinfo/NOTICE')
expected = [
u'Copyright (c) 2008 Jacob Seidelin, jseidelin@nihilogic.dk, http://blog.nihilogic.dk',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_imageinfo_imageinfo_binaryajax_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-imageinfo-imageinfo/binaryajax.js')
expected = [
u'Copyright (c) 2008 Jacob Seidelin, cupboy@gmail.com, http://blog.nihilogic.dk',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_imageinfo_imageinfo_imageinfo_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-imageinfo-imageinfo/imageinfo.js')
expected = [
u'Copyright (c) 2008 Jacob Seidelin, jseidelin@nihilogic.dk, http://blog.nihilogic.dk',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_oauth_contacts_notice(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-oauth_contacts/NOTICE')
expected = [
u'copyright unitedHeroes.net',
u'Copyright (c) 2009, unitedHeroes.net',
u'Copyright Paul Johnston 2000 - 2002.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_proxy_configuration_test_jsunittest_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-proxy_configuration-test/jsunittest.js')
expected = [
u'(c) 2008 Dr Nic Williams',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_wave_background_html(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-wave/background.html')
expected = [
u'Copyright 2010 Google',
]
check_detection(expected, test_file)
def test_ics_chromium_crypto_third_party_nss_blapi_h(self):
test_file = self.get_test_loc('ics/chromium-crypto-third_party-nss/blapi.h')
expected = [
u'Copyright (c) 1994-2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_crypto_third_party_nss_sha256_h(self):
test_file = self.get_test_loc('ics/chromium-crypto-third_party-nss/sha256.h')
expected = [
u'Copyright (c) 2002 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_license_txt(self):
test_file = self.get_test_loc('ics/chromium-googleurl/LICENSE.txt')
expected = [
u'Copyright 2007, Google Inc.',
u'Copyright (c) 1998 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_base_basictypes_h(self):
test_file = self.get_test_loc('ics/chromium-googleurl-base/basictypes.h')
expected = [
u'Copyright 2001 - 2003 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_base_logging_cc(self):
test_file = self.get_test_loc('ics/chromium-googleurl-base/logging.cc')
expected = [
u'Copyright 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_base_logging_h(self):
test_file = self.get_test_loc('ics/chromium-googleurl-base/logging.h')
expected = [
u'Copyright 2006 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_base_scoped_ptr_h(self):
test_file = self.get_test_loc('ics/chromium-googleurl-base/scoped_ptr.h')
expected = [
u'(c) Copyright Greg Colvin and Beman Dawes 1998, 1999.',
u'Copyright (c) 2001, 2002 Peter Dimov',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_src_gurl_unittest_cc(self):
test_file = self.get_test_loc('ics/chromium-googleurl-src/gurl_unittest.cc')
expected = [
u'Copyright 2007 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_src_url_canon_ip_cc(self):
test_file = self.get_test_loc('ics/chromium-googleurl-src/url_canon_ip.cc')
expected = [
u'Copyright 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_src_url_common_h(self):
test_file = self.get_test_loc('ics/chromium-googleurl-src/url_common.h')
expected = [
u'Copyright 2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_src_url_parse_cc(self):
test_file = self.get_test_loc('ics/chromium-googleurl-src/url_parse.cc')
expected = [
u'Copyright (c) 1998 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_src_url_test_utils_h(self):
test_file = self.get_test_loc('ics/chromium-googleurl-src/url_test_utils.h')
expected = [
u'Copyright 2007 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_base_cookie_monster_cc(self):
test_file = self.get_test_loc('ics/chromium-net-base/cookie_monster.cc')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
u'Copyright (c) 2003 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_base_effective_tld_names_dat(self):
test_file = self.get_test_loc('ics/chromium-net-base/effective_tld_names.dat')
expected = [
u'Copyright (c) 2007 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_base_ssl_false_start_blacklist_process_cc(self):
test_file = self.get_test_loc('ics/chromium-net-base/ssl_false_start_blacklist_process.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_base_x509_cert_types_mac_unittest_cc(self):
test_file = self.get_test_loc('ics/chromium-net-base/x509_cert_types_mac_unittest.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'(c) Kasm 2005',
u'(c) 1999 Entrust.net Limited',
u'(c) Kasm 2005',
u"(c) 1999 Entrust.net Limited",
]
check_detection(expected, test_file)
def test_ics_chromium_net_base_x509_certificate_unittest_cc(self):
test_file = self.get_test_loc('ics/chromium-net-base/x509_certificate_unittest.cc')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_data_proxy_resolver_perftest_no_ads_pac(self):
test_file = self.get_test_loc('ics/chromium-net-data-proxy_resolver_perftest/no-ads.pac')
expected = [
u'Copyright 1996-2004, John',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_net_data_proxy_resolver_perftest_no_ads_pac_trail_name(self):
test_file = self.get_test_loc('ics/chromium-net-data-proxy_resolver_perftest/no-ads.pac')
expected = [
u'Copyright 1996-2004, John LoVerso.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_disk_cache_sparse_control_cc(self):
test_file = self.get_test_loc('ics/chromium-net-disk_cache/sparse_control.cc')
expected = [
u'Copyright (c) 2009-2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_ftp_ftp_network_layer_cc(self):
test_file = self.get_test_loc('ics/chromium-net-ftp/ftp_network_layer.cc')
expected = [
u'Copyright (c) 2008 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_http_des_cc(self):
test_file = self.get_test_loc('ics/chromium-net-http/des.cc')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
u'Copyright (c) 2003 IBM Corporation.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_http_http_auth_handler_ntlm_portable_cc(self):
test_file = self.get_test_loc('ics/chromium-net-http/http_auth_handler_ntlm_portable.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 2003 IBM Corporation.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_http_http_chunked_decoder_cc(self):
test_file = self.get_test_loc('ics/chromium-net-http/http_chunked_decoder.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 2001 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_http_md4_cc(self):
test_file = self.get_test_loc('ics/chromium-net-http/md4.cc')
expected = [
u'Copyright (c) 2003 IBM Corporation.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_socket_ssl_client_socket_nss_cc(self):
test_file = self.get_test_loc('ics/chromium-net-socket/ssl_client_socket_nss.cc')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
u'Copyright (c) 2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_third_party_gssapi_gssapi_h(self):
test_file = self.get_test_loc('ics/chromium-net-third_party-gssapi/gssapi.h')
expected = [
u'Copyright 1993 by OpenVision Technologies, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_third_party_gssapi_license(self):
test_file = self.get_test_loc('ics/chromium-net-third_party-gssapi/LICENSE')
expected = [
u'Copyright 1993 by OpenVision Technologies, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_tools_spdyshark_makefile_am(self):
test_file = self.get_test_loc('ics/chromium-net-tools-spdyshark/Makefile.am')
expected = [
u'Copyright 1998 Gerald Combs',
]
check_detection(expected, test_file)
def test_ics_chromium_net_tools_spdyshark_packet_spdy_c(self):
test_file = self.get_test_loc('ics/chromium-net-tools-spdyshark/packet-spdy.c')
expected = [
u'Copyright 2010, Google Inc. Eric Shienbrood <ers@google.com>',
u'Copyright 1998 Gerald Combs',
]
check_detection(expected, test_file)
def test_ics_chromium_net_tools_spdyshark_plugin_rc_in(self):
test_file = self.get_test_loc('ics/chromium-net-tools-spdyshark/plugin.rc.in')
expected = [
u'Copyright (c) 1998 Gerald Combs <gerald@wireshark.org>, Gilbert Ramirez <gram@alumni.rice.edu> and others',
]
check_detection(expected, test_file)
def test_ics_chromium_net_tools_testserver_chromiumsync_py(self):
test_file = self.get_test_loc('ics/chromium-net-tools-testserver/chromiumsync.py')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_tools_tld_cleanup_tld_cleanup_cc(self):
test_file = self.get_test_loc('ics/chromium-net-tools-tld_cleanup/tld_cleanup.cc')
expected = [
u'Copyright (c) 2006-2008 The Chromium Authors.',
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_aclocal_m4(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_compile(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/compile')
expected = [
u'Copyright 1999, 2000 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_configure(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/configure')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_copying(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/COPYING')
expected = [
u'Copyright (c) 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_depcomp(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/depcomp')
expected = [
u'Copyright (c) 1999, 2000, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_install(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/INSTALL')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_ltmain_sh(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/ltmain.sh')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_missing(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/missing')
expected = [
u'Copyright (c) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_man_vcdiff_1(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-man/vcdiff.1')
expected = [
u'Copyright (c) 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_addrcache_cc(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/addrcache.cc')
expected = [
u'Copyright 2007 Google Inc. Author Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_src_addrcache_cc_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/addrcache.cc')
expected = [
u'Copyright 2007 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_adler32_c(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/adler32.c')
expected = [
u'Copyright (c) 1995-2004 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_blockhash_cc(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/blockhash.cc')
expected = [
u'Copyright 2006, 2008 Google Inc. Authors Chandra Chereddi, Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_src_blockhash_cc_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/blockhash.cc')
expected = [
u'Copyright 2006, 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_blockhash_test_cc(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/blockhash_test.cc')
expected = [
u'Copyright 2008 Google Inc. Author Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_src_blockhash_test_cc_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/blockhash_test.cc')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_codetablewriter_interface_h(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/codetablewriter_interface.h')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_gflags_cc(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/gflags.cc')
expected = [
u'Copyright (c) 2006, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_mutex_h(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/mutex.h')
expected = [
u'Copyright (c) 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_rolling_hash_h(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/rolling_hash.h')
expected = [
u'Copyright 2007, 2008 Google Inc. Authors Jeff Dean, Sanjay Ghemawat, Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_src_rolling_hash_h_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/rolling_hash.h')
expected = [
u'Copyright 2007, 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_vcdiff_test_sh(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/vcdiff_test.sh')
expected = [
u'Copyright 2008 Google Inc. Author Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_src_vcdiff_test_sh_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/vcdiff_test.sh')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_zconf_h(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/zconf.h')
expected = [
u'Copyright (c) 1995-2005 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_zlib_h(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/zlib.h')
expected = [
u'Copyright (c) 1995-2005 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_google_output_string_h(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src-google/output_string.h')
expected = [
u'Copyright 2008 Google Inc. Author Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_src_google_output_string_h_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src-google/output_string.h')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_gtest_gtest_cc(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src-gtest/gtest.cc')
expected = [
u'Copyright 2005, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_gtest_gtest_main_cc(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src-gtest/gtest_main.cc')
expected = [
u'Copyright 2006, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_vsprojects_vcdiff_test_bat(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-vsprojects/vcdiff_test.bat')
expected = [
u'Copyright 2008 Google Inc. Author Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_vsprojects_vcdiff_test_bat_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-vsprojects/vcdiff_test.bat')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_generate_gmock_mutant_py(self):
test_file = self.get_test_loc('ics/chromium-testing/generate_gmock_mutant.py')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_copying(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock/COPYING')
expected = [
u'Copyright 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_include_gmock_gmock_cardinalities_h(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-include-gmock/gmock-cardinalities.h')
expected = [
u'Copyright 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_scripts_fuse_gmock_files_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts/fuse_gmock_files.py')
expected = [
u'Copyright 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_scripts_gmock_doctor_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts/gmock_doctor.py')
expected = [
u'Copyright 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_scripts_upload_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts/upload.py')
expected = [
u'Copyright 2007 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_scripts_generator_gmock_gen_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts-generator/gmock_gen.py')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_scripts_generator_cpp_ast_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts-generator-cpp/ast.py')
expected = [
u'Copyright 2007 Neal Norwitz',
u'Portions Copyright 2007 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_scripts_generator_cpp_gmock_class_test_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts-generator-cpp/gmock_class_test.py')
expected = [
u'Copyright 2009 Neal Norwitz',
u'Portions Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_test_gmock_test_utils_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-test/gmock_test_utils.py')
expected = [
u'Copyright 2006, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_include_gtest_internal_gtest_linked_ptr_h(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-include-gtest-internal/gtest-linked_ptr.h')
expected = [
u'Copyright 2003 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_include_gtest_internal_gtest_tuple_h(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-include-gtest-internal/gtest-tuple.h')
expected = [
u'Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_samples_sample10_unittest_cc(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-samples/sample10_unittest.cc')
expected = [
u'Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_scripts_gen_gtest_pred_impl_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-scripts/gen_gtest_pred_impl.py')
expected = [
u'Copyright 2006, Google Inc.',
u'Copyright 2006, Google Inc.',
u'Copyright 2006, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_src_gtest_port_cc(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-src/gtest-port.cc')
expected = [
u'Copyright 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_test_gtest_catch_exceptions_test_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-test/gtest_catch_exceptions_test.py')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_test_gtest_filter_unittest_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-test/gtest_filter_unittest.py')
expected = [
u'Copyright 2005 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_test_gtest_shuffle_test_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-test/gtest_shuffle_test.py')
expected = [
u'Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_test_gtest_linked_ptr_test_cc(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-test/gtest-linked_ptr_test.cc')
expected = [
u'Copyright 2003, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_buffer_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/buffer.c')
expected = [
u'Copyright (c) 2002, 2003 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_config_guess(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/config.guess')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_configure(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/configure')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_devpoll_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/devpoll.c')
expected = [
u'Copyright 2000-2004 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_epoll_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/epoll.c')
expected = [
u'Copyright 2000-2003 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_epoll_sub_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/epoll_sub.c')
expected = [
u'Copyright 2003 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_evbuffer_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/evbuffer.c')
expected = [
u'Copyright (c) 2002-2004 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_evdns_3(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/evdns.3')
expected = [
u'Copyright (c) 2006 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_evdns_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/evdns.h')
expected = [
u'Copyright (c) 2006 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_event_3(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/event.3')
expected = [
u'Copyright (c) 2000 Artur Grabowski <art@openbsd.org>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_event_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/event.h')
expected = [
u'Copyright (c) 2000-2007 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_event_rpcgen_py(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/event_rpcgen.py')
expected = [
u'Copyright (c) 2005 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_event_tagging_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/event_tagging.c')
expected = [
u'Copyright (c) 2003, 2004 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_event_internal_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/event-internal.h')
expected = [
u'Copyright (c) 2000-2004 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_evport_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/evport.c')
expected = [
u'Copyright (c) 2007 Sun Microsystems.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_evsignal_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/evsignal.h')
expected = [
u'Copyright 2000-2002 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_evutil_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/evutil.c')
expected = [
u'Copyright (c) 2007 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_http_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/http.c')
expected = [
u'Copyright (c) 2002-2006 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_http_internal_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/http-internal.h')
expected = [
u'Copyright 2001 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_license(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/LICENSE')
expected = [
u'Copyright 2000-2007 Niels Provos <provos@citi.umich.edu>',
u'Copyright 2007-2009 Niels Provos and Nick Mathewson',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_log_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/log.c')
expected = [
u'Copyright (c) 2005 Nick Mathewson <nickm@freehaven.net>',
u'Copyright (c) 2000 Dug Song <dugsong@monkey.org>',
u'Copyright (c) 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_min_heap_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/min_heap.h')
expected = [
u'Copyright (c) 2006 Maxim Yegorushkin <maxim.yegorushkin@gmail.com>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_missing(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/missing')
expected = [
u'Copyright (c) 1996, 1997, 1999, 2000, 2002, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_strlcpy_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/strlcpy.c')
expected = [
u'Copyright (c) 1998 Todd C. Miller <Todd.Miller@courtesan.com>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_compat_sys_libevent_time_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent-compat-sys/_libevent_time.h')
expected = [
u'Copyright (c) 1982, 1986, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_compat_sys_queue_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent-compat-sys/queue.h')
expected = [
u'Copyright (c) 1991, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_test_regress_dns_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent-test/regress_dns.c')
expected = [
u'Copyright (c) 2003-2006 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_overrides_talk_base_logging_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-overrides-talk-base/logging.h')
expected = [
u'Copyright 2004 2005, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_copying(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source/COPYING')
expected = [
u'Copyright (c) 2004 2005, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_asyncfile_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/asyncfile.cc')
expected = [
u'Copyright 2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_asyncfile_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/asyncfile.h')
expected = [
u'Copyright 2004 2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_base64_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/base64.cc')
expected = [
u'Copyright (c) 1999, Bob Withers',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_base64_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/base64.h')
expected = [
u'Copyright (c) 1999, Bob Withers',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_basicpacketsocketfactory_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/basicpacketsocketfactory.cc')
expected = [
u'Copyright 2011, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_buffer_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/buffer.h')
expected = [
u'Copyright 2004-2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_event_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/event.cc')
expected = [
u'Copyright 2004 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_fileutils_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/fileutils.cc')
expected = [
u'Copyright 2004 2006, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_httpbase_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/httpbase.cc')
expected = [
u'Copyright 2004 2005, Google Inc.',
u'Copyright 2005 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_macconversion_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/macconversion.cc')
expected = [
u'Copyright 2004 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_macutils_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/macutils.cc')
expected = [
u'Copyright 2007 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_socketstream_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/socketstream.h')
expected = [
u'Copyright 2005 2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_stringutils_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/stringutils.cc')
expected = [
u'Copyright 2004 2005, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_call_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/call.cc')
expected = [
u'Copyright 2004 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_codec_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/codec.h')
expected = [
u'Copyright 2004 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_mediamonitor_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/mediamonitor.cc')
expected = [
u'Copyright 2005 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_mediamonitor_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/mediamonitor.h')
expected = [
u'Copyright 2005 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_srtpfilter_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/srtpfilter.h')
expected = [
u'Copyright 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_v4llookup_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/v4llookup.cc')
expected = [
u'Copyright 2009, Google Inc. Author lexnikitin@google.com',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_v4llookup_cc_extra_author(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/v4llookup.cc')
expected = [
u'Copyright 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_videocommon_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/videocommon.h')
expected = [
u'Copyright 2011, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_third_party_libudev_libudev_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-third_party-libudev/libudev.h')
expected = [
u'Copyright (c) 2008-2010 Kay Sievers <kay.sievers@vrfy.org>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_modp_b64_license(self):
test_file = self.get_test_loc('ics/chromium-third_party-modp_b64/LICENSE')
expected = [
u'Copyright (c) 2005, 2006 Nick Galbreath',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_modp_b64_modp_b64_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-modp_b64/modp_b64.cc')
expected = [
u'Copyright (c) 2005, 2006 Nick Galbreath',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_modp_b64_modp_b64_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-modp_b64/modp_b64.h')
expected = [
u'Copyright (c) 2005, 2006, Nick Galbreath',
]
check_detection(expected, test_file)
def test_ics_chromium_webkit_glue_inspector_strings_grd(self):
test_file = self.get_test_loc('ics/chromium-webkit-glue/inspector_strings.grd')
expected = [
u'Copyright (c) 2007, 2008 Apple Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_webkit_glue_multipart_response_delegate_h(self):
test_file = self.get_test_loc('ics/chromium-webkit-glue/multipart_response_delegate.h')
expected = [
u'Copyright (c) 2006-2009 The Chromium Authors.',
u'Copyright (c) 1998 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_webkit_glue_webcursor_gtk_data_h(self):
test_file = self.get_test_loc('ics/chromium-webkit-glue/webcursor_gtk_data.h')
expected = [
u'Copyright (c) 2001 Tim Copperfield <timecop@network.email.ne.jp>',
u'Copyright (c) 2007 Christian Dywan <christian@twotoasts.de>',
]
check_detection(expected, test_file)
def test_ics_chromium_webkit_glue_webkit_strings_grd(self):
test_file = self.get_test_loc('ics/chromium-webkit-glue/webkit_strings.grd')
expected = [
u'Copyright (c) 2007 Apple Inc.',
u'Copyright (c) 2001 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_webkit_glue_resources_readme_txt(self):
test_file = self.get_test_loc('ics/chromium-webkit-glue-resources/README.txt')
expected = [
u'Copyright (c) 1998 the Initial Developer.',
u'Copyright (c) 2005 Apple Computer, Inc.',
]
check_detection(expected, test_file)
def test_ics_clang_notice_trail_place(self):
test_file = self.get_test_loc('ics/clang/NOTICE')
expected = [
u'Copyright (c) 2007-2011 University of Illinois at Urbana-Champaign.',
]
check_detection(expected, test_file)
def test_ics_clang_docs_block_abi_apple_txt(self):
test_file = self.get_test_loc('ics/clang-docs/Block-ABI-Apple.txt')
expected = [
u'Copyright 2008-2010 Apple, Inc.',
]
check_detection(expected, test_file)
def test_ics_clang_docs_blocklanguagespec_txt(self):
test_file = self.get_test_loc('ics/clang-docs/BlockLanguageSpec.txt')
expected = [
u'Copyright 2008-2009 Apple, Inc.',
]
check_detection(expected, test_file)
def test_ics_clang_include_clang_basic_convertutf_h(self):
test_file = self.get_test_loc('ics/clang-include-clang-Basic/ConvertUTF.h')
expected = [
u'Copyright 2001-2004 Unicode, Inc.',
]
check_detection(expected, test_file)
def test_ics_clang_lib_headers_iso646_h(self):
test_file = self.get_test_loc('ics/clang-lib-Headers/iso646.h')
expected = [
u'Copyright (c) 2008 Eli Friedman',
]
check_detection(expected, test_file)
def test_ics_clang_lib_headers_limits_h(self):
test_file = self.get_test_loc('ics/clang-lib-Headers/limits.h')
expected = [
u'Copyright (c) 2009 Chris Lattner',
]
check_detection(expected, test_file)
def test_ics_clang_lib_headers_tgmath_h(self):
test_file = self.get_test_loc('ics/clang-lib-Headers/tgmath.h')
expected = [
u'Copyright (c) 2009 Howard Hinnant',
]
check_detection(expected, test_file)
def test_ics_collada_license_txt(self):
test_file = self.get_test_loc('ics/collada/license.txt')
expected = [
u'Copyright 2006 Sony Computer Entertainment Inc.',
]
check_detection(expected, test_file)
def test_ics_collada_include_dae_h(self):
test_file = self.get_test_loc('ics/collada-include/dae.h')
expected = [
u'Copyright 2006 Sony Computer Entertainment Inc.',
]
check_detection(expected, test_file)
def test_ics_collada_include_dae_daezaeuncompresshandler_h(self):
test_file = self.get_test_loc('ics/collada-include-dae/daeZAEUncompressHandler.h')
expected = [
u'Copyright 2008 Netallied Systems GmbH.',
]
check_detection(expected, test_file)
def test_ics_collada_src_1_4_dom_domasset_cpp(self):
test_file = self.get_test_loc('ics/collada-src-1.4-dom/domAsset.cpp')
expected = [
u'Copyright 2006 Sony Computer Entertainment Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_acinclude_m4(self):
test_file = self.get_test_loc('ics/dbus/acinclude.m4')
expected = [
u'Copyright (c) 2004 Scott James Remnant <scott@netsplit.com>.',
u'(c) 2003, 2004, 2005 Thomas Vander Stichele',
]
check_detection(expected, test_file)
def test_ics_dbus_configure_in(self):
test_file = self.get_test_loc('ics/dbus/configure.in')
expected = [
u'Copyright (c) 2000-2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_copying(self):
test_file = self.get_test_loc('ics/dbus/COPYING')
expected = [
u'Copyright (c) 2003-2004 Lawrence E. Rosen.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_activation_c(self):
test_file = self.get_test_loc('ics/dbus-bus/activation.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2004 Imendio HB',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_activation_h(self):
test_file = self.get_test_loc('ics/dbus-bus/activation.h')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_activation_exit_codes_h(self):
test_file = self.get_test_loc('ics/dbus-bus/activation-exit-codes.h')
expected = [
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_bus_c(self):
test_file = self.get_test_loc('ics/dbus-bus/bus.c')
expected = [
u'Copyright (c) 2003, 2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_config_parser_trivial_c(self):
test_file = self.get_test_loc('ics/dbus-bus/config-parser-trivial.c')
expected = [
u'Copyright (c) 2003, 2004, 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_connection_c(self):
test_file = self.get_test_loc('ics/dbus-bus/connection.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_connection_h(self):
test_file = self.get_test_loc('ics/dbus-bus/connection.h')
expected = [
u'Copyright (c) 2003, 2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_dbus_daemon_1_in(self):
test_file = self.get_test_loc('ics/dbus-bus/dbus-daemon.1.in')
expected = [
u'Copyright (c) 2003,2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_desktop_file_c(self):
test_file = self.get_test_loc('ics/dbus-bus/desktop-file.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2003 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_dir_watch_inotify_c(self):
test_file = self.get_test_loc('ics/dbus-bus/dir-watch-inotify.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'(c) 2006 Mandriva',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_dispatch_c(self):
test_file = self.get_test_loc('ics/dbus-bus/dispatch.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2003, 2004, 2005 Red Hat, Inc.',
u'Copyright (c) 2004 Imendio HB',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_driver_c(self):
test_file = self.get_test_loc('ics/dbus-bus/driver.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2003, 2004, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_main_c(self):
test_file = self.get_test_loc('ics/dbus-bus/main.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2002, 2003 Red Hat, Inc., CodeFactory AB, and others',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_messagebus_config_in(self):
test_file = self.get_test_loc('ics/dbus-bus/messagebus-config.in')
expected = [
u'Copyright 2009 Yaakov Selkowitz',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_services_c(self):
test_file = self.get_test_loc('ics/dbus-bus/services.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_signals_c(self):
test_file = self.get_test_loc('ics/dbus-bus/signals.c')
expected = [
u'Copyright (c) 2003, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_utils_c(self):
test_file = self.get_test_loc('ics/dbus-bus/utils.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_cmake_bus_dbus_daemon_xml(self):
test_file = self.get_test_loc('ics/dbus-cmake-bus/dbus-daemon.xml')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_cmake_modules_win32macros_cmake(self):
test_file = self.get_test_loc('ics/dbus-cmake-modules/Win32Macros.cmake')
expected = [
u'Copyright (c) 2006-2007, Ralf Habacker',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_address_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-address.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2004,2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_auth_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-auth.h')
expected = [
u'Copyright (c) 2002 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_auth_util_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-auth-util.c')
expected = [
u'Copyright (c) 2002, 2003, 2004 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_connection_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-connection.c')
expected = [
u'Copyright (c) 2002-2006 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_connection_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-connection.h')
expected = [
u'Copyright (c) 2002, 2003 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_credentials_util_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-credentials-util.c')
expected = [
u'Copyright (c) 2007 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_errors_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-errors.c')
expected = [
u'Copyright (c) 2002, 2004 Red Hat Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_errors_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-errors.h')
expected = [
u'Copyright (c) 2002 Red Hat Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_file_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-file.h')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_file_unix_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-file-unix.c')
expected = [
u'Copyright (c) 2002, 2003, 2006 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_hash_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-hash.c')
expected = [
u'Copyright (c) 2002 Red Hat, Inc.',
u'Copyright (c) 1991-1993 The Regents of the University of California.',
u'Copyright (c) 1994 Sun Microsystems, Inc.',
u'Copyright (c) 1991-1993 The Regents of the University of California.',
u'Copyright (c) 1994 Sun Microsystems, Inc.',
u'copyrighted by the Regents of the University of California, Sun Microsystems, Inc., Scriptics Corporation'
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_hash_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-hash.h')
expected = [
u'Copyright (c) 2002 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_internals_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-internals.c')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_internals_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-internals.h')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_keyring_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-keyring.c')
expected = [
u'Copyright (c) 2003, 2004 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_keyring_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-keyring.h')
expected = [
u'Copyright (c) 2003 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_marshal_basic_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-marshal-basic.c')
expected = [
u'Copyright (c) 2002 CodeFactory AB',
u'Copyright (c) 2003, 2004, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_marshal_basic_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-marshal-basic.h')
expected = [
u'Copyright (c) 2002 CodeFactory AB',
u'Copyright (c) 2004, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_marshal_recursive_util_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-marshal-recursive-util.c')
expected = [
u'Copyright (c) 2004, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_md5_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-md5.c')
expected = [
u'Copyright (c) 2003 Red Hat Inc.',
u'Copyright (c) 1999, 2000 Aladdin Enterprises.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_memory_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-memory.c')
expected = [
u'Copyright (c) 2002, 2003 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_message_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-message.h')
expected = [
u'Copyright (c) 2002, 2003, 2005 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_message_factory_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-message-factory.c')
expected = [
u'Copyright (c) 2005 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_message_private_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-message-private.h')
expected = [
u'Copyright (c) 2005 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_message_util_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-message-util.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat Inc.',
u'Copyright (c) 2002, 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_misc_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-misc.c')
expected = [
u'Copyright (c) 2006 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_nonce_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-nonce.c')
expected = [
u'Copyright (c) 2009 Klaralvdalens Datakonsult AB, a KDAB Group company, info@kdab.net',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_nonce_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-nonce.h')
expected = [
u'Copyright (c) 2009 Klaralvdalens Datakonsult AB, a KDAB Group company, info@kdab.net',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_object_tree_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-object-tree.c')
expected = [
u'Copyright (c) 2003, 2005 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_protocol_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-protocol.h')
expected = [
u'Copyright (c) 2002, 2003 CodeFactory AB',
u'Copyright (c) 2004, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_server_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-server.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_server_debug_pipe_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-debug-pipe.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2003, 2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_server_socket_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-socket.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2006 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_server_socket_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-socket.h')
expected = [
u'Copyright (c) 2002, 2006 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_server_win_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-win.c')
expected = [
u'Copyright (c) 2002, 2003, 2004 Red Hat Inc.',
u'Copyright (c) 2007 Ralf Habacker <ralf.habacker@freenet.de>',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_server_win_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-win.h')
expected = [
u'Copyright (c) 2002 Red Hat Inc.',
u'Copyright (c) 2007 Ralf Habacker <ralf.habacker@freenet.de>',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sha_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sha.c')
expected = [
u'Copyright (c) 2003 Red Hat Inc.',
u'Copyright (c) 1995 A. M. Kuchling',
u'Copyright (c) 1995, A.M.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_dbus_dbus_dbus_sha_c_trail_name(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sha.c')
expected = [
u'Copyright (c) 2003 Red Hat Inc.',
u'Copyright (c) 1995 A. M. Kuchling',
u'Copyright (c) 1995 A. M. Kuchling',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sockets_win_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sockets-win.h')
expected = [
u'Copyright (c) 2005 Novell, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_spawn_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-spawn.c')
expected = [
u'Copyright (c) 2002, 2003, 2004 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_spawn_win_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-spawn-win.c')
expected = [
u'Copyright (c) 2002, 2003, 2004 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2005 Novell, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_string_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-string.h')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
u'Copyright (c) 2006 Ralf Habacker <ralf.habacker@freenet.de>',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_string_util_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-string-util.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat, Inc.',
u'Copyright (c) 2006 Ralf Habacker <ralf.habacker@freenet.de>',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sysdeps_pthread_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-pthread.c')
expected = [
u'Copyright (c) 2002, 2003, 2006 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sysdeps_util_unix_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-util-unix.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sysdeps_util_win_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-util-win.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2000 Werner Almesberger',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sysdeps_win_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-win.c')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2005 Novell, Inc.',
u'Copyright (c) 2006 Ralf Habacker <ralf.habacker@freenet.de>',
u'Copyright (c) 2006 Peter Kummel <syntheticpp@gmx.net>',
u'Copyright (c) 2006 Christian Ehrlicher <ch.ehrlicher@gmx.de>',
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2005 Novell, Inc.',
u'Copyright 2004 Eric Poech',
u'Copyright 2004 Robert Shearman',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sysdeps_win_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-win.h')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2005 Novell, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sysdeps_wince_glue_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-wince-glue.c')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2005 Novell, Inc.',
u'Copyright (c) 2006 Ralf Habacker <ralf.habacker@freenet.de>',
u'Copyright (c) 2006 Peter Kummel <syntheticpp@gmx.net>',
u'Copyright (c) 2006 Christian Ehrlicher <ch.ehrlicher@gmx.de>',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_threads_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-threads.c')
expected = [
u'Copyright (c) 2002, 2003, 2006 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_threads_internal_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-threads-internal.h')
expected = [
u'Copyright (c) 2002, 2005 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_transport_protected_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-transport-protected.h')
expected = [
u'Copyright (c) 2002, 2004 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_userdb_util_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-userdb-util.c')
expected = [
u'Copyright (c) 2003, 2004, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_sd_daemon_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/sd-daemon.c')
expected = [
u'Copyright 2010 Lennart Poettering',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_versioninfo_rc_in(self):
test_file = self.get_test_loc('ics/dbus-dbus/versioninfo.rc.in')
expected = [
u'Copyright (c) 2005 g10 Code GmbH',
u'Copyright (c) 2009 FreeDesktop.org',
]
check_detection(expected, test_file)
def test_ics_dbus_doc_introspect_dtd(self):
test_file = self.get_test_loc('ics/dbus-doc/introspect.dtd')
expected = [
u'(c) 2005-02-02 David A. Wheeler',
]
check_detection(expected, test_file)
def test_ics_dbus_doc_introspect_xsl(self):
test_file = self.get_test_loc('ics/dbus-doc/introspect.xsl')
expected = [
u'Copyright (c) 2005 Lennart Poettering.',
]
check_detection(expected, test_file)
def test_ics_dbus_test_decode_gcov_c(self):
test_file = self.get_test_loc('ics/dbus-test/decode-gcov.c')
expected = [
u'Copyright (c) 2003 Red Hat Inc.',
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_cleanup_sockets_1(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-cleanup-sockets.1')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_cleanup_sockets_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-cleanup-sockets.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2002 Michael Meeks',
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2002 Michael Meeks',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_launch_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-launch.c')
expected = [
u'Copyright (c) 2003, 2006 Red Hat, Inc.',
u'Copyright (c) 2006 Thiago Macieira <thiago@kde.org>',
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_launch_win_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-launch-win.c')
expected = [
u'Copyright (c) 2007 Ralf Habacker <ralf.habacker@freenet.de>',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_launch_x11_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-launch-x11.c')
expected = [
u'Copyright (c) 2006 Thiago Macieira <thiago@kde.org>',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_monitor_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-monitor.c')
expected = [
u'Copyright (c) 2003 Philip Blundell <philb@gnu.org>',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_print_message_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-print-message.c')
expected = [
u'Copyright (c) 2003 Philip Blundell <philb@gnu.org>',
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_uuidgen_1(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-uuidgen.1')
expected = [
u'Copyright (c) 2006 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_uuidgen_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-uuidgen.c')
expected = [
u'Copyright (c) 2006 Red Hat, Inc.',
u'Copyright (c) 2006 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_viewer_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-viewer.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_strtoll_c(self):
test_file = self.get_test_loc('ics/dbus-tools/strtoll.c')
expected = [
u'Copyright (c) 1992, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_arp_c(self):
test_file = self.get_test_loc('ics/dhcpcd/arp.c')
expected = [
u'Copyright (c) 2006-2008 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_bind_c(self):
test_file = self.get_test_loc('ics/dhcpcd/bind.c')
expected = [
u'Copyright (c) 2006-2010 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_bpf_filter_h(self):
test_file = self.get_test_loc('ics/dhcpcd/bpf-filter.h')
expected = [
u'Copyright (c) 2006-2008 Roy Marples <roy@marples.name>',
u'Copyright (c) 2004,2007 by Internet Systems Consortium, Inc.',
u'Copyright (c) 1996-2003 by Internet Software Consortium',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_client_c(self):
test_file = self.get_test_loc('ics/dhcpcd/client.c')
expected = [
u'Copyright 2006-2008 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_common_c(self):
test_file = self.get_test_loc('ics/dhcpcd/common.c')
expected = [
u'Copyright (c) 2006-2009 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_dhcpcd_8(self):
test_file = self.get_test_loc('ics/dhcpcd/dhcpcd.8')
expected = [
u'Copyright (c) 2006-2010 Roy Marples',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_dhcpcd_c(self):
test_file = self.get_test_loc('ics/dhcpcd/dhcpcd.c')
expected = [
u'Copyright (c) 2006-2010 Roy Marples <roy@marples.name>',
u'Copyright (c) 2006-2010 Roy Marples',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_ifaddrs_c(self):
test_file = self.get_test_loc('ics/dhcpcd/ifaddrs.c')
expected = [
u'Copyright 2011, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_if_linux_wireless_c(self):
test_file = self.get_test_loc('ics/dhcpcd/if-linux-wireless.c')
expected = [
u'Copyright (c) 2009-2010 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_notice(self):
test_file = self.get_test_loc('ics/dhcpcd/NOTICE')
expected = [
u'Copyright 2006-2008 Roy Marples <roy@marples.name>',
u'Copyright (c) 2004,2007 by Internet Systems Consortium, Inc.',
u'Copyright (c) 1996-2003 by Internet Software Consortium',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_readme(self):
test_file = self.get_test_loc('ics/dhcpcd/README')
expected = [
u'Copyright (c) 2006-2010 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_compat_arc4random_c(self):
test_file = self.get_test_loc('ics/dhcpcd-compat/arc4random.c')
expected = [
u'Copyright 1996 David Mazieres <dm@lcs.mit.edu>.',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_compat_linkaddr_c(self):
test_file = self.get_test_loc('ics/dhcpcd-compat/linkaddr.c')
expected = [
u'Copyright (c) 1990, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_mk_cc_mk(self):
test_file = self.get_test_loc('ics/dhcpcd-mk/cc.mk')
expected = [
u'Copyright 2008 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_mk_dist_mk(self):
test_file = self.get_test_loc('ics/dhcpcd-mk/dist.mk')
expected = [
u'Copyright 2008-2009 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_copying_v3(self):
test_file = self.get_test_loc('ics/dnsmasq/COPYING-v3')
expected = [
u'Copyright (c) 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_makefile(self):
test_file = self.get_test_loc('ics/dnsmasq/Makefile')
expected = [
u'Copyright (c) 2000-2009 Simon Kelley',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_contrib_suse_dnsmasq_suse_spec(self):
test_file = self.get_test_loc('ics/dnsmasq-contrib-Suse/dnsmasq-suse.spec')
expected = [
u'Copyright GPL Group',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_contrib_wrt_dhcp_lease_time_c(self):
test_file = self.get_test_loc('ics/dnsmasq-contrib-wrt/dhcp_lease_time.c')
expected = [
u'Copyright (c) 2007 Simon Kelley',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_contrib_wrt_dhcp_release_c(self):
test_file = self.get_test_loc('ics/dnsmasq-contrib-wrt/dhcp_release.c')
expected = [
u'Copyright (c) 2006 Simon Kelley',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_contrib_wrt_lease_update_sh(self):
test_file = self.get_test_loc('ics/dnsmasq-contrib-wrt/lease_update.sh')
expected = [
u'Copyright (c) 2006 Simon Kelley',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_src_bpf_c(self):
test_file = self.get_test_loc('ics/dnsmasq-src/bpf.c')
expected = [
u'Copyright (c) 2000-2009 Simon Kelley',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_src_dnsmasq_h(self):
test_file = self.get_test_loc('ics/dnsmasq-src/dnsmasq.h')
expected = [
u'Copyright (c) 2000-2009 Simon Kelley',
u'Copyright (c) 2000-2009 Simon Kelley',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_src_nameser_h(self):
test_file = self.get_test_loc('ics/dnsmasq-src/nameser.h')
expected = [
u'Copyright (c) 1983, 1989, 1993 The Regents of the University of California.',
u'Portions Copyright (c) 1993 by Digital Equipment Corporation.',
u'Portions Copyright (c) 1995 by International Business Machines, Inc.',
]
check_detection(expected, test_file)
def test_ics_doclava_notice(self):
test_file = self.get_test_loc('ics/doclava/NOTICE')
expected = [
u'Copyright (c) 2010 Google Inc.',
u'Copyright (c) 2008 John Resig (jquery.com)',
u'Copyright (c) 2009 John Resig, http://jquery.com',
]
check_detection(expected, test_file)
def test_ics_doclava_res_assets_templates_assets_jquery_history_js(self):
test_file = self.get_test_loc('ics/doclava-res-assets-templates-assets/jquery-history.js')
expected = [
u'Copyright (c) 2008 Tom Rodenberg',
]
check_detection(expected, test_file)
def test_ics_doclava_res_assets_templates_assets_jquery_resizable_min_js(self):
test_file = self.get_test_loc('ics/doclava-res-assets-templates-assets/jquery-resizable.min.js')
expected = [
u'Copyright (c) 2009 John Resig',
u'Copyright 2009, The Dojo Foundation',
u'Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)',
u'Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)',
]
check_detection(expected, test_file)
def test_ics_doclava_src_com_google_doclava_annotationinstanceinfo_java(self):
test_file = self.get_test_loc('ics/doclava-src-com-google-doclava/AnnotationInstanceInfo.java')
expected = [
u'Copyright (c) 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_doclava_src_com_google_doclava_doclava2_java(self):
test_file = self.get_test_loc('ics/doclava-src-com-google-doclava/Doclava2.java')
expected = [
u'Copyright (c) 2011 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_doclava_src_com_google_doclava_parser_java_g(self):
test_file = self.get_test_loc('ics/doclava-src-com-google-doclava-parser/Java.g')
expected = [
u'Copyright (c) 2007-2008 Terence Parr',
]
check_detection(expected, test_file)
def test_ics_dropbear_agentfwd_h(self):
test_file = self.get_test_loc('ics/dropbear/agentfwd.h')
expected = [
u'Copyright (c) 2002,2003 Matt Johnston',
]
check_detection(expected, test_file)
def test_ics_dropbear_atomicio_c(self):
test_file = self.get_test_loc('ics/dropbear/atomicio.c')
expected = [
u'Copyright (c) 1995,1999 Theo de Raadt.',
]
check_detection(expected, test_file)
def test_ics_dropbear_circbuffer_c(self):
test_file = self.get_test_loc('ics/dropbear/circbuffer.c')
expected = [
u'Copyright (c) 2002-2004 Matt Johnston',
]
check_detection(expected, test_file)
def test_ics_dropbear_cli_algo_c(self):
test_file = self.get_test_loc('ics/dropbear/cli-algo.c')
expected = [
u'Copyright (c) 2002,2003 Matt Johnston',
u'Copyright (c) 2004 by Mihnea Stoenescu',
]
check_detection(expected, test_file)
def test_ics_dropbear_cli_authinteract_c(self):
test_file = self.get_test_loc('ics/dropbear/cli-authinteract.c')
expected = [
u'Copyright (c) 2005 Matt Johnston',
]
check_detection(expected, test_file)
def test_ics_dropbear_cli_kex_c(self):
test_file = self.get_test_loc('ics/dropbear/cli-kex.c')
expected = [
u'Copyright (c) 2002-2004 Matt Johnston',
u'Copyright (c) 2004 by Mihnea Stoenescu',
]
check_detection(expected, test_file)
def test_ics_dropbear_common_kex_c(self):
test_file = self.get_test_loc('ics/dropbear/common-kex.c')
expected = [
u'Copyright (c) 2002-2004 Matt Johnston',
u'Portions Copyright (c) 2004 by Mihnea Stoenescu',
]
check_detection(expected, test_file)
def test_ics_dropbear_compat_c(self):
test_file = self.get_test_loc('ics/dropbear/compat.c')
expected = [
u'Copyright (c) 2002,2003 Matt Johnston',
u'Copyright (c) 1998 Todd C. Miller <Todd.Miller@courtesan.com>',
u'Copyright (c) 1990, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_dropbear_configure(self):
test_file = self.get_test_loc('ics/dropbear/configure')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_dropbear_dbutil_c(self):
test_file = self.get_test_loc('ics/dropbear/dbutil.c')
expected = [
u'Copyright (c) 2002,2003 Matt Johnston',
u'Copyright (c) 1998 Todd C. Miller <Todd.Miller@courtesan.com>',
]
check_detection(expected, test_file)
def test_ics_dropbear_fake_rfc2553_c(self):
test_file = self.get_test_loc('ics/dropbear/fake-rfc2553.c')
expected = [
u'Copyright (c) 2000-2003 Damien Miller.',
u'Copyright (c) 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_dropbear_install_sh(self):
test_file = self.get_test_loc('ics/dropbear/install-sh')
expected = [
u'Copyright 1991 by the Massachusetts Institute of Technology',
]
check_detection(expected, test_file)
def test_ics_dropbear_keyimport_c_trail_name(self):
test_file = self.get_test_loc('ics/dropbear/keyimport.c')
expected = [
u'copyright 2003 Matt Johnston',
u'copyright 1997-2003 Simon Tatham.',
u'Portions copyright Robert de Bath, Joris van Rantwijk, Delian Delchev, Andreas Schultz, Jeroen Massar, Wez Furlong, Nicolas Barry, Justin Bradford, and CORE SDI S.A.',
]
check_detection(expected, test_file)
def test_ics_dropbear_license_extra_portion_trail_name(self):
test_file = self.get_test_loc('ics/dropbear/LICENSE')
expected = [
u'(c) 2004 Mihnea Stoenescu',
u'Copyright (c) 2002-2006 Matt Johnston',
u'Portions copyright (c) 2004 Mihnea Stoenescu',
u'Copyright (c) 1995 Tatu Ylonen <ylo@cs.hut.fi>, Espoo, Finland',
u'(c) Todd C. Miller',
u'copyright 1997-2003 Simon Tatham.',
u'Portions copyright Robert de Bath, Joris van Rantwijk, Delian Delchev, Andreas Schultz, Jeroen Massar, Wez Furlong, Nicolas Barry, Justin Bradford, and CORE SDI S.A.',
]
check_detection(expected, test_file)
def test_ics_dropbear_loginrec_c_extra_portion_extra_portion(self):
test_file = self.get_test_loc('ics/dropbear/loginrec.c')
expected = [
u'Copyright (c) 2000 Andre Lucas.',
u'Portions copyright (c) 1998 Todd C. Miller',
u'Portions copyright (c) 1996 Jason Downs',
u'Portions copyright (c) 1996 Theo de Raadt',
]
check_detection(expected, test_file)
def test_ics_dropbear_loginrec_h(self):
test_file = self.get_test_loc('ics/dropbear/loginrec.h')
expected = [
u'Copyright (c) 2000 Andre Lucas.',
]
check_detection(expected, test_file)
def test_ics_dropbear_netbsd_getpass_c(self):
test_file = self.get_test_loc('ics/dropbear/netbsd_getpass.c')
expected = [
u'Copyright (c) 1988, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_dropbear_progressmeter_c(self):
test_file = self.get_test_loc('ics/dropbear/progressmeter.c')
expected = [
u'Copyright (c) 2003 Nils Nordman.',
]
check_detection(expected, test_file)
def test_ics_dropbear_progressmeter_h(self):
test_file = self.get_test_loc('ics/dropbear/progressmeter.h')
expected = [
u'Copyright (c) 2002 Nils Nordman.',
]
check_detection(expected, test_file)
def test_ics_dropbear_scp_c(self):
test_file = self.get_test_loc('ics/dropbear/scp.c')
expected = [
u'Copyright (c) 1999 Theo de Raadt.',
u'Copyright (c) 1999 Aaron Campbell.',
u'Copyright (c) 1983, 1990, 1992, 1993, 1995 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_dropbear_scpmisc_c(self):
test_file = self.get_test_loc('ics/dropbear/scpmisc.c')
expected = [
u'Copyright (c) 2000 Markus Friedl.',
u'Copyright (c) 1995 Tatu Ylonen <ylo@cs.hut.fi>, Espoo, Finland',
]
check_detection(expected, test_file)
def test_ics_dropbear_scpmisc_h(self):
test_file = self.get_test_loc('ics/dropbear/scpmisc.h')
expected = [
u'Copyright (c) 1995 Tatu Ylonen <ylo@cs.hut.fi>, Espoo, Finland',
]
check_detection(expected, test_file)
def test_ics_dropbear_svr_authpam_c(self):
test_file = self.get_test_loc('ics/dropbear/svr-authpam.c')
expected = [
u'Copyright (c) 2004 Martin Carlsson',
u'Portions (c) 2004 Matt Johnston',
]
check_detection(expected, test_file)
def test_ics_dropbear_svr_main_c(self):
test_file = self.get_test_loc('ics/dropbear/svr-main.c')
expected = [
u'Copyright (c) 2002-2006 Matt Johnston',
]
check_detection(expected, test_file)
def test_ics_dropbear_libtommath_mtest_mpi_c(self):
test_file = self.get_test_loc('ics/dropbear-libtommath-mtest/mpi.c')
expected = [
u'Copyright (c) 1998 Michael J. Fromberger',
]
check_detection(expected, test_file)
def test_ics_dropbear_libtommath_mtest_mpi_h(self):
test_file = self.get_test_loc('ics/dropbear-libtommath-mtest/mpi.h')
expected = [
u'Copyright (c) 1998 Michael J. Fromberger',
]
check_detection(expected, test_file)
def test_ics_easymock_src_org_easymock_abstractmatcher_java_trail_name(self):
test_file = self.get_test_loc('ics/easymock-src-org-easymock/AbstractMatcher.java')
expected = [
u'Copyright 2001-2009 OFFIS, Tammo Freese',
]
check_detection(expected, test_file)
def test_ics_easymock_src_org_easymock_capture_java_trail_name(self):
test_file = self.get_test_loc('ics/easymock-src-org-easymock/Capture.java')
expected = [
u'Copyright 2003-2009 OFFIS, Henri Tremblay',
]
check_detection(expected, test_file)
def test_ics_easymock_src_org_easymock_iargumentmatcher_java_trail_name(self):
test_file = self.get_test_loc('ics/easymock-src-org-easymock/IArgumentMatcher.java')
expected = [
u'Copyright 2001-2006 OFFIS, Tammo Freese',
]
check_detection(expected, test_file)
def test_ics_embunit_inc_assertimpl_h(self):
test_file = self.get_test_loc('ics/embunit-inc/AssertImpl.h')
expected = [
u'Copyright (c) 2003 Embedded Unit Project',
]
check_detection(expected, test_file)
def test_ics_embunit_src_stdimpl_c(self):
test_file = self.get_test_loc('ics/embunit-src/stdImpl.c')
expected = [
u'Copyright (c) 2003 Embedded Unit Project',
]
check_detection(expected, test_file)
def test_ics_emma_android_mk(self):
test_file = self.get_test_loc('ics/emma/Android.mk')
expected = [
u'Copyright 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_emma_build_txt(self):
test_file = self.get_test_loc('ics/emma/BUILD.txt')
expected = [
u'Copyright (c) 2003-2004 Vlad Roubtsov.',
]
check_detection(expected, test_file)
def test_ics_emma_test_sh(self):
test_file = self.get_test_loc('ics/emma/test.sh')
expected = [
u'Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_emma_ant_ant14_com_vladium_emma_antmain_java(self):
test_file = self.get_test_loc('ics/emma-ant-ant14-com-vladium-emma/ANTMain.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2004',
]
check_detection(expected, test_file)
def test_ics_emma_ant_ant14_com_vladium_emma_emmajavatask_java(self):
test_file = self.get_test_loc('ics/emma-ant-ant14-com-vladium-emma/emmajavaTask.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2003',
]
check_detection(expected, test_file)
def test_ics_emma_core_data_manifest_mf_extra_implementation(self):
test_file = self.get_test_loc('ics/emma-core-data/MANIFEST.MF')
expected = [
u'(c) Vladimir Roubtsov',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_emma_iappconstants_java_extra_string(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-emma/IAppConstants.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2003',
u'(c) Vladimir Roubtsov',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_emma_processor_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-emma/Processor.java')
expected = [
u'Copyright (c) 2004 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2004',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_emma_data_imetadataconstants_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-emma-data/IMetadataConstants.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_emma_report_lcov_reportgenerator_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-emma-report-lcov/ReportGenerator.java')
expected = [
u'Copyright 2009 Google Inc.',
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2003',
u'Tim Baverstock, (c) 2009',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_jcd_cls_abstractclassdefvisitor_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-jcd-cls/AbstractClassDefVisitor.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'(c) 2001, Vlad Roubtsov',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_jcd_cls_constantcollection_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-jcd-cls/ConstantCollection.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'(c) 2001, Vladimir Roubtsov',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_logging_iloglevels_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-logging/ILogLevels.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2001',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_util_softvaluemap_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-util/SoftValueMap.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'(c) 2002, Vlad Roubtsov',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_util_wcmatcher_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-util/WCMatcher.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2002',
]
check_detection(expected, test_file)
def test_ics_esd_include_audiofile_h(self):
test_file = self.get_test_loc('ics/esd-include/audiofile.h')
expected = [
u'Copyright (c) 1998-2000, Michael Pruett <michael@68k.org>',
]
check_detection(expected, test_file)
def test_ics_expat_configure(self):
test_file = self.get_test_loc('ics/expat/configure')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_expat_configure_in(self):
test_file = self.get_test_loc('ics/expat/configure.in')
expected = [
u'Copyright 2000 Clark Cooper',
]
check_detection(expected, test_file)
def test_ics_expat_notice(self):
test_file = self.get_test_loc('ics/expat/NOTICE')
expected = [
u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd and Clark Cooper',
u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Expat maintainers.'
]
check_detection(expected, test_file)
def test_ics_expat_amiga_expat_lib_c_trail_maint(self):
test_file = self.get_test_loc('ics/expat-amiga/expat_lib.c')
expected = [
u'Copyright (c) 2001-2007 Expat maintainers.',
]
check_detection(expected, test_file)
def test_ics_expat_conftools_libtool_m4(self):
test_file = self.get_test_loc('ics/expat-conftools/libtool.m4')
expected = [
u'Copyright 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_expat_conftools_ltmain_sh(self):
test_file = self.get_test_loc('ics/expat-conftools/ltmain.sh')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_expat_doc_reference_html(self):
test_file = self.get_test_loc('ics/expat-doc/reference.html')
expected = [
u'Copyright 1999,2000 Clark Cooper <coopercc@netheaven.com>',
]
check_detection(expected, test_file)
def test_ics_expat_examples_outline_c(self):
test_file = self.get_test_loc('ics/expat-examples/outline.c')
expected = [
u'Copyright 1999, Clark Cooper',
]
check_detection(expected, test_file)
def test_ics_expat_lib_ascii_h(self):
test_file = self.get_test_loc('ics/expat-lib/ascii.h')
expected = [
u'Copyright (c) 1998, 1999 Thai Open Source Software Center Ltd',
]
check_detection(expected, test_file)
def test_ics_expat_lib_expat_h(self):
test_file = self.get_test_loc('ics/expat-lib/expat.h')
expected = [
u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd',
]
check_detection(expected, test_file)
def test_ics_expat_lib_macconfig_h(self):
test_file = self.get_test_loc('ics/expat-lib/macconfig.h')
expected = [
u'Copyright 2000, Clark Cooper',
]
check_detection(expected, test_file)
def test_ics_expat_lib_makefile_mpw_extra_portion(self):
test_file = self.get_test_loc('ics/expat-lib/Makefile.MPW')
expected = [
u'Copyright (c) 2002 Daryle Walker',
u'Portions Copyright (c) 2002 Thomas Wegner',
]
check_detection(expected, test_file)
def test_ics_expat_lib_xmlparse_c(self):
test_file = self.get_test_loc('ics/expat-lib/xmlparse.c')
expected = [
u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd',
]
check_detection(expected, test_file)
def test_ics_expat_lib_xmltok_c(self):
test_file = self.get_test_loc('ics/expat-lib/xmltok.c')
expected = [
u'Copyright (c) 1998, 1999 Thai Open Source Software Center Ltd',
]
check_detection(expected, test_file)
def test_ics_expat_tests_chardata_c(self):
test_file = self.get_test_loc('ics/expat-tests/chardata.c')
expected = [
u'Copyright (c) 1998-2003 Thai Open Source Software Center Ltd',
]
check_detection(expected, test_file)
def test_ics_expat_win32_expat_iss(self):
test_file = self.get_test_loc('ics/expat-win32/expat.iss')
expected = [
u'Copyright (c) 1998-2006 Thai Open Source Software Center, Clark Cooper, and the Expat maintainers',
]
check_detection(expected, test_file)
def test_ics_eyes_free_notice(self):
test_file = self.get_test_loc('ics/eyes-free/NOTICE')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_fdlibm_configure(self):
test_file = self.get_test_loc('ics/fdlibm/configure')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_fdlibm_e_acos_c(self):
test_file = self.get_test_loc('ics/fdlibm/e_acos.c')
expected = [
u'Copyright (c) 1993 by Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_fdlibm_e_exp_c(self):
test_file = self.get_test_loc('ics/fdlibm/e_exp.c')
expected = [
u'Copyright (c) 2004 by Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_fdlibm_k_tan_c(self):
test_file = self.get_test_loc('ics/fdlibm/k_tan.c')
expected = [
u'Copyright 2004 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_fdlibm_makefile_in(self):
test_file = self.get_test_loc('ics/fdlibm/makefile.in')
expected = [
u'Copyright (c) 1993 by Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_fdlibm_notice(self):
test_file = self.get_test_loc('ics/fdlibm/NOTICE')
expected = [
u'Copyright (c) 1993 by Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_flac_notice(self):
test_file = self.get_test_loc('ics/flac/NOTICE')
expected = [
u'Copyright (c) 2000,2001,2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_include_flac_all_h(self):
test_file = self.get_test_loc('ics/flac-include-FLAC/all.h')
expected = [
u'Copyright (c) 2000,2001,2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_include_flac_assert_h(self):
test_file = self.get_test_loc('ics/flac-include-FLAC/assert.h')
expected = [
u'Copyright (c) 2001,2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_include_flac_callback_h(self):
test_file = self.get_test_loc('ics/flac-include-FLAC/callback.h')
expected = [
u'Copyright (c) 2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_include_share_alloc_h(self):
test_file = self.get_test_loc('ics/flac-include-share/alloc.h')
expected = [
u'Copyright (c) 2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_makefile_am(self):
test_file = self.get_test_loc('ics/flac-libFLAC/Makefile.am')
expected = [
u'Copyright (c) 2001,2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_makefile_in(self):
test_file = self.get_test_loc('ics/flac-libFLAC/Makefile.in')
expected = [
u'Copyright 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2001,2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_ogg_decoder_aspect_c(self):
test_file = self.get_test_loc('ics/flac-libFLAC/ogg_decoder_aspect.c')
expected = [
u'Copyright (c) 2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_window_c(self):
test_file = self.get_test_loc('ics/flac-libFLAC/window.c')
expected = [
u'Copyright (c) 2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_ia32_bitreader_asm_nasm(self):
test_file = self.get_test_loc('ics/flac-libFLAC-ia32/bitreader_asm.nasm')
expected = [
u'Copyright (c) 2001,2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_ppc_makefile_am(self):
test_file = self.get_test_loc('ics/flac-libFLAC-ppc/Makefile.am')
expected = [
u'Copyright (c) 2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_ppc_makefile_in(self):
test_file = self.get_test_loc('ics/flac-libFLAC-ppc/Makefile.in')
expected = [
u'Copyright 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_freetype_notice(self):
test_file = self.get_test_loc('ics/freetype/NOTICE')
expected = [
u'Copyright 1996-2002, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg',
u'copyright (c) The FreeType Project (www.freetype.org).',
u'copyright (c) 1996-2000 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_builds_ft2unix_h(self):
test_file = self.get_test_loc('ics/freetype-builds/ft2unix.h')
expected = [
u'Copyright 1996-2001, 2003, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_ft2build_h(self):
test_file = self.get_test_loc('ics/freetype-include/ft2build.h')
expected = [
u'Copyright 1996-2001, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_freetype_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/freetype.h')
expected = [
u'Copyright 1996-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftadvanc_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftadvanc.h')
expected = [
u'Copyright 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftbbox_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftbbox.h')
expected = [
u'Copyright 1996-2001, 2003, 2007, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftbdf_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftbdf.h')
expected = [
u'Copyright 2002, 2003, 2004, 2006, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftbitmap_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftbitmap.h')
expected = [
u'Copyright 2004, 2005, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftcache_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftcache.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftcid_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftcid.h')
expected = [
u'Copyright 2007, 2009 by Dereg Clegg, Michael Toftdal.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_fterrdef_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/fterrdef.h')
expected = [
u'Copyright 2002, 2004, 2006, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_fterrors_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/fterrors.h')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftgasp_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftgasp.h')
expected = [
u'Copyright 2007, 2008, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftglyph_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftglyph.h')
expected = [
u'Copyright 1996-2003, 2006, 2008, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftgxval_h_trail_name(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftgxval.h')
expected = [
u'Copyright 2004, 2005, 2006 by Masatake YAMATO, Redhat K.K, David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftgzip_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftgzip.h')
expected = [
u'Copyright 2002, 2003, 2004, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftimage_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftimage.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftincrem_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftincrem.h')
expected = [
u'Copyright 2002, 2003, 2006, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftlcdfil_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftlcdfil.h')
expected = [
u'Copyright 2006, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftlist_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftlist.h')
expected = [
u'Copyright 1996-2001, 2003, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftlzw_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftlzw.h')
expected = [
u'Copyright 2004, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftmac_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftmac.h')
expected = [
u'Copyright 1996-2001, 2004, 2006, 2007 by Just van Rossum, David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftmm_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftmm.h')
expected = [
u'Copyright 1996-2001, 2003, 2004, 2006, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftmodapi_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftmodapi.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2006, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftmoderr_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftmoderr.h')
expected = [
u'Copyright 2001, 2002, 2003, 2004, 2005, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftotval_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftotval.h')
expected = [
u'Copyright 2004, 2005, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftoutln_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftoutln.h')
expected = [
u'Copyright 1996-2003, 2005-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftpfr_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftpfr.h')
expected = [
u'Copyright 2002, 2003, 2004, 2006, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftrender_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftrender.h')
expected = [
u'Copyright 1996-2001, 2005, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftsnames_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftsnames.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2006, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftstroke_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftstroke.h')
expected = [
u'Copyright 2002-2006, 2008, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftsynth_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftsynth.h')
expected = [
u'Copyright 2000-2001, 2003, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftsystem_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftsystem.h')
expected = [
u'Copyright 1996-2001, 2002, 2005, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_fttrigon_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/fttrigon.h')
expected = [
u'Copyright 2001, 2003, 2005, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_fttypes_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/fttypes.h')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftwinfnt_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftwinfnt.h')
expected = [
u'Copyright 2003, 2004, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftxf86_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftxf86.h')
expected = [
u'Copyright 2002, 2003, 2004, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_t1tables_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/t1tables.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2006, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ttnameid_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ttnameid.h')
expected = [
u'Copyright 1996-2002, 2003, 2004, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_tttables_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/tttables.h')
expected = [
u'Copyright 1996-2005, 2008-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_tttags_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/tttags.h')
expected = [
u'Copyright 1996-2001, 2004, 2005, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ttunpat_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ttunpat.h')
expected = [
u'Copyright 2003, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_config_ftconfig_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-config/ftconfig.h')
expected = [
u'Copyright 1996-2004, 2006-2008, 2010-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_config_ftstdlib_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-config/ftstdlib.h')
expected = [
u'Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_autohint_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/autohint.h')
expected = [
u'Copyright 1996-2001, 2002, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftcalc_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftcalc.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftdebug_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftdebug.h')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2006, 2007, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftdriver_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftdriver.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftgloadr_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftgloadr.h')
expected = [
u'Copyright 2002, 2003, 2005, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftmemory_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftmemory.h')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2005, 2006, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftobjs_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftobjs.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftpic_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftpic.h')
expected = [
u'Copyright 2009 by Oran Agra and Mickey Gabel.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftrfork_h_trail_name(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftrfork.h')
expected = [
u'Copyright 2004, 2006, 2007 by Masatake YAMATO and Redhat K.K.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftserv_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftserv.h')
expected = [
u'Copyright 2003, 2004, 2005, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftstream_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftstream.h')
expected = [
u'Copyright 1996-2002, 2004-2006, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_fttrace_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/fttrace.h')
expected = [
u'Copyright 2002, 2004-2007, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftvalid_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftvalid.h')
expected = [
u'Copyright 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_internal_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/internal.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_pcftypes_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/pcftypes.h')
expected = [
u'Copyright (c) 2000, 2001, 2002 by Francesco Zappa Nardelli',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_pshints_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/pshints.h')
expected = [
u'Copyright 2001, 2002, 2003, 2005, 2006, 2007, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_sfnt_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/sfnt.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_tttypes_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/tttypes.h')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2005, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svbdf_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svbdf.h')
expected = [
u'Copyright 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svcid_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svcid.h')
expected = [
u'Copyright 2007, 2009 by Derek Clegg, Michael Toftdal.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svgxval_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svgxval.h')
expected = [
u'Copyright 2004, 2005 by Masatake YAMATO, Red Hat K.K., David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svkern_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svkern.h')
expected = [
u'Copyright 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svmm_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svmm.h')
expected = [
u'Copyright 2003, 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svpostnm_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svpostnm.h')
expected = [
u'Copyright 2003, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svpsinfo_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svpsinfo.h')
expected = [
u'Copyright 2003, 2004, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svttcmap_h_trail_name(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svttcmap.h')
expected = [
u'Copyright 2003 by Masatake YAMATO, Redhat K.K.',
u'Copyright 2003, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svttglyf_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svttglyf.h')
expected = [
u'Copyright 2007 by David Turner.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afangles_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afangles.c')
expected = [
u'Copyright 2003-2006, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afcjk_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afcjk.c')
expected = [
u'Copyright 2006-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afcjk_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afcjk.h')
expected = [
u'Copyright 2006, 2007, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afdummy_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afdummy.c')
expected = [
u'Copyright 2003-2005, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_aferrors_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/aferrors.h')
expected = [
u'Copyright 2005 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afglobal_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afglobal.c')
expected = [
u'Copyright 2003-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afglobal_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afglobal.h')
expected = [
u'Copyright 2003-2005, 2007, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afhints_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afhints.c')
expected = [
u'Copyright 2003-2007, 2009-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afhints_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afhints.h')
expected = [
u'Copyright 2003-2008, 2010-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afindic_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afindic.c')
expected = [
u'Copyright 2007, 2011 by Rahul Bhalerao <rahul.bhalerao@redhat.com>, <b.rahul.pm@gmail.com>.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afindic_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afindic.h')
expected = [
u'Copyright 2007 by Rahul Bhalerao <rahul.bhalerao@redhat.com>, <b.rahul.pm@gmail.com>.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_aflatin_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/aflatin.h')
expected = [
u'Copyright 2003-2007, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afloader_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afloader.c')
expected = [
u'Copyright 2003-2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afmodule_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afmodule.c')
expected = [
u'Copyright 2003-2006, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afmodule_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afmodule.h')
expected = [
u'Copyright 2003, 2004, 2005 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afpic_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afpic.c')
expected = [
u'Copyright 2009, 2010, 2011 by Oran Agra and Mickey Gabel.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afpic_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afpic.h')
expected = [
u'Copyright 2009, 2011 by Oran Agra and Mickey Gabel.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afwarp_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afwarp.h')
expected = [
u'Copyright 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_autofit_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/autofit.c')
expected = [
u'Copyright 2003-2007, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftadvanc_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftadvanc.c')
expected = [
u'Copyright 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftapi_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftapi.c')
expected = [
u'Copyright 2002 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftbase_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftbase.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2006, 2007, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftbase_h(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftbase.h')
expected = [
u'Copyright 2008, 2010 by David Turner, Robert Wilhelm, Werner Lemberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_freetype_src_base_ftbase_h_trail_name(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftbase.h')
expected = [
u'Copyright 2008, 2010 by David Turner, Robert Wilhelm, Werner Lemberg and suzuki toshiya.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftbbox_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftbbox.c')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftbitmap_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftbitmap.c')
expected = [
u'Copyright 2004, 2005, 2006, 2007, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftcalc_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftcalc.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftdbgmem_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftdbgmem.c')
expected = [
u'Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftdebug_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftdebug.c')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftgloadr_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftgloadr.c')
expected = [
u'Copyright 2002, 2003, 2004, 2005, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftglyph_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftglyph.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftinit_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftinit.c')
expected = [
u'Copyright 1996-2001, 2002, 2005, 2007, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftlcdfil_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftlcdfil.c')
expected = [
u'Copyright 2006, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftmm_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftmm.c')
expected = [
u'Copyright 1996-2001, 2003, 2004, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftobjs_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftobjs.c')
expected = [
u'Copyright 1996-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftpatent_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftpatent.c')
expected = [
u'Copyright 2007, 2008, 2010 by David Turner.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftrfork_c_trail_name(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftrfork.c')
expected = [
u'Copyright 2004, 2005, 2006, 2007, 2008, 2009, 2010 by Masatake YAMATO and Redhat K.K.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftsnames_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftsnames.c')
expected = [
u'Copyright 1996-2001, 2002, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftstream_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftstream.c')
expected = [
u'Copyright 2000-2002, 2004-2006, 2008-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftstroke_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftstroke.c')
expected = [
u'Copyright 2002-2006, 2008-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftsynth_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftsynth.c')
expected = [
u'Copyright 2000-2001, 2002, 2003, 2004, 2005, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftsystem_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftsystem.c')
expected = [
u'Copyright 1996-2002, 2006, 2008-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_fttrigon_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/fttrigon.c')
expected = [
u'Copyright 2001, 2002, 2003, 2004, 2005 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftutil_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftutil.c')
expected = [
u'Copyright 2002, 2004, 2005, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftxf86_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftxf86.c')
expected = [
u'Copyright 2002, 2003, 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cff_c(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cff.c')
expected = [
u'Copyright 1996-2001, 2002 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffcmap_c(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffcmap.c')
expected = [
u'Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffcmap_h(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffcmap.h')
expected = [
u'Copyright 2002, 2003, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cfferrs_h(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cfferrs.h')
expected = [
u'Copyright 2001 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffload_c(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffload.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffload_h(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffload.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffobjs_h(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffobjs.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffparse_c(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffparse.c')
expected = [
u'Copyright 1996-2004, 2007-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffparse_h(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffparse.h')
expected = [
u'Copyright 1996-2001, 2002, 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffpic_c(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffpic.c')
expected = [
u'Copyright 2009, 2010 by Oran Agra and Mickey Gabel.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cfftypes_h(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cfftypes.h')
expected = [
u'Copyright 1996-2003, 2006-2008, 2010-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_afmparse_c(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/afmparse.c')
expected = [
u'Copyright 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_psaux_c(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/psaux.c')
expected = [
u'Copyright 1996-2001, 2002, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_psauxmod_c(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/psauxmod.c')
expected = [
u'Copyright 2000-2001, 2002, 2003, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_psauxmod_h(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/psauxmod.h')
expected = [
u'Copyright 2000-2001 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_psconv_c(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/psconv.c')
expected = [
u'Copyright 2006, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_t1cmap_c(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/t1cmap.c')
expected = [
u'Copyright 2002, 2003, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_t1decode_c(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/t1decode.c')
expected = [
u'Copyright 2000-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_t1decode_h(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/t1decode.h')
expected = [
u'Copyright 2000-2001, 2002, 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshalgo_c(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshalgo.c')
expected = [
u'Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshalgo_h(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshalgo.h')
expected = [
u'Copyright 2001, 2002, 2003, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshglob_c(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshglob.c')
expected = [
u'Copyright 2001, 2002, 2003, 2004, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshglob_h(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshglob.h')
expected = [
u'Copyright 2001, 2002, 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshinter_c(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshinter.c')
expected = [
u'Copyright 2001, 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshmod_c(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshmod.c')
expected = [
u'Copyright 2001, 2002, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshrec_c(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshrec.c')
expected = [
u'Copyright 2001, 2002, 2003, 2004, 2007, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshrec_h(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshrec.h')
expected = [
u'Copyright 2001, 2002, 2003, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psnames_psmodule_c(self):
test_file = self.get_test_loc('ics/freetype-src-psnames/psmodule.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2005, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psnames_psmodule_h(self):
test_file = self.get_test_loc('ics/freetype-src-psnames/psmodule.h')
expected = [
u'Copyright 1996-2001 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psnames_pstables_h(self):
test_file = self.get_test_loc('ics/freetype-src-psnames/pstables.h')
expected = [
u'Copyright 2005, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_raster_ftmisc_h(self):
test_file = self.get_test_loc('ics/freetype-src-raster/ftmisc.h')
expected = [
u'Copyright 2005, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_raster_ftraster_c(self):
test_file = self.get_test_loc('ics/freetype-src-raster/ftraster.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_raster_ftrend1_c(self):
test_file = self.get_test_loc('ics/freetype-src-raster/ftrend1.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2005, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_sfdriver_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/sfdriver.c')
expected = [
u'Copyright 1996-2007, 2009-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_sferrors_h(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/sferrors.h')
expected = [
u'Copyright 2001, 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_sfobjs_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/sfobjs.c')
expected = [
u'Copyright 1996-2008, 2010-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttbdf_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttbdf.c')
expected = [
u'Copyright 2005, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttcmap_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttcmap.c')
expected = [
u'Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttcmap_h(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttcmap.h')
expected = [
u'Copyright 2002, 2003, 2004, 2005 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttkern_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttkern.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttkern_h(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttkern.h')
expected = [
u'Copyright 1996-2001, 2002, 2005, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttload_h(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttload.h')
expected = [
u'Copyright 1996-2001, 2002, 2005, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttmtx_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttmtx.c')
expected = [
u'Copyright 2006-2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttpost_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttpost.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttsbit_h(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttsbit.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttsbit0_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttsbit0.c')
expected = [
u'Copyright 2005, 2006, 2007, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_smooth_ftgrays_c(self):
test_file = self.get_test_loc('ics/freetype-src-smooth/ftgrays.c')
expected = [
u'Copyright 2000-2003, 2005-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_smooth_ftsmooth_c(self):
test_file = self.get_test_loc('ics/freetype-src-smooth/ftsmooth.c')
expected = [
u'Copyright 2000-2006, 2009-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_truetype_truetype_c(self):
test_file = self.get_test_loc('ics/freetype-src-truetype/truetype.c')
expected = [
u'Copyright 1996-2001, 2004, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_truetype_ttgload_c(self):
test_file = self.get_test_loc('ics/freetype-src-truetype/ttgload.c')
expected = [
u'Copyright 1996-2011 David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_truetype_ttgload_h(self):
test_file = self.get_test_loc('ics/freetype-src-truetype/ttgload.h')
expected = [
u'Copyright 1996-2006, 2008, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_truetype_ttinterp_h(self):
test_file = self.get_test_loc('ics/freetype-src-truetype/ttinterp.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_truetype_ttobjs_h(self):
test_file = self.get_test_loc('ics/freetype-src-truetype/ttobjs.h')
expected = [
u'Copyright 1996-2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_truetype_ttpload_c(self):
test_file = self.get_test_loc('ics/freetype-src-truetype/ttpload.c')
expected = [
u'Copyright 1996-2002, 2004-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_fsck_msdos_boot_c(self):
test_file = self.get_test_loc('ics/fsck_msdos/boot.c')
expected = [
u'Copyright (c) 1995, 1997 Wolfgang Solfrank',
u'Copyright (c) 1995 Martin Husemann',
]
check_detection(expected, test_file)
def test_ics_fsck_msdos_check_c(self):
test_file = self.get_test_loc('ics/fsck_msdos/check.c')
expected = [
u'Copyright (c) 1995, 1996, 1997 Wolfgang Solfrank',
u'Copyright (c) 1995 Martin Husemann',
]
check_detection(expected, test_file)
def test_ics_fsck_msdos_main_c(self):
test_file = self.get_test_loc('ics/fsck_msdos/main.c')
expected = [
u'Copyright (c) 1995 Wolfgang Solfrank',
u'Copyright (c) 1995 Martin Husemann',
]
check_detection(expected, test_file)
def test_ics_fsck_msdos_notice(self):
test_file = self.get_test_loc('ics/fsck_msdos/NOTICE')
expected = [
u'Copyright (c) 1995, 1997 Wolfgang Solfrank',
u'Copyright (c) 1995 Martin Husemann',
u'Copyright (c) 1995, 1996, 1997 Wolfgang Solfrank',
u'Copyright (c) 1995 Martin Husemann',
u'Copyright (c) 1995 Wolfgang Solfrank',
u'Copyright (c) 1995 Martin Husemann',
]
check_detection(expected, test_file)
def test_ics_genext2fs_aclocal_m4(self):
test_file = self.get_test_loc('ics/genext2fs/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_genext2fs_configure(self):
test_file = self.get_test_loc('ics/genext2fs/configure')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_genext2fs_genext2fs_c(self):
test_file = self.get_test_loc('ics/genext2fs/genext2fs.c')
expected = [
u'Copyright (c) 2000 Xavier Bestel <xavier.bestel@free.fr>',
u'Copyright (c) 1999,2000 by Lineo, inc. and John Beppu',
u'Copyright (c) 1999,2000,2001 by John Beppu <beppu@codepoet.org>',
u'Copyright (c) 2002 Edward Betts <edward@debian.org>',
u'Copyright (c) 2002 Ixia',
u'Copyright (c) 2002 Ixia',
u'Copyright (c) 2002 Ixia',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_genext2fs_genext2fs_c_trail_name_trail_name_trail_name_trail_name(self):
test_file = self.get_test_loc('ics/genext2fs/genext2fs.c')
expected = [
u'Copyright (c) 2000 Xavier Bestel <xavier.bestel@free.fr>',
u'Copyright (c) 1999,2000 by Lineo, inc. and John Beppu',
u'Beppu Copyright (c) 1999,2000,2001 by John Beppu <beppu@codepoet.org>',
u'Copyright (c) 2002 Edward Betts <edward@debian.org>',
u'Copyright (c) 2002 Ixia communications',
u'Copyright (c) 2002 Ixia communications',
u'Copyright (c) 2002 Ixia communications',
]
check_detection(expected, test_file)
def test_ics_genext2fs_m4_ac_func_scanf_can_malloc_m4(self):
test_file = self.get_test_loc('ics/genext2fs-m4/ac_func_scanf_can_malloc.m4')
expected = [
u'(c) Finn Thain 2006',
]
check_detection(expected, test_file)
def test_ics_giflib_gif_lib_private_h(self):
test_file = self.get_test_loc('ics/giflib/gif_lib_private.h')
expected = [
u'(c) Copyright 1997 Eric S. Raymond',
u'(c) Copyright 1997 Eric S. Raymond',
]
check_detection(expected, test_file)
def test_ics_giflib_notice(self):
test_file = self.get_test_loc('ics/giflib/NOTICE')
expected = [
u'Copyright (c) 1997 Eric S. Raymond',
]
check_detection(expected, test_file)
def test_ics_google_diff_match_patch_name_fraser_neil_plaintext_diff_match_patch_java(self):
test_file = self.get_test_loc('ics/google-diff-match-patch-name-fraser-neil-plaintext/diff_match_patch.java')
expected = [
u'Copyright 2006 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_gtest_test_gtest_filter_unittest_py(self):
test_file = self.get_test_loc('ics/gtest-test/gtest_filter_unittest.py')
expected = [
u'Copyright 2005, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_gtest_test_gtest_nc_test_py(self):
test_file = self.get_test_loc('ics/gtest-test/gtest_nc_test.py')
expected = [
u'Copyright 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_guava_guava_ipr(self):
test_file = self.get_test_loc('ics/guava/guava.ipr')
expected = []
check_detection(expected, test_file)
@expectedFailure
def test_ics_guava_guava_ipr_markup(self):
test_file = self.get_test_loc('ics/guava/guava.ipr')
expected = [
u'Copyright (c) today.year Google Inc.',
]
check_detection(expected, test_file)
def test_ics_guava_src_com_google_common_annotations_gwtcompatible_java(self):
test_file = self.get_test_loc('ics/guava-src-com-google-common-annotations/GwtCompatible.java')
expected = [
u'Copyright (c) 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_guava_src_com_google_common_annotations_visiblefortesting_java(self):
test_file = self.get_test_loc('ics/guava-src-com-google-common-annotations/VisibleForTesting.java')
expected = [
u'Copyright (c) 2006 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_guava_src_com_google_common_base_charmatcher_java(self):
test_file = self.get_test_loc('ics/guava-src-com-google-common-base/CharMatcher.java')
expected = [
u'Copyright (c) 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_guava_src_com_google_common_base_charsets_java(self):
test_file = self.get_test_loc('ics/guava-src-com-google-common-base/Charsets.java')
expected = [
u'Copyright (c) 2007 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_guava_src_com_google_common_io_nulloutputstream_java(self):
test_file = self.get_test_loc('ics/guava-src-com-google-common-io/NullOutputStream.java')
expected = [
u'Copyright (c) 2004 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_contrib_harfbuzz_unicode_icu_c(self):
test_file = self.get_test_loc('ics/harfbuzz-contrib/harfbuzz-unicode-icu.c')
expected = [
u'Copyright 2010, The Android Open Source Project',
u'Copyright 2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_contrib_tables_bidimirroring_txt(self):
test_file = self.get_test_loc('ics/harfbuzz-contrib-tables/BidiMirroring.txt')
expected = [
u'Copyright (c) 1991-2008 Unicode, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_arabic_c(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-arabic.c')
expected = [
u'Copyright (c) 2008 Nokia Corporation',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_arabic_c_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-arabic.c')
expected = [
u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_buffer_private_h(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-buffer-private.h')
expected = [
u'Copyright (c) 1998-2004 David Turner and Werner Lemberg',
u'Copyright (c) 2004,2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_dump_c(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-dump.c')
expected = [
u'Copyright (c) 2000, 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_external_h(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-external.h')
expected = [
u'Copyright (c) 2008 Nokia Corporation',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_external_h_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-external.h')
expected = [
u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_gdef_private_h(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-gdef-private.h')
expected = [
u'Copyright (c) 1998-2004 David Turner and Werner Lemberg',
u'Copyright (c) 2006 Behdad Esfahbod',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_global_h(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-global.h')
expected = [
u'Copyright (c) 2008 Nokia Corporation',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_global_h_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-global.h')
expected = [
u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_gpos_c(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-gpos.c')
expected = [
u'Copyright (c) 1998-2004 David Turner and Werner Lemberg',
u'Copyright (c) 2006 Behdad Esfahbod',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_greek_c(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-greek.c')
expected = [
u'Copyright (c) 2010 Nokia Corporation',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_greek_c_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-greek.c')
expected = [
u'Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_impl_c(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-impl.c')
expected = [
u'Copyright (c) 1998-2004 David Turner and Werner Lemberg',
u'Copyright (c) 2008 Nokia Corporation',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_impl_c_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-impl.c')
expected = [
u'Copyright (c) 1998-2004 David Turner and Werner Lemberg',
u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_shape_h(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-shape.h')
expected = [
u'Copyright (c) 2006 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_stream_c(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-stream.c')
expected = [
u'Copyright (c) 2005 David Turner',
u'Copyright (c) 2008 Nokia Corporation',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_stream_c_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-stream.c')
expected = [
u'Copyright (c) 2005 David Turner',
u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_stream_h(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-stream.h')
expected = [
u'Copyright (c) 2005 David Turner',
u'Copyright (c) 2008 Nokia Corporation',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_stream_h_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-stream.h')
expected = [
u'Copyright (c) 2005 David Turner',
u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)',
]
check_detection(expected, test_file)
def test_ics_hyphenation_hnjalloc_c(self):
test_file = self.get_test_loc('ics/hyphenation/hnjalloc.c')
expected = [
u'Copyright (c) 1998 Raph Levien',
u'(c) 2001 ALTLinux, Moscow',
]
check_detection(expected, test_file)
def test_ics_hyphenation_hnjalloc_h(self):
test_file = self.get_test_loc('ics/hyphenation/hnjalloc.h')
expected = [
u'Copyright (c) 1998 Raph Levien',
]
check_detection(expected, test_file)
def test_ics_hyphenation_hyphen_c(self):
test_file = self.get_test_loc('ics/hyphenation/hyphen.c')
expected = [
u'Copyright (c) 1998 Raph Levien',
u'(c) 2001 ALTLinux, Moscow',
u'(c) 2001 Peter Novodvorsky (nidd@cs.msu.su)',
u'(c) 2006, 2007, 2008 Laszlo Nemeth',
]
check_detection(expected, test_file)
def test_ics_hyphenation_hyphen_h(self):
test_file = self.get_test_loc('ics/hyphenation/hyphen.h')
expected = [
u'(c) 1998 Raph Levien',
u'(c) 2001 ALTLinux, Moscow',
u'(c) 2006, 2007, 2008 Laszlo Nemeth',
u'Copyright (c) 1998 Raph Levien',
]
check_detection(expected, test_file)
def test_ics_hyphenation_readme(self):
test_file = self.get_test_loc('ics/hyphenation/README')
expected = [
u'(c) 1998 Raph Levien',
u'(c) 2001 ALTLinux, Moscow',
u'(c) 2006, 2007, 2008 Laszlo Nemeth',
]
check_detection(expected, test_file)
def test_ics_iproute2_readme_lnstat(self):
test_file = self.get_test_loc('ics/iproute2/README.lnstat')
expected = [
u'(c) 2004 Harald Welte laforge@gnumonks.org',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_linux_if_addrlabel_h(self):
test_file = self.get_test_loc('ics/iproute2-include-linux/if_addrlabel.h')
expected = [
u'Copyright (c) 2007 USAGI/WIDE Project',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_linux_if_arp_h(self):
test_file = self.get_test_loc('ics/iproute2-include-linux/if_arp.h')
expected = [
u'(c) UCB 1986-1988',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_linux_if_tun_h(self):
test_file = self.get_test_loc('ics/iproute2-include-linux/if_tun.h')
expected = [
u'Copyright (c) 1999-2000 Maxim Krasnyansky <max_mk@yahoo.com>',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_linux_netfilter_ipv4_h(self):
test_file = self.get_test_loc('ics/iproute2-include-linux/netfilter_ipv4.h')
expected = [
u'(c) 1998 Rusty Russell',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_linux_can_netlink_h(self):
test_file = self.get_test_loc('ics/iproute2-include-linux-can/netlink.h')
expected = [
u'Copyright (c) 2009 Wolfgang Grandegger <wg@grandegger.com>',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_linux_tc_act_tc_skbedit_h(self):
test_file = self.get_test_loc('ics/iproute2-include-linux-tc_act/tc_skbedit.h')
expected = [
u'Copyright (c) 2008, Intel Corporation.',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_netinet_icmp6_h(self):
test_file = self.get_test_loc('ics/iproute2-include-netinet/icmp6.h')
expected = [
u'Copyright (c) 1991-1997,2000,2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_iproute2_ip_ip6tunnel_c(self):
test_file = self.get_test_loc('ics/iproute2-ip/ip6tunnel.c')
expected = [
u'Copyright (c) 2006 USAGI/WIDE Project',
]
check_detection(expected, test_file)
def test_ics_iproute2_ip_ipaddrlabel_c(self):
test_file = self.get_test_loc('ics/iproute2-ip/ipaddrlabel.c')
expected = [
u'Copyright (c) 2007 USAGI/WIDE Project',
]
check_detection(expected, test_file)
def test_ics_iproute2_ip_ipprefix_c(self):
test_file = self.get_test_loc('ics/iproute2-ip/ipprefix.c')
expected = [
u'Copyright (c) 2005 USAGI/WIDE Project',
]
check_detection(expected, test_file)
def test_ics_iproute2_ip_ipxfrm_c(self):
test_file = self.get_test_loc('ics/iproute2-ip/ipxfrm.c')
expected = [
u'Copyright (c) 2004 USAGI/WIDE Project',
]
check_detection(expected, test_file)
def test_ics_iproute2_misc_lnstat_c(self):
test_file = self.get_test_loc('ics/iproute2-misc/lnstat.c')
expected = [
u'Copyright (c) 2004 by Harald Welte <laforge@gnumonks.org>',
u'Copyright 2001 by Robert Olsson <robert.olsson@its.uu.se> Uppsala University, Sweden',
u'Copyright (c) 2004 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iproute2_misc_lnstat_util_c(self):
test_file = self.get_test_loc('ics/iproute2-misc/lnstat_util.c')
expected = [
u'Copyright (c) 2004 by Harald Welte <laforge@gnumonks.org>',
u'Copyright 2001 by Robert Olsson <robert.olsson@its.uu.se> Uppsala University, Sweden',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_notice_extra_contributed(self):
test_file = self.get_test_loc('ics/ipsec-tools/NOTICE')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 2004 Emmanuel Dreyfus',
u'Copyright (c) 2004-2006 Emmanuel Dreyfus',
u'Copyright (c) 2000 WIDE Project.',
u'Copyright (c) 2004-2005 Emmanuel Dreyfus',
u'Copyright (c) 2000, 2001 WIDE Project.',
u'Copyright (c) 2004 SuSE Linux AG, Nuernberg, Germany.',
u'Copyright (c) 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002 and 2003 WIDE Project.',
u'Copyright 2000 Wasabi Systems, Inc.',
u'Copyright (c) 2005 International Business Machines Corporation',
u'Copyright (c) 2005 by Trusted Computer Solutions, Inc.',
u'Copyright 2000 Aaron D. Gifford.',
u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.',
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 1991, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_include_glibc_notice(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-include-glibc/NOTICE')
expected = [
u'Copyright (c) 1991, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_libipsec_ipsec_dump_policy_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/ipsec_dump_policy.c')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_libipsec_ipsec_set_policy_3(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/ipsec_set_policy.3')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_libipsec_key_debug_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/key_debug.c')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_libipsec_notice(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/NOTICE')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.',
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_libipsec_policy_parse_y(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/policy_parse.y')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_cfparse_y(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/cfparse.y')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002 and 2003 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_dump_h(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/dump.h')
expected = [
u'Copyright (c) 2000 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_evt_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/evt.c')
expected = [
u'Copyright (c) 2004 Emmanuel Dreyfus',
u'Copyright (c) 2008 Timo Teras',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_gcmalloc_h(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/gcmalloc.h')
expected = [
u'Copyright (c) 2000, 2001 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_genlist_c_extra_contributed(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/genlist.c')
expected = [
u'Copyright (c) 2004 SuSE Linux AG, Nuernberg, Germany.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_grabmyaddr_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/grabmyaddr.c')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 2008 Timo Teras <timo.teras@iki.fi>.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_gssapi_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/gssapi.c')
expected = [
u'Copyright 2000 Wasabi Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_handler_h(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/handler.h')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_isakmp_cfg_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/isakmp_cfg.c')
expected = [
u'Copyright (c) 2004-2006 Emmanuel Dreyfus',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_isakmp_cfg_h(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/isakmp_cfg.h')
expected = [
u'Copyright (c) 2004 Emmanuel Dreyfus',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_isakmp_xauth_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/isakmp_xauth.c')
expected = [
u'Copyright (c) 2004-2005 Emmanuel Dreyfus',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_notice_extra_contributed(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/NOTICE')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 2004 Emmanuel Dreyfus',
u'Copyright (c) 2004-2006 Emmanuel Dreyfus',
u'Copyright (c) 2000 WIDE Project.',
u'Copyright (c) 2004-2005 Emmanuel Dreyfus',
u'Copyright (c) 2000, 2001 WIDE Project.',
u'Copyright (c) 2004 SuSE Linux AG, Nuernberg, Germany.',
u'Copyright (c) 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002 and 2003 WIDE Project.',
u'Copyright 2000 Wasabi Systems, Inc.',
u'Copyright (c) 2005 International Business Machines Corporation',
u'Copyright (c) 2005 by Trusted Computer Solutions, Inc.',
u'Copyright 2000 Aaron D. Gifford.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_plainrsa_gen_8(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/plainrsa-gen.8')
expected = [
u'Copyright (c) 2004 SuSE Linux AG, Nuernberg, Germany.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_racoon_8(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/racoon.8')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_racoonctl_8(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/racoonctl.8')
expected = [
u'Copyright (c) 2004 Emmanuel Dreyfus',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_racoonctl_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/racoonctl.c')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 2008 Timo Teras.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_security_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/security.c')
expected = [
u'Copyright (c) 2005 International Business Machines Corporation',
u'Copyright (c) 2005 by Trusted Computer Solutions, Inc.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_missing_crypto_sha2_sha2_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon-missing-crypto-sha2/sha2.c')
expected = [
u'Copyright 2000 Aaron D. Gifford.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_missing_crypto_sha2_sha2_h(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon-missing-crypto-sha2/sha2.h')
expected = [
u'Copyright 2000 Aaron D. Gifford.',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libip6t_reject_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libip6t_REJECT.c')
expected = [
u'(c) 2000 Jozsef Kadlecsik <kadlec@blackhole.kfki.hu>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libipt_clusterip_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libipt_CLUSTERIP.c')
expected = [
u'(c) 2003 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libipt_ecn_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libipt_ECN.c')
expected = [
u'(c) 2002 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libipt_ttl_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libipt_TTL.c')
expected = [
u'(c) 2000 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_audit_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_AUDIT.c')
expected = [
u'(c) 2010-2011, Thomas Graf <tgraf@redhat.com>',
u'(c) 2010-2011, Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_checksum_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_CHECKSUM.c')
expected = [
u'(c) 2002 by Harald Welte <laforge@gnumonks.org>',
u'(c) 2010 by Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_cluster_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_cluster.c')
expected = [
u'(c) 2009 by Pablo Neira Ayuso <pablo@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_connmark_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_connmark.c')
expected = [
u'(c) 2002,2004 MARA Systems AB',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_connsecmark_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_CONNSECMARK.c')
expected = [
u'Copyright (c) 2006 Red Hat, Inc., James Morris <jmorris@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_conntrack_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_conntrack.c')
expected = [
u'(c) 2001 Marc Boucher (marc@mbsi.ca).',
u'Copyright (c) CC Computer Consultants GmbH, 2007 - 2008 Jan Engelhardt <jengelh@computergmbh.de>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_dccp_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_dccp.c')
expected = [
u'(c) 2005 by Harald Welte <laforge@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_devgroup_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_devgroup.c')
expected = [
u'Copyright (c) 2011 Patrick McHardy <kaber@trash.net>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_hashlimit_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_hashlimit.c')
expected = [
u'(c) 2003-2004 by Harald Welte <laforge@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_idletimer_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_IDLETIMER.c')
expected = [
u'Copyright (c) 2010 Nokia Corporation.',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_led_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_LED.c')
expected = [
u'(c) 2008 Adam Nielsen <a.nielsen@shikadi.net>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_osf_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_osf.c')
expected = [
u'Copyright (c) 2003+ Evgeniy Polyakov <zbr@ioremap.net>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_owner_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_owner.c')
expected = [
u'Copyright (c) CC Computer Consultants GmbH, 2007 - 2008 Jan Engelhardt <jengelh@computergmbh.de>'
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_set_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_SET.c')
expected = [
u'Copyright (c) 2000-2002 Joakim Axelsson <gozem@linux.nu> Patrick Schaaf <bof@bof.de> Martin Josefsson <gandalf@wlug.westbo.se>',
u'Copyright (c) 2003-2010 Jozsef Kadlecsik <kadlec@blackhole.kfki.hu>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_socket_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_socket.c')
expected = [
u'Copyright (c) 2007 BalaBit IT Ltd.',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_string_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_string.c')
expected = [
u'Copyright (c) 2000 Emmanuel Roger <winfield@freegates.be>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_tcpoptstrip_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_TCPOPTSTRIP.c')
expected = [
u'Copyright (c) 2007 Sven Schnelle <svens@bitebene.org>',
u'Copyright (c) CC Computer Consultants GmbH, 2007 Jan Engelhardt <jengelh@computergmbh.de>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_tee_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_TEE.c')
expected = [
u'Copyright (c) Sebastian Claen , 2007 Jan Engelhardt',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_time_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_time.c')
expected = [
u'Copyright (c) CC Computer Consultants GmbH, 2007',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_tproxy_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_TPROXY.c')
expected = [
u'Copyright (c) 2002-2008 BalaBit IT Ltd.',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_u32_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_u32.c')
expected = [
u'(c) 2002 by Don Cohen <don-netf@isis.cs3-inc.com>',
u'Copyright (c) CC Computer Consultants GmbH, 2007',
]
check_detection(expected, test_file)
def test_ics_iptables_include_libipq_libipq_h(self):
test_file = self.get_test_loc('ics/iptables-include-libipq/libipq.h')
expected = [
u'Copyright (c) 2000-2001 Netfilter Core Team',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_ipv6_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux/netfilter_ipv6.h')
expected = [
u'(c) 1998 Rusty Russell',
u'(c) 1999 David Jeffery',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_audit_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_AUDIT.h')
expected = [
u'(c) 2010-2011 Thomas Graf <tgraf@redhat.com>',
u'(c) 2010-2011 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_checksum_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_CHECKSUM.h')
expected = [
u'(c) 2002 by Harald Welte <laforge@gnumonks.org>',
u'(c) 2010 Red Hat Inc',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_conntrack_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_conntrack.h')
expected = [
u'(c) 2001 Marc Boucher (marc@mbsi.ca).',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_dscp_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_DSCP.h')
expected = [
u'(c) 2002 Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_idletimer_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_IDLETIMER.h')
expected = [
u'Copyright (c) 2004, 2010 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_nfqueue_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_NFQUEUE.h')
expected = [
u'(c) 2005 Harald Welte <laforge@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_osf_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_osf.h')
expected = [
u'Copyright (c) 2003+ Evgeniy Polyakov <johnpol@2ka.mxt.ru>',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_ipv4_ipt_ttl_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter_ipv4/ipt_ttl.h')
expected = [
u'(c) 2000 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_ipv4_ipt_ulog_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter_ipv4/ipt_ULOG.h')
expected = [
u'(c) 2000-2002 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_ip6tables_standalone_c(self):
test_file = self.get_test_loc('ics/iptables-iptables/ip6tables-standalone.c')
expected = [
u'(c) 2000-2002 by the netfilter coreteam <coreteam@netfilter.org> Paul Rusty Russell <rusty@rustcorp.com.au> Marc Boucher <marc+nf@mbsi.ca>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_iptables_xslt(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables.xslt')
expected = [
u'Copyright 2006 UfoMechanic Author azez@ufomechanic.net',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_iptables_iptables_iptables_xslt_extra_author(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables.xslt')
expected = [
u'Copyright 2006 UfoMechanic',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_iptables_apply(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables-apply')
expected = [
u'Copyright (c) Martin F. Krafft <madduck@madduck.net>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_iptables_apply_8(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables-apply.8')
expected = [
u'copyright by Martin F. Krafft.',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_iptables_restore_c(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables-restore.c')
expected = [
u'(c) 2000-2002 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_iptables_save_c(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables-save.c')
expected = [
u'(c) 1999 by Paul Rusty Russell <rusty@rustcorp.com.au>',
u'(c) 2000-2002 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_iptables_xml_c(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables-xml.c')
expected = [
u'(c) 2006 Ufo Mechanic <azez@ufomechanic.net>',
u'(c) 2000-2002 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_xtables_c_trail_name(self):
test_file = self.get_test_loc('ics/iptables-iptables/xtables.c')
expected = [
u'(c) 2000-2006 by the netfilter coreteam <coreteam@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_xtoptions_c(self):
test_file = self.get_test_loc('ics/iptables-iptables/xtoptions.c')
expected = [
u'Copyright (c) Jan Engelhardt, 2011',
]
check_detection(expected, test_file)
def test_ics_iptables_libipq_ipq_create_handle_3(self):
test_file = self.get_test_loc('ics/iptables-libipq/ipq_create_handle.3')
expected = [
u'Copyright (c) 2000-2001 Netfilter Core Team',
u'Copyright (c) 2000-2001 Netfilter Core Team.',
]
check_detection(expected, test_file)
def test_ics_iptables_libipq_ipq_errstr_3(self):
test_file = self.get_test_loc('ics/iptables-libipq/ipq_errstr.3')
expected = [
u'Copyright (c) 2000 Netfilter Core Team',
u'Copyright (c) 2000-2001 Netfilter Core Team.',
]
check_detection(expected, test_file)
def test_ics_iptables_libiptc_libip4tc_c(self):
test_file = self.get_test_loc('ics/iptables-libiptc/libip4tc.c')
expected = [
u'(c) 1999 Paul Rusty Russell',
]
check_detection(expected, test_file)
def test_ics_iptables_libiptc_libiptc_c(self):
test_file = self.get_test_loc('ics/iptables-libiptc/libiptc.c')
expected = [
u'(c) 1999 Paul Rusty Russell',
u'(c) 2000-2004 by the Netfilter Core Team <coreteam@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_m4_ax_check_linker_flags_m4(self):
test_file = self.get_test_loc('ics/iptables-m4/ax_check_linker_flags.m4')
expected = [
u'Copyright (c) 2009 Mike Frysinger <vapier@gentoo.org>',
u'Copyright (c) 2009 Steven G. Johnson <stevenj@alum.mit.edu>',
u'Copyright (c) 2009 Matteo Frigo',
]
check_detection(expected, test_file)
def test_ics_iptables_utils_nfnl_osf_c(self):
test_file = self.get_test_loc('ics/iptables-utils/nfnl_osf.c')
expected = [
u'Copyright (c) 2005 Evgeniy Polyakov <johnpol@2ka.mxt.ru>',
]
check_detection(expected, test_file)
def test_ics_iptables_utils_pf_os(self):
test_file = self.get_test_loc('ics/iptables-utils/pf.os')
expected = [
u'(c) Copyright 2000-2003 by Michal Zalewski <lcamtuf@coredump.cx>',
u'(c) Copyright 2003 by Mike Frantzen <frantzen@w4g.org>',
]
check_detection(expected, test_file)
def test_ics_javasqlite_src_main_native_sqlite_jni_defs_h(self):
test_file = self.get_test_loc('ics/javasqlite-src-main-native/sqlite_jni_defs.h')
expected = [
u'Copyright 2007, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_javassist_license_html(self):
test_file = self.get_test_loc('ics/javassist/License.html')
expected = [
u'Copyright (c) 1999-2010 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_notice(self):
test_file = self.get_test_loc('ics/javassist/NOTICE')
expected = [
u'Copyright (c) 1999-2010 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_readme_html(self):
test_file = self.get_test_loc('ics/javassist/Readme.html')
expected = [
u'Copyright (c) 1999-2010 by Shigeru Chiba',
u'Copyright (c) 1999-2010 Shigeru Chiba.',
u'Copyright (c) 1999-2010 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_sample_preproc_assistant_java(self):
test_file = self.get_test_loc('ics/javassist-sample-preproc/Assistant.java')
expected = [
u'Copyright (c) 1999-2005 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_src_main_javassist_bytearrayclasspath_java(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist/ByteArrayClassPath.java')
expected = [
u'Copyright (c) 1999-2007 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_src_main_javassist_ctclass_java(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist/CtClass.java')
expected = [
u'Copyright (c) 1999-2007 Shigeru Chiba.',
u'(c) 1999-2010 Shigeru Chiba.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_javassist_src_main_javassist_ctclass_java_lead_copy(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist/CtClass.java')
expected = [
u'Copyright (c) 1999-2007 Shigeru Chiba.',
u'Copyright (c) 1999-2010 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_src_main_javassist_bytecode_bytestream_java(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist-bytecode/ByteStream.java')
expected = [
u'Copyright (c) 1999-2010 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_src_main_javassist_bytecode_instructionprinter_java(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist-bytecode/InstructionPrinter.java')
expected = [
u'Copyright (c) 1999-2007 Shigeru Chiba, and others.',
]
check_detection(expected, test_file)
def test_ics_javassist_src_main_javassist_bytecode_annotation_annotation_java(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist-bytecode-annotation/Annotation.java')
expected = [
u'Copyright (c) 2004 Bill Burke.',
]
check_detection(expected, test_file)
def test_ics_javassist_src_main_javassist_bytecode_annotation_nosuchclasserror_java(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist-bytecode-annotation/NoSuchClassError.java')
expected = [
u'Copyright (c) 1999-2009 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_tutorial_tutorial_html(self):
test_file = self.get_test_loc('ics/javassist-tutorial/tutorial.html')
expected = [
u'Copyright (c) 2000-2010 by Shigeru Chiba',
]
check_detection(expected, test_file)
def test_ics_jdiff_src_jdiff_diffmyers_java(self):
test_file = self.get_test_loc('ics/jdiff-src-jdiff/DiffMyers.java')
expected = [
u'Copyright (c) 2000 Business Management Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_jhead_main_c(self):
test_file = self.get_test_loc('ics/jhead/main.c')
expected = [
u'Copyright (c) 2008, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_jpeg_ansi2knr_c(self):
test_file = self.get_test_loc('ics/jpeg/ansi2knr.c')
expected = [
u'Copyright (c) 1988 Richard M. Stallman',
u'Copyright (c) 1989 Aladdin Enterprises.',
]
check_detection(expected, test_file)
def test_ics_jpeg_cderror_h(self):
test_file = self.get_test_loc('ics/jpeg/cderror.h')
expected = [
u'Copyright (c) 1994-1997, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_cdjpeg_c(self):
test_file = self.get_test_loc('ics/jpeg/cdjpeg.c')
expected = [
u'Copyright (c) 1991-1997, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_cjpeg_c(self):
test_file = self.get_test_loc('ics/jpeg/cjpeg.c')
expected = [
u'Copyright (c) 1991-1998, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_ckconfig_c(self):
test_file = self.get_test_loc('ics/jpeg/ckconfig.c')
expected = [
u'Copyright (c) 1991-1994, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_coderules_doc(self):
test_file = self.get_test_loc('ics/jpeg/coderules.doc')
expected = [
u'Copyright (c) 1991-1996, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_config_guess(self):
test_file = self.get_test_loc('ics/jpeg/config.guess')
expected = [
u'Copyright (c) 1992, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_jpeg_config_sub(self):
test_file = self.get_test_loc('ics/jpeg/config.sub')
expected = [
u'Copyright (c) 1991, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_jpeg_configure(self):
test_file = self.get_test_loc('ics/jpeg/configure')
expected = [
u'Copyright (c) 1992, 93, 94, 95, 96 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_jpeg_filelist_doc(self):
test_file = self.get_test_loc('ics/jpeg/filelist.doc')
expected = [
u'Copyright (c) 1994-1998, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_install_doc(self):
test_file = self.get_test_loc('ics/jpeg/install.doc')
expected = [
u'Copyright (c) 1991-1998, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jcapimin_c(self):
test_file = self.get_test_loc('ics/jpeg/jcapimin.c')
expected = [
u'Copyright (c) 1994-1998, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jcapistd_c(self):
test_file = self.get_test_loc('ics/jpeg/jcapistd.c')
expected = [
u'Copyright (c) 1994-1996, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jccolor_c(self):
test_file = self.get_test_loc('ics/jpeg/jccolor.c')
expected = [
u'Copyright (c) 1991-1996, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jcphuff_c(self):
test_file = self.get_test_loc('ics/jpeg/jcphuff.c')
expected = [
u'Copyright (c) 1995-1997, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jctrans_c(self):
test_file = self.get_test_loc('ics/jpeg/jctrans.c')
expected = [
u'Copyright (c) 1995-1998, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jmem_android_c(self):
test_file = self.get_test_loc('ics/jpeg/jmem-android.c')
expected = [
u'Copyright (c) 2007-2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_jpeg_jmemansi_c(self):
test_file = self.get_test_loc('ics/jpeg/jmemansi.c')
expected = [
u'Copyright (c) 1992-1996, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jmemdos_c(self):
test_file = self.get_test_loc('ics/jpeg/jmemdos.c')
expected = [
u'Copyright (c) 1992-1997, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jversion_h(self):
test_file = self.get_test_loc('ics/jpeg/jversion.h')
expected = [
u'Copyright (c) 1991-1998, Thomas G. Lane.',
u'Copyright (c) 1998, Thomas G. Lane',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_jpeg_ltconfig(self):
test_file = self.get_test_loc('ics/jpeg/ltconfig')
expected = [
u'Copyright (c) 1996-1998 Free Software Foundation, Inc. Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 1996-1998 Free Software Foundation, Inc. Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_jpeg_ltmain_sh(self):
test_file = self.get_test_loc('ics/jpeg/ltmain.sh')
expected = [
u'Copyright (c) 1996-1998 Free Software Foundation, Inc. Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
]
check_detection(expected, test_file)
def test_ics_jpeg_notice(self):
test_file = self.get_test_loc('ics/jpeg/NOTICE')
expected = [
u'copyright (c) 1991-1998, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_rdcolmap_c(self):
test_file = self.get_test_loc('ics/jpeg/rdcolmap.c')
expected = [
u'Copyright (c) 1994-1996, Thomas G. Lane.',
u'Copyright (c) 1988 by Jef Poskanzer.',
]
check_detection(expected, test_file)
def test_ics_jpeg_rdppm_c(self):
test_file = self.get_test_loc('ics/jpeg/rdppm.c')
expected = [
u'Copyright (c) 1991-1997, Thomas G. Lane.',
u'Copyright (c) 1988 by Jef Poskanzer.',
]
check_detection(expected, test_file)
def test_ics_jpeg_readme(self):
test_file = self.get_test_loc('ics/jpeg/README')
expected = [
u'copyright (c) 1991-1998, Thomas G. Lane.',
u'copyright by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_jpeg_structure_doc(self):
test_file = self.get_test_loc('ics/jpeg/structure.doc')
expected = [
u'Copyright (c) 1991-1995, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_transupp_c(self):
test_file = self.get_test_loc('ics/jpeg/transupp.c')
expected = [
u'Copyright (c) 1997, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_wrgif_c(self):
test_file = self.get_test_loc('ics/jpeg/wrgif.c')
expected = [
u'Copyright (c) 1991-1997, Thomas G. Lane.',
u'Copyright (c) 1989 by Jef Poskanzer.',
]
check_detection(expected, test_file)
def test_ics_jpeg_wrjpgcom_c(self):
test_file = self.get_test_loc('ics/jpeg/wrjpgcom.c')
expected = [
u'Copyright (c) 1994-1997, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jsr305_notice_trail_name(self):
test_file = self.get_test_loc('ics/jsr305/NOTICE')
expected = [
u'Copyright (c) 2007-2009, JSR305 expert group',
]
check_detection(expected, test_file)
def test_ics_jsr305_ri_src_main_java_javax_annotation_concurrent_guardedby_java(self):
test_file = self.get_test_loc('ics/jsr305-ri-src-main-java-javax-annotation-concurrent/GuardedBy.java')
expected = [
u'Copyright (c) 2005 Brian Goetz',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_atomic_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/atomic.h')
expected = [
u'Copyright (c) 1996 Russell King.',
u'Copyright (c) 2002 Deep Blue Solutions Ltd.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_bitops_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/bitops.h')
expected = [
u'Copyright 1995, Russell King. Various',
u'Copyright 2001, Nicolas Pitre',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_asm_arm_bitops_h_extra_various(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/bitops.h')
expected = [
u'Copyright 1995, Russell King.',
u'Copyright 2001, Nicolas Pitre',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_cacheflush_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/cacheflush.h')
expected = [
u'Copyright (c) 1999-2002 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_delay_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/delay.h')
expected = [
u'Copyright (c) 1995-2004 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_domain_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/domain.h')
expected = [
u'Copyright (c) 1999 Russell King.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_fpstate_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/fpstate.h')
expected = [
u'Copyright (c) 1995 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_glue_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/glue.h')
expected = [
u'Copyright (c) 1997-1999 Russell King',
u'Copyright (c) 2000-2002 Deep Blue Solutions Ltd.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_hardware_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/hardware.h')
expected = [
u'Copyright (c) 1996 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_ide_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/ide.h')
expected = [
u'Copyright (c) 1994-1996 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_io_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/io.h')
expected = [
u'Copyright (c) 1996-2000 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_locks_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/locks.h')
expected = [
u'Copyright (c) 2000 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_memory_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/memory.h')
expected = [
u'Copyright (c) 2000-2002 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_mtd_xip_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/mtd-xip.h')
expected = [
u'Copyright (c) 2004 MontaVista Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_page_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/page.h')
expected = [
u'Copyright (c) 1995-2003 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_param_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/param.h')
expected = [
u'Copyright (c) 1995-1999 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_pgalloc_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/pgalloc.h')
expected = [
u'Copyright (c) 2000-2001 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_pgtable_hwdef_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/pgtable-hwdef.h')
expected = [
u'Copyright (c) 1995-2002 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_posix_types_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/posix_types.h')
expected = [
u'Copyright (c) 1996-1998 Russell King.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_proc_fns_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/proc-fns.h')
expected = [
u'Copyright (c) 1997-1999 Russell King',
u'Copyright (c) 2000 Deep Blue Solutions Ltd',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_procinfo_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/procinfo.h')
expected = [
u'Copyright (c) 1996-1999 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_ptrace_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/ptrace.h')
expected = [
u'Copyright (c) 1996-2003 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_sizes_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/sizes.h')
expected = [
u'Copyright (c) ARM Limited 1998.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_smp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/smp.h')
expected = [
u'Copyright (c) 2004-2005 ARM Ltd.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_thread_info_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/thread_info.h')
expected = [
u'Copyright (c) 2002 Russell King.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_timex_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/timex.h')
expected = [
u'Copyright (c) 1997,1998 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_tlbflush_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/tlbflush.h')
expected = [
u'Copyright (c) 1999-2003 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_unistd_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/unistd.h')
expected = [
u'Copyright (c) 2001-2005 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_board_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/board.h')
expected = [
u'Copyright (c) 2004 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_board_perseus2_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/board-perseus2.h')
expected = [
u'Copyright 2003 by Texas Instruments Incorporated OMAP730 / Perseus2',
u'Copyright (c) 2001 RidgeRun, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_asm_arm_arch_board_perseus2_h_extra_name(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/board-perseus2.h')
expected = [
u'Copyright 2003 by Texas Instruments Incorporated',
u'Copyright (c) 2001 RidgeRun, Inc. (http://www.ridgerun.com)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_dma_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/dma.h')
expected = [
u'Copyright (c) 2003 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_fpga_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/fpga.h')
expected = [
u'Copyright (c) 2001 RidgeRun, Inc.',
u'Copyright (c) 2002 MontaVista Software, Inc.',
u'Copyright (c) 2004 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_gpio_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/gpio.h')
expected = [
u'Copyright (c) 2003-2005 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_gpio_switch_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/gpio-switch.h')
expected = [
u'Copyright (c) 2006 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_hardware_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/hardware.h')
expected = [
u'Copyright (c) 2001 RidgeRun, Inc. Author RidgeRun, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_asm_arm_arch_hardware_h_extra_author(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/hardware.h')
expected = [
u'Copyright (c) 2001 RidgeRun, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_io_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/io.h')
expected = [
u'Copyright (c) 1997-1999 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_irqs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/irqs.h')
expected = [
u'Copyright (c) Greg Lonnon 2001',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_mcbsp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/mcbsp.h')
expected = [
u'Copyright (c) 2002 RidgeRun, Inc. Author Steve Johnson',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_asm_arm_arch_mcbsp_h_extra_author(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/mcbsp.h')
expected = [
u'Copyright (c) 2002 RidgeRun, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_memory_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/memory.h')
expected = [
u'Copyright (c) 2000 RidgeRun, Inc.',
u'Copyright (c) 1999 ARM Limited',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_mtd_xip_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/mtd-xip.h')
expected = [
u'(c) 2005 MontaVista Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_mux_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/mux.h')
expected = [
u'Copyright (c) 2003 - 2005 Nokia Corporation',
u'Copyright (c) 2004 Texas Instruments',
u'Copyright (c) 2004 Texas Instruments',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_timex_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/timex.h')
expected = [
u'Copyright (c) 2000 RidgeRun, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_vmalloc_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/vmalloc.h')
expected = [
u'Copyright (c) 2000 Russell King.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_generic_tlb_h_trail_other(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-generic/tlb.h')
expected = [
u'Copyright 2001 Red Hat, Inc.',
u'Copyright Linus Torvalds and others.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_generic_topology_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-generic/topology.h')
expected = [
u'Copyright (c) 2002, IBM Corp.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_acpi_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/acpi_32.h')
expected = [
u'Copyright (c) 2001 Paul Diefenbaugh <paul.s.diefenbaugh@intel.com>',
u'Copyright (c) 2001 Patrick Mochel <mochel@osdl.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_bitops_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/bitops_32.h')
expected = [
u'Copyright 1992, Linus Torvalds.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_delay_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/delay.h')
expected = [
u'Copyright (c) 1993 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_fixmap_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/fixmap_32.h')
expected = [
u'Copyright (c) 1998 Ingo Molnar',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_genapic_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/genapic_32.h')
expected = [
u'Copyright 2003 Andi Kleen, SuSE Labs.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_highmem_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/highmem.h')
expected = [
u'Copyright (c) 1999 Gerhard Wichert, Siemens AG',
u'Copyright (c) 1999 Ingo Molnar <mingo@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_hw_irq_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/hw_irq_32.h')
expected = [
u'(c) 1992, 1993 Linus Torvalds',
u'(c) 1997 Ingo Molnar',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_i387_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/i387_32.h')
expected = [
u'Copyright (c) 1994 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_io_apic_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/io_apic_32.h')
expected = [
u'Copyright (c) 1997, 1998, 1999, 2000 Ingo Molnar',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_ist_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/ist.h')
expected = [
u'Copyright 2002 Andy Grover <andrew.grover@intel.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_semaphore_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/semaphore_32.h')
expected = [
u'(c) Copyright 1996 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_thread_info_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/thread_info_32.h')
expected = [
u'Copyright (c) 2002 David Howells (dhowells@redhat.com)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_voyager_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/voyager.h')
expected = [
u'Copyright (c) 1999,2001',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_xen_hypercall_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86-xen/hypercall.h')
expected = [
u'Copyright (c) 2002-2004, K A Fraser',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_a1026_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/a1026.h')
expected = [
u'Copyright (c) 2009 HTC Corporation.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_aio_abi_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/aio_abi.h')
expected = [
u'Copyright 2000,2001,2002 Red Hat.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_android_alarm_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/android_alarm.h')
expected = [
u'Copyright 2006, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_android_pmem_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/android_pmem.h')
expected = [
u'Copyright (c) 2007 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_android_power_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/android_power.h')
expected = [
u'Copyright 2005-2006, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_apm_bios_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/apm_bios.h')
expected = [
u'Copyright 1994-2001 Stephen Rothwell (sfr@canb.auug.org.au)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ashmem_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ashmem.h')
expected = [
u'Copyright 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ata_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ata.h')
expected = [
u'Copyright 2003-2004 Red Hat, Inc.',
u'Copyright 2003-2004 Jeff Garzik',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_attribute_container_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/attribute_container.h')
expected = [
u'Copyright (c) 2005 - James Bottomley <James.Bottomley@steeleye.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_auto_fs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/auto_fs.h')
expected = [
u'Copyright 1997 Transmeta Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_binder_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/binder.h')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
u'Copyright (c) 2005 Palmsource, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_bio_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/bio.h')
expected = [
u'Copyright (c) 2001 Jens Axboe <axboe@suse.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_bmp085_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/bmp085.h')
expected = [
u'Copyright (c) 2010 Motorola, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_capella_cm3602_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/capella_cm3602.h')
expected = [
u'Copyright (c) 2009 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_capi_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/capi.h')
expected = [
u'Copyright 1997 by Carsten Paeth (calle@calle.in-berlin.de)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_cdrom_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/cdrom.h')
expected = [
u'Copyright (c) 1992 David Giller, rafetmad@oxy.edu 1994, 1995 Eberhard Moenkeberg, emoenke@gwdg.de 1996 David van Leeuwen',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_linux_cdrom_h_trail_email(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/cdrom.h')
expected = [
u'Copyright (c) 1992 David Giller, rafetmad@oxy.edu 1994, 1995 Eberhard Moenkeberg, emoenke@gwdg.de 1996 David van Leeuwen, david@tm.tno.nl',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_clk_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/clk.h')
expected = [
u'Copyright (c) 2004 ARM Limited.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_coda_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/coda.h')
expected = [
u'Copyright (c) 1987-1999 Carnegie Mellon University',
u'Copyright (c) 1987-1999 Carnegie Mellon University',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_coda_fs_i_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/coda_fs_i.h')
expected = [
u'Copyright (c) 1998 Carnegie Mellon University',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_completion_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/completion.h')
expected = [
u'(c) Copyright 2001 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_cpcap_audio_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/cpcap_audio.h')
expected = [
u'Copyright (c) 2010 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_device_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/device.h')
expected = [
u'Copyright (c) 2001-2003 Patrick Mochel <mochel@osdl.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_dmaengine_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/dmaengine.h')
expected = [
u'Copyright (c) 2004 - 2006 Intel Corporation.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_dm_ioctl_h_trail_name(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/dm-ioctl.h')
expected = [
u'Copyright (c) 2001 - 2003 Sistina Software (UK) Limited.',
u'Copyright (c) 2004 - 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_efs_dir_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/efs_dir.h')
expected = [
u'Copyright (c) 1999 Al Smith',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_efs_fs_i_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/efs_fs_i.h')
expected = [
u'Copyright (c) 1999 Al Smith',
u'(c) 1988 Silicon Graphics',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ethtool_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ethtool.h')
expected = [
u'Copyright (c) 1998 David S. Miller (davem@redhat.com)',
u'Copyright 2001 Jeff Garzik <jgarzik@pobox.com>',
u'Portions Copyright 2001 Sun Microsystems',
u'Portions Copyright 2002 Intel',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ext2_fs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ext2_fs.h')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995 Remy Card (card@masi.ibp.fr) Laboratoire MASI',
u'Copyright (c) 1991, 1992 Linus Torvalds',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_linux_ext2_fs_h_trail_name(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ext2_fs.h')
expected = [
u'Copyright (C) 1992, 1993, 1994, 1995 Remy Card (card@masi.ibp.fr) Laboratoire MASI - Institut Blaise Pascal',
u'Copyright (c) 1991, 1992 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ext3_fs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ext3_fs.h')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995 Remy Card (card@masi.ibp.fr) Laboratoire MASI',
u'Copyright (c) 1991, 1992 Linus Torvalds',
u'(c) Daniel Phillips, 2001',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_linux_ext3_fs_h_trail_name(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ext3_fs.h')
expected = [
u'Copyright (C) 1992, 1993, 1994, 1995 Remy Card (card@masi.ibp.fr) Laboratoire MASI - Institut Blaise Pascal',
u'Copyright (c) 1991, 1992 Linus Torvalds',
u'(c) Daniel Phillips, 2001',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ftape_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ftape.h')
expected = [
u'Copyright (c) 1994-1996 Bas Laarhoven',
u'(c) 1996-1997 Claus-Justus Heine.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_genhd_h_extra_generic(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/genhd.h')
expected = [
u'Copyright (c) 1992 Drew Eckhardt',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_hdsmart_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/hdsmart.h')
expected = [
u'Copyright (c) 1999-2000 Michael Cornwell <cornwell@acm.org>',
u'Copyright (c) 2000 Andre Hedrick <andre@linux-ide.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_hid_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/hid.h')
expected = [
u'Copyright (c) 1999 Andreas Gal',
u'Copyright (c) 2000-2001 Vojtech Pavlik',
u'Copyright (c) 2006-2007 Jiri Kosina',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_hidraw_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/hidraw.h')
expected = [
u'Copyright (c) 2007 Jiri Kosina',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_hil_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/hil.h')
expected = [
u'Copyright (c) 2001 Brian S. Julin',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_i2c_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/i2c.h')
expected = [
u'Copyright (c) 1995-2000 Simon G. Vogl',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_if_ppp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/if_ppp.h')
expected = [
u'Copyright (c) 1989 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_inotify_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/inotify.h')
expected = [
u'Copyright (c) 2005 John McCutchan',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_input_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/input.h')
expected = [
u'Copyright (c) 1999-2002 Vojtech Pavlik',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ion_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ion.h')
expected = [
u'Copyright (c) 2011 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ipmi_msgdefs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ipmi_msgdefs.h')
expected = [
u'Copyright 2002 MontaVista Software Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_jbd_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/jbd.h')
expected = [
u'Copyright 1998-2000 Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_kernelcapi_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/kernelcapi.h')
expected = [
u'(c) Copyright 1997 by Carsten Paeth (calle@calle.in-berlin.de)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_keychord_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/keychord.h')
expected = [
u'Copyright (c) 2008 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_klist_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/klist.h')
expected = [
u'Copyright (c) 2005 Patrick Mochel',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_kobject_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/kobject.h')
expected = [
u'Copyright (c) 2002-2003 Patrick Mochel',
u'Copyright (c) 2002-2003 Open Source Development Labs',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_kref_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/kref.h')
expected = [
u'Copyright (c) 2004 Greg Kroah-Hartman <greg@kroah.com>',
u'Copyright (c) 2004 IBM Corp.',
u'Copyright (c) 2002-2003 Patrick Mochel <mochel@osdl.org>',
u'Copyright (c) 2002-2003 Open Source Development Labs',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ktime_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ktime.h')
expected = [
u'Copyright (c) 2005, Thomas Gleixner <tglx@linutronix.de>',
u'Copyright (c) 2005, Red Hat, Inc., Ingo Molnar',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_kxtf9_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/kxtf9.h')
expected = [
u'Copyright (c) 2008-2009, Kionix, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_leds_an30259a_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/leds-an30259a.h')
expected = [
u'Copyright (c) 2011 Samsung Electronics Co. Ltd.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_lis331dlh_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/lis331dlh.h')
expected = [
u'Copyright (c) 2008-2009, Motorola',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_lockdep_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/lockdep.h')
expected = [
u'Copyright (c) 2006 Red Hat, Inc., Ingo Molnar <mingo@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_loop_h_trail_name(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/loop.h')
expected = [
u"Copyright 1993 by Theodore Ts'o.",
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mc146818rtc_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/mc146818rtc.h')
expected = [
u'Copyright Torsten Duwe <duwe@informatik.uni-erlangen.de> 1993',
u'Copyright Motorola 1984',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mempolicy_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/mempolicy.h')
expected = [
u'Copyright 2003,2004 Andi Kleen SuSE Labs',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_moduleparam_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/moduleparam.h')
expected = [
u'(c) Copyright 2001, 2002 Rusty Russell IBM Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_msm_kgsl_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_kgsl.h')
expected = [
u'(c) Copyright Advanced Micro Devices, Inc. 2002, 2007',
u'Copyright (c) 2008-2009 QUALCOMM USA, INC.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_msm_mdp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_mdp.h')
expected = [
u'Copyright (c) 2007 Google Incorporated',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_msm_q6vdec_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_q6vdec.h')
expected = [
u'Copyright (c) 2008-2009, Code Aurora Forum.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_msm_vidc_dec_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_vidc_dec.h')
expected = [
u'Copyright (c) 2010, Code Aurora Forum.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_msm_vidc_enc_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_vidc_enc.h')
expected = [
u'Copyright (c) 2009, Code Aurora Forum.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mt9t013_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/mt9t013.h')
expected = [
u'Copyright (c) 2007, 2008 HTC, Inc',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mutex_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/mutex.h')
expected = [
u'Copyright (c) 2004, 2005, 2006 Red Hat, Inc., Ingo Molnar <mingo@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ncp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ncp.h')
expected = [
u'Copyright (c) 1995 by Volker Lendecke',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ncp_mount_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ncp_mount.h')
expected = [
u'Copyright (c) 1995, 1996 by Volker Lendecke',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_netfilter_arp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/netfilter_arp.h')
expected = [
u'(c) 2002 Rusty Russell',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfs4_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/nfs4.h')
expected = [
u'Copyright (c) 2002 The Regents of the University of Michigan.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfsacl_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/nfsacl.h')
expected = [
u'(c) 2003 Andreas Gruenbacher <agruen@suse.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nvhdcp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/nvhdcp.h')
expected = [
u'Copyright (c) 2010-2011, NVIDIA Corporation.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_pagemap_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/pagemap.h')
expected = [
u'Copyright 1995 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_patchkey_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/patchkey.h')
expected = [
u'Copyright (c) 2005 Stuart Brady',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_pci_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/pci.h')
expected = [
u'Copyright 1994, Drew Eckhardt',
u'Copyright 1997 1999 Martin Mares <mj@ucw.cz>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_perf_event_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/perf_event.h')
expected = [
u'Copyright (c) 2008-2009, Thomas Gleixner <tglx@linutronix.de>',
u'Copyright (c) 2008-2009, Red Hat, Inc., Ingo Molnar',
u'Copyright (c) 2008-2009, Red Hat, Inc., Peter Zijlstra',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_plist_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/plist.h')
expected = [
u'(c) 2002-2003 Intel Corp Inaky Perez-Gonzalez <inaky.perez-gonzalez@intel.com>.',
u'(c) MontaVista Software, Inc.',
u'(c) 2005 Thomas Gleixner <tglx@linutronix.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_pm_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/pm.h')
expected = [
u'Copyright (c) 2000 Andrew Henroid',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_pn544_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/pn544.h')
expected = [
u'Copyright (c) 2010 Trusted Logic S.A.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_posix_acl_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/posix_acl.h')
expected = [
u'(c) 2002 Andreas Gruenbacher, <a.gruenbacher@computer.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ppdev_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ppdev.h')
expected = [
u'Copyright (c) 1998-9 Tim Waugh <tim@cyberelk.demon.co.uk>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ppp_defs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ppp_defs.h')
expected = [
u'Copyright (c) 1994 The Australian National University.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_qic117_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/qic117.h')
expected = [
u'Copyright (c) 1993-1996 Bas Laarhoven',
u'(c) 1997 Claus-Justus Heine.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_quota_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/quota.h')
expected = [
u'Copyright (c) 1982, 1986 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_rcupdate_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/rcupdate.h')
expected = [
u'Copyright (c) IBM Corporation, 2001',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_relay_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/relay.h')
expected = [
u'Copyright (c) 2002, 2003 - Tom Zanussi (zanussi@us.ibm.com), IBM Corp',
u'Copyright (c) 1999, 2000, 2001, 2002 - Karim Yaghmour (karim@opersys.com)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_rpmsg_omx_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/rpmsg_omx.h')
expected = [
u'Copyright (c) 2011 Texas Instruments.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_rtc_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/rtc.h')
expected = [
u'Copyright (c) 1999 Hewlett-Packard Co.',
u'Copyright (c) 1999 Stephane Eranian <eranian@hpl.hp.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_serial_core_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/serial_core.h')
expected = [
u'Copyright (c) 2000 Deep Blue Solutions Ltd.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_serial_reg_h_trail_name(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/serial_reg.h')
expected = [
u"Copyright (c) 1992, 1994 by Theodore Ts'o.",
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sfh7743_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/sfh7743.h')
expected = [
u'Copyright (c) 2009 Motorola, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_smb_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/smb.h')
expected = [
u'Copyright (c) 1995, 1996 by Paal-Kr. Engstad and Volker Lendecke',
u'Copyright (c) 1997 by Volker Lendecke',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_soundcard_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/soundcard.h')
expected = [
u'Copyright by Hannu Savolainen 1993-1997',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_spinlock_api_smp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/spinlock_api_smp.h')
expected = [
u'portions Copyright 2005, Red Hat, Inc., Ingo Molnar',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sysfs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/sysfs.h')
expected = [
u'Copyright (c) 2001,2002 Patrick Mochel',
u'Copyright (c) 2004 Silicon Graphics, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_taskstats_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/taskstats.h')
expected = [
u'Copyright (c) Shailabh Nagar, IBM Corp. 2006',
u'(c) Balbir Singh, IBM Corp. 2006',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_telephony_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/telephony.h')
expected = [
u'(c) Copyright 1999-2001 Quicknet Technologies, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_timex_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/timex.h')
expected = [
u'Copyright (c) David L. Mills 1993',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ufs_fs_i_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ufs_fs_i.h')
expected = [
u'Copyright (c) 1996 Adrian Rodriguez (adrian@franklins-tower.rutgers.edu) Laboratory for Computer Science Research Computing Facility',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_usbdevice_fs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/usbdevice_fs.h')
expected = [
u'Copyright (c) 2000 Thomas Sailer (sailer@ife.ee.ethz.ch)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_videodev2_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/videodev2.h')
expected = [
u'Copyright (c) 1999-2007 the contributors',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_vt_buffer_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/vt_buffer.h')
expected = [
u'(c) 1998 Martin Mares <mj@ucw.cz>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_wanrouter_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/wanrouter.h')
expected = [
u'Copyright (c) 1995-2000 Sangoma Technologies Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_wireless_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/wireless.h')
expected = [
u'Copyright (c) 1997-2006 Jean Tourrilhes',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_xattr_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/xattr.h')
expected = [
u'Copyright (c) 2001 by Andreas Gruenbacher <a.gruenbacher@computer.org>',
u'Copyright (c) 2001-2002 Silicon Graphics, Inc.',
u'Copyright (c) 2004 Red Hat, Inc., James Morris <jmorris@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_zconf_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/zconf.h')
expected = [
u'Copyright (c) 1995-1998 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_lockd_nlm_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-lockd/nlm.h')
expected = [
u'Copyright (c) 1996, Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_lockd_xdr_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-lockd/xdr.h')
expected = [
u'Copyright (c) 1996 Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_bbm_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/bbm.h')
expected = [
u'Copyright (c) 2005 Samsung Electronics Kyungmin Park <kyungmin.park@samsung.com>',
u'Copyright (c) 2000-2005 Thomas Gleixner <tglx@linuxtronix.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_blktrans_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/blktrans.h')
expected = [
u'(c) 2003 David Woodhouse <dwmw2@infradead.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_flashchip_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/flashchip.h')
expected = [
u'(c) 2000 Red Hat.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_mtd_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/mtd.h')
expected = [
u'Copyright (c) 1999-2003 David Woodhouse <dwmw2@infradead.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_nand_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/nand.h')
expected = [
u'Copyright (c) 2000 David Woodhouse <dwmw2@mvhi.com> Steven J. Hill <sjhill@realitydiluted.com> Thomas Gleixner <tglx@linutronix.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_nand_ecc_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/nand_ecc.h')
expected = [
u'Copyright (c) 2000 Steven J. Hill (sjhill@realitydiluted.com)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_nftl_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/nftl.h')
expected = [
u'(c) 1999-2003 David Woodhouse <dwmw2@infradead.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_onenand_regs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/onenand_regs.h')
expected = [
u'Copyright (c) 2005 Samsung Electronics',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_partitions_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/partitions.h')
expected = [
u'(c) 2000 Nicolas Pitre <nico@cam.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_netfilter_xt_connmark_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter/xt_CONNMARK.h')
expected = [
u'Copyright (c) 2002,2004 MARA Systems AB',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_linux_netfilter_xt_connmark_h_trail_url(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter/xt_CONNMARK.h')
expected = [
u'Copyright (c) 2002,2004 MARA Systems AB <http://www.marasystems.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_netfilter_ipv4_ip_queue_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter_ipv4/ip_queue.h')
expected = [
u'(c) 2000 James Morris',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_netfilter_ipv4_ipt_dscp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter_ipv4/ipt_DSCP.h')
expected = [
u'(c) 2002 Harald Welte <laforge@gnumonks.org>',
u'(c) 2000 by Matthew G. Marsh <mgm@paktronix.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_netfilter_ipv4_ipt_ttl_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter_ipv4/ipt_TTL.h')
expected = [
u'(c) 2000 by Harald Welte <laforge@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfsd_auth_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/auth.h')
expected = [
u'Copyright (c) 1995, 1996 Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfsd_const_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/const.h')
expected = [
u'Copyright (c) 1995-1997 Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfsd_debug_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/debug.h')
expected = [
u'Copyright (c) 1995 Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfsd_interface_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/interface.h')
expected = [
u'Copyright (c) 2000 Neil Brown <neilb@cse.unsw.edu.au>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfsd_nfsfh_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/nfsfh.h')
expected = [
u'Copyright (c) 1995, 1996, 1997 Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_raid_md_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-raid/md.h')
expected = [
u'Copyright (c) 1996-98 Ingo Molnar, Gadi Oxman',
u'Copyright (c) 1994-96 Marc ZYNGIER <zyngier@ufr-info-p7.ibp.fr>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_raid_md_k_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-raid/md_k.h')
expected = [
u'Copyright (c) 1996-98 Ingo Molnar, Gadi Oxman',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sunrpc_auth_gss_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/auth_gss.h')
expected = [
u'Copyright (c) 2000 The Regents of the University of Michigan',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sunrpc_clnt_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/clnt.h')
expected = [
u'Copyright (c) 1995, 1996, Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sunrpc_gss_asn1_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/gss_asn1.h')
expected = [
u'Copyright (c) 2000 The Regents of the University of Michigan.',
u'Copyright 1995 by the Massachusetts Institute of Technology.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sunrpc_gss_err_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/gss_err.h')
expected = [
u'Copyright (c) 2002 The Regents of the University of Michigan.',
u'Copyright 1993 by OpenVision Technologies, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sunrpc_timer_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/timer.h')
expected = [
u'Copyright (c) 2002 Trond Myklebust <trond.myklebust@fys.uio.no>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_sound_asound_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-sound/asound.h')
expected = [
u'Copyright (c) 1994-2003 by Jaroslav Kysela <perex@perex.cz>, Abramo Bagnara <abramo@alsa-project.org>',
]
check_detection(expected, test_file)
def test_ics_libffi_aclocal_m4(self):
test_file = self.get_test_loc('ics/libffi/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1998, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_android_mk(self):
test_file = self.get_test_loc('ics/libffi/Android.mk')
expected = [
u'Copyright 2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_libffi_configure(self):
test_file = self.get_test_loc('ics/libffi/configure')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_depcomp(self):
test_file = self.get_test_loc('ics/libffi/depcomp')
expected = [
u'Copyright (c) 1999, 2000, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_license(self):
test_file = self.get_test_loc('ics/libffi/LICENSE')
expected = [
u'Copyright (c) 1996-2008 Red Hat, Inc and others.',
]
check_detection(expected, test_file)
def test_ics_libffi_ltcf_c_sh(self):
test_file = self.get_test_loc('ics/libffi/ltcf-c.sh')
expected = [
u'Copyright (c) 1996-2000, 2001 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
]
check_detection(expected, test_file)
def test_ics_libffi_ltcf_cxx_sh(self):
test_file = self.get_test_loc('ics/libffi/ltcf-cxx.sh')
expected = [
u'Copyright (c) 1996-1999, 2000, 2001, 2003 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
]
check_detection(expected, test_file)
def test_ics_libffi_ltconfig(self):
test_file = self.get_test_loc('ics/libffi/ltconfig')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 1996-2000 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 1999-2000 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_ltmain_sh(self):
test_file = self.get_test_loc('ics/libffi/ltmain.sh')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_missing(self):
test_file = self.get_test_loc('ics/libffi/missing')
expected = [
u'Copyright (c) 1996, 1997, 1999, 2000, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_darwin_x86_ffi_h(self):
test_file = self.get_test_loc('ics/libffi-darwin-x86/ffi.h')
expected = [
u'Copyright (c) 1996-2003, 2007, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_darwin_x86_ffitarget_h(self):
test_file = self.get_test_loc('ics/libffi-darwin-x86/ffitarget.h')
expected = [
u'Copyright (c) 1996-2003 Red Hat, Inc.',
u'Copyright (c) 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_doc_libffi_texi(self):
test_file = self.get_test_loc('ics/libffi-doc/libffi.texi')
expected = [
u'Copyright 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_include_ffi_h_in(self):
test_file = self.get_test_loc('ics/libffi-include/ffi.h.in')
expected = [
u'Copyright (c) 1996-2003, 2007, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_include_ffi_common_h(self):
test_file = self.get_test_loc('ics/libffi-include/ffi_common.h')
expected = [
u'Copyright (c) 1996 Red Hat, Inc.',
u'Copyright (c) 2007 Free Software Foundation, Inc',
]
check_detection(expected, test_file)
def test_ics_libffi_src_closures_c(self):
test_file = self.get_test_loc('ics/libffi-src/closures.c')
expected = [
u'Copyright (c) 2007 Red Hat, Inc.',
u'Copyright (c) 2007 Free Software Foundation, Inc',
]
check_detection(expected, test_file)
def test_ics_libffi_src_debug_c(self):
test_file = self.get_test_loc('ics/libffi-src/debug.c')
expected = [
u'Copyright (c) 1996 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_java_raw_api_c(self):
test_file = self.get_test_loc('ics/libffi-src/java_raw_api.c')
expected = [
u'Copyright (c) 1999, 2007, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_prep_cif_c(self):
test_file = self.get_test_loc('ics/libffi-src/prep_cif.c')
expected = [
u'Copyright (c) 1996, 1998, 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_raw_api_c(self):
test_file = self.get_test_loc('ics/libffi-src/raw_api.c')
expected = [
u'Copyright (c) 1999, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_types_c(self):
test_file = self.get_test_loc('ics/libffi-src/types.c')
expected = [
u'Copyright (c) 1996, 1998 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_alpha_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-alpha/ffi.c')
expected = [
u'Copyright (c) 1998, 2001, 2007, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_alpha_ffitarget_h(self):
test_file = self.get_test_loc('ics/libffi-src-alpha/ffitarget.h')
expected = [
u'Copyright (c) 1996-2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_arm_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-arm/ffi.c')
expected = [
u'Copyright (c) 1998, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_cris_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-cris/ffi.c')
expected = [
u'Copyright (c) 1998 Cygnus Solutions',
u'Copyright (c) 2004 Simon Posnjak',
u'Copyright (c) 2005 Axis Communications AB',
u'Copyright (c) 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_frv_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-frv/ffi.c')
expected = [
u'Copyright (c) 2004 Anthony Green',
u'Copyright (c) 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_frv_ffitarget_h(self):
test_file = self.get_test_loc('ics/libffi-src-frv/ffitarget.h')
expected = [
u'Copyright (c) 1996-2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_ia64_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-ia64/ffi.c')
expected = [
u'Copyright (c) 1998, 2007, 2008 Red Hat, Inc.',
u'Copyright (c) 2000 Hewlett Packard Company',
]
check_detection(expected, test_file)
def test_ics_libffi_src_ia64_ia64_flags_h(self):
test_file = self.get_test_loc('ics/libffi-src-ia64/ia64_flags.h')
expected = [
u'Copyright (c) 2000 Hewlett Packard Company',
]
check_detection(expected, test_file)
def test_ics_libffi_src_m32r_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-m32r/ffi.c')
expected = [
u'Copyright (c) 2004 Renesas Technology',
u'Copyright (c) 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_m32r_ffitarget_h(self):
test_file = self.get_test_loc('ics/libffi-src-m32r/ffitarget.h')
expected = [
u'Copyright (c) 2004 Renesas Technology.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_mips_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-mips/ffi.c')
expected = [
u'Copyright (c) 1996, 2007, 2008 Red Hat, Inc.',
u'Copyright (c) 2008 David Daney',
]
check_detection(expected, test_file)
def test_ics_libffi_src_pa_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-pa/ffi.c')
expected = [
u'(c) 2003-2004 Randolph Chung <tausq@debian.org>',
u'(c) 2008 Red Hat, Inc.',
u'(c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_powerpc_asm_h(self):
test_file = self.get_test_loc('ics/libffi-src-powerpc/asm.h')
expected = [
u'Copyright (c) 1998 Geoffrey Keating',
]
check_detection(expected, test_file)
def test_ics_libffi_src_powerpc_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-powerpc/ffi.c')
expected = [
u'Copyright (c) 1998 Geoffrey Keating',
u'Copyright (c) 2007 Free Software Foundation, Inc',
u'Copyright (c) 2008 Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_libffi_src_powerpc_ffi_darwin_c(self):
test_file = self.get_test_loc('ics/libffi-src-powerpc/ffi_darwin.c')
expected = [
u'Copyright (c) 1998 Geoffrey Keating',
u'Copyright (c) 2001 John Hornkvist',
u'Copyright (c) 2002, 2006, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_libffi_src_powerpc_ffitarget_h(self):
test_file = self.get_test_loc('ics/libffi-src-powerpc/ffitarget.h')
expected = [
u'Copyright (c) 1996-2003 Red Hat, Inc.',
u'Copyright (c) 2007 Free Software Foundation, Inc',
]
check_detection(expected, test_file)
def test_ics_libffi_src_s390_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-s390/ffi.c')
expected = [
u'Copyright (c) 2000, 2007 Software AG',
u'Copyright (c) 2008 Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_libffi_src_sh_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-sh/ffi.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006, 2007, 2008 Kaz Kojima',
u'Copyright (c) 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_sh64_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-sh64/ffi.c')
expected = [
u'Copyright (c) 2003, 2004 Kaz Kojima',
u'Copyright (c) 2008 Anthony Green',
]
check_detection(expected, test_file)
def test_ics_libffi_src_sparc_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-sparc/ffi.c')
expected = [
u'Copyright (c) 1996, 2003, 2004, 2007, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_x86_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-x86/ffi.c')
expected = [
u'Copyright (c) 1996, 1998, 1999, 2001, 2007, 2008 Red Hat, Inc.',
u'Copyright (c) 2002 Ranjit Mathew',
u'Copyright (c) 2002 Bo Thorsen',
u'Copyright (c) 2002 Roger Sayle',
u'Copyright (c) 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_x86_ffi64_c(self):
test_file = self.get_test_loc('ics/libffi-src-x86/ffi64.c')
expected = [
u'Copyright (c) 2002, 2007 Bo Thorsen <bo@suse.de>',
u'Copyright (c) 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_testsuite_run_all_tests(self):
test_file = self.get_test_loc('ics/libffi-testsuite/run-all-tests')
expected = [
u'Copyright 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_libffi_testsuite_lib_libffi_dg_exp(self):
test_file = self.get_test_loc('ics/libffi-testsuite-lib/libffi-dg.exp')
expected = [
u'Copyright (c) 2003, 2005, 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_testsuite_lib_target_libpath_exp(self):
test_file = self.get_test_loc('ics/libffi-testsuite-lib/target-libpath.exp')
expected = [
u'Copyright (c) 2004, 2005, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_testsuite_lib_wrapper_exp(self):
test_file = self.get_test_loc('ics/libffi-testsuite-lib/wrapper.exp')
expected = [
u'Copyright (c) 2004, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libgsm_changelog(self):
test_file = self.get_test_loc('ics/libgsm/ChangeLog')
expected = [
u'Copyright 1992 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.',
]
check_detection(expected, test_file)
def test_ics_libgsm_notice(self):
test_file = self.get_test_loc('ics/libgsm/NOTICE')
expected = [
u'Copyright 1992, 1993, 1994 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin',
]
check_detection(expected, test_file)
def test_ics_libgsm_readme(self):
test_file = self.get_test_loc('ics/libgsm/README')
expected = [
u'Copyright 1992 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.',
]
check_detection(expected, test_file)
def test_ics_libgsm_inc_config_h(self):
test_file = self.get_test_loc('ics/libgsm-inc/config.h')
expected = [
u'Copyright 1992 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.',
]
check_detection(expected, test_file)
def test_ics_libgsm_man_gsm_3(self):
test_file = self.get_test_loc('ics/libgsm-man/gsm.3')
expected = [
u'Copyright 1992 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.',
]
check_detection(expected, test_file)
def test_ics_libgsm_man_gsm_option_3(self):
test_file = self.get_test_loc('ics/libgsm-man/gsm_option.3')
expected = [
u'Copyright 1992-1995 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.',
]
check_detection(expected, test_file)
def test_ics_liblzf_license(self):
test_file = self.get_test_loc('ics/liblzf/LICENSE')
expected = [
u'Copyright (c) 2000-2009 Marc Alexander Lehmann <schmorp@schmorp.de>',
]
check_detection(expected, test_file)
def test_ics_liblzf_lzf_c(self):
test_file = self.get_test_loc('ics/liblzf/lzf.c')
expected = [
u'Copyright (c) 2006 Stefan Traby <stefan@hello-penguin.com>',
]
check_detection(expected, test_file)
def test_ics_liblzf_lzf_h(self):
test_file = self.get_test_loc('ics/liblzf/lzf.h')
expected = [
u'Copyright (c) 2000-2008 Marc Alexander Lehmann <schmorp@schmorp.de>',
]
check_detection(expected, test_file)
def test_ics_liblzf_lzf_c_c(self):
test_file = self.get_test_loc('ics/liblzf/lzf_c.c')
expected = [
u'Copyright (c) 2000-2010 Marc Alexander Lehmann <schmorp@schmorp.de>',
]
check_detection(expected, test_file)
def test_ics_liblzf_lzfp_h(self):
test_file = self.get_test_loc('ics/liblzf/lzfP.h')
expected = [
u'Copyright (c) 2000-2007 Marc Alexander Lehmann <schmorp@schmorp.de>',
]
check_detection(expected, test_file)
def test_ics_liblzf_cs_clzf_cs(self):
test_file = self.get_test_loc('ics/liblzf-cs/CLZF.cs')
expected = [
u'Copyright (c) 2005 Oren J. Maurice <oymaurice@hazorea.org.il>',
]
check_detection(expected, test_file)
def test_ics_libnfc_nxp_inc_nfc_custom_config_h(self):
test_file = self.get_test_loc('ics/libnfc-nxp-inc/nfc_custom_config.h')
expected = [
u'Copyright (c) 2010 NXP Semiconductors',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_generic_h(self):
test_file = self.get_test_loc('ics/libnl-headers/netlink-generic.h')
expected = [
u'Copyright (c) 2003-2006 Thomas Graf <tgraf@suug.ch>',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_local_h(self):
test_file = self.get_test_loc('ics/libnl-headers/netlink-local.h')
expected = [
u'Copyright (c) 2003-2008 Thomas Graf <tgraf@suug.ch>',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_errno_h(self):
test_file = self.get_test_loc('ics/libnl-headers-netlink/errno.h')
expected = [
u'Copyright (c) 2008 Thomas Graf <tgraf@suug.ch>',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_object_api_h(self):
test_file = self.get_test_loc('ics/libnl-headers-netlink/object-api.h')
expected = [
u'Copyright (c) 2003-2007 Thomas Graf <tgraf@suug.ch>',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_cli_utils_h(self):
test_file = self.get_test_loc('ics/libnl-headers-netlink-cli/utils.h')
expected = [
u'Copyright (c) 2003-2009 Thomas Graf <tgraf@suug.ch>',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_netfilter_ct_h(self):
test_file = self.get_test_loc('ics/libnl-headers-netlink-netfilter/ct.h')
expected = [
u'Copyright (c) 2003-2008 Thomas Graf <tgraf@suug.ch>',
u'Copyright (c) 2007 Philip Craig <philipc@snapgear.com>',
u'Copyright (c) 2007 Secure Computing Corporation',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_route_addr_h(self):
test_file = self.get_test_loc('ics/libnl-headers-netlink-route/addr.h')
expected = [
u'Copyright (c) 2003-2008 Thomas Graf <tgraf@suug.ch>',
u'Copyright (c) 2003-2006 Baruch Even <baruch@ev-en.org>, Mediatrix Telecom, inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_libnl_headers_netlink_route_addr_h_trail_email(self):
test_file = self.get_test_loc('ics/libnl-headers-netlink-route/addr.h')
expected = [
u'Copyright (c) 2003-2008 Thomas Graf <tgraf@suug.ch>',
u'Copyright (c) 2003-2006 Baruch Even <baruch@ev-en.org>, Mediatrix Telecom, inc. <ericb@mediatrix.com>',
]
check_detection(expected, test_file)
def test_ics_libpcap_aclocal_m4_trail_name(self):
test_file = self.get_test_loc('ics/libpcap/aclocal.m4')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_atmuni31_h(self):
test_file = self.get_test_loc('ics/libpcap/atmuni31.h')
expected = [
u'Copyright (c) 1997 Yen Yen Lim and North Dakota State University',
]
check_detection(expected, test_file)
def test_ics_libpcap_bpf_dump_c(self):
test_file = self.get_test_loc('ics/libpcap/bpf_dump.c')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_bpf_image_c(self):
test_file = self.get_test_loc('ics/libpcap/bpf_image.c')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_config_guess(self):
test_file = self.get_test_loc('ics/libpcap/config.guess')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libpcap_configure_in_trail_name(self):
test_file = self.get_test_loc('ics/libpcap/configure.in')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_etherent_c(self):
test_file = self.get_test_loc('ics/libpcap/etherent.c')
expected = [
u'Copyright (c) 1990, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_ethertype_h(self):
test_file = self.get_test_loc('ics/libpcap/ethertype.h')
expected = [
u'Copyright (c) 1993, 1994, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_fad_getad_c(self):
test_file = self.get_test_loc('ics/libpcap/fad-getad.c')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1997, 1998 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_fad_win32_c(self):
test_file = self.get_test_loc('ics/libpcap/fad-win32.c')
expected = [
u'Copyright (c) 2002 - 2005 NetGroup, Politecnico di Torino (Italy)',
u'Copyright (c) 2005 - 2006 CACE Technologies, Davis (California)',
]
check_detection(expected, test_file)
def test_ics_libpcap_gencode_c(self):
test_file = self.get_test_loc('ics/libpcap/gencode.c')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_gencode_h(self):
test_file = self.get_test_loc('ics/libpcap/gencode.h')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.',
u'Copyright (c) 1997 Yen Yen Lim and North Dakota State University',
]
check_detection(expected, test_file)
def test_ics_libpcap_grammar_c(self):
test_file = self.get_test_loc('ics/libpcap/grammar.c')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_llc_h(self):
test_file = self.get_test_loc('ics/libpcap/llc.h')
expected = [
u'Copyright (c) 1993, 1994, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_makefile_in(self):
test_file = self.get_test_loc('ics/libpcap/Makefile.in')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_mkdep(self):
test_file = self.get_test_loc('ics/libpcap/mkdep')
expected = [
u'Copyright (c) 1994, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_nlpid_h(self):
test_file = self.get_test_loc('ics/libpcap/nlpid.h')
expected = [
u'Copyright (c) 1996 Juniper Networks, Inc.',
]
check_detection(expected, test_file)
def test_ics_libpcap_optimize_c(self):
test_file = self.get_test_loc('ics/libpcap/optimize.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_3_trail_name(self):
test_file = self.get_test_loc('ics/libpcap/pcap.3')
expected = [
u'Copyright (c) 1994, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap.c')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996, 1997, 1998 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_h(self):
test_file = self.get_test_loc('ics/libpcap/pcap.h')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_bpf_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap-bpf.c')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996, 1998 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_bpf_h(self):
test_file = self.get_test_loc('ics/libpcap/pcap-bpf.h')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_dlpi_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap-dlpi.c')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_int_h(self):
test_file = self.get_test_loc('ics/libpcap/pcap-int.h')
expected = [
u'Copyright (c) 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_linux_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap-linux.c')
expected = [
u'Copyright (c) 2000 Torsten Landschoff <torsten@debian.org> Sebastian Krahmer <krahmer@cs.uni-potsdam.de>',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_namedb_h(self):
test_file = self.get_test_loc('ics/libpcap/pcap-namedb.h')
expected = [
u'Copyright (c) 1994, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_nit_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap-nit.c')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_nit_h(self):
test_file = self.get_test_loc('ics/libpcap/pcap-nit.h')
expected = [
u'Copyright (c) 1990, 1994 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_null_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap-null.c')
expected = [
u'Copyright (c) 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_stdinc_h_trail_name(self):
test_file = self.get_test_loc('ics/libpcap/pcap-stdinc.h')
expected = [
u'Copyright (c) 2002 - 2003 NetGroup, Politecnico di Torino (Italy)',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_win32_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap-win32.c')
expected = [
u'Copyright (c) 1999 - 2005 NetGroup, Politecnico di Torino (Italy)',
u'Copyright (c) 2005 - 2007 CACE Technologies, Davis (California)',
]
check_detection(expected, test_file)
def test_ics_libpcap_ppp_h(self):
test_file = self.get_test_loc('ics/libpcap/ppp.h')
expected = [
u'Copyright 1989 by Carnegie Mellon.',
]
check_detection(expected, test_file)
def test_ics_libpcap_scanner_c(self):
test_file = self.get_test_loc('ics/libpcap/scanner.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_tokdefs_h(self):
test_file = self.get_test_loc('ics/libpcap/tokdefs.h')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libpcap_doc_pcap_html(self):
test_file = self.get_test_loc('ics/libpcap-doc/pcap.html')
expected = [
u'Copyright (c) The Internet Society (2004).',
u'Copyright (c) The Internet Society (2004).',
]
check_detection(expected, test_file)
def test_ics_libpcap_doc_pcap_txt(self):
test_file = self.get_test_loc('ics/libpcap-doc/pcap.txt')
expected = [
u'Copyright (c) The Internet Society (2004).',
u'Full Copyright Statement',
u'Copyright (c) The Internet Society (2004).',
]
check_detection(expected, test_file)
def test_ics_libpcap_lbl_os_sunos4_h(self):
test_file = self.get_test_loc('ics/libpcap-lbl/os-sunos4.h')
expected = [
u'Copyright (c) 1989, 1990, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_lbl_os_ultrix4_h(self):
test_file = self.get_test_loc('ics/libpcap-lbl/os-ultrix4.h')
expected = [
u'Copyright (c) 1990, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_missing_snprintf_c(self):
test_file = self.get_test_loc('ics/libpcap-missing/snprintf.c')
expected = [
u'Copyright (c) 1995-1999 Kungliga Tekniska Hogskolan (Royal Institute of Technology, Stockholm, Sweden).',
]
check_detection(expected, test_file)
def test_ics_libvpx_args_c(self):
test_file = self.get_test_loc('ics/libvpx/args.c')
expected = [
u'Copyright (c) 2010 The WebM project',
]
check_detection(expected, test_file)
def test_ics_libvpx_docs_mk(self):
test_file = self.get_test_loc('ics/libvpx/docs.mk')
expected = [
u'Copyright (c) 2010 The WebM project',
]
check_detection(expected, test_file)
def test_ics_libvpx_license(self):
test_file = self.get_test_loc('ics/libvpx/LICENSE')
expected = [
u'Copyright (c) 2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_libvpx_y4minput_c(self):
test_file = self.get_test_loc('ics/libvpx/y4minput.c')
expected = [
u'Copyright (c) 2010 The WebM project',
u'Copyright (c) 2002-2010 The Xiph.Org Foundation and contributors.',
]
check_detection(expected, test_file)
def test_ics_libvpx_build_x86_msvs_obj_int_extract_bat(self):
test_file = self.get_test_loc('ics/libvpx-build-x86-msvs/obj_int_extract.bat')
expected = [
u'Copyright (c) 2011 The WebM project',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_asciimathphp_2_0_htmlmathml_js(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-ASCIIMathPHP-2.0/htmlMathML.js')
expected = [
u'(c) Peter Jipsen',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_geshi_docs_geshi_doc_html(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-geshi-docs/geshi-doc.html')
expected = [
u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann',
u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann',
u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann',
u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann',
u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann',
u'Copyright (c) 2004 Nigel McNie',
u"Copyright (c) 2008 < name> (< website URL> ) <span class coMULTI'> ",
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_geshi_docs_geshi_doc_txt(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-geshi-docs/geshi-doc.txt')
expected = [
u'Copyright (c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann Email nigel@geshi.org',
u'Copyright (c) 2004 Nigel McNie',
u'Copyright (c) 2004 ( )',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_libvpx_examples_includes_geshi_docs_geshi_doc_txt_trail_email_trail_url_misc(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-geshi-docs/geshi-doc.txt')
expected = [
u'Copyright (c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann Email nigel@geshi.org, BenBE@omorphia.de',
u'Copyright: (c) 2004 Nigel McNie (http://qbnz.com/highlighter/)',
u'Copyright: (c) 2004 <name> (<website URL>)',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_geshi_docs_phpdoc_ini(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-geshi-docs/phpdoc.ini')
expected = [
u'Copyright 2002, Greg Beaver <cellog@users.sourceforge.net>',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_html_toc_0_91_toc_pod(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-HTML-Toc-0.91/Toc.pod')
expected = [
u'Copyright (c) 2001 Freddy Vulto.',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_php_markdown_extra_1_2_3_license_text(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-Markdown-Extra-1.2.3/License.text')
expected = [
u'Copyright (c) 2004-2008 Michel Fortin',
u'Copyright (c) 2003-2006 John Gruber',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_php_markdown_extra_1_2_3_markdown_php(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-Markdown-Extra-1.2.3/markdown.php')
expected = [
u'Copyright (c) 2004-2008 Michel Fortin',
u'Copyright (c) 2004-2006 John Gruber',
u'Copyright (c) 2004-2008 Michel Fortin',
u'Copyright (c) 2003-2006 John Gruber',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_php_markdown_extra_1_2_3_php_markdown_extra_readme_text(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-Markdown-Extra-1.2.3/PHP Markdown Extra Readme.text')
expected = [
u'Copyright (c) 2004-2005 Michel Fortin',
u'Copyright (c) 2003-2005 John Gruber',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_php_smartypants_1_5_1e_php_smartypants_readme_txt(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-SmartyPants-1.5.1e/PHP SmartyPants Readme.txt')
expected = [
u'Copyright (c) 2005 Michel Fortin',
u'Copyright (c) 2003-2004 John Gruber',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_php_smartypants_1_5_1e_smartypants_php(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-SmartyPants-1.5.1e/smartypants.php')
expected = [
u'Copyright (c) 2003-2004 John Gruber',
u'Copyright (c) 2004-2005 Michel Fortin',
u'Copyright (c) 2003 John Gruber',
u'Copyright (c) 2004-2005 Michel Fortin',
]
check_detection(expected, test_file)
def test_ics_libvpx_libmkv_ebmlids_h(self):
test_file = self.get_test_loc('ics/libvpx-libmkv/EbmlIDs.h')
expected = [
u'Copyright (c) 2010 The WebM project',
]
check_detection(expected, test_file)
def test_ics_libvpx_nestegg_license(self):
test_file = self.get_test_loc('ics/libvpx-nestegg/LICENSE')
expected = [
u'Copyright (c) 2010 Mozilla Foundation',
]
check_detection(expected, test_file)
def test_ics_libvpx_nestegg_halloc_halloc_h(self):
test_file = self.get_test_loc('ics/libvpx-nestegg-halloc/halloc.h')
expected = [
u'Copyright (c) 2004-2010 Alex Pankratov.',
]
check_detection(expected, test_file)
def test_ics_libvpx_nestegg_halloc_readme(self):
test_file = self.get_test_loc('ics/libvpx-nestegg-halloc/README')
expected = [
u'Copyright (c) 2004-2010, Alex Pankratov (ap@swapped.cc).',
]
check_detection(expected, test_file)
def test_ics_libvpx_nestegg_halloc_src_halloc_c(self):
test_file = self.get_test_loc('ics/libvpx-nestegg-halloc-src/halloc.c')
expected = [
u'Copyright (c) 2004i-2010 Alex Pankratov.',
]
check_detection(expected, test_file)
def test_ics_libvpx_nestegg_include_nestegg_nestegg_h(self):
test_file = self.get_test_loc('ics/libvpx-nestegg-include-nestegg/nestegg.h')
expected = [
u'Copyright (c) 2010 Mozilla Foundation',
]
check_detection(expected, test_file)
def test_ics_libvpx_nestegg_m4_pkg_m4(self):
test_file = self.get_test_loc('ics/libvpx-nestegg-m4/pkg.m4')
expected = [
u'Copyright (c) 2004 Scott James Remnant <scott@netsplit.com>.',
]
check_detection(expected, test_file)
def test_ics_libvpx_vp8_common_asm_com_offsets_c(self):
test_file = self.get_test_loc('ics/libvpx-vp8-common/asm_com_offsets.c')
expected = [
u'Copyright (c) 2011 The WebM project',
]
check_detection(expected, test_file)
def test_ics_libxml2_dict_c(self):
test_file = self.get_test_loc('ics/libxml2/dict.c')
expected = [
u'Copyright (c) 2003 Daniel Veillard.',
]
check_detection(expected, test_file)
def test_ics_libxml2_hash_c(self):
test_file = self.get_test_loc('ics/libxml2/hash.c')
expected = [
u'Copyright (c) 2000 Bjorn Reese and Daniel Veillard.',
]
check_detection(expected, test_file)
def test_ics_libxml2_list_c(self):
test_file = self.get_test_loc('ics/libxml2/list.c')
expected = [
u'Copyright (c) 2000 Gary Pennington and Daniel Veillard.',
]
check_detection(expected, test_file)
def test_ics_libxml2_notice(self):
test_file = self.get_test_loc('ics/libxml2/NOTICE')
expected = [
u'Copyright (c) 1998-2003 Daniel Veillard.',
]
check_detection(expected, test_file)
def test_ics_libxml2_trio_c(self):
test_file = self.get_test_loc('ics/libxml2/trio.c')
expected = [
u'Copyright (c) 1998 Bjorn Reese and Daniel Stenberg.',
]
check_detection(expected, test_file)
def test_ics_libxml2_triodef_h(self):
test_file = self.get_test_loc('ics/libxml2/triodef.h')
expected = [
u'Copyright (c) 2001 Bjorn Reese <breese@users.sourceforge.net>',
]
check_detection(expected, test_file)
def test_ics_libxml2_triop_h(self):
test_file = self.get_test_loc('ics/libxml2/triop.h')
expected = [
u'Copyright (c) 2000 Bjorn Reese and Daniel Stenberg.',
]
check_detection(expected, test_file)
def test_ics_libxml2_triostr_c(self):
test_file = self.get_test_loc('ics/libxml2/triostr.c')
expected = [
u'Copyright (c) 2001 Bjorn Reese and Daniel Stenberg.',
]
check_detection(expected, test_file)
def test_ics_libxslt_copyright(self):
test_file = self.get_test_loc('ics/libxslt/Copyright')
expected = [
u'Copyright (c) 2001-2002 Daniel Veillard.',
u'Copyright (c) 2001-2002 Thomas Broyer, Charlie Bozeman and Daniel Veillard.',
]
check_detection(expected, test_file)
def test_ics_lohit_fonts_notice(self):
test_file = self.get_test_loc('ics/lohit-fonts/NOTICE')
expected = [
u'Copyright 2011 Lohit Fonts Project contributors',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_lohit_fonts_notice_trail_url(self):
test_file = self.get_test_loc('ics/lohit-fonts/NOTICE')
expected = [
u'Copyright 2011 Lohit Fonts Project contributors <http://fedorahosted.org/lohit>',
]
check_detection(expected, test_file)
def test_ics_lohit_fonts_lohit_bengali_ttf_copyright(self):
test_file = self.get_test_loc('ics/lohit-fonts-lohit-bengali-ttf/COPYRIGHT')
expected = [
u'Copyright 2011 Lohit Fonts Project contributors.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_lohit_fonts_lohit_bengali_ttf_copyright_trail_url(self):
test_file = self.get_test_loc('ics/lohit-fonts-lohit-bengali-ttf/COPYRIGHT')
expected = [
u'Copyright 2011 Lohit Fonts Project contributors. <http://fedorahosted.org/lohit>',
]
check_detection(expected, test_file)
def test_ics_markdown_notice(self):
test_file = self.get_test_loc('ics/markdown/NOTICE')
expected = [
u'Copyright 2007, 2008 The Python Markdown Project',
u'Copyright 2004, 2005, 2006 Yuri Takhteyev',
u'Copyright 2004 Manfred Stienstra',
]
check_detection(expected, test_file)
def test_ics_markdown_bin_markdown(self):
test_file = self.get_test_loc('ics/markdown-bin/markdown')
expected = [
u'Copyright 2007, 2008 The Python Markdown Project',
u'Copyright 2004, 2005, 2006 Yuri Takhteyev',
u'Copyright 2004 Manfred Stienstra',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_html4_py(self):
test_file = self.get_test_loc('ics/markdown-markdown/html4.py')
expected = [
u'Copyright (c) 1999-2007 by Fredrik Lundh.',
u'Copyright (c) 1999-2007 by Fredrik Lundh',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_extensions_abbr_py(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/abbr.py')
expected = [
u'Copyright 2007-2008 Waylan Limberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_markdown_markdown_extensions_abbr_py_trail_url(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/abbr.py')
expected = [
u'Copyright 2007-2008 [Waylan Limberg](http://achinghead.com/) [Seemant Kulleen](http://www.kulleen.org/)',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_extensions_codehilite_py(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/codehilite.py')
expected = [
u'Copyright 2006-2008 Waylan Limberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_markdown_markdown_extensions_codehilite_py_trail_url(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/codehilite.py')
expected = [
u'Copyright 2006-2008 [Waylan Limberg](http://achinghead.com/).',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_extensions_def_list_py(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/def_list.py')
expected = [
u'Copyright 2008 - Waylan Limberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_markdown_markdown_extensions_def_list_py_trail_url(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/def_list.py')
expected = [
u'Copyright 2008 - [Waylan Limberg](http://achinghead.com)',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_extensions_html_tidy_py(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/html_tidy.py')
expected = [
u'Copyright (c) 2008 Waylan Limberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_markdown_markdown_extensions_html_tidy_py_trail_url(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/html_tidy.py')
expected = [
u'Copyright (c)2008 [Waylan Limberg](http://achinghead.com)',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_extensions_tables_py(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/tables.py')
expected = [
u'Copyright 2009 - Waylan Limberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_markdown_markdown_extensions_tables_py_trail_url(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/tables.py')
expected = [
u'Copyright 2009 - [Waylan Limberg](http://achinghead.com)',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_extensions_toc_py(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/toc.py')
expected = [
u'(c) 2008 Jack Miller',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_markdown_markdown_extensions_toc_py_trail_url(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/toc.py')
expected = [
u'(c) 2008 [Jack Miller](http://codezen.org)',
]
check_detection(expected, test_file)
def test_ics_mesa3d_notice(self):
test_file = self.get_test_loc('ics/mesa3d/NOTICE')
expected = [
u'Copyright (c) 1999-2008 Brian Paul',
u'Copyright (c) 2008-1010 Intel Corporation',
u'Copyright (c) 2007-2010 VMware, Inc.',
u'Copyright (c) 2010 Luca Barbieri',
u'Copyright (c) 2006 Alexander Chemeris',
u'Copyright 2007,2010,2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_mesa3d_docs_license_html(self):
test_file = self.get_test_loc('ics/mesa3d-docs/license.html')
expected = [
u'copyrighted by Mark Kilgard',
u'Copyright (c) 1999-2007 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_docs_subset_a_html(self):
test_file = self.get_test_loc('ics/mesa3d-docs/subset-A.html')
expected = [
u'Copyright (c) 2002-2003 by Tungsten Graphics, Inc., Cedar Park, Texas.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_include_c99_inttypes_h(self):
test_file = self.get_test_loc('ics/mesa3d-include-c99/inttypes.h')
expected = [
u'Copyright (c) 2006 Alexander Chemeris',
]
check_detection(expected, test_file)
def test_ics_mesa3d_include_c99_stdbool_h(self):
test_file = self.get_test_loc('ics/mesa3d-include-c99/stdbool.h')
expected = [
u'Copyright 2007-2010 VMware, Inc.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_include_c99_stdint_h(self):
test_file = self.get_test_loc('ics/mesa3d-include-c99/stdint.h')
expected = [
u'Copyright (c) 2006-2008 Alexander Chemeris',
]
check_detection(expected, test_file)
def test_ics_mesa3d_include_pixelflinger2_pixelflinger2_interface_h(self):
test_file = self.get_test_loc('ics/mesa3d-include-pixelflinger2/pixelflinger2_interface.h')
expected = [
u'Copyright 2010, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_ast_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/ast.h')
expected = [
u'Copyright (c) 2009 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_ast_expr_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/ast_expr.cpp')
expected = [
u'Copyright (c) 2010 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_glsl_compiler_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/glsl_compiler.cpp')
expected = [
u'Copyright (c) 2008, 2009 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_glsl_parser_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/glsl_parser.cpp')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009, 2010 Free Software Foundation, Inc.',
u'Copyright (c) 2008, 2009 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_glsl_parser_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/glsl_parser.h')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009, 2010 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_ir_to_llvm_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/ir_to_llvm.cpp')
expected = [
u'Copyright (c) 2005-2007 Brian Paul',
u'Copyright (c) 2008 VMware, Inc.',
u'Copyright (c) 2010 Intel Corporation',
u'Copyright (c) 2010 Luca Barbieri',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_list_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/list.h')
expected = [
u'Copyright (c) 2008, 2010 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_lower_jumps_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/lower_jumps.cpp')
expected = [
u'Copyright (c) 2010 Luca Barbieri',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_program_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/program.h')
expected = [
u'Copyright (c) 1999-2008 Brian Paul',
u'Copyright (c) 2009 VMware, Inc.',
u'Copyright (c) 2010 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_strtod_c(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/strtod.c')
expected = [
u'Copyright 2010 VMware, Inc.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_glcpp_glcpp_lex_c(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl-glcpp/glcpp-lex.c')
expected = [
u'Copyright (c) 2010 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_glcpp_glcpp_parse_c(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl-glcpp/glcpp-parse.c')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009, 2010 Free Software Foundation, Inc.',
u'Copyright (c) 2010 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_glcpp_makefile_am(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl-glcpp/Makefile.am')
expected = [
u'Copyright (c) 2010 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_compiler_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/compiler.h')
expected = [
u'Copyright (c) 1999-2008 Brian Paul',
u'Copyright (c) 2009 VMware, Inc.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_config_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/config.h')
expected = [
u'Copyright (c) 1999-2007 Brian Paul',
u'Copyright (c) 2008 VMware, Inc.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_core_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/core.h')
expected = [
u'Copyright (c) 2010 LunarG Inc.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_debug_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/debug.h')
expected = [
u'Copyright (c) 1999-2004 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_get_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/get.h')
expected = [
u'Copyright (c) 1999-2001 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_glheader_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/glheader.h')
expected = [
u'Copyright (c) 1999-2008 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_hash_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/hash.h')
expected = [
u'Copyright (c) 1999-2006 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_shaderobj_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/shaderobj.h')
expected = [
u'Copyright (c) 2004-2007 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_simple_list_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/simple_list.h')
expected = [
u'(c) 1997, Keith Whitwell',
u'Copyright (c) 1999-2001 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_program_hash_table_c(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-program/hash_table.c')
expected = [
u'Copyright (c) 2008 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_program_prog_statevars_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-program/prog_statevars.h')
expected = [
u'Copyright (c) 1999-2007 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_pixelflinger2_pixelflinger2_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-src-pixelflinger2/pixelflinger2.cpp')
expected = [
u'Copyright 2010, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_mesa3d_test_egl_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-test/egl.cpp')
expected = [
u'Copyright 2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_mesa3d_test_m_matrix_c(self):
test_file = self.get_test_loc('ics/mesa3d-test/m_matrix.c')
expected = [
u'Copyright (c) 1999-2005 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_test_m_matrix_h(self):
test_file = self.get_test_loc('ics/mesa3d-test/m_matrix.h')
expected = [
u'Copyright (c) 1999-2005 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mksh_android_mk(self):
test_file = self.get_test_loc('ics/mksh/Android.mk')
expected = [
u'Copyright (c) 2010 Thorsten Glaser <t.glaser@tarent.de>',
]
check_detection(expected, test_file)
def test_ics_mksh_mkshrc(self):
test_file = self.get_test_loc('ics/mksh/mkshrc')
expected = [
u'Copyright (c) 2010 Thorsten Glaser <t.glaser@tarent.de>',
]
check_detection(expected, test_file)
def test_ics_mksh_notice(self):
test_file = self.get_test_loc('ics/mksh/NOTICE')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mksh_src_build_sh(self):
test_file = self.get_test_loc('ics/mksh-src/Build.sh')
expected = [
u'Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mksh_src_edit_c(self):
test_file = self.get_test_loc('ics/mksh-src/edit.c')
expected = [
u'Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mksh_src_funcs_c(self):
test_file = self.get_test_loc('ics/mksh-src/funcs.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mksh_src_jobs_c(self):
test_file = self.get_test_loc('ics/mksh-src/jobs.c')
expected = [
u'Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mksh_src_lalloc_c(self):
test_file = self.get_test_loc('ics/mksh-src/lalloc.c')
expected = [
u'Copyright (c) 2009 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mksh_src_sh_h(self):
test_file = self.get_test_loc('ics/mksh-src/sh.h')
expected = [
u'Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mtpd_l2tp_c(self):
test_file = self.get_test_loc('ics/mtpd/l2tp.c')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_mtpd_notice(self):
test_file = self.get_test_loc('ics/mtpd/NOTICE')
expected = [
u'Copyright (c) 2009, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_netperf_module_license_hp(self):
test_file = self.get_test_loc('ics/netperf/MODULE_LICENSE_HP')
expected = [
u'Copyright (c) 1993 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_netcpu_kstat10_c(self):
test_file = self.get_test_loc('ics/netperf/netcpu_kstat10.c')
expected = [
u'(c) Copyright 2005-2007, Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_netcpu_looper_c(self):
test_file = self.get_test_loc('ics/netperf/netcpu_looper.c')
expected = [
u'(c) Copyright 2005-2007. version 2.4.3',
]
check_detection(expected, test_file)
def test_ics_netperf_netcpu_none_c(self):
test_file = self.get_test_loc('ics/netperf/netcpu_none.c')
expected = [
u'(c) Copyright 2005, Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_netcpu_procstat_c(self):
test_file = self.get_test_loc('ics/netperf/netcpu_procstat.c')
expected = [
u'(c) Copyright 2005-2007 version 2.4.3',
]
check_detection(expected, test_file)
def test_ics_netperf_netlib_c(self):
test_file = self.get_test_loc('ics/netperf/netlib.c')
expected = [
u'(c) Copyright 1993-2007 Hewlett-Packard Company.',
]
check_detection(expected, test_file)
def test_ics_netperf_netlib_h(self):
test_file = self.get_test_loc('ics/netperf/netlib.h')
expected = [
u'Copyright (c) 1993-2005 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_netperf_c(self):
test_file = self.get_test_loc('ics/netperf/netperf.c')
expected = [
u'Copyright (c) 1993-2007 Hewlett-Packard Company',
u'(c) Copyright 1993-2007 Hewlett-Packard Company.',
]
check_detection(expected, test_file)
def test_ics_netperf_netserver_c(self):
test_file = self.get_test_loc('ics/netperf/netserver.c')
expected = [
u'Copyright (c) 1993-2007 Hewlett-Packard Company',
u'(c) Copyright 1993-2007 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_netsh_h(self):
test_file = self.get_test_loc('ics/netperf/netsh.h')
expected = [
u'Copyright (c) 1993,1995 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_bsd_c(self):
test_file = self.get_test_loc('ics/netperf/nettest_bsd.c')
expected = [
u'(c) Copyright 1993-2004 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_bsd_h(self):
test_file = self.get_test_loc('ics/netperf/nettest_bsd.h')
expected = [
u'Copyright (c) 1993-2004 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_dlpi_c(self):
test_file = self.get_test_loc('ics/netperf/nettest_dlpi.c')
expected = [
u'(c) Copyright 1993,1995,2004 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_dlpi_h(self):
test_file = self.get_test_loc('ics/netperf/nettest_dlpi.h')
expected = [
u'Copyright (c) 1993, Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_sctp_c(self):
test_file = self.get_test_loc('ics/netperf/nettest_sctp.c')
expected = [
u'(c) Copyright 2005-2007 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_sctp_h(self):
test_file = self.get_test_loc('ics/netperf/nettest_sctp.h')
expected = [
u'Copyright (c) 1993-2003 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_sdp_c(self):
test_file = self.get_test_loc('ics/netperf/nettest_sdp.c')
expected = [
u'(c) Copyright 2007 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_sdp_h(self):
test_file = self.get_test_loc('ics/netperf/nettest_sdp.h')
expected = [
u'Copyright (c) 2007 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_unix_c(self):
test_file = self.get_test_loc('ics/netperf/nettest_unix.c')
expected = [
u'(c) Copyright 1994-2007 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_xti_c(self):
test_file = self.get_test_loc('ics/netperf/nettest_xti.c')
expected = [
u'(c) Copyright 1995-2007 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_xti_h(self):
test_file = self.get_test_loc('ics/netperf/nettest_xti.h')
expected = [
u'Copyright (c) 1995,2004 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_neven_facedetector_jni_cpp(self):
test_file = self.get_test_loc('ics/neven/FaceDetector_jni.cpp')
expected = [
u'Copyright (c) 2006 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_neven_notice(self):
test_file = self.get_test_loc('ics/neven/NOTICE')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_nist_sip_java_gov_nist_core_multimap_java(self):
test_file = self.get_test_loc('ics/nist-sip-java-gov-nist-core/MultiMap.java')
expected = [
u'Copyright 1999-2004 The Apache Software Foundation',
]
check_detection(expected, test_file)
def test_ics_oauth_core_src_main_java_net_oauth_consumerproperties_java(self):
test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth/ConsumerProperties.java')
expected = [
u'Copyright 2007 Netflix, Inc.',
]
check_detection(expected, test_file)
def test_ics_oauth_core_src_main_java_net_oauth_oauthexception_java(self):
test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth/OAuthException.java')
expected = [
u'Copyright 2008 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_oauth_core_src_main_java_net_oauth_oauthmessage_java(self):
test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth/OAuthMessage.java')
expected = [
u'Copyright 2007, 2008 Netflix, Inc.',
]
check_detection(expected, test_file)
def test_ics_oauth_core_src_main_java_net_oauth_client_oauthresponsemessage_java(self):
test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth-client/OAuthResponseMessage.java')
expected = [
u'Copyright 2008 Netflix, Inc.',
]
check_detection(expected, test_file)
def test_ics_oauth_core_src_main_java_net_oauth_client_httpclient4_httpclient4_java(self):
test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth-client-httpclient4/HttpClient4.java')
expected = [
u'Copyright 2008 Sean Sullivan',
]
check_detection(expected, test_file)
def test_ics_oauth_core_src_main_java_net_oauth_signature_rsa_sha1_java(self):
test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth-signature/RSA_SHA1.java')
expected = [
u'Copyright 2007 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_opencv_cvjni_cpp(self):
test_file = self.get_test_loc('ics/opencv/cvjni.cpp')
expected = [
u'Copyright (c) 2006-2009 SIProp Project http://www.siprop.org',
]
check_detection(expected, test_file)
def test_ics_opencv_license_opencv(self):
test_file = self.get_test_loc('ics/opencv/LICENSE_OpenCV')
expected = [
u'Copyright (c) 2000-2006, Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_opencv_notice(self):
test_file = self.get_test_loc('ics/opencv/NOTICE')
expected = [
u'Copyright (c) 2000-2006, Intel Corporation',
u'Copyright (c) 2006-2009 SIProp Project http://www.siprop.org',
u'Copyright (c) 1992, 1993 The Regents of the University of California.',
u'Copyright (c) 2008, Liu Liu',
u'Copyright (c) 2008, Google',
u'Copyright (c) 1992, 1993 The Regents of the University of California.',
u'Copyright (c) 2002, MD-Mathematische Dienste GmbH Im Defdahl',
u'Copyright (c) 2000-2003 Chih-Chung Chang and Chih-Jen Lin',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2002, Intel Corporation',
u'Copyright( C) 2000, Intel Corporation',
u'Copyright (c) 2008, Xavier Delacour',
u'Copyright( C) 2000, Intel Corporation',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2008, Nils Hasler',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 1978-1999 Ken Turkowski. <turk@computer.org>',
u'Copyright (c) 1981-1999 Ken Turkowski. <turk@computer.org>',
u'Copyright (c) 1998 Yossi Rubner Computer Science Department, Stanford University',
u'Copyright (c) 2006 Simon Perreault',
u'Copyright (c) 1995 Intel Corporation.',
]
check_detection(expected, test_file)
def test_ics_opencv_cvaux_src_cv3dtracker_cpp(self):
test_file = self.get_test_loc('ics/opencv-cvaux-src/cv3dtracker.cpp')
expected = [
u'Copyright (c) 2002, Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_opencv_cvaux_src_cvdpstereo_cpp(self):
test_file = self.get_test_loc('ics/opencv-cvaux-src/cvdpstereo.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_include_cv_h(self):
test_file = self.get_test_loc('ics/opencv-cv-include/cv.h')
expected = [
u'Copyright (c) 2000, Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvkdtree_hpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/_cvkdtree.hpp')
expected = [
u'Copyright (c) 2008, Xavier Delacour',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvcolor_cpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/cvcolor.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2002, MD-Mathematische Dienste GmbH Im Defdahl',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvdistransform_cpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/cvdistransform.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'(c) 2006 by Jay Stavinzky.',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvemd_cpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/cvemd.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 1998 Yossi Rubner Computer Science Department, Stanford University',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvkdtree_cpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/cvkdtree.cpp')
expected = [
u'Copyright (c) 2008, Xavier Delacour',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvsmooth_cpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/cvsmooth.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2006 Simon Perreault',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvsurf_cpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/cvsurf.cpp')
expected = [
u'Copyright (c) 2008, Liu Liu',
]
check_detection(expected, test_file)
def test_ics_opencv_cxcore_include_cvwimage_h(self):
test_file = self.get_test_loc('ics/opencv-cxcore-include/cvwimage.h')
expected = [
u'Copyright (c) 2008, Google',
]
check_detection(expected, test_file)
def test_ics_opencv_cxcore_include_cxmisc_h(self):
test_file = self.get_test_loc('ics/opencv-cxcore-include/cxmisc.h')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 1992, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_opencv_cxcore_include_cxtypes_h(self):
test_file = self.get_test_loc('ics/opencv-cxcore-include/cxtypes.h')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 1995 Intel Corporation.',
]
check_detection(expected, test_file)
def test_ics_opencv_cxcore_src_cxdatastructs_cpp(self):
test_file = self.get_test_loc('ics/opencv-cxcore-src/cxdatastructs.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 1992, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_opencv_cxcore_src_cxutils_cpp(self):
test_file = self.get_test_loc('ics/opencv-cxcore-src/cxutils.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 1978-1999 Ken Turkowski. <turk@computer.org>',
u'Copyright (c) 1981-1999 Ken Turkowski. <turk@computer.org>',
]
check_detection(expected, test_file)
def test_ics_opencv_ml_src_mlsvm_cpp(self):
test_file = self.get_test_loc('ics/opencv-ml-src/mlsvm.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2000-2003 Chih-Chung Chang and Chih-Jen Lin',
]
check_detection(expected, test_file)
def test_ics_opencv_otherlibs_highgui_cvcap_socket_cpp(self):
test_file = self.get_test_loc('ics/opencv-otherlibs-highgui/cvcap_socket.cpp')
expected = [
u'Copyright (c) 2008, Nils Hasler',
]
check_detection(expected, test_file)
def test_ics_opencv_otherlibs_highgui_grfmt_png_cpp(self):
test_file = self.get_test_loc('ics/opencv-otherlibs-highgui/grfmt_png.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'(Copyright (c) 1999-2001 MIYASAKA Masaru)',
]
check_detection(expected, test_file)
def test_ics_openssl_e_os_h(self):
test_file = self.get_test_loc('ics/openssl/e_os.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_e_os2_h(self):
test_file = self.get_test_loc('ics/openssl/e_os2.h')
expected = [
u'Copyright (c) 1998-2000 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_notice(self):
test_file = self.get_test_loc('ics/openssl/NOTICE')
expected = [
u'Copyright (c) 1998-2011 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_app_rand_c(self):
test_file = self.get_test_loc('ics/openssl-apps/app_rand.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2000 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_apps_c(self):
test_file = self.get_test_loc('ics/openssl-apps/apps.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2001 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_apps_h(self):
test_file = self.get_test_loc('ics/openssl-apps/apps.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2001 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_asn1pars_c(self):
test_file = self.get_test_loc('ics/openssl-apps/asn1pars.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_cms_c(self):
test_file = self.get_test_loc('ics/openssl-apps/cms.c')
expected = [
u'Copyright (c) 2008 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_ec_c(self):
test_file = self.get_test_loc('ics/openssl-apps/ec.c')
expected = [
u'Copyright (c) 1998-2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_ecparam_c(self):
test_file = self.get_test_loc('ics/openssl-apps/ecparam.c')
expected = [
u'Copyright (c) 1998-2005 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_engine_c(self):
test_file = self.get_test_loc('ics/openssl-apps/engine.c')
expected = [
u'Copyright (c) 2000 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_genpkey_c(self):
test_file = self.get_test_loc('ics/openssl-apps/genpkey.c')
expected = [
u'Copyright (c) 2006 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_nseq_c(self):
test_file = self.get_test_loc('ics/openssl-apps/nseq.c')
expected = [
u'Copyright (c) 1999 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_openssl_c(self):
test_file = self.get_test_loc('ics/openssl-apps/openssl.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2006 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_pkcs12_c(self):
test_file = self.get_test_loc('ics/openssl-apps/pkcs12.c')
expected = [
u'Copyright (c) 1999-2006 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_prime_c(self):
test_file = self.get_test_loc('ics/openssl-apps/prime.c')
expected = [
u'Copyright (c) 2004 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_rand_c(self):
test_file = self.get_test_loc('ics/openssl-apps/rand.c')
expected = [
u'Copyright (c) 1998-2001 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_s_client_c(self):
test_file = self.get_test_loc('ics/openssl-apps/s_client.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2006 The OpenSSL Project.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_s_server_c(self):
test_file = self.get_test_loc('ics/openssl-apps/s_server.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2006 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_smime_c(self):
test_file = self.get_test_loc('ics/openssl-apps/smime.c')
expected = [
u'Copyright (c) 1999-2004 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_speed_c(self):
test_file = self.get_test_loc('ics/openssl-apps/speed.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_timeouts_h(self):
test_file = self.get_test_loc('ics/openssl-apps/timeouts.h')
expected = [
u'Copyright (c) 1999-2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_cryptlib_c(self):
test_file = self.get_test_loc('ics/openssl-crypto/cryptlib.c')
expected = [
u'Copyright (c) 1998-2006 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_lpdir_nyi_c(self):
test_file = self.get_test_loc('ics/openssl-crypto/LPdir_nyi.c')
expected = [
u'Copyright (c) 2004, Richard Levitte <richard@levitte.org>',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_md32_common_h(self):
test_file = self.get_test_loc('ics/openssl-crypto/md32_common.h')
expected = [
u'Copyright (c) 1999-2007 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_mem_clr_c(self):
test_file = self.get_test_loc('ics/openssl-crypto/mem_clr.c')
expected = [
u'Copyright (c) 2001 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_o_str_c(self):
test_file = self.get_test_loc('ics/openssl-crypto/o_str.c')
expected = [
u'Copyright (c) 2003 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_aes_aes_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-aes/aes.h')
expected = [
u'Copyright (c) 1998-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_aes_aes_cfb_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-aes/aes_cfb.c')
expected = [
u'Copyright (c) 2002-2006 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_a_sign_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/a_sign.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2003 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_asn_mime_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn_mime.c')
expected = [
u'Copyright (c) 1999-2008 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_asn_moid_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn_moid.c')
expected = [
u'Copyright (c) 2001-2004 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_asn1_err_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn1_err.c')
expected = [
u'Copyright (c) 1999-2009 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_asn1_gen_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn1_gen.c')
expected = [
u'Copyright (c) 2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_asn1t_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn1t.h')
expected = [
u'Copyright (c) 2000-2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_tasn_dec_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/tasn_dec.c')
expected = [
u'Copyright (c) 2000-2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_tasn_enc_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/tasn_enc.c')
expected = [
u'Copyright (c) 2000-2004 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_tasn_prn_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/tasn_prn.c')
expected = [
u'Copyright (c) 2000,2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_x_nx509_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/x_nx509.c')
expected = [
u'Copyright (c) 2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bf_bf_locl_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-bf/bf_locl.h')
expected = [
u'Copyright (c) 1995-1997 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bf_copyright(self):
test_file = self.get_test_loc('ics/openssl-crypto-bf/COPYRIGHT')
expected = [
u'Copyright (c) 1995-1997 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bio_b_print_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bio/b_print.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright Patrick Powell 1995',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bio_bss_bio_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bio/bss_bio.c')
expected = [
u'Copyright (c) 1998-2003 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn.h')
expected = [
u'Copyright (c) 1995-1997 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2006 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_blind_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_blind.c')
expected = [
u'Copyright (c) 1998-2006 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_ctx_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_ctx.c')
expected = [
u'Copyright (c) 1998-2004 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_err_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_err.c')
expected = [
u'Copyright (c) 1999-2007 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_exp_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_exp.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_gf2m_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_gf2m.c')
expected = [
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright (c) 1998-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_lcl_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_lcl.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2000 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_mod_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_mod.c')
expected = [
u'Copyright (c) 1998-2000 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_des_read2pwd_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-des/read2pwd.c')
expected = [
u'Copyright (c) 2001-2002 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_des_readme(self):
test_file = self.get_test_loc('ics/openssl-crypto-des/README')
expected = [
u'Copyright (c) 1997, Eric Young',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_des_rpc_des_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-des/rpc_des.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1986 by Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_des_asm_des_enc_m4(self):
test_file = self.get_test_loc('ics/openssl-crypto-des-asm/des_enc.m4')
expected = [
u'Copyright Svend Olaf Mikkelsen.',
u'Copyright Eric A. Young.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_dsa_dsa_locl_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-dsa/dsa_locl.h')
expected = [
u'Copyright (c) 2007 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ec_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ec.h')
expected = [
u'Copyright (c) 1998-2005 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ec_asn1_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ec_asn1.c')
expected = [
u'Copyright (c) 2000-2003 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ec_curve_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ec_curve.c')
expected = [
u'Copyright (c) 1998-2004 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ec_mult_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ec_mult.c')
expected = [
u'Copyright (c) 1998-2007 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ec2_mult_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ec2_mult.c')
expected = [
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright (c) 1998-2003 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ec2_smpl_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ec2_smpl.c')
expected = [
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright (c) 1998-2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ecp_mont_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ecp_mont.c')
expected = [
u'Copyright (c) 1998-2001 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ecp_nist_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ecp_nist.c')
expected = [
u'Copyright (c) 1998-2003 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ecp_smpl_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ecp_smpl.c')
expected = [
u'Copyright (c) 1998-2002 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ecdh_ecdh_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-ecdh/ecdh.h')
expected = [
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright (c) 2000-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ecdsa_ecdsatest_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ecdsa/ecdsatest.c')
expected = [
u'Copyright (c) 2000-2005 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ecdsa_ecs_asn1_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ecdsa/ecs_asn1.c')
expected = [
u'Copyright (c) 2000-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_engine_eng_all_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_all.c')
expected = [
u'Copyright (c) 2000-2001 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_engine_eng_cryptodev_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_cryptodev.c')
expected = [
u'Copyright (c) 2002 Bob Beck <beck@openbsd.org>',
u'Copyright (c) 2002 Theo de Raadt',
u'Copyright (c) 2002 Markus Friedl',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_engine_eng_dyn_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_dyn.c')
expected = [
u'Copyright (c) 1999-2001 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_engine_eng_err_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_err.c')
expected = [
u'Copyright (c) 1999-2010 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_engine_eng_fat_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_fat.c')
expected = [
u'Copyright (c) 1999-2001 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_engine_engine_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-engine/engine.h')
expected = [
u'Copyright (c) 1999-2004 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_evp_m_ecdsa_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-evp/m_ecdsa.c')
expected = [
u'Copyright (c) 1998-2002 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_evp_m_sigver_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-evp/m_sigver.c')
expected = [
u'Copyright (c) 2006,2007 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_pem_pem_all_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-pem/pem_all.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_pkcs12_p12_crt_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-pkcs12/p12_crt.c')
expected = [
u'Copyright (c) 1999-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_rand_rand_win_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-rand/rand_win.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2000 The OpenSSL Project.',
u'(c) Copyright Microsoft Corp. 1993.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_rc4_asm_rc4_ia64_pl(self):
test_file = self.get_test_loc('ics/openssl-crypto-rc4-asm/rc4-ia64.pl')
expected = [
u'Copyright (c) 2005 Hewlett-Packard Development Company, L.P.',
u'(c) 2005 Hewlett-Packard Development Company',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ui_ui_compat_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ui/ui_compat.c')
expected = [
u'Copyright (c) 2001-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ui_ui_openssl_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ui/ui_openssl.c')
expected = [
u'Copyright (c) 2001 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_x509_x509_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-x509/x509.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_x509v3_v3_alt_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-x509v3/v3_alt.c')
expected = [
u'Copyright (c) 1999-2003 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_x509v3_v3_pci_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-x509v3/v3_pci.c')
expected = [
u'Copyright (c) 2004 Kungliga Tekniska Hogskolan (Royal Institute of Technology, Stockholm, Sweden).',
]
check_detection(expected, test_file)
def test_ics_openssl_include_openssl_modes_h(self):
test_file = self.get_test_loc('ics/openssl-include-openssl/modes.h')
expected = [
u'Copyright (c) 2008 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_include_openssl_ssl_h(self):
test_file = self.get_test_loc('ics/openssl-include-openssl/ssl.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2007 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_include_openssl_ssl3_h(self):
test_file = self.get_test_loc('ics/openssl-include-openssl/ssl3.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2002 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_include_openssl_tls1_h(self):
test_file = self.get_test_loc('ics/openssl-include-openssl/tls1.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2006 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_d1_both_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/d1_both.c')
expected = [
u'Copyright (c) 1998-2005 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_d1_clnt_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/d1_clnt.c')
expected = [
u'Copyright (c) 1999-2007 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_s2_lib_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/s2_lib.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2007 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_s3_enc_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/s3_enc.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2007 The OpenSSL Project.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_s3_lib_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/s3_lib.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2007 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_ssl_asn1_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/ssl_asn1.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_ssl_cert_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/ssl_cert.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2007 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_ssltest_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/ssltest.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2000 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_t1_reneg_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/t1_reneg.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2009 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_oprofile_changelog_2002(self):
test_file = self.get_test_loc('ics/oprofile/ChangeLog-2002')
expected = [
u'copyright for 2002',
]
check_detection(expected, test_file)
def test_ics_oprofile_configure_in(self):
test_file = self.get_test_loc('ics/oprofile/configure.in')
expected = [
u'Copyright 1999 Olaf Titz <olaf@bigred.inka.de>',
]
check_detection(expected, test_file)
def test_ics_oprofile_popt_h(self):
test_file = self.get_test_loc('ics/oprofile/popt.h')
expected = [
u'(c) 1998-2000 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_agents_jvmpi_jvmpi_oprofile_cpp(self):
test_file = self.get_test_loc('ics/oprofile-agents-jvmpi/jvmpi_oprofile.cpp')
expected = [
u'Copyright 2007 OProfile authors',
u'Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_init_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/init.c')
expected = [
u'Copyright 2002 OProfile authors',
u'Copyright (c) 2005 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_anon_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_anon.c')
expected = [
u'Copyright 2005 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_cookie_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_cookie.c')
expected = [
u'Copyright 2002, 2005 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_events_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_events.c')
expected = [
u'Copyright 2002, 2003 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_extended_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_extended.c')
expected = [
u'Copyright 2007-2009 OProfile authors',
u'Copyright (c) 2009 Advanced Micro Devices, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_ibs_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_ibs.c')
expected = [
u'Copyright 2007-2010 OProfile authors',
u'Copyright (c) 2008 Advanced Micro Devices, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_ibs_h(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_ibs.h')
expected = [
u'Copyright 2008-2010 OProfile authors',
u'Copyright (c) 2008 Advanced Micro Devices, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_ibs_trans_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_ibs_trans.c')
expected = [
u'Copyright 2008 - 2010 OProfile authors',
u'Copyright (c) 2008 Advanced Micro Devices, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_ibs_trans_h(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_ibs_trans.h')
expected = [
u'Copyright 2008 OProfile authors',
u'Copyright (c) 2008 Advanced Micro Devices, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_mangling_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_mangling.c')
expected = [
u'Copyright 2002 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_perfmon_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_perfmon.c')
expected = [
u'Copyright 2003 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_pipe_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_pipe.c')
expected = [
u'Copyright 2008 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_spu_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_spu.c')
expected = [
u'Copyright 2007 OProfile authors',
u'(c) Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_trans_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_trans.c')
expected = [
u'Copyright 2002 OProfile authors',
u'Copyright (c) 2005 Hewlett-Packard Co.',
u'(c) Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_trans_h(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_trans.h')
expected = [
u'Copyright 2002 OProfile authors',
u'(c) Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_oprofiled_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/oprofiled.c')
expected = [
u'Copyright 2002, 2003 OProfile authors',
u'Copyright (c) 2005 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_liblegacy_p_module_h(self):
test_file = self.get_test_loc('ics/oprofile-daemon-liblegacy/p_module.h')
expected = [
u'Copyright 1996, 1997 Linux International.',
]
check_detection(expected, test_file)
def test_ics_oprofile_doc_oprofile_1_in(self):
test_file = self.get_test_loc('ics/oprofile-doc/oprofile.1.in')
expected = [
u'Copyright (c) 1998-2004 University of Manchester, UK, John Levon, and others.',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_ppc64_970mp_events(self):
test_file = self.get_test_loc('ics/oprofile-events-ppc64-970MP/events')
expected = [
u'Copyright OProfile authors',
u'Copyright (c) International Business Machines, 2007.',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_ppc64_970mp_unit_masks(self):
test_file = self.get_test_loc('ics/oprofile-events-ppc64-970MP/unit_masks')
expected = [
u'Copyright OProfile authors',
u'Copyright (c) International Business Machines, 2006.',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_ppc64_cell_be_events(self):
test_file = self.get_test_loc('ics/oprofile-events-ppc64-cell-be/events')
expected = [
u'Copyright OProfile authors',
u'(c) COPYRIGHT International Business Machines Corp. 2006',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_ppc64_ibm_compat_v1_events(self):
test_file = self.get_test_loc('ics/oprofile-events-ppc64-ibm-compat-v1/events')
expected = [
u'Copyright OProfile authors',
u'Copyright (c) International Business Machines, 2009.',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_x86_64_family10_events_extra_contributed(self):
test_file = self.get_test_loc('ics/oprofile-events-x86-64-family10/events')
expected = [
u'Copyright OProfile authors',
u'Copyright (c) 2006-2008 Advanced Micro Devices',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_x86_64_family11h_unit_masks(self):
test_file = self.get_test_loc('ics/oprofile-events-x86-64-family11h/unit_masks')
expected = [
u'Copyright OProfile authors',
u'Copyright (c) Advanced Micro Devices, 2006-2008',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_x86_64_family12h_events_extra_contributed(self):
test_file = self.get_test_loc('ics/oprofile-events-x86-64-family12h/events')
expected = [
u'Copyright OProfile authors',
u'Copyright (c) 2006-2010 Advanced Micro Devices',
]
check_detection(expected, test_file)
def test_ics_oprofile_include_sstream(self):
test_file = self.get_test_loc('ics/oprofile-include/sstream')
expected = [
u'Copyright (c) 2000 Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_oprofile_libop_op_hw_specific_h(self):
test_file = self.get_test_loc('ics/oprofile-libop/op_hw_specific.h')
expected = [
u'Copyright 2008 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_oprofile_libpopt_findme_c(self):
test_file = self.get_test_loc('ics/oprofile-libpopt/findme.c')
expected = [
u'(c) 1998-2002 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_libpp_callgraph_container_cpp(self):
test_file = self.get_test_loc('ics/oprofile-libpp/callgraph_container.cpp')
expected = [
u'Copyright 2004 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_libpp_format_output_h(self):
test_file = self.get_test_loc('ics/oprofile-libpp/format_output.h')
expected = [
u'Copyright 2002 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_libpp_populate_cpp(self):
test_file = self.get_test_loc('ics/oprofile-libpp/populate.cpp')
expected = [
u'Copyright 2003 OProfile authors',
u'(c) Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
def test_ics_oprofile_libpp_symbol_cpp(self):
test_file = self.get_test_loc('ics/oprofile-libpp/symbol.cpp')
expected = [
u'Copyright 2002, 2004 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_libpp_xml_utils_cpp(self):
test_file = self.get_test_loc('ics/oprofile-libpp/xml_utils.cpp')
expected = [
u'Copyright 2006 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_libregex_demangle_java_symbol_cpp(self):
test_file = self.get_test_loc('ics/oprofile-libregex/demangle_java_symbol.cpp')
expected = [
u'Copyright 2007 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_libutil_sparse_array_h(self):
test_file = self.get_test_loc('ics/oprofile-libutil++/sparse_array.h')
expected = [
u'Copyright 2007 OProfile authors',
u'Copyright (c) International Business Machines, 2007.',
]
check_detection(expected, test_file)
def test_ics_oprofile_libutil_string_manip_cpp(self):
test_file = self.get_test_loc('ics/oprofile-libutil++/string_manip.cpp')
expected = [
u'Copyright 2002 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_libutil_utility_h(self):
test_file = self.get_test_loc('ics/oprofile-libutil++/utility.h')
expected = [
u'Copyright 2002 OProfile authors',
u'(c) Copyright boost.org 1999.',
]
check_detection(expected, test_file)
def test_ics_oprofile_module_ia64_op_pmu_c(self):
test_file = self.get_test_loc('ics/oprofile-module-ia64/op_pmu.c')
expected = [
u'Copyright 2002 OProfile authors',
u'Copyright (c) 1999 Ganesh Venkitachalam <venkitac@us.ibm.com>',
u'Copyright (c) 1999-2002 Hewlett Packard Co Stephane Eranian <eranian@hpl.hp.com> David Mosberger-Tang <davidm@hpl.hp.com>',
]
check_detection(expected, test_file)
def test_ics_oprofile_opcontrol_opcontrol_cpp(self):
test_file = self.get_test_loc('ics/oprofile-opcontrol/opcontrol.cpp')
expected = [
u'Copyright 2008, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_oprofile_opjitconv_conversion_c(self):
test_file = self.get_test_loc('ics/oprofile-opjitconv/conversion.c')
expected = [
u'Copyright 2008 OProfile authors',
u'Copyright IBM Corporation 2008',
]
check_detection(expected, test_file)
def test_ics_oprofile_pp_oparchive_cpp(self):
test_file = self.get_test_loc('ics/oprofile-pp/oparchive.cpp')
expected = [
u'Copyright 2003, 2004 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_pp_oparchive_options_cpp(self):
test_file = self.get_test_loc('ics/oprofile-pp/oparchive_options.cpp')
expected = [
u'Copyright 2002, 2003, 2004 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_utils_opcontrol(self):
test_file = self.get_test_loc('ics/oprofile-utils/opcontrol')
expected = [
u'Copyright 2002 Read',
u'Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_oprofile_utils_opcontrol_misc(self):
test_file = self.get_test_loc('ics/oprofile-utils/opcontrol')
expected = [
u'Copyright 2002 Read the file COPYING',
u'Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
def test_ics_ping_notice(self):
test_file = self.get_test_loc('ics/ping/NOTICE')
expected = [
u'Copyright (c) 1989 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ping_ping_c(self):
test_file = self.get_test_loc('ics/ping/ping.c')
expected = [
u'Copyright (c) 1989 The Regents of the University of California.',
u'Copyright (c) 1989 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ping6_notice(self):
test_file = self.get_test_loc('ics/ping6/NOTICE')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 1989, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ping6_ping6_c(self):
test_file = self.get_test_loc('ics/ping6/ping6.c')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 1989, 1993 The Regents of the University of California.',
u'Copyright (c) 1989, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_auth_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/auth.c')
expected = [
u'Copyright (c) 1993-2002 Paul Mackerras.',
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_cbcp_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/cbcp.c')
expected = [
u'Copyright (c) 1995 Pedro Roque Marques.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_ccp_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/ccp.c')
expected = [
u'Copyright (c) 1994-2002 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_chap_ms_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/chap_ms.c')
expected = [
u'Copyright (c) 1995 Eric Rosenquist.',
u'Copyright (c) 2002 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_chap_ms_h(self):
test_file = self.get_test_loc('ics/ppp-pppd/chap_ms.h')
expected = [
u'Copyright (c) 1995 Eric Rosenquist.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_chap_md5_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/chap-md5.c')
expected = [
u'Copyright (c) 2003 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_demand_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/demand.c')
expected = [
u'Copyright (c) 1996-2002 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_eap_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/eap.c')
expected = [
u'Copyright (c) 2001 by Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_ecp_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/ecp.c')
expected = [
u'Copyright (c) 2002 The Android Open Source Project',
u'Copyright (c) 1994-2002 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_ecp_h(self):
test_file = self.get_test_loc('ics/ppp-pppd/ecp.h')
expected = [
u'Copyright (c) 2002 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_eui64_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/eui64.c')
expected = [
u'Copyright (c) 1999 Tommi Komulainen.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_fsm_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/fsm.c')
expected = [
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_ipv6cp_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/ipv6cp.c')
expected = [
u'Copyright (c) 1999 Tommi Komulainen.',
u'Copyright (c) 1995, 1996, 1997 Francis.Dupont@inria.fr, INRIA',
u'Copyright (c) 1998, 1999 Francis.Dupont@inria.fr',
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_main_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/main.c')
expected = [
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
u'Copyright (c) 1999-2004 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_md4_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/md4.c')
expected = [
u'(c) 1990 RSA Data Security, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_md5_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/md5.c')
expected = [
u'Copyright (c) 1990, RSA Data Security, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_md5_h(self):
test_file = self.get_test_loc('ics/ppp-pppd/md5.h')
expected = [
u'Copyright (c) 1990, RSA Data Security, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_notice(self):
test_file = self.get_test_loc('ics/ppp-pppd/NOTICE')
expected = [
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
u'Copyright (c) 1995 Pedro Roque Marques.',
u'Copyright (c) 2000-2004 Paul Mackerras.',
u'Copyright (c) 1994-2002 Paul Mackerras.',
u'Copyright (c) 2003 Paul Mackerras.',
u'Copyright (c) 1996-2002 Paul Mackerras.',
u'Copyright (c) 1999-2004 Paul Mackerras.',
u'Copyright (c) 2000-2002 Paul Mackerras.',
u'Copyright (c) 1999-2002 Paul Mackerras.',
u'Copyright (c) 1995 Eric Rosenquist.',
u'Copyright (c) 2002 The Android Open Source Project',
u'Copyright (c) 1990, RSA Data Security, Inc.',
u'Copyright (c) 2001 by Sun Microsystems, Inc.',
u'Copyright (c) 1999 Tommi Komulainen.',
u'Copyright (c) 1995, 1996, 1997 Francis.Dupont@inria.fr, INRIA',
u'Copyright (c) 1998, 1999 Francis.Dupont@inria.fr',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_pppd_8(self):
test_file = self.get_test_loc('ics/ppp-pppd/pppd.8')
expected = [
u'Copyright (c) 1993-2003 Paul Mackerras <paulus@samba.org>',
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
u'Copyright (c) 1993-2004 Paul Mackerras.',
u'Copyright (c) 1995 Pedro Roque Marques.',
u'Copyright (c) 1995 Eric Rosenquist.',
u'Copyright (c) 1999 Tommi Komulainen.',
u'Copyright (c) Andrew Tridgell 1999',
u'Copyright (c) 2000 by Sun Microsystems, Inc.',
u'Copyright (c) 2001 by Sun Microsystems, Inc.',
u'Copyright (c) 2002 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_pppd_h(self):
test_file = self.get_test_loc('ics/ppp-pppd/pppd.h')
expected = [
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_sys_linux_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/sys-linux.c')
expected = [
u'Copyright (c) 1994-2004 Paul Mackerras.',
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_sys_solaris_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/sys-solaris.c')
expected = [
u'Copyright (c) 2000 by Sun Microsystems, Inc.',
u'Copyright (c) 1995-2002 Paul Mackerras.',
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_tty_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/tty.c')
expected = [
u'Copyright (c) 2000-2004 Paul Mackerras.',
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_utils_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/utils.c')
expected = [
u'Copyright (c) 1999-2002 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_include_net_ppp_defs_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-include-net/ppp_defs.h')
expected = [
u'Copyright (c) 1984 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_include_net_pppio_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-include-net/pppio.h')
expected = [
u'Copyright (c) 1994 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_include_net_slcompress_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-include-net/slcompress.h')
expected = [
u'Copyright (c) 1989 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_minconn_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins/minconn.c')
expected = [
u'Copyright (c) 1999 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_passprompt_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins/passprompt.c')
expected = [
u'Copyright 1999 Paul Mackerras, Alan Curry.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_winbind_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins/winbind.c')
expected = [
u'Copyright (c) 2003 Andrew Bartlet <abartlet@samba.org>',
u'Copyright 1999 Paul Mackerras, Alan Curry.',
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
u'Copyright (c) 1996, Matjaz Godec <gody@elgo.si>',
u'Copyright (c) 1996, Lars Fenneberg <in5y050@public.uni-hamburg.de>',
u'Copyright (c) 1997, Miguel A.L. Paraz <map@iphil.net>',
u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg <lf@elemental.net>',
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
u'Copyright (c) 2003, Sean E. Millichamp',
u'Copyright (c) Andrew Tridgell 1992-2001',
u'Copyright (c) Simo Sorce 2001-2002',
u'Copyright (c) Martin Pool 2003',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_pppoatm_copying(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-pppoatm/COPYING')
expected = [
u'Copyright 1995-2000 EPFL-LRC/ICA',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_pppoatm_pppoatm_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-pppoatm/pppoatm.c')
expected = [
u'Copyright 2000 Mitchell Blank Jr.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_avpair_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/avpair.c')
expected = [
u'Copyright (c) 1995 Lars Fenneberg',
u'Copyright 1992 Livingston Enterprises, Inc.',
u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_buildreq_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/buildreq.c')
expected = [
u'Copyright (c) 1995,1997 Lars Fenneberg',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_clientid_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/clientid.c')
expected = [
u'Copyright (c) 1995,1996,1997 Lars Fenneberg',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_config_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/config.c')
expected = [
u'Copyright (c) 1995,1996,1997 Lars Fenneberg',
u'Copyright 1992 Livingston Enterprises, Inc.',
u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_copyright(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/COPYRIGHT')
expected = [
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg <lf@elemental.net>',
u'Copyright 1992 Livingston Enterprises, Inc. Livingston Enterprises, Inc.',
u'Copyright (c) 1991-2, RSA Data Security, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_dict_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/dict.c')
expected = [
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
u'Copyright (c) 1995,1996,1997 Lars Fenneberg',
u'Copyright 1992 Livingston Enterprises, Inc.',
u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_includes_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/includes.h')
expected = [
u'Copyright (c) 1997 Lars Fenneberg',
u'Copyright 1992 Livingston Enterprises, Inc.',
u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_lock_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/lock.c')
expected = [
u'Copyright (c) 1997 Lars Fenneberg',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_makefile_linux(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/Makefile.linux')
expected = [
u'Copyright 2002 Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_options_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/options.h')
expected = [
u'Copyright (c) 1996 Lars Fenneberg',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_pathnames_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/pathnames.h')
expected = [
u'Copyright (c) 1995,1996 Lars Fenneberg',
u'Copyright 1992 Livingston Enterprises, Inc.',
u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_radattr_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/radattr.c')
expected = [
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_radius_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/radius.c')
expected = [
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
u'Copyright (c) 1996, Matjaz Godec <gody@elgo.si>',
u'Copyright (c) 1996, Lars Fenneberg <in5y050@public.uni-hamburg.de>',
u'Copyright (c) 1997, Miguel A.L. Paraz <map@iphil.net>',
u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg <lf@elemental.net>',
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_radiusclient_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/radiusclient.h')
expected = [
u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg',
u'Copyright 1992 Livingston Enterprises, Inc.',
u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_radrealms_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/radrealms.c')
expected = [
u'Copyright (c) 2002 Netservers',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_rp_pppoe_common_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/common.c')
expected = [
u'Copyright (c) 2000 by Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_rp_pppoe_discovery_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/discovery.c')
expected = [
u'Copyright (c) 1999 by Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_rp_pppoe_makefile_linux(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/Makefile.linux')
expected = [
u'Copyright (c) 2001 Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_rp_pppoe_plugin_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/plugin.c')
expected = [
u'Copyright (c) 2001 by Roaring Penguin Software Inc., Michal Ostrowski and Jamal Hadi Salim.',
u'Copyright 2000 Michal Ostrowski <mostrows@styx.uwaterloo.ca>, Jamal Hadi Salim <hadi@cyberus.ca>',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_rp_pppoe_pppoe_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/pppoe.h')
expected = [
u'Copyright (c) 2000 Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_rp_pppoe_pppoe_discovery_c_trail_name(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/pppoe-discovery.c')
expected = [
u'Copyright (c) 2000-2001 by Roaring Penguin Software Inc.',
u"Copyright (c) 2004 Marco d'Itri <md@linux.it>",
]
check_detection(expected, test_file)
def test_ics_proguard_notice(self):
test_file = self.get_test_loc('ics/proguard/NOTICE')
expected = [
u'Copyright (c) 2002-2009 Eric Lafortune.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_proguard_readme(self):
test_file = self.get_test_loc('ics/proguard/README')
expected = [
u'Copyright (c) 2002-2009 Eric Lafortune (eric@graphics.cornell.edu)',
]
check_detection(expected, test_file)
def test_ics_proguard_docs_acknowledgements_html(self):
test_file = self.get_test_loc('ics/proguard-docs/acknowledgements.html')
expected = [
u"Copyright (c) 2002-2009 <a href http://www.graphics.cornell.edu/~eric/'>Eric",
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_proguard_docs_acknowledgements_html_markup(self):
test_file = self.get_test_loc('ics/proguard-docs/acknowledgements.html')
expected = [
u'Copyright (c) 2002-2009 http://www.graphics.cornell.edu/~eric/ Eric Lafortune',
]
check_detection(expected, test_file)
def test_ics_proguard_docs_gpl_html(self):
test_file = self.get_test_loc('ics/proguard-docs/GPL.html')
expected = [
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_proguard_docs_gpl_exception_html(self):
test_file = self.get_test_loc('ics/proguard-docs/GPL_exception.html')
expected = [
u'Copyright (c) 2002-2009 Eric Lafortune',
]
check_detection(expected, test_file)
def test_ics_proguard_examples_annotations_src_proguard_annotation_keep_java(self):
test_file = self.get_test_loc('ics/proguard-examples-annotations-src-proguard-annotation/Keep.java')
expected = [
u'Copyright (c) 2002-2007 Eric Lafortune (eric@graphics.cornell.edu)',
]
check_detection(expected, test_file)
def test_ics_proguard_src_proguard_argumentwordreader_java(self):
test_file = self.get_test_loc('ics/proguard-src-proguard/ArgumentWordReader.java')
expected = [
u'Copyright (c) 2002-2009 Eric Lafortune (eric@graphics.cornell.edu)',
]
check_detection(expected, test_file)
def test_ics_proguard_src_proguard_gui_guiresources_properties(self):
test_file = self.get_test_loc('ics/proguard-src-proguard-gui/GUIResources.properties')
expected = [
u'Copyright (c) 2002-2009 Eric Lafortune (eric@graphics.cornell.edu)',
u'Copyright (c) 2002-2009.',
]
check_detection(expected, test_file)
def test_ics_protobuf_aclocal_m4(self):
test_file = self.get_test_loc('ics/protobuf/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_configure(self):
test_file = self.get_test_loc('ics/protobuf/configure')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_install_txt(self):
test_file = self.get_test_loc('ics/protobuf/INSTALL.txt')
expected = [
u'Copyright 1994, 1995, 1996, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_ltmain_sh(self):
test_file = self.get_test_loc('ics/protobuf/ltmain.sh')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_readme_txt(self):
test_file = self.get_test_loc('ics/protobuf/README.txt')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_editors_proto_vim(self):
test_file = self.get_test_loc('ics/protobuf-editors/proto.vim')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_gtest_aclocal_m4(self):
test_file = self.get_test_loc('ics/protobuf-gtest/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2004 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_gtest_scons_sconscript(self):
test_file = self.get_test_loc('ics/protobuf-gtest-scons/SConscript')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_java_src_main_java_com_google_protobuf_abstractmessage_java(self):
test_file = self.get_test_loc('ics/protobuf-java-src-main-java-com-google-protobuf/AbstractMessage.java')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_m4_libtool_m4(self):
test_file = self.get_test_loc('ics/protobuf-m4/libtool.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_m4_ltoptions_m4(self):
test_file = self.get_test_loc('ics/protobuf-m4/ltoptions.m4')
expected = [
u'Copyright (c) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_m4_ltsugar_m4(self):
test_file = self.get_test_loc('ics/protobuf-m4/ltsugar.m4')
expected = [
u'Copyright (c) 2004, 2005, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_m4_ltversion_m4(self):
test_file = self.get_test_loc('ics/protobuf-m4/ltversion.m4')
expected = [
u'Copyright (c) 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_src_google_protobuf_compiler_javamicro_javamicro_params_h(self):
test_file = self.get_test_loc('ics/protobuf-src-google-protobuf-compiler-javamicro/javamicro_params.h')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_src_google_protobuf_io_tokenizer_cc(self):
test_file = self.get_test_loc('ics/protobuf-src-google-protobuf-io/tokenizer.cc')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_src_google_protobuf_stubs_structurally_valid_cc(self):
test_file = self.get_test_loc('ics/protobuf-src-google-protobuf-stubs/structurally_valid.cc')
expected = [
u'Copyright 2005-2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_a_out_h(self):
test_file = self.get_test_loc('ics/qemu/a.out.h')
expected = [
u'Copyright 1997, 1998, 1999, 2001 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_acl_c(self):
test_file = self.get_test_loc('ics/qemu/acl.c')
expected = [
u'Copyright (c) 2009 Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_qemu_aio_android_c(self):
test_file = self.get_test_loc('ics/qemu/aio-android.c')
expected = [
u'Copyright IBM, Corp. 2008',
]
check_detection(expected, test_file)
def test_ics_qemu_android_trace_h(self):
test_file = self.get_test_loc('ics/qemu/android-trace.h')
expected = [
u'Copyright (c) 2006-2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_arch_init_c(self):
test_file = self.get_test_loc('ics/qemu/arch_init.c')
expected = [
u'Copyright (c) 2003-2008 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_arm_dis_c(self):
test_file = self.get_test_loc('ics/qemu/arm-dis.c')
expected = [
u'Copyright 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004 2007, Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_arm_semi_c(self):
test_file = self.get_test_loc('ics/qemu/arm-semi.c')
expected = [
u'Copyright (c) 2005, 2007 CodeSourcery.',
]
check_detection(expected, test_file)
def test_ics_qemu_block_c(self):
test_file = self.get_test_loc('ics/qemu/block.c')
expected = [
u'Copyright (c) 2003 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_bt_host_c(self):
test_file = self.get_test_loc('ics/qemu/bt-host.c')
expected = [
u'Copyright (c) 2008 Andrzej Zaborowski <balrog@zabor.org>',
]
check_detection(expected, test_file)
def test_ics_qemu_console_c(self):
test_file = self.get_test_loc('ics/qemu/console.c')
expected = [
u'Copyright (c) 2004 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_cpu_all_h(self):
test_file = self.get_test_loc('ics/qemu/cpu-all.h')
expected = [
u'Copyright (c) 2003 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_cpu_exec_c(self):
test_file = self.get_test_loc('ics/qemu/cpu-exec.c')
expected = [
u'Copyright (c) 2003-2005 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_curses_c(self):
test_file = self.get_test_loc('ics/qemu/curses.c')
expected = [
u'Copyright (c) 2005 Andrzej Zaborowski <balrog@zabor.org>',
]
check_detection(expected, test_file)
def test_ics_qemu_curses_keys_h(self):
test_file = self.get_test_loc('ics/qemu/curses_keys.h')
expected = [
u'Copyright (c) 2005 Andrzej Zaborowski <balrog@zabor.org>',
]
check_detection(expected, test_file)
def test_ics_qemu_cutils_c(self):
test_file = self.get_test_loc('ics/qemu/cutils.c')
expected = [
u'Copyright (c) 2006 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_d3des_c(self):
test_file = self.get_test_loc('ics/qemu/d3des.c')
expected = [
u'Copyright (c) 1999 AT&T Laboratories Cambridge.',
u'Copyright (c) 1988,1989,1990,1991,1992 by Richard Outerbridge.',
]
check_detection(expected, test_file)
def test_ics_qemu_d3des_h(self):
test_file = self.get_test_loc('ics/qemu/d3des.h')
expected = [
u'Copyright (c) 1999 AT&T Laboratories Cambridge.',
u'Copyright (c) 1988,1989,1990,1991,1992 by Richard Outerbridge',
]
check_detection(expected, test_file)
def test_ics_qemu_device_tree_c(self):
test_file = self.get_test_loc('ics/qemu/device_tree.c')
expected = [
u'Copyright 2008 IBM Corporation. Authors Jerone Young <jyoung5@us.ibm.com> Hollis Blanchard <hollisb@us.ibm.com>',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_qemu_device_tree_c_extra_author(self):
test_file = self.get_test_loc('ics/qemu/device_tree.c')
expected = [
u'Copyright 2008 IBM Corporation.',
]
check_detection(expected, test_file)
def test_ics_qemu_dma_helpers_c(self):
test_file = self.get_test_loc('ics/qemu/dma-helpers.c')
expected = [
u'Copyright (c) 2009 Red Hat',
]
check_detection(expected, test_file)
def test_ics_qemu_dynlink_h(self):
test_file = self.get_test_loc('ics/qemu/dynlink.h')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_dynlink_static_c(self):
test_file = self.get_test_loc('ics/qemu/dynlink-static.c')
expected = [
u'Copyright (c) 2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_feature_to_c_sh(self):
test_file = self.get_test_loc('ics/qemu/feature_to_c.sh')
expected = [
u'Copyright (c) 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_hostregs_helper_h(self):
test_file = self.get_test_loc('ics/qemu/hostregs_helper.h')
expected = [
u'Copyright (c) 2007 CodeSourcery',
]
check_detection(expected, test_file)
def test_ics_qemu_host_utils_c(self):
test_file = self.get_test_loc('ics/qemu/host-utils.c')
expected = [
u'Copyright (c) 2003 Fabrice Bellard',
u'Copyright (c) 2007 Aurelien Jarno',
]
check_detection(expected, test_file)
def test_ics_qemu_host_utils_h(self):
test_file = self.get_test_loc('ics/qemu/host-utils.h')
expected = [
u'Copyright (c) 2007 Thiemo Seufer',
u'Copyright (c) 2007 Jocelyn Mayer',
]
check_detection(expected, test_file)
def test_ics_qemu_i386_dis_c(self):
test_file = self.get_test_loc('ics/qemu/i386-dis.c')
expected = [
u'Copyright 1988, 1989, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_json_lexer_c(self):
test_file = self.get_test_loc('ics/qemu/json-lexer.c')
expected = [
u'Copyright IBM, Corp. 2009',
]
check_detection(expected, test_file)
def test_ics_qemu_keymaps_c(self):
test_file = self.get_test_loc('ics/qemu/keymaps.c')
expected = [
u'Copyright (c) 2004 Johannes Schindelin',
]
check_detection(expected, test_file)
def test_ics_qemu_kqemu_c(self):
test_file = self.get_test_loc('ics/qemu/kqemu.c')
expected = [
u'Copyright (c) 2005-2008 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_kqemu_h(self):
test_file = self.get_test_loc('ics/qemu/kqemu.h')
expected = [
u'Copyright (c) 2004-2008 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_loader_c(self):
test_file = self.get_test_loc('ics/qemu/loader.c')
expected = [
u'Copyright (c) 2006 Fabrice Bellard',
u'(c) Copyright 2008 Semihalf',
u'(c) Copyright 2000-2005 Wolfgang Denk',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_qemu_loader_c_trail_name(self):
test_file = self.get_test_loc('ics/qemu/loader.c')
expected = [
u'Copyright (c) 2006 Fabrice Bellard',
u'(c) Copyright 2008 Semihalf',
u'(C) Copyright 2000-2005 Wolfgang Denk, DENX Software Engineering, wd@denx.de.',
]
check_detection(expected, test_file)
def test_ics_qemu_migration_exec_c(self):
test_file = self.get_test_loc('ics/qemu/migration-exec.c')
expected = [
u'Copyright IBM, Corp. 2008',
u'Copyright Dell MessageOne 2008',
]
check_detection(expected, test_file)
def test_ics_qemu_monitor_c(self):
test_file = self.get_test_loc('ics/qemu/monitor.c')
expected = [
u'Copyright (c) 2003-2004 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_net_checksum_c(self):
test_file = self.get_test_loc('ics/qemu/net-checksum.c')
expected = [
u'(c) 2008 Gerd Hoffmann <kraxel@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_qemu_notify_c(self):
test_file = self.get_test_loc('ics/qemu/notify.c')
expected = [
u'Copyright IBM, Corp. 2010',
]
check_detection(expected, test_file)
def test_ics_qemu_os_posix_c(self):
test_file = self.get_test_loc('ics/qemu/os-posix.c')
expected = [
u'Copyright (c) 2003-2008 Fabrice Bellard',
u'Copyright (c) 2010 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_ppc_dis_c(self):
test_file = self.get_test_loc('ics/qemu/ppc-dis.c')
expected = [
u'Copyright 1994, 1995, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright 1994, 1995, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright 1994, 1995, 1996, 1997, 1998, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_qdict_c(self):
test_file = self.get_test_loc('ics/qemu/qdict.c')
expected = [
u'Copyright (c) 2009 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_qemu_error_c(self):
test_file = self.get_test_loc('ics/qemu/qemu-error.c')
expected = [
u'Copyright (c) 2010 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_qemu_io_c(self):
test_file = self.get_test_loc('ics/qemu/qemu-io.c')
expected = [
u'Copyright (c) 2009 Red Hat, Inc.',
u'Copyright (c) 2003-2005 Silicon Graphics, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_qemu_option_c(self):
test_file = self.get_test_loc('ics/qemu/qemu-option.c')
expected = [
u'Copyright (c) 2003-2008 Fabrice Bellard',
u'Copyright (c) 2009 Kevin Wolf <kwolf@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_qemu_qemu_options_h(self):
test_file = self.get_test_loc('ics/qemu/qemu-options.h')
expected = [
u'Copyright (c) 2003-2008 Fabrice Bellard',
u'Copyright (c) 2010 Jes Sorensen <Jes.Sorensen@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_qemu_qemu_thread_c(self):
test_file = self.get_test_loc('ics/qemu/qemu-thread.c')
expected = [
u'Copyright Red Hat, Inc. 2009',
]
check_detection(expected, test_file)
def test_ics_qemu_softmmu_outside_jit_c(self):
test_file = self.get_test_loc('ics/qemu/softmmu_outside_jit.c')
expected = [
u'Copyright (c) 2007-2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_softmmu_semi_h(self):
test_file = self.get_test_loc('ics/qemu/softmmu-semi.h')
expected = [
u'Copyright (c) 2007 CodeSourcery.',
]
check_detection(expected, test_file)
def test_ics_qemu_sys_tree_h(self):
test_file = self.get_test_loc('ics/qemu/sys-tree.h')
expected = [
u'Copyright 2002 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_qemu_tap_win32_c(self):
test_file = self.get_test_loc('ics/qemu/tap-win32.c')
expected = [
u'Copyright (c) Damion K. Wilson, 2003',
u'Copyright (c) James Yonan, 2003-2004',
]
check_detection(expected, test_file)
def test_ics_qemu_tcpdump_c(self):
test_file = self.get_test_loc('ics/qemu/tcpdump.c')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_uboot_image_h(self):
test_file = self.get_test_loc('ics/qemu/uboot_image.h')
expected = [
u'(c) Copyright 2000-2005 Wolfgang Denk',
]
check_detection(expected, test_file)
def test_ics_qemu_usb_linux_c(self):
test_file = self.get_test_loc('ics/qemu/usb-linux.c')
expected = [
u'Copyright (c) 2005 Fabrice Bellard',
u'Copyright (c) 2008 Max Krasnyansky Support',
u'Copyright 2008 TJ',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_qemu_usb_linux_c_extra_support_trail_email(self):
test_file = self.get_test_loc('ics/qemu/usb-linux.c')
expected = [
u'Copyright (c) 2005 Fabrice Bellard',
u'Copyright (c) 2008 Max Krasnyansky',
u'Copyright 2008 TJ <linux@tjworld.net>',
]
check_detection(expected, test_file)
def test_ics_qemu_vl_android_c(self):
test_file = self.get_test_loc('ics/qemu/vl-android.c')
expected = [
u'Copyright (c) 2003-2008 Fabrice Bellard',
u'Copyright (c) 2003-2008 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_vnc_android_c(self):
test_file = self.get_test_loc('ics/qemu/vnc-android.c')
expected = [
u'Copyright (c) 2006 Anthony Liguori <anthony@codemonkey.ws>',
u'Copyright (c) 2006 Fabrice Bellard',
u'Copyright (c) 2009 Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_qemu_android_android_h(self):
test_file = self.get_test_loc('ics/qemu-android/android.h')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_android_main_c(self):
test_file = self.get_test_loc('ics/qemu-android/main.c')
expected = [
u'Copyright (c) 2006-2008 The Android Open Source Project',
u'Copyright (c) 2006-2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_android_main_common_c(self):
test_file = self.get_test_loc('ics/qemu-android/main-common.c')
expected = [
u'Copyright (c) 2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_android_qemu_setup_c(self):
test_file = self.get_test_loc('ics/qemu-android/qemu-setup.c')
expected = [
u'Copyright (c) 2006-2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_android_snapshot_c(self):
test_file = self.get_test_loc('ics/qemu-android/snapshot.c')
expected = [
u'Copyright (c) 2010 The Android Open Source Project',
u'copyright (c) 2003 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_android_utils_mapfile_c(self):
test_file = self.get_test_loc('ics/qemu-android-utils/mapfile.c')
expected = [
u'Copyright (c) 2007-2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_alsaaudio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/alsaaudio.c')
expected = [
u'Copyright (c) 2008-2010 The Android Open Source Project',
u'Copyright (c) 2005 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_audio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/audio.c')
expected = [
u'Copyright (c) 2007-2008 The Android Open Source Project',
u'Copyright (c) 2003-2005 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_audio_h(self):
test_file = self.get_test_loc('ics/qemu-audio/audio.h')
expected = [
u'Copyright (c) 2003-2005 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_audio_template_h(self):
test_file = self.get_test_loc('ics/qemu-audio/audio_template.h')
expected = [
u'Copyright (c) 2005 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_coreaudio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/coreaudio.c')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
u'Copyright (c) 2005 Mike Kronenberg',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_esdaudio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/esdaudio.c')
expected = [
u'Copyright (c) 2008-2009 The Android Open Source Project',
u'Copyright (c) 2006 Frederick Reeve',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_fmodaudio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/fmodaudio.c')
expected = [
u'Copyright (c) 2004-2005 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_mixeng_c(self):
test_file = self.get_test_loc('ics/qemu-audio/mixeng.c')
expected = [
u'Copyright (c) 2004-2005 Vassili Karpov',
u'Copyright (c) 1998 Fabrice Bellard',
u'Copyright 1998 Fabrice Bellard.',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_rate_template_h(self):
test_file = self.get_test_loc('ics/qemu-audio/rate_template.h')
expected = [
u'Copyright (c) 2004-2005 Vassili Karpov',
u'Copyright (c) 1998 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_wavaudio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/wavaudio.c')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
u'Copyright (c) 2004-2005 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_winaudio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/winaudio.c')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_block_bochs_c(self):
test_file = self.get_test_loc('ics/qemu-block/bochs.c')
expected = [
u'Copyright (c) 2005 Alex Beregszaszi',
]
check_detection(expected, test_file)
def test_ics_qemu_block_cloop_c(self):
test_file = self.get_test_loc('ics/qemu-block/cloop.c')
expected = [
u'Copyright (c) 2004 Johannes E. Schindelin',
]
check_detection(expected, test_file)
def test_ics_qemu_block_nbd_c(self):
test_file = self.get_test_loc('ics/qemu-block/nbd.c')
expected = [
u'Copyright (c) 2008 Bull S.A.S.',
u'Copyright (c) 2007 Anthony Liguori <anthony@codemonkey.ws>',
]
check_detection(expected, test_file)
def test_ics_qemu_block_parallels_c(self):
test_file = self.get_test_loc('ics/qemu-block/parallels.c')
expected = [
u'Copyright (c) 2007 Alex Beregszaszi',
]
check_detection(expected, test_file)
def test_ics_qemu_block_qcow_c(self):
test_file = self.get_test_loc('ics/qemu-block/qcow.c')
expected = [
u'Copyright (c) 2004-2006 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_block_vmdk_c(self):
test_file = self.get_test_loc('ics/qemu-block/vmdk.c')
expected = [
u'Copyright (c) 2004 Fabrice Bellard',
u'Copyright (c) 2005 Filip Navara',
]
check_detection(expected, test_file)
def test_ics_qemu_block_vpc_c(self):
test_file = self.get_test_loc('ics/qemu-block/vpc.c')
expected = [
u'Copyright (c) 2005 Alex Beregszaszi',
u'Copyright (c) 2009 Kevin Wolf <kwolf@suse.de>',
]
check_detection(expected, test_file)
def test_ics_qemu_block_vvfat_c(self):
test_file = self.get_test_loc('ics/qemu-block/vvfat.c')
expected = [
u'Copyright (c) 2004,2005 Johannes E. Schindelin',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_png_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/png.c')
expected = [
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1996-1997 Andreas Dilger',
u'Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_png_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/png.h')
expected = [
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
u'Copyright (c) 2004, 2006-2007 Glenn Randers-Pehrson',
u'Copyright (c) 2000-2002 Glenn Randers-Pehrson',
u'Copyright (c) 1998, 1999, 2000 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_pngconf_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngconf.h')
expected = [
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_pngerror_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngerror.c')
expected = [
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_pnggccrd_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pnggccrd.c')
expected = [
u'Copyright (c) 1998 Intel Corporation',
u'Copyright (c) 1999-2002,2007 Greg Roelofs',
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_pngmem_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngmem.c')
expected = [
u'Copyright (c) 1998-2006 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_pngrtran_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngrtran.c')
expected = [
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
u'Copyright (c) 1998-01-04 Charles Poynton poynton at inforamp.net',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_pngvcrd_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngvcrd.c')
expected = [
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1998, Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_copying(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12/COPYING')
expected = [
u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_include_begin_code_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-include/begin_code.h')
expected = [
u'Copyright (c) 1997-2004 Sam Lantinga',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_include_sdl_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-include/SDL.h')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_include_sdl_opengl_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-include/SDL_opengl.h')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
u'Copyright (c) 1991-2004 Silicon Graphics, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_audio_sdl_mixer_mmx_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio/SDL_mixer_MMX.c')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
u'Copyright 2002 Stephane Marchesin (stephane.marchesin@wanadoo.fr)',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_audio_sdl_mixer_mmx_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio/SDL_mixer_MMX.h')
expected = [
u'Copyright 2002 Stephane Marchesin (stephane.marchesin@wanadoo.fr)',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_audio_dc_aica_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio-dc/aica.c')
expected = [
u'(c) 2000 Dan Potter',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_audio_sun_sdl_sunaudio_c_trail_name(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio-sun/SDL_sunaudio.c')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
u'Copyright 1989 by Rich Gopstein and Harris Corporation',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_cdrom_macosx_audiofileplayer_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-cdrom-macosx/AudioFilePlayer.c')
expected = [
u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002 Sam Lantinga',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_cdrom_macosx_sdlosxcaguard_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-cdrom-macosx/SDLOSXCAGuard.c')
expected = [
u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002 Sam Lantinga',
u'(c) Copyright 2002 Apple Computer, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_cdrom_macosx_sdlosxcaguard_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-cdrom-macosx/SDLOSXCAGuard.h')
expected = [
u'Copyright (c) 1997-2004 Sam Lantinga',
u'(c) Copyright 2002 Apple Computer, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_cdrom_osf_sdl_syscdrom_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-cdrom-osf/SDL_syscdrom.c')
expected = [
u'DirectMedia Layer Copyright (c) 2003',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_hermes_copying_lib(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-hermes/COPYING.LIB')
expected = [
u'Copyright (c) 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_hermes_headmmx_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-hermes/HeadMMX.h')
expected = [
u'Copyright (c) 1998 Christian Nentwich (c.nentwich@cs.ucl.ac.uk)',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_hermes_headx86_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-hermes/HeadX86.h')
expected = [
u'Copyright (c) 1998 Christian Nentwich (brn@eleet.mcb.at)',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_hermes_readme(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-hermes/README')
expected = [
u'(c) 1998 Christian Nentwich',
u'(c) Glenn Fielder (gaffer@gaffer.org)',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_joystick_os2_joyos2_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-joystick-os2/joyos2.h')
expected = [
u'Copyright (c) 1995 IBM Corporation',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_loadso_macosx_sdl_dlcompat_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-loadso-macosx/SDL_dlcompat.c')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
u"Copyright (c) 2002 Jorge Acereda <jacereda@users.sourceforge.net> & Peter O'Gorman <ogorman@users.sourceforge.net>",
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_main_win32_version_rc(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-main-win32/version.rc')
expected = [
u'Copyright (c) 2007 Sam Lantinga',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_stdlib_sdl_qsort_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-stdlib/SDL_qsort.c')
expected = [
u'(c) 1998 Gareth McCaughan',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_thread_win32_win_ce_semaphore_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-thread-win32/win_ce_semaphore.c')
expected = [
u'Copyright (c) 1998, Johnson M. Hart',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_timer_macos_fasttimes_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-timer-macos/FastTimes.c')
expected = [
u'Copyright (c) Matt Slot, 1999-2000.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_sdl_yuv_sw_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video/SDL_yuv_sw.c')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
u'Copyright (c) 1995 The Regents of the University of California.',
u'Copyright (c) 1995 Erik Corry',
u'Copyright (c) 1995 Brown University.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_fbcon_matrox_regs_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-fbcon/matrox_regs.h')
expected = [
u'Copyright 1996 The XFree86 Project, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_fbcon_riva_mmio_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-fbcon/riva_mmio.h')
expected = [
u'Copyright 1993-1999 NVIDIA, Corporation.',
u'Copyright 1993-1999 NVIDIA, Corporation.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_maccommon_sdl_macwm_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-maccommon/SDL_macwm.c')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
u'Copyright (c) 1999 Apple Computer, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_nanox_sdl_nxevents_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-nanox/SDL_nxevents.c')
expected = [
u'Copyright (c) 1997-2004 Sam Lantinga',
u'Copyright (c) 2001 Hsieh-Fu Tsai',
u'Copyright (c) 2002 Greg Haerr <greg@censoft.com>',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_nanox_sdl_nxevents_c_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-nanox/SDL_nxevents_c.h')
expected = [
u'Copyright (c) 1997-2004 Sam Lantinga',
u'Copyright (c) 2001 Hsieh-Fu Tsai',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_quartz_cgs_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-quartz/CGS.h')
expected = [
u'Copyright (c) 1997-2003 Sam Lantinga',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_extutil_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/extutil.h')
expected = [
u'Copyright 1989, 1998 The Open Group',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_panoramixext_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/panoramiXext.h')
expected = [
u'Copyright (c) 1991, 1997 Digital Equipment Corporation, Maynard, Massachusetts.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86dga_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86dga.h')
expected = [
u'Copyright (c) 1999 XFree86 Inc',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86dga1_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86dga1.h')
expected = [
u'Copyright (c) 1995 Jon Tombs',
u'Copyright (c) 1995 XFree86 Inc',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86dga1str_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86dga1str.h')
expected = [
u'Copyright (c) 1995 Jon Tombs',
u'Copyright (c) 1995 XFree86 Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86vmode_h_trail_caps(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86vmode.h')
expected = [
u'Copyright 1995 Kaleb S. KEITHLEY',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xme_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xme.h')
expected = [
u'Copyright 1993-2001 by Xi Graphics, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xv_h_trail_name(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/Xv.h')
expected = [
u'Copyright 1991 by Digital Equipment Corporation, Maynard, Massachusetts, and the Massachusetts Institute of Technology, Cambridge, Massachusetts.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_xv_xvlibint_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-Xv/Xvlibint.h')
expected = [
u'Copyright 1987 by Digital Equipment Corporation, Maynard, Massachusetts, and the Massachusetts Institute of Technology, Cambridge, Massachusetts.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_xxf86dga_xf86dga_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-Xxf86dga/XF86DGA.c')
expected = [
u'Copyright (c) 1995 Jon Tombs',
u'Copyright (c) 1995,1996 The XFree86 Project, Inc',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_xxf86vm_xf86vmode_c_trail_caps(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-Xxf86vm/XF86VMode.c')
expected = [
u'Copyright (c) 1995 Kaleb S. KEITHLEY',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_compress_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/compress.c')
expected = [
u'Copyright (c) 1995-2003 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_crc32_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/crc32.c')
expected = [
u'Copyright (c) 1995-2005 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_deflate_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/deflate.c')
expected = [
u'Copyright (c) 1995-2005 Jean-loup Gailly.',
u'Copyright 1995-2005 Jean-loup Gailly',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_deflate_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/deflate.h')
expected = [
u'Copyright (c) 1995-2004 Jean-loup Gailly',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_gzio_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/gzio.c')
expected = [
u'Copyright (c) 1995-2005 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_inffast_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/inffast.h')
expected = [
u'Copyright (c) 1995-2003 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_inftrees_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/inftrees.c')
expected = [
u'Copyright (c) 1995-2005 Mark Adler',
u'Copyright 1995-2005 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_trees_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/trees.c')
expected = [
u'Copyright (c) 1995-2005 Jean-loup Gailly',
]
check_detection(expected, test_file)
def test_ics_qemu_elff_dwarf_h(self):
test_file = self.get_test_loc('ics/qemu-elff/dwarf.h')
expected = [
u'Copyright (c) 2000,2001,2003,2004,2005,2006 Silicon Graphics, Inc.',
u'Portions Copyright 2002,2007 Sun Microsystems, Inc.',
u'Portions Copyright 2007-2009 David Anderson.',
]
check_detection(expected, test_file)
def test_ics_qemu_gdb_xml_arm_core_xml(self):
test_file = self.get_test_loc('ics/qemu-gdb-xml/arm-core.xml')
expected = [
u'Copyright (c) 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_gdb_xml_power_altivec_xml(self):
test_file = self.get_test_loc('ics/qemu-gdb-xml/power-altivec.xml')
expected = [
u'Copyright (c) 2007, 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_apic_c(self):
test_file = self.get_test_loc('ics/qemu-hw/apic.c')
expected = [
u'Copyright (c) 2004-2005 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_arm_misc_h(self):
test_file = self.get_test_loc('ics/qemu-hw/arm-misc.h')
expected = [
u'Copyright (c) 2006 CodeSourcery.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_armv7m_c(self):
test_file = self.get_test_loc('ics/qemu-hw/armv7m.c')
expected = [
u'Copyright (c) 2006-2007 CodeSourcery.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_baum_h(self):
test_file = self.get_test_loc('ics/qemu-hw/baum.h')
expected = [
u'Copyright (c) 2008 Samuel Thibault',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_bt_h(self):
test_file = self.get_test_loc('ics/qemu-hw/bt.h')
expected = [
u'Copyright (c) 2007 OpenMoko, Inc.',
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2006 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_bt_hci_c(self):
test_file = self.get_test_loc('ics/qemu-hw/bt-hci.c')
expected = [
u'Copyright (c) 2007 OpenMoko, Inc.',
u'Copyright (c) 2008 Andrzej Zaborowski <balrog@zabor.org>',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_bt_hid_c(self):
test_file = self.get_test_loc('ics/qemu-hw/bt-hid.c')
expected = [
u'Copyright (c) 2007-2008 OpenMoko, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_dma_c(self):
test_file = self.get_test_loc('ics/qemu-hw/dma.c')
expected = [
u'Copyright (c) 2003-2004 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_fw_cfg_c(self):
test_file = self.get_test_loc('ics/qemu-hw/fw_cfg.c')
expected = [
u'Copyright (c) 2008 Gleb Natapov',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_irq_c(self):
test_file = self.get_test_loc('ics/qemu-hw/irq.c')
expected = [
u'Copyright (c) 2007 CodeSourcery.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_mmc_h(self):
test_file = self.get_test_loc('ics/qemu-hw/mmc.h')
expected = [
u'Copyright 2002 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_msmouse_c(self):
test_file = self.get_test_loc('ics/qemu-hw/msmouse.c')
expected = [
u'Copyright (c) 2008 Lubomir Rintel',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_power_supply_h(self):
test_file = self.get_test_loc('ics/qemu-hw/power_supply.h')
expected = [
u'Copyright (c) 2007 Anton Vorontsov <cbou@mail.ru>',
u'Copyright (c) 2004 Szabolcs Gyurko',
u'Copyright (c) 2003 Ian Molton <spyro@f2s.com>',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_pxa_h(self):
test_file = self.get_test_loc('ics/qemu-hw/pxa.h')
expected = [
u'Copyright (c) 2006 Openedhand Ltd.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_qdev_c(self):
test_file = self.get_test_loc('ics/qemu-hw/qdev.c')
expected = [
u'Copyright (c) 2009 CodeSourcery',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_sd_h(self):
test_file = self.get_test_loc('ics/qemu-hw/sd.h')
expected = [
u'Copyright (c) 2005-2007 Pierre Ossman',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_smbios_c(self):
test_file = self.get_test_loc('ics/qemu-hw/smbios.c')
expected = [
u'Copyright (c) 2009 Hewlett-Packard Development Company, L.P.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_smc91c111_c(self):
test_file = self.get_test_loc('ics/qemu-hw/smc91c111.c')
expected = [
u'Copyright (c) 2005 CodeSourcery, LLC.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_usb_hid_c(self):
test_file = self.get_test_loc('ics/qemu-hw/usb-hid.c')
expected = [
u'Copyright (c) 2005 Fabrice Bellard',
u'Copyright (c) 2007 OpenMoko, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_usb_hub_c(self):
test_file = self.get_test_loc('ics/qemu-hw/usb-hub.c')
expected = [
u'Copyright (c) 2005 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_usb_ohci_c(self):
test_file = self.get_test_loc('ics/qemu-hw/usb-ohci.c')
expected = [
u'Copyright (c) 2004 Gianni Tedesco',
u'Copyright (c) 2006 CodeSourcery',
u'Copyright (c) 2006 Openedhand Ltd.',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_bochs_h(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs/bochs.h')
expected = [
u'Copyright (c) 2002 MandrakeSoft S.A.',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_config_h_in(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs/config.h.in')
expected = [
u'Copyright (c) 2001 MandrakeSoft S.A.',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_configure(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs/configure')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_makefile_in(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs/Makefile.in')
expected = [
u'Copyright (c) 2002 MandrakeSoft S.A.',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_bios_acpi_dsdt_dsl(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs-bios/acpi-dsdt.dsl')
expected = [
u'Copyright (c) 2006 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_bios_acpi_dsdt_hex_extra_support(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs-bios/acpi-dsdt.hex')
expected = [
u'Copyright (c) 2000 - 2006 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_bios_rombios_c(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs-bios/rombios.c')
expected = [
u'Copyright (c) 2002 MandrakeSoft S.A.',
u'(c) 2002 MandrakeSoft S.A.',
u'(c) by Joseph Gil',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_bios_rombios_h(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs-bios/rombios.h')
expected = [
u'Copyright (c) 2006 Volker Ruppert',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_vgabios_clext_c(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/clext.c')
expected = [
u'Copyright (c) 2004 Makoto Suzuki',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_vgabios_readme(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/README')
expected = [
u'(c) by Joseph Gil',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_vgabios_vbe_c_extra_byte(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/vbe.c')
expected = [
u'Copyright (c) 2002 Jeroen Janssen',
u'(c) 2003 http://savannah.nongnu.org/projects/vgabios/',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_vgabios_vgabios_c(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/vgabios.c')
expected = [
u'Copyright (c) 2001-2008 the LGPL VGABios developers Team',
u'(c) by Joseph Gil',
u'(c) 2008 the LGPL VGABios developers Team',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_vgabios_vgafonts_h(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/vgafonts.h')
expected = [
u'(c) by Joseph Gil',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_cksum_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/cksum.c')
expected = [
u'Copyright (c) 1988, 1992, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_copyright(self):
test_file = self.get_test_loc('ics/qemu-slirp/COPYRIGHT')
expected = [
u'Danny Gasparovski. Copyright (c), 1995,1996',
u'Copyright (c) 1995,1996 Danny Gasparovski.'
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_debug_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/debug.c')
expected = [
u'Copyright (c) 1995 Danny Gasparovski.',
u'Portions copyright (c) 2000 Kelly Price.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_debug_h(self):
test_file = self.get_test_loc('ics/qemu-slirp/debug.h')
expected = [
u'Copyright (c) 1995 Danny Gasparovski.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_ip_icmp_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/ip_icmp.c')
expected = [
u'Copyright (c) 1982, 1986, 1988, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_ip_input_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/ip_input.c')
expected = [
u'Copyright (c) 1982, 1986, 1988, 1993 The Regents of the University of California.',
u'Copyright (c) 1995 Danny Gasparovski.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_ip_output_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/ip_output.c')
expected = [
u'Copyright (c) 1982, 1986, 1988, 1990, 1993 The Regents of the University of California.',
u'Copyright (c) 1995 Danny Gasparovski.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_mbuf_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/mbuf.c')
expected = [
u'Copyright (c) 1995 Danny Gasparovski',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_misc_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/misc.c')
expected = [
u'Copyright (c) 1995 Danny Gasparovski.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_tcp_input_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/tcp_input.c')
expected = [
u'Copyright (c) 1982, 1986, 1988, 1990, 1993, 1994 The Regents of the University of California.',
u'Copyright (c) 1995 Danny Gasparovski.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_tcp_timer_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/tcp_timer.c')
expected = [
u'Copyright (c) 1982, 1986, 1988, 1990, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_tcp_var_h(self):
test_file = self.get_test_loc('ics/qemu-slirp/tcp_var.h')
expected = [
u'Copyright (c) 1982, 1986, 1993, 1994 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_tftp_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/tftp.c')
expected = [
u'Copyright (c) 2004 Magnus Damm <damm@opensource.se>',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_android_helper_h(self):
test_file = self.get_test_loc('ics/qemu-slirp-android/helper.h')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_target_arm_iwmmxt_helper_c(self):
test_file = self.get_test_loc('ics/qemu-target-arm/iwmmxt_helper.c')
expected = [
u'Copyright (c) 2007 OpenedHand, Ltd.',
u'Copyright (c) 2008 CodeSourcery',
]
check_detection(expected, test_file)
def test_ics_qemu_target_arm_neon_helper_c(self):
test_file = self.get_test_loc('ics/qemu-target-arm/neon_helper.c')
expected = [
u'Copyright (c) 2007, 2008 CodeSourcery.',
]
check_detection(expected, test_file)
def test_ics_qemu_target_arm_op_helper_c(self):
test_file = self.get_test_loc('ics/qemu-target-arm/op_helper.c')
expected = [
u'Copyright (c) 2005-2007 CodeSourcery, LLC',
]
check_detection(expected, test_file)
def test_ics_qemu_target_arm_translate_c(self):
test_file = self.get_test_loc('ics/qemu-target-arm/translate.c')
expected = [
u'Copyright (c) 2003 Fabrice Bellard',
u'Copyright (c) 2005-2007 CodeSourcery',
u'Copyright (c) 2007 OpenedHand, Ltd.',
]
check_detection(expected, test_file)
def test_ics_qemu_target_i386_helper_template_h(self):
test_file = self.get_test_loc('ics/qemu-target-i386/helper_template.h')
expected = [
u'Copyright (c) 2008 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_target_i386_kvm_c(self):
test_file = self.get_test_loc('ics/qemu-target-i386/kvm.c')
expected = [
u'Copyright (c) 2006-2008 Qumranet Technologies',
u'Copyright IBM, Corp. 2008',
]
check_detection(expected, test_file)
def test_ics_qemu_target_i386_ops_sse_h(self):
test_file = self.get_test_loc('ics/qemu-target-i386/ops_sse.h')
expected = [
u'Copyright (c) 2005 Fabrice Bellard',
u'Copyright (c) 2008 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_qemu_target_i386_ops_sse_header_h(self):
test_file = self.get_test_loc('ics/qemu-target-i386/ops_sse_header.h')
expected = [
u'Copyright (c) 2005 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_tcg_tcg_c(self):
test_file = self.get_test_loc('ics/qemu-tcg/tcg.c')
expected = [
u'Copyright (c) 2008 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_tcg_arm_tcg_target_c(self):
test_file = self.get_test_loc('ics/qemu-tcg-arm/tcg-target.c')
expected = [
u'Copyright (c) 2008 Andrzej Zaborowski',
]
check_detection(expected, test_file)
def test_ics_qemu_tcg_arm_tcg_target_h(self):
test_file = self.get_test_loc('ics/qemu-tcg-arm/tcg-target.h')
expected = [
u'Copyright (c) 2008 Fabrice Bellard',
u'Copyright (c) 2008 Andrzej Zaborowski',
]
check_detection(expected, test_file)
def test_ics_quake_androidmanifest_xml(self):
test_file = self.get_test_loc('ics/quake/AndroidManifest.xml')
expected = [
u'Copyright 2007, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_quake_notice(self):
test_file = self.get_test_loc('ics/quake/NOTICE')
expected = [
u'Copyright (c) 1996-2000 Id Software Inc.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_gnu_txt(self):
test_file = self.get_test_loc('ics/quake-quake-src/gnu.txt')
expected = [
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_glqwcl_spec_sh(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW/glqwcl.spec.sh')
expected = [
u'Copyright Restricted Icon',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_adivtab_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/adivtab.h')
expected = [
u'Copyright (c) 1999, 2000 Id Software Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_anorms_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/anorms.h')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_cd_linux_c(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/cd_linux.c')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'(c) 1996 Id Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_cl_demo_c(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/cl_demo.c')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_exitscrn_txt(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/exitscrn.txt')
expected = [
u'(c) 1996, 1997 Id Software, inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_keys_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/keys.h')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'(c) Mouse Wheel Support',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_md4_c(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/md4.c')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'Copyright (c) 1991-2, RSA Data Security, Inc.',
u'Copyright (c) 1990-2, RSA Data Security, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_menu_c(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/menu.c')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'(c) 1996 Id',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_quake_quake_src_qw_client_menu_c_trail_name(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/menu.c')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'(c) 1996 Id Software',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_qwcl_plg(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/qwcl.plg')
expected = [
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_d3d_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/d3d.h')
expected = [
u'Copyright (c) 1995-1996 Microsoft Corporation.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_ddraw_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/ddraw.h')
expected = [
u'Copyright (c) 1994-1996 Microsoft Corporation.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_dinput_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/dinput.h')
expected = [
u'Copyright (c) 1996 Microsoft Corporation.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_dplay_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/dplay.h')
expected = [
u'Copyright (c) 1994-1995 Microsoft Corporation.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_dsound_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/dsound.h')
expected = [
u'Copyright (c) 1995,1996 Microsoft Corporation.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_scitech_include_debug_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-scitech-include/debug.h')
expected = [
u'Copyright (c) 1996 SciTech Software',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_scitech_include_mgldos_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-scitech-include/mgldos.h')
expected = [
u'Copyright (c) 1996 SciTech Software.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_3dfx_txt_trail_name(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/3dfx.txt')
expected = [
u'Copyright 1997 3Dfx Interactive, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_cl_input_cpp(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/cl_input.cpp')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'(c) 1996 Id Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_conproc_cpp(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/conproc.cpp')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_menu_cpp(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/menu.cpp')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'(c) 1996 Id Software, inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_mpdosock_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/mpdosock.h')
expected = [
u'Copyright (c) 1993-1995, Microsoft Corp.',
u'Copyright (c) 1982-1986 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_sys_linux_cpp(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/sys_linux.cpp')
expected = [
u'(c) 1996 Id Software, inc.',
u'(c) 1996 Id Software, inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_winquake_plg(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/WinQuake.plg')
expected = [
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
]
check_detection(expected, test_file)
def test_ics_quake_src_com_android_quake_quakeactivity_java(self):
test_file = self.get_test_loc('ics/quake-src-com-android-quake/QuakeActivity.java')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_quake_src_com_android_quake_quakelib_java(self):
test_file = self.get_test_loc('ics/quake-src-com-android-quake/QuakeLib.java')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
u'(c) Mouse Wheel Support',
]
check_detection(expected, test_file)
def test_ics_quake_src_com_android_quake_quakeview_java(self):
test_file = self.get_test_loc('ics/quake-src-com-android-quake/QuakeView.java')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
u'Copyright (c) 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_safe_iop_notice(self):
test_file = self.get_test_loc('ics/safe-iop/NOTICE')
expected = [
u'Copyright (c) 2007,2008 Will Drewry <redpig@dataspill.org>',
]
check_detection(expected, test_file)
def test_ics_safe_iop_include_safe_iop_h_lead_portion(self):
test_file = self.get_test_loc('ics/safe-iop-include/safe_iop.h')
expected = [
u'Copyright 2007,2008 redpig@dataspill.org',
u'portions copyright The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_safe_iop_src_safe_iop_c_lead_portion(self):
test_file = self.get_test_loc('ics/safe-iop-src/safe_iop.c')
expected = [
u'Copyright 2007,2008 redpig@dataspill.org',
u'portions copyright The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_android_sample_sampleapp_androidmanifest_xml(self):
test_file = self.get_test_loc('ics/skia-android_sample-SampleApp/AndroidManifest.xml')
expected = [
u'Copyright (c) 2011 Skia',
]
check_detection(expected, test_file)
def test_ics_skia_android_sample_sampleapp_jni_sample_jni_cpp(self):
test_file = self.get_test_loc('ics/skia-android_sample-SampleApp-jni/sample-jni.cpp')
expected = [
u'Copyright (c) 2011 Skia',
]
check_detection(expected, test_file)
def test_ics_skia_emoji_emojifont_cpp(self):
test_file = self.get_test_loc('ics/skia-emoji/EmojiFont.cpp')
expected = [
u'Copyright 2009, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_gm_strokerects_cpp(self):
test_file = self.get_test_loc('ics/skia-gm/strokerects.cpp')
expected = [
u'Copyright 2011 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_gpu_src_grgpu_cpp(self):
test_file = self.get_test_loc('ics/skia-gpu-src/GrGpu.cpp')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_include_core_skbitmap_h(self):
test_file = self.get_test_loc('ics/skia-include-core/SkBitmap.h')
expected = [
u'Copyright (c) 2006 The Android Open Source Project',
u'SkColorGetR (c), SkColorGetG',
]
check_detection(expected, test_file)
def test_ics_skia_include_core_skcolorpriv_h(self):
test_file = self.get_test_loc('ics/skia-include-core/SkColorPriv.h')
expected = [
u'Copyright (c) 2006 The Android Open Source Project',
u'SkGetPackedG32 (c), SkGetPackedB32',
u'SkGetPackedG32 (c), SkGetPackedB32',
]
check_detection(expected, test_file)
def test_ics_skia_include_core_skregion_h(self):
test_file = self.get_test_loc('ics/skia-include-core/SkRegion.h')
expected = [
u'Copyright (c) 2005 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_include_core_skscalar_h(self):
test_file = self.get_test_loc('ics/skia-include-core/SkScalar.h')
expected = [
u'Copyright (c) 2006 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_include_core_sktregistry_h(self):
test_file = self.get_test_loc('ics/skia-include-core/SkTRegistry.h')
expected = [
u'Copyright 2009, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_include_ports_skharfbuzzfont_h(self):
test_file = self.get_test_loc('ics/skia-include-ports/SkHarfBuzzFont.h')
expected = [
u'Copyright (c) 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_include_views_skoswindow_wxwidgets_h(self):
test_file = self.get_test_loc('ics/skia-include-views/SkOSWindow_wxwidgets.h')
expected = [
u'Copyright (c) 2006 The Android Open Source Project',
u'Copyright 2005 MyCompanyName',
]
check_detection(expected, test_file)
def test_ics_skia_src_animator_skoperanditerpolator_cpp(self):
test_file = self.get_test_loc('ics/skia-src-animator/SkOperandIterpolator.cpp')
expected = [
u'Copyright 2006, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_core_skbitmap_cpp(self):
test_file = self.get_test_loc('ics/skia-src-core/SkBitmap.cpp')
expected = [
u'Copyright (c) 2006-2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_core_skbitmapprocstate_matrixprocs_cpp(self):
test_file = self.get_test_loc('ics/skia-src-core/SkBitmapProcState_matrixProcs.cpp')
expected = [
u'(c) COPYRIGHT 2009 Motorola',
]
check_detection(expected, test_file)
def test_ics_skia_src_core_skblitter_4444_cpp(self):
test_file = self.get_test_loc('ics/skia-src-core/SkBlitter_4444.cpp')
expected = [
u'Copyright 2006, The Android Open Source Project',
u'SkColorGetG (c), SkColorGetB',
]
check_detection(expected, test_file)
def test_ics_skia_src_core_skcolortable_cpp(self):
test_file = self.get_test_loc('ics/skia-src-core/SkColorTable.cpp')
expected = [
u'Copyright (c) 2006-2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_core_skfilterproc_h(self):
test_file = self.get_test_loc('ics/skia-src-core/SkFilterProc.h')
expected = [
u'Copyright (c) 2006-2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_images_skimagedecoder_libjpeg_cpp(self):
test_file = self.get_test_loc('ics/skia-src-images/SkImageDecoder_libjpeg.cpp')
expected = [
u'Copyright 2007, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_opts_opts_check_arm_cpp(self):
test_file = self.get_test_loc('ics/skia-src-opts/opts_check_arm.cpp')
expected = [
u'Copyright (c) 2010, Code Aurora Forum.',
u'Copyright 2006-2010, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_pdf_skpdffont_cpp(self):
test_file = self.get_test_loc('ics/skia-src-pdf/SkPDFFont.cpp')
expected = [
u'Copyright (c) 2011 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_src_ports_skdebug_brew_cpp(self):
test_file = self.get_test_loc('ics/skia-src-ports/SkDebug_brew.cpp')
expected = [
u'Copyright 2009, The Android Open Source Project',
u'Copyright 2009, Company 100, Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_src_ports_skfonthost_fontconfig_cpp(self):
test_file = self.get_test_loc('ics/skia-src-ports/SkFontHost_fontconfig.cpp')
expected = [
u'Copyright 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_src_ports_skfonthost_none_cpp(self):
test_file = self.get_test_loc('ics/skia-src-ports/SkFontHost_none.cpp')
expected = [
u'Copyright 2006-2008, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_ports_skosfile_brew_cpp(self):
test_file = self.get_test_loc('ics/skia-src-ports/SkOSFile_brew.cpp')
expected = [
u'Copyright 2006, The Android Open Source Project',
u'Copyright 2009, Company 100, Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_src_ports_skxmlparser_empty_cpp(self):
test_file = self.get_test_loc('ics/skia-src-ports/SkXMLParser_empty.cpp')
expected = [
u'Copyright 2006, The Android Open Source Project',
u'Copyright Skia Inc. 2004 - 2005',
]
check_detection(expected, test_file)
def test_ics_skia_tests_fillpathtest_cpp(self):
test_file = self.get_test_loc('ics/skia-tests/FillPathTest.cpp')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_sonivox_notice(self):
test_file = self.get_test_loc('ics/sonivox/NOTICE')
expected = [
u'Copyright (c) 2004-2006 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas.h')
expected = [
u'Copyright Sonic Network Inc. 2005, 2006',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_build_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_build.h')
expected = [
u'Copyright Sonic Network Inc. 2006',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_config_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_config.c')
expected = [
u'Copyright Sonic Network Inc. 2004-2006',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_config_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_config.h')
expected = [
u'Copyright 2005 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_main_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_main.c')
expected = [
u'Copyright Sonic Network Inc. 2004',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_types_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_types.h')
expected = [
u'Copyright Sonic Network Inc. 2004',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_wave_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_wave.c')
expected = [
u'Copyright Sonic Network Inc. 2005',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_lib_src_eas_ctype_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_ctype.h')
expected = [
u'Copyright (c) 2005 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_lib_src_eas_data_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_data.h')
expected = [
u'Copyright 2004 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_lib_src_eas_fmengine_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_fmengine.c')
expected = [
u'Copyright Sonic Network Inc. 2004, 2005',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_lib_src_eas_fmsndlib_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_fmsndlib.c')
expected = [
u'(c) Copyright 2005 Sonic Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_lib_src_eas_smfdata_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_smfdata.h')
expected = [
u'Copyright Sonic Network Inc. 2005',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_hybrid_22k_lib_src_eas_wtengine_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-hybrid-22k-lib_src/eas_wtengine.c')
expected = [
u'Copyright Sonic Network Inc. 2004-2005',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_hybrid_22k_lib_src_hybrid_22khz_mcu_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-hybrid-22k-lib_src/hybrid_22khz_mcu.c')
expected = [
u'Copyright (c) 2006 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_wt_22k_lib_src_dls_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-wt-22k-lib_src/dls.h')
expected = [
u'Copyright (c) 1996 Sonic Foundry',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_wt_22k_lib_src_jet_data_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-wt-22k-lib_src/jet_data.h')
expected = [
u'Copyright (c) 2006 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_wt_22k_lib_src_wt_22khz_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-wt-22k-lib_src/wt_22khz.c')
expected = [
u'Copyright (c) 2009 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_docs_jet_authoring_guidelines_html(self):
test_file = self.get_test_loc('ics/sonivox-docs/JET_Authoring_Guidelines.html')
expected = [
u'Copyright 2009 techdoc.dot Jennifer Hruska',
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_sonivox_docs_jet_creator_user_manual_html(self):
test_file = self.get_test_loc('ics/sonivox-docs/JET_Creator_User_Manual.html')
expected = [
u'Copyright 2009 Confidential Information',
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_sonivox_docs_jet_creator_user_manual_html_markup_lead_name(self):
test_file = self.get_test_loc('ics/sonivox-docs/JET_Creator_User_Manual.html')
expected = [
u'Jennifer Hruska Copyright 2009 Confidential Information',
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_sonivox_docs_jet_programming_manual_html(self):
test_file = self.get_test_loc('ics/sonivox-docs/JET_Programming_Manual.html')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_sonivox_jet_tools_jetcreator_jetaudition_py(self):
test_file = self.get_test_loc('ics/sonivox-jet_tools-JetCreator/JetAudition.py')
expected = [
u'Copyright (c) 2008 Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_speex_notice(self):
test_file = self.get_test_loc('ics/speex/NOTICE')
expected = [
u'Copyright (c) 2002-2008 Jean-Marc Valin',
u'Copyright (c) 2002 Jean-Marc Valin & David Rowe',
u'Copyright (c) 2003 Epic Games',
u'Copyright (c) 2003 Epic Games',
u'Copyright (c) 2004-2006 Epic Games',
u'Copyright (c) 2005 Analog Devices',
u'Copyright (c) 2005 Jean-Marc Valin, CSIRO, Christopher Montgomery',
u'Copyright (c) 2006 David Rowe',
u'Copyright (c) 2006-2008 CSIRO, Jean-Marc Valin, Xiph.Org Foundation',
u'Copyright (c) 2008 Thorvald Natvig',
u'Copyright (c) 2003-2004, Mark Borgerding',
u'Copyright (c) 2005-2007, Jean-Marc Valin',
u'Copyright (c) 2011 Jyri Sarha, Texas Instruments',
u'Copyright 1992, 1993, 1994 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin',
]
check_detection(expected, test_file)
def test_ics_speex_include_speex_speex_h(self):
test_file = self.get_test_loc('ics/speex-include-speex/speex.h')
expected = [
u'Copyright (c) 2002-2006 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_include_speex_speex_bits_h(self):
test_file = self.get_test_loc('ics/speex-include-speex/speex_bits.h')
expected = [
u'Copyright (c) 2002 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_include_speex_speex_buffer_h(self):
test_file = self.get_test_loc('ics/speex-include-speex/speex_buffer.h')
expected = [
u'Copyright (c) 2007 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_include_speex_speex_echo_h(self):
test_file = self.get_test_loc('ics/speex-include-speex/speex_echo.h')
expected = [
u'Copyright (c) Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_include_speex_speex_preprocess_h(self):
test_file = self.get_test_loc('ics/speex-include-speex/speex_preprocess.h')
expected = [
u'Copyright (c) 2003 Epic Games',
]
check_detection(expected, test_file)
def test_ics_speex_include_speex_speex_types_h(self):
test_file = self.get_test_loc('ics/speex-include-speex/speex_types.h')
expected = [
u'(c) COPYRIGHT 1994-2002 by the Xiph.Org Foundation http://www.xiph.org/',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_kiss_fft_guts_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/_kiss_fft_guts.h')
expected = [
u'Copyright (c) 2003-2004, Mark Borgerding',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_arch_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/arch.h')
expected = [
u'Copyright (c) 2003 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_bits_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/bits.c')
expected = [
u'Copyright (c) 2002 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_cb_search_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/cb_search.c')
expected = [
u'Copyright (c) 2002-2006 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_cb_search_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/cb_search.h')
expected = [
u'Copyright (c) 2002 Jean-Marc Valin & David Rowe',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_cb_search_arm4_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/cb_search_arm4.h')
expected = [
u'Copyright (c) 2004 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_cb_search_bfin_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/cb_search_bfin.h')
expected = [
u'Copyright (c) 2005 Analog Devices',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_fftwrap_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/fftwrap.c')
expected = [
u'Copyright (c) 2005-2006 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_fftwrap_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/fftwrap.h')
expected = [
u'Copyright (c) 2005 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_filterbank_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/filterbank.c')
expected = [
u'Copyright (c) 2006 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_fixed_bfin_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/fixed_bfin.h')
expected = [
u'Copyright (c) 2005 Analog Devices Author Jean-Marc Valin',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_speex_libspeex_fixed_bfin_h_extra_author(self):
test_file = self.get_test_loc('ics/speex-libspeex/fixed_bfin.h')
expected = [
u'Copyright (c) 2005 Analog Devices',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_kiss_fft_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/kiss_fft.c')
expected = [
u'Copyright (c) 2003-2004, Mark Borgerding',
u'Copyright (c) 2005-2007, Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_kiss_fftr_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/kiss_fftr.c')
expected = [
u'Copyright (c) 2003-2004, Mark Borgerding',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_lpc_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/lpc.c')
expected = [
u'Copyright 1992, 1993, 1994 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_lsp_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/lsp.c')
expected = [
u'Jean-Marc Valin (c) 2002-2006',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_lsp_bfin_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/lsp_bfin.h')
expected = [
u'Copyright (c) 2006 David Rowe',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_mdf_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/mdf.c')
expected = [
u'Copyright (c) 2003-2008 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_modes_wb_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/modes_wb.c')
expected = [
u'Copyright (c) 2002-2007 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_preprocess_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/preprocess.c')
expected = [
u'Copyright (c) 2003 Epic Games',
u'Copyright (c) 2004-2006 Epic Games',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_pseudofloat_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/pseudofloat.h')
expected = [
u'Copyright (c) 2005 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_resample_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/resample.c')
expected = [
u'Copyright (c) 2007-2008 Jean-Marc Valin',
u'Copyright (c) 2008 Thorvald Natvig',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_resample_neon_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/resample_neon.h')
expected = [
u'Copyright (c) 2007-2008 Jean-Marc Valin',
u'Copyright (c) 2008 Thorvald Natvig',
u'Copyright (c) 2011 Jyri Sarha, Texas Instruments',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_resample_sse_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/resample_sse.h')
expected = [
u'Copyright (c) 2007-2008 Jean-Marc Valin',
u'Copyright (c) 2008 Thorvald Natvig',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_scal_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/scal.c')
expected = [
u'Copyright (c) 2006-2008 CSIRO, Jean-Marc Valin, Xiph.Org Foundation',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_smallft_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/smallft.c')
expected = [
u'(c) COPYRIGHT 1994-2001 by the XIPHOPHORUS Company http://www.xiph.org/',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_vorbis_psy_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/vorbis_psy.h')
expected = [
u'Copyright (c) 2005 Jean-Marc Valin, CSIRO, Christopher Montgomery',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_window_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/window.c')
expected = [
u'Copyright (c) 2006 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_srec_notice(self):
test_file = self.get_test_loc('ics/srec/NOTICE')
expected = [
u'Copyright 2007, 2008 Nuance Communications',
]
check_detection(expected, test_file)
def test_ics_srec_audio_audioin_unix_include_audioin_h(self):
test_file = self.get_test_loc('ics/srec-audio-AudioIn-UNIX-include/audioin.h')
expected = [
u'Copyright 2007, 2008 Nuance Communciations, Inc.',
]
check_detection(expected, test_file)
def test_ics_srec_audio_audioin_unix_src_audioinwrapper_cpp(self):
test_file = self.get_test_loc('ics/srec-audio-AudioIn-UNIX-src/audioinwrapper.cpp')
expected = [
u'Copyright 2007, 2008 Nuance Communciations, Inc.',
]
check_detection(expected, test_file)
def test_ics_srec_audio_audioin_unix_src_filter_c(self):
test_file = self.get_test_loc('ics/srec-audio-AudioIn-UNIX-src/filter.c')
expected = [
u'Copyright 2007, 2008 Nuance Communciations, Inc.',
]
check_detection(expected, test_file)
def test_ics_srec_doc_srec_doxygen(self):
test_file = self.get_test_loc('ics/srec-doc/srec.doxygen')
expected = [
u'(c) Copyright 2003-2007 Nuance',
]
check_detection(expected, test_file)
def test_ics_srec_srec_srec_doxygen(self):
test_file = self.get_test_loc('ics/srec-srec/srec.doxygen')
expected = [
u'(c) Copyright 2003 Speechworks International',
]
check_detection(expected, test_file)
def test_ics_srec_srec_jni_android_speech_srec_microphoneinputstream_cpp(self):
test_file = self.get_test_loc('ics/srec-srec_jni/android_speech_srec_MicrophoneInputStream.cpp')
expected = [
u'Copyright 2007 Nuance Communciations, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_srec_tools_grxmlcompile_grxmlcompile_cpp(self):
test_file = self.get_test_loc('ics/srec-tools-grxmlcompile/grxmlcompile.cpp')
expected = [
u'Copyright 2007, 2008 Nuance Communciations, Inc.',
u'Copyright (c) 2007 Project Admins leethomason',
]
check_detection(expected, test_file)
def test_ics_srtp_config_guess(self):
test_file = self.get_test_loc('ics/srtp/config.guess')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_config_log(self):
test_file = self.get_test_loc('ics/srtp/config.log')
expected = [
u'Copyright (c) 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_license(self):
test_file = self.get_test_loc('ics/srtp/LICENSE')
expected = [
u'Copyright (c) 2001-2006 Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_crypto_cipher_aes_c(self):
test_file = self.get_test_loc('ics/srtp-crypto-cipher/aes.c')
expected = [
u'Copyright (c) 2001-2006, Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_crypto_hash_hmac_c(self):
test_file = self.get_test_loc('ics/srtp-crypto-hash/hmac.c')
expected = [
u'Copyright (c) 2001-2006 Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_crypto_include_auth_h(self):
test_file = self.get_test_loc('ics/srtp-crypto-include/auth.h')
expected = [
u'Copyright (c) 2001-2006, Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_crypto_include_kernel_compat_h(self):
test_file = self.get_test_loc('ics/srtp-crypto-include/kernel_compat.h')
expected = [
u'Copyright (c) 2005 Ingate Systems AB',
]
check_detection(expected, test_file)
def test_ics_srtp_doc_header_template(self):
test_file = self.get_test_loc('ics/srtp-doc/header.template')
expected = [
u'copyright 2001-2005 by David A. McGrew, Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_doc_intro_txt(self):
test_file = self.get_test_loc('ics/srtp-doc/intro.txt')
expected = [
u'Copyright (c) 2001-2005 Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_doc_rfc3711_txt(self):
test_file = self.get_test_loc('ics/srtp-doc/rfc3711.txt')
expected = [
u'Copyright (c) The Internet Society (2004).',
u'Full Copyright Statement',
u'Full Copyright Statement',
u'Copyright (c) The Internet Society (2004).',
]
check_detection(expected, test_file)
def test_ics_srtp_include_ekt_h(self):
test_file = self.get_test_loc('ics/srtp-include/ekt.h')
expected = [
u'Copyright (c) 2001-2005 Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_configure_bat(self):
test_file = self.get_test_loc('ics/stlport/configure.bat')
expected = [
u'Copyright (c) 2004,2005 Michael Fink',
]
check_detection(expected, test_file)
def test_ics_stlport_license(self):
test_file = self.get_test_loc('ics/stlport/LICENSE')
expected = [
u'Copyright 1999,2000 Boris Fomitchev',
u'Copyright 1994 Hewlett-Packard Company',
u'Copyright 1996,97 Silicon Graphics Computer Systems, Inc.',
u'Copyright 1997 Moscow Center for SPARC Technology.',
]
check_detection(expected, test_file)
def test_ics_stlport_makefile(self):
test_file = self.get_test_loc('ics/stlport/Makefile')
expected = [
u'Copyright (c) 2004-2008 Petr Ovtchenkov',
]
check_detection(expected, test_file)
def test_ics_stlport_readme(self):
test_file = self.get_test_loc('ics/stlport/README')
expected = [
u'Copyright (c) 1994 Hewlett-Packard Company',
u'Copyright (c) 1996-1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
u'Copyright (c) 1999-2003 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_acquire_release_h(self):
test_file = self.get_test_loc('ics/stlport-src/acquire_release.h')
expected = [
u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_allocators_cpp(self):
test_file = self.get_test_loc('ics/stlport-src/allocators.cpp')
expected = [
u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_bitset_cpp(self):
test_file = self.get_test_loc('ics/stlport-src/bitset.cpp')
expected = [
u'Copyright (c) 1998 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_ctype_cpp(self):
test_file = self.get_test_loc('ics/stlport-src/ctype.cpp')
expected = [
u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_dll_main_cpp(self):
test_file = self.get_test_loc('ics/stlport-src/dll_main.cpp')
expected = [
u'Copyright (c) 1994 Hewlett-Packard Company',
u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_lock_free_slist_h(self):
test_file = self.get_test_loc('ics/stlport-src/lock_free_slist.h')
expected = [
u'Copyright (c) 1997-1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_stlport_rc(self):
test_file = self.get_test_loc('ics/stlport-src/stlport.rc')
expected = [
u'Copyright (c) Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_c_locale_dummy_c_locale_dummy_c(self):
test_file = self.get_test_loc('ics/stlport-src-c_locale_dummy/c_locale_dummy.c')
expected = [
u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_c_locale_win32_c_wlocale_win32_c(self):
test_file = self.get_test_loc('ics/stlport-src-c_locale_win32/c_wlocale_win32.c')
expected = [
u'Copyright (c) 2007 2008 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_assert_h(self):
test_file = self.get_test_loc('ics/stlport-stlport/assert.h')
expected = [
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_exception(self):
test_file = self.get_test_loc('ics/stlport-stlport/exception')
expected = [
u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_limits(self):
test_file = self.get_test_loc('ics/stlport-stlport/limits')
expected = [
u'Copyright (c) 1997 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_locale(self):
test_file = self.get_test_loc('ics/stlport-stlport/locale')
expected = [
u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_numeric(self):
test_file = self.get_test_loc('ics/stlport-stlport/numeric')
expected = [
u'Copyright (c) 1994 Hewlett-Packard Company',
u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_rope(self):
test_file = self.get_test_loc('ics/stlport-stlport/rope')
expected = [
u'Copyright (c) 1997 Silicon Graphics Computer Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_type_traits(self):
test_file = self.get_test_loc('ics/stlport-stlport/type_traits')
expected = [
u'Copyright (c) 2007, 2008 Petr Ovtchenkov',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_unordered_map(self):
test_file = self.get_test_loc('ics/stlport-stlport/unordered_map')
expected = [
u'Copyright (c) 2004,2005 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_carray_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_carray.h')
expected = [
u'Copyright (c) 2005 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_function_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_function.h')
expected = [
u'Copyright (c) 1994 Hewlett-Packard Company',
u'Copyright (c) 1996-1998 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_function_adaptors_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_function_adaptors.h')
expected = [
u'Copyright (c) 1994 Hewlett-Packard Company',
u'Copyright (c) 1996-1998 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
u'Copyright (c) 1999 Boris Fomitchev',
u'Copyright (c) 2000 Pavel Kuznetsov',
u"Copyright (c) 2001 Meridian'93",
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_hash_fun_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_hash_fun.h')
expected = [
u'Copyright (c) 1996-1998 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1994 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_heap_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_heap.h')
expected = [
u'Copyright (c) 1994 Hewlett-Packard Company',
u'Copyright (c) 1997 Silicon Graphics Computer Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_limits_c(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_limits.c')
expected = [
u'Copyright (c) 1998,1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_string_base_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_string_base.h')
expected = [
u'Copyright (c) 1997-1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
u'Copyright (c) 2003 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_boost_type_traits_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/boost_type_traits.h')
expected = [
u'Copyright (c) 2004 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_concept_checks_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/concept_checks.h')
expected = [
u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_msl_string_h_trail_inc(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/msl_string.h')
expected = [
u'Copyright (c) 1998 Mark of the Unicorn, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_type_manips_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/type_manips.h')
expected = [
u'Copyright (c) 2003 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_type_traits_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/type_traits.h')
expected = [
u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
u'Copyright (c) 1999 Boris Fomitchev',
u'Copyright 2000 Adobe Systems Incorporated and others.',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_config_native_headers_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl-config/_native_headers.h')
expected = [
u'Copyright (c) 2006 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_test_eh_main_cpp_trail_inc(self):
test_file = self.get_test_loc('ics/stlport-test-eh/main.cpp')
expected = [
u'Copyright (c) 1997 Mark of the Unicorn, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
]
check_detection(expected, test_file)
def test_ics_stlport_test_eh_mwerks_console_os_x_c(self):
test_file = self.get_test_loc('ics/stlport-test-eh/mwerks_console_OS_X.c')
expected = [
u'Copyright (c) 1995-2002 Metrowerks Corporation.',
]
check_detection(expected, test_file)
def test_ics_stlport_test_eh_random_number_h_trail_inc(self):
test_file = self.get_test_loc('ics/stlport-test-eh/random_number.h')
expected = [
u'Copyright (c) 1997-1998 Mark of the Unicorn, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_test_eh_test_insert_h_trail_inc(self):
test_file = self.get_test_loc('ics/stlport-test-eh/test_insert.h')
expected = [
u'Copyright (c) 1997 Mark of the Unicorn, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_test_unit_limits_test_cpp(self):
test_file = self.get_test_loc('ics/stlport-test-unit/limits_test.cpp')
expected = [
u'Copyright Jens Maurer 2000',
]
check_detection(expected, test_file)
def test_ics_stlport_test_unit_cppunit_cppunit_mini_h(self):
test_file = self.get_test_loc('ics/stlport-test-unit-cppunit/cppunit_mini.h')
expected = [
u'Copyright (c) 2003, 2004 Zdenek Nemec',
]
check_detection(expected, test_file)
def test_ics_strace_aclocal_m4(self):
test_file = self.get_test_loc('ics/strace/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1998, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_bjm_c(self):
test_file = self.get_test_loc('ics/strace/bjm.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>',
]
check_detection(expected, test_file)
def test_ics_strace_config_log(self):
test_file = self.get_test_loc('ics/strace/config.log')
expected = [
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_copyright(self):
test_file = self.get_test_loc('ics/strace/COPYRIGHT')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993 Ulrich Pegelow <pegelow@moorea.uni-muenster.de>',
u'Copyright (c) 1995, 1996 Michael Elizabeth Chastain <mec@duracef.shout.net>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1998-2001 Wichert Akkerman <wakkerma@deephackmode.org>',
# this is redundant and rare junk u'COPYRIGHT,v 1.3 2002/03/31 18:43:00 wichert',
]
check_detection(expected, test_file)
def test_ics_strace_defs_h(self):
test_file = self.get_test_loc('ics/strace/defs.h')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_depcomp(self):
test_file = self.get_test_loc('ics/strace/depcomp')
expected = [
u'Copyright (c) 1999, 2000, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_errnoent_sh(self):
test_file = self.get_test_loc('ics/strace/errnoent.sh')
expected = [
u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_ioctl_c(self):
test_file = self.get_test_loc('ics/strace/ioctl.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-2001 Wichert Akkerman <wichert@cistron.nl>',
]
check_detection(expected, test_file)
def test_ics_strace_ioctlsort_c(self):
test_file = self.get_test_loc('ics/strace/ioctlsort.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_ipc_c(self):
test_file = self.get_test_loc('ics/strace/ipc.c')
expected = [
u'Copyright (c) 1993 Ulrich Pegelow <pegelow@moorea.uni-muenster.de>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>',
]
check_detection(expected, test_file)
def test_ics_strace_makefile_in(self):
test_file = self.get_test_loc('ics/strace/Makefile.in')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_mem_c(self):
test_file = self.get_test_loc('ics/strace/mem.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>',
u'Copyright (c) 2000 PocketPenguins Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_net_c(self):
test_file = self.get_test_loc('ics/strace/net.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-2000 Wichert Akkerman <wichert@cistron.nl>',
]
check_detection(expected, test_file)
def test_ics_strace_proc_c(self):
test_file = self.get_test_loc('ics/strace/proc.c')
expected = [
u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_process_c(self):
test_file = self.get_test_loc('ics/strace/process.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>',
u'Copyright (c) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation',
u'Copyright (c) 2000 PocketPenguins Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_signal_c(self):
test_file = self.get_test_loc('ics/strace/signal.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>',
u'Copyright (c) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation',
]
check_detection(expected, test_file)
def test_ics_strace_signalent_sh(self):
test_file = self.get_test_loc('ics/strace/signalent.sh')
expected = [
u'Copyright (c) 1996 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_sock_c(self):
test_file = self.get_test_loc('ics/strace/sock.c')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_strace_1(self):
test_file = self.get_test_loc('ics/strace/strace.1')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_strace_graph(self):
test_file = self.get_test_loc('ics/strace/strace-graph')
expected = [
u'Copyright (c) 1998 by Richard Braakman <dark@xs4all.nl>.',
]
check_detection(expected, test_file)
def test_ics_strace_stream_c(self):
test_file = self.get_test_loc('ics/strace/stream.c')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>',
]
check_detection(expected, test_file)
def test_ics_strace_syscallent_sh(self):
test_file = self.get_test_loc('ics/strace/syscallent.sh')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_linux_dummy_h(self):
test_file = self.get_test_loc('ics/strace-linux/dummy.h')
expected = [
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_linux_ioctlent_sh(self):
test_file = self.get_test_loc('ics/strace-linux/ioctlent.sh')
expected = [
u'Copyright (c) 2001 Wichert Akkerman <wichert@cistron.nl>',
]
check_detection(expected, test_file)
def test_ics_strace_strace_linux_hppa_syscallent_h(self):
test_file = self.get_test_loc('ics/strace-strace-linux-hppa/syscallent.h')
expected = [
u'Copyright (c) 2001 Hewlett-Packard, Matthew Wilcox',
]
check_detection(expected, test_file)
def test_ics_strace_strace_linux_ia64_syscallent_h(self):
test_file = self.get_test_loc('ics/strace-strace-linux-ia64/syscallent.h')
expected = [
u'Copyright (c) 1999, 2001 Hewlett-Packard Co David Mosberger-Tang <davidm@hpl.hp.com>',
]
check_detection(expected, test_file)
def test_ics_strace_strace_linux_mips_ioctlent_sh(self):
test_file = self.get_test_loc('ics/strace-strace-linux-mips/ioctlent.sh')
expected = [
u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1995, 1996 Michael Elizabeth Chastain <mec@duracef.shout.net>',
]
check_detection(expected, test_file)
def test_ics_strace_strace_linux_s390_syscallent_h(self):
test_file = self.get_test_loc('ics/strace-strace-linux-s390/syscallent.h')
expected = [
u'Copyright (c) 2000 IBM Deutschland Entwicklung GmbH, IBM Coporation Authors',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_strace_strace_linux_s390_syscallent_h_extra_author(self):
test_file = self.get_test_loc('ics/strace-strace-linux-s390/syscallent.h')
expected = [
u'Copyright (c) 2000 IBM Deutschland Entwicklung GmbH, IBM Coporation',
]
check_detection(expected, test_file)
def test_ics_strace_strace_linux_sh_syscallent_h(self):
test_file = self.get_test_loc('ics/strace-strace-linux-sh/syscallent.h')
expected = [
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 2000 PocketPenguins Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_strace_linux_sparc_syscall_h(self):
test_file = self.get_test_loc('ics/strace-strace-linux-sparc/syscall.h')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_svox_pico_androidmanifest_xml(self):
test_file = self.get_test_loc('ics/svox-pico/AndroidManifest.xml')
expected = [
u'Copyright 2009, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_svox_pico_resources_tools_lingwarebuilding_readme_txt(self):
test_file = self.get_test_loc('ics/svox-pico_resources-tools-LingwareBuilding/Readme.txt')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
]
check_detection(expected, test_file)
def test_ics_svox_pico_resources_tools_lingwarebuilding_picolingware_source_files_textana_en_gb_en_gb_lexpos_utf(self):
test_file = self.get_test_loc('ics/svox-pico_resources-tools-LingwareBuilding-PicoLingware_source_files-textana-en-GB/en-GB_lexpos.utf')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
]
check_detection(expected, test_file)
def test_ics_svox_pico_resources_tools_lingwarebuilding_picolingware_tools_windows_tools_buildbin_sh(self):
test_file = self.get_test_loc('ics/svox-pico_resources-tools-LingwareBuilding-PicoLingware_tools_windows-tools/buildbin.sh')
expected = [
u'Copyright (c) 2009 SVOX AG.',
]
check_detection(expected, test_file)
def test_ics_svox_pico_compat_jni_com_android_tts_compat_synthproxy_cpp(self):
test_file = self.get_test_loc('ics/svox-pico-compat-jni/com_android_tts_compat_SynthProxy.cpp')
expected = [
u'Copyright (c) 2009-2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_svox_pico_lib_notice(self):
test_file = self.get_test_loc('ics/svox-pico-lib/NOTICE')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
]
check_detection(expected, test_file)
def test_ics_svox_pico_lib_picoacph_c(self):
test_file = self.get_test_loc('ics/svox-pico-lib/picoacph.c')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
u'Copyright (c) 2008-2009 SVOX AG',
]
check_detection(expected, test_file)
def test_ics_svox_pico_lib_picofftsg_c(self):
test_file = self.get_test_loc('ics/svox-pico-lib/picofftsg.c')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
u'Copyright (c) 2008-2009 SVOX AG',
u'(Copyright Takuya OOURA, 1996-2001)',
]
check_detection(expected, test_file)
def test_ics_svox_pico_lib_picoos_c(self):
test_file = self.get_test_loc('ics/svox-pico-lib/picoos.c')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
u'Copyright (c) 2008-2009 SVOX AG',
u'(c) SVOX AG',
]
check_detection(expected, test_file)
def test_ics_svox_pico_res_xml_tts_engine_xml(self):
test_file = self.get_test_loc('ics/svox-pico-res-xml/tts_engine.xml')
expected = [
u'Copyright (c) 2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_svox_pico_res_xml_voices_list_xml(self):
test_file = self.get_test_loc('ics/svox-pico-res-xml/voices_list.xml')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_svox_pico_tts_com_svox_picottsengine_cpp(self):
test_file = self.get_test_loc('ics/svox-pico-tts/com_svox_picottsengine.cpp')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
]
check_detection(expected, test_file)
def test_ics_tagsoup_src_org_ccil_cowan_tagsoup_autodetector_java(self):
test_file = self.get_test_loc('ics/tagsoup-src-org-ccil-cowan-tagsoup/AutoDetector.java')
expected = [
u'Copyright 2002-2008 by John Cowan.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_aclocal_m4_trail_name_m4_dnl_comment(self):
test_file = self.get_test_loc('ics/tcpdump/aclocal.m4')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998 The Regents of the University of California.',
u'Copyright (c) 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_addrtoname_c(self):
test_file = self.get_test_loc('ics/tcpdump/addrtoname.c')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_addrtoname_h(self):
test_file = self.get_test_loc('ics/tcpdump/addrtoname.h')
expected = [
u'Copyright (c) 1990, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_af_c(self):
test_file = self.get_test_loc('ics/tcpdump/af.c')
expected = [
u'Copyright (c) 1998-2006 The TCPDUMP project',
]
check_detection(expected, test_file)
def test_ics_tcpdump_aodv_h(self):
test_file = self.get_test_loc('ics/tcpdump/aodv.h')
expected = [
u'Copyright (c) 2003 Bruce M. Simpson <bms@spc.org>',
]
check_detection(expected, test_file)
def test_ics_tcpdump_appletalk_h(self):
test_file = self.get_test_loc('ics/tcpdump/appletalk.h')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_atm_h(self):
test_file = self.get_test_loc('ics/tcpdump/atm.h')
expected = [
u'Copyright (c) 2002 Guy Harris.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_bootp_h(self):
test_file = self.get_test_loc('ics/tcpdump/bootp.h')
expected = [
u'Copyright 1988 by Carnegie Mellon.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_chdlc_h(self):
test_file = self.get_test_loc('ics/tcpdump/chdlc.h')
expected = [
u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_cpack_c(self):
test_file = self.get_test_loc('ics/tcpdump/cpack.c')
expected = [
u'Copyright (c) 2003, 2004 David Young.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_dccp_h(self):
test_file = self.get_test_loc('ics/tcpdump/dccp.h')
expected = [
u'Copyright (c) Arnaldo Carvalho de Melo 2004',
u'Copyright (c) Ian McDonald 2005 <iam4@cs.waikato.ac.nz>',
u'Copyright (c) Yoshifumi Nishida 2005',
]
check_detection(expected, test_file)
def test_ics_tcpdump_decnet_h(self):
test_file = self.get_test_loc('ics/tcpdump/decnet.h')
expected = [
u'Copyright (c) 1992, 1994, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_decode_prefix_h(self):
test_file = self.get_test_loc('ics/tcpdump/decode_prefix.h')
expected = [
u'Copyright (c) 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_enc_h(self):
test_file = self.get_test_loc('ics/tcpdump/enc.h')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998 by John Ioannidis, Angelos D. Keromytis and Niels Provos.',
u'Copyright (c) 2001, Angelos D. Keromytis.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_gmt2local_c(self):
test_file = self.get_test_loc('ics/tcpdump/gmt2local.c')
expected = [
u'Copyright (c) 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_icmp6_h(self):
test_file = self.get_test_loc('ics/tcpdump/icmp6.h')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 1982, 1986, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_ieee802_11_h(self):
test_file = self.get_test_loc('ics/tcpdump/ieee802_11.h')
expected = [
u'Copyright (c) 2001 Fortress Technologies Charlie Lenahan',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_tcpdump_ieee802_11_h_trail_email(self):
test_file = self.get_test_loc('ics/tcpdump/ieee802_11.h')
expected = [
u'Copyright (c) 2001 Fortress Technologies Charlie Lenahan ( clenahan@fortresstech.com )',
]
check_detection(expected, test_file)
def test_ics_tcpdump_interface_h(self):
test_file = self.get_test_loc('ics/tcpdump/interface.h')
expected = [
u'Copyright (c) 1988-2002 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_ipproto_h(self):
test_file = self.get_test_loc('ics/tcpdump/ipproto.h')
expected = [
u'Copyright (c) 1982, 1986, 1990, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_l2tp_h(self):
test_file = self.get_test_loc('ics/tcpdump/l2tp.h')
expected = [
u'Copyright (c) 1991, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_machdep_c(self):
test_file = self.get_test_loc('ics/tcpdump/machdep.c')
expected = [
u'Copyright (c) 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_makefile_in(self):
test_file = self.get_test_loc('ics/tcpdump/Makefile.in')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_makemib(self):
test_file = self.get_test_loc('ics/tcpdump/makemib')
expected = [
u'Copyright (c) 1990, 1996 John Robert LoVerso.',
u'copyright (c) 1999 William C. Fenner.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_mpls_h(self):
test_file = self.get_test_loc('ics/tcpdump/mpls.h')
expected = [
u'Copyright (c) 2001 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_nameser_h(self):
test_file = self.get_test_loc('ics/tcpdump/nameser.h')
expected = [
u'Copyright (c) 1983, 1989, 1993 The Regents of the University of California.',
u'Portions Copyright (c) 1993 by Digital Equipment Corporation.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_netdissect_h(self):
test_file = self.get_test_loc('ics/tcpdump/netdissect.h')
expected = [
u'Copyright (c) 1988-1997 The Regents of the University of California.',
u'Copyright (c) 1998-2004 Michael Richardson <mcr@tcpdump.org> The TCPDUMP project',
]
check_detection(expected, test_file)
def test_ics_tcpdump_nfs_h(self):
test_file = self.get_test_loc('ics/tcpdump/nfs.h')
expected = [
u'Copyright (c) 1989, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_nfsfh_h_trail_name(self):
test_file = self.get_test_loc('ics/tcpdump/nfsfh.h')
expected = [
u'Copyright (c) 1993, 1994 Jeffrey C. Mogul, Digital Equipment Corporation, Western Research Laboratory.',
u'Copyright (c) 2001 Compaq Computer Corporation.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_parsenfsfh_c(self):
test_file = self.get_test_loc('ics/tcpdump/parsenfsfh.c')
expected = [
u'Copyright (c) 1993, 1994 Jeffrey C. Mogul, Digital Equipment Corporation, Western Research Laboratory.',
u'Copyright (c) 2001 Compaq Computer Corporation.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_pmap_prot_h(self):
test_file = self.get_test_loc('ics/tcpdump/pmap_prot.h')
expected = [
u'Copyright (c) 1984, Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_ah_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-ah.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_ap1394_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-ap1394.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 2000 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_ascii_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-ascii.c')
expected = [
u'Copyright (c) 1997, 1998 The NetBSD Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_atm_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-atm.c')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_beep_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-beep.c')
expected = [
u'Copyright (c) 2000, Richard Sharpe',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_bootp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-bootp.c')
expected = [
u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_cdp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-cdp.c')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_cnfp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-cnfp.c')
expected = [
u'Copyright (c) 1998 Michael Shalayeff',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_dccp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-dccp.c')
expected = [
u'Copyright (c) Arnaldo Carvalho de Melo 2004',
u'Copyright (c) Ian McDonald 2005',
u'Copyright (c) Yoshifumi Nishida 2005',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_dhcp6_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-dhcp6.c')
expected = [
u'Copyright (c) 1998 and 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_dvmrp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-dvmrp.c')
expected = [
u'Copyright (c) 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_eap_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-eap.c')
expected = [
u'Copyright (c) 2004 - Michael Richardson <mcr@xelerance.com>',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_egp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-egp.c')
expected = [
u'Copyright (c) 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_eigrp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-eigrp.c')
expected = [
u'Copyright (c) 1998-2004 Hannes Gredler <hannes@tcpdump.org> The TCPDUMP project',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_enc_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-enc.c')
expected = [
u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_fddi_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-fddi.c')
expected = [
u'Copyright (c) 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_frag6_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-frag6.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1993, 1994 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_gre_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-gre.c')
expected = [
u'Copyright (c) 2002 Jason L. Wright (jason@thought.net)',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_hsrp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-hsrp.c')
expected = [
u'Copyright (c) 2001 Julian Cowley',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_ip6opts_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-ip6opts.c')
expected = [
u'Copyright (c) 1998 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_krb_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-krb.c')
expected = [
u'Copyright (c) 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_lwres_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-lwres.c')
expected = [
u'Copyright (c) 2001 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_mobile_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-mobile.c')
expected = [
u'(c) 1998 The NetBSD Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_mobility_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-mobility.c')
expected = [
u'Copyright (c) 2002 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_msdp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-msdp.c')
expected = [
u'Copyright (c) 2001 William C. Fenner.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_olsr_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-olsr.c')
expected = [
u'Copyright (c) 1998-2007 The TCPDUMP project',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_radius_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-radius.c')
expected = [
u'Copyright (c) 2000 Alfredo Andres Omella.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_rip_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-rip.c')
expected = [
u'Copyright (c) 1989, 1990, 1991, 1993, 1994, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_ripng_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-ripng.c')
expected = [
u'Copyright (c) 1989, 1990, 1991, 1993, 1994 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_rx_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-rx.c')
expected = [
u'Copyright (c) 2000 United States Government',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_tcpdump_print_rx_c_trail_name(self):
test_file = self.get_test_loc('ics/tcpdump/print-rx.c')
expected = [
u'Copyright: (c) 2000 United States Government as represented by the Secretary of the Navy.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_sctp_c_trail_name(self):
test_file = self.get_test_loc('ics/tcpdump/print-sctp.c')
expected = [
u'Copyright (c) 2001 NETLAB, Temple University',
u'Copyright (c) 2001 Protocol Engineering Lab, University of Delaware',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_sl_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-sl.c')
expected = [
u'Copyright (c) 1989, 1990, 1991, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_slow_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-slow.c')
expected = [
u'Copyright (c) 1998-2005 The TCPDUMP project',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_smb_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-smb.c')
expected = [
u'Copyright (c) Andrew Tridgell 1995-1999',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_snmp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-snmp.c')
expected = [
u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 John Robert LoVerso.',
u'J. Schoenwaelder, Copyright (c) 1999.',
u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 this software was produced',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_tcpdump_print_snmp_c_trail_name_lead_name_trail_name_complex(self):
test_file = self.get_test_loc('ics/tcpdump/print-snmp.c')
expected = [
u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 John Robert LoVerso.',
u'J. Schoenwaelder, Copyright (c) 1999.',
u'Los Alamos National Laboratory Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_stp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-stp.c')
expected = [
u'Copyright (c) 2000 Lennert Buytenhek',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_tcp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-tcp.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
u'Copyright (c) 1999-2004 The tcpdump.org project',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_telnet_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-telnet.c')
expected = [
u'Copyright (c) 1997, 1998 The NetBSD Foundation, Inc.',
u'Copyright (c) 1994, Simon J. Gerraty.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_timed_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-timed.c')
expected = [
u'Copyright (c) 2000 Ben Smithurst <ben@scientia.demon.co.uk>',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_token_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-token.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_vrrp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-vrrp.c')
expected = [
u'Copyright (c) 2000 William C. Fenner.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_wb_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-wb.c')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_zephyr_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-zephyr.c')
expected = [
u'Copyright (c) 2001 Nickolai Zeldovich <kolya@MIT.EDU>',
]
check_detection(expected, test_file)
def test_ics_tcpdump_route6d_h(self):
test_file = self.get_test_loc('ics/tcpdump/route6d.h')
expected = [
u'Copyright (c) 1995, 1996, 1997 and 1998 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_sctpconstants_h_trail_name(self):
test_file = self.get_test_loc('ics/tcpdump/sctpConstants.h')
expected = [
u'Implementation Copyright (c) 1999 Cisco And Motorola',
]
check_detection(expected, test_file)
def test_ics_tcpdump_slcompress_h(self):
test_file = self.get_test_loc('ics/tcpdump/slcompress.h')
expected = [
u'Copyright (c) 1989, 1990, 1992, 1993 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_slip_h(self):
test_file = self.get_test_loc('ics/tcpdump/slip.h')
expected = [
u'Copyright (c) 1990 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_strcasecmp_c(self):
test_file = self.get_test_loc('ics/tcpdump/strcasecmp.c')
expected = [
u'Copyright (c) 1987 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_tcpdump_1_trail_name(self):
test_file = self.get_test_loc('ics/tcpdump/tcpdump.1')
expected = [
u'Copyright (c) 1987, 1988, 1989, 1990, 1991, 1992, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_tcpdump_c(self):
test_file = self.get_test_loc('ics/tcpdump/tcpdump.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 2000 The Regents of the University of California.',
u'Copyright (c) 2001 Seth Webster <swebster@sst.ll.mit.edu>',
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 2000 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_telnet_h(self):
test_file = self.get_test_loc('ics/tcpdump/telnet.h')
expected = [
u'Copyright (c) 1983, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_token_h(self):
test_file = self.get_test_loc('ics/tcpdump/token.h')
expected = [
u'Copyright (c) 1998, Larry Lile',
]
check_detection(expected, test_file)
def test_ics_tcpdump_vfprintf_c(self):
test_file = self.get_test_loc('ics/tcpdump/vfprintf.c')
expected = [
u'Copyright (c) 1995 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_missing_inet_aton_c_trail_place(self):
test_file = self.get_test_loc('ics/tcpdump-missing/inet_aton.c')
expected = [
u'Copyright (c) 1995, 1996, 1997 Kungliga Tekniska Hogskolan (Royal Institute of Technology, Stockholm, Sweden).',
]
check_detection(expected, test_file)
def test_ics_tcpdump_missing_inet_ntop_c_trail_place(self):
test_file = self.get_test_loc('ics/tcpdump-missing/inet_ntop.c')
expected = [
u'Copyright (c) 1999 Kungliga Tekniska Hogskolan (Royal Institute of Technology, Stockholm, Sweden).',
]
check_detection(expected, test_file)
def test_ics_tinyxml_android_mk(self):
test_file = self.get_test_loc('ics/tinyxml/Android.mk')
expected = [
u'Copyright 2005 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_tinyxml_tinyxml_cpp(self):
test_file = self.get_test_loc('ics/tinyxml/tinyxml.cpp')
expected = [
u'copyright (c) 2000-2002 Lee Thomason (www.grinninglizard.com)',
]
check_detection(expected, test_file)
def test_ics_tinyxml_tinyxml_h(self):
test_file = self.get_test_loc('ics/tinyxml/tinyxml.h')
expected = [
u'copyright (c) 2000-2002 Lee Thomason (www.grinninglizard.com)',
]
check_detection(expected, test_file)
def test_ics_tremolo_notice(self):
test_file = self.get_test_loc('ics/tremolo/NOTICE')
expected = [
u'Copyright (c) 2002-2009, Xiph.org Foundation',
u'Copyright (c) 2010, Robin Watts for Pinknoise Productions Ltd',
]
check_detection(expected, test_file)
def test_ics_tremolo_tremolo_asm_arm_h(self):
test_file = self.get_test_loc('ics/tremolo-Tremolo/asm_arm.h')
expected = [
u'Copyright (c) 2002-2009, Xiph.org Foundation',
u'Copyright (c) 2010, Robin Watts for Pinknoise Productions Ltd',
]
check_detection(expected, test_file)
def test_ics_webp_examples_dwebp_c(self):
test_file = self.get_test_loc('ics/webp-examples/dwebp.c')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_webp_include_webp_encode_h(self):
test_file = self.get_test_loc('ics/webp-include-webp/encode.h')
expected = [
u'Copyright 2011 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_webp_src_dec_android_mk(self):
test_file = self.get_test_loc('ics/webp-src-dec/Android.mk')
expected = [
u'Copyright 2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_webp_src_enc_dsp_c(self):
test_file = self.get_test_loc('ics/webp-src-enc/dsp.c')
expected = [
u'Copyright 2011 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_webrtc_android_webrtc_mk(self):
test_file = self.get_test_loc('ics/webrtc/android-webrtc.mk')
expected = [
u'Copyright (c) 2011 The WebRTC project',
]
check_detection(expected, test_file)
def test_ics_webrtc_notice(self):
test_file = self.get_test_loc('ics/webrtc/NOTICE')
expected = [
u'Copyright (c) 2011 The WebRTC project',
u'Copyright (c) 2010 The Android Open Source Project',
u'Copyright Takuya OOURA, 1996-2001',
u'Copyright Takuya OOURA, 1996-2001',
u'Copyright Steven J. Ross 2001 - 2009.',
]
check_detection(expected, test_file)
def test_ics_webrtc_src_common_types_h(self):
test_file = self.get_test_loc('ics/webrtc-src/common_types.h')
expected = [
u'Copyright (c) 2011 The WebRTC project',
]
check_detection(expected, test_file)
def test_ics_webrtc_src_modules_audio_processing_aec_main_source_aec_rdft_c(self):
test_file = self.get_test_loc('ics/webrtc-src-modules-audio_processing-aec-main-source/aec_rdft.c')
expected = [
u'Copyright Takuya OOURA, 1996-2001',
]
check_detection(expected, test_file)
def test_ics_webrtc_src_system_wrappers_source_spreadsortlib_spreadsort_hpp(self):
test_file = self.get_test_loc('ics/webrtc-src-system_wrappers-source-spreadsortlib/spreadsort.hpp')
expected = [
u'Copyright Steven J. Ross 2001 - 2009.',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_aes_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/aes.c')
expected = [
u'Copyright (c) 2003-2005, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_aes_h(self):
test_file = self.get_test_loc('ics/wpa_supplicant/aes.h')
expected = [
u'Copyright (c) 2003-2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_aes_wrap_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/aes_wrap.c')
expected = [
u'Copyright (c) 2003-2007, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_asn1_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/asn1.c')
expected = [
u'Copyright (c) 2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_base64_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/base64.c')
expected = [
u'Copyright (c) 2005, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_build_config_h(self):
test_file = self.get_test_loc('ics/wpa_supplicant/build_config.h')
expected = [
u'Copyright (c) 2005-2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_common_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/common.c')
expected = [
u'Copyright (c) 2002-2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_config_h(self):
test_file = self.get_test_loc('ics/wpa_supplicant/config.h')
expected = [
u'Copyright (c) 2003-2005, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_crypto_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/crypto.c')
expected = [
u'Copyright (c) 2004-2005, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_ctrl_iface_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/ctrl_iface.c')
expected = [
u'Copyright (c) 2004-2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_ctrl_iface_dbus_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/ctrl_iface_dbus.c')
expected = [
u'Copyright (c) 2006, Dan Williams <dcbw@redhat.com> and Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_atmel_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_atmel.c')
expected = [
u'Copyright (c) 2000-2005, ATMEL Corporation',
u'Copyright (c) 2004-2007, Jouni Malinen <j@w1.fi>',
u'Copyright 2000-2001 ATMEL Corporation.',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_broadcom_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_broadcom.c')
expected = [
u'Copyright (c) 2004, Nikki Chumkov <nikki@gattaca.ru>',
u'Copyright (c) 2004, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_bsd_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_bsd.c')
expected = [
u'Copyright (c) 2004, Sam Leffler <sam@errno.com>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_ipw_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_ipw.c')
expected = [
u'Copyright (c) 2005 Zhu Yi <yi.zhu@intel.com>',
u'Copyright (c) 2004 Lubomir Gelo <lgelo@cnc.sk>',
u'Copyright (c) 2003-2004, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_madwifi_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_madwifi.c')
expected = [
u'Copyright (c) 2004, Sam Leffler <sam@errno.com>',
u'Copyright (c) 2004-2005, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_ndiswrapper_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_ndiswrapper.c')
expected = [
u'Copyright (c) 2004-2006, Giridhar Pemmasani <giri@lmc.cs.sunysb.edu>',
u'Copyright (c) 2004-2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_prism54_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_prism54.c')
expected = [
u'Copyright (c) 2003-2005, Jouni Malinen <j@w1.fi>',
u'Copyright (c) 2004, Luis R. Rodriguez <mcgrof@ruslug.rutgers.edu>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_wired_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_wired.c')
expected = [
u'Copyright (c) 2005-2007, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_eap_gpsk_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/eap_gpsk.c')
expected = [
u'Copyright (c) 2006-2007, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_eap_psk_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/eap_psk.c')
expected = [
u'Copyright (c) 2004-2007, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_eap_sim_common_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/eap_sim_common.c')
expected = [
u'Copyright (c) 2004-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_eapol_test_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/eapol_test.c')
expected = [
u'Copyright (c) 2003-2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_eloop_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/eloop.c')
expected = [
u'Copyright (c) 2002-2005, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_l2_packet_freebsd_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/l2_packet_freebsd.c')
expected = [
u'Copyright (c) 2003-2005, Jouni Malinen <j@w1.fi>',
u'Copyright (c) 2005, Sam Leffler <sam@errno.com>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_mlme_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/mlme.c')
expected = [
u'Copyright (c) 2003-2006, Jouni Malinen <j@w1.fi>',
u'Copyright (c) 2004, Instant802 Networks, Inc.',
u'Copyright (c) 2005-2006, Devicescape Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_notice(self):
test_file = self.get_test_loc('ics/wpa_supplicant/NOTICE')
expected = [
u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_radius_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/radius.c')
expected = [
u'Copyright (c) 2002-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_tls_none_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/tls_none.c')
expected = [
u'Copyright (c) 2004, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_wireless_copy_h(self):
test_file = self.get_test_loc('ics/wpa_supplicant/wireless_copy.h')
expected = [
u'Copyright (c) 1997-2007 Jean Tourrilhes',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_wpa_cli_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/wpa_cli.c')
expected = [
u'Copyright (c) 2004-2008, Jouni Malinen <j@w1.fi>',
u'Copyright (c) 2004-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_wpa_supplicant_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/wpa_supplicant.c')
expected = [
u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>',
u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_wpa_gui_wpagui_ui_h(self):
test_file = self.get_test_loc('ics/wpa_supplicant-wpa_gui/wpagui.ui.h')
expected = [
u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_wpa_gui_qt4_wpagui_cpp(self):
test_file = self.get_test_loc('ics/wpa_supplicant-wpa_gui-qt4/wpagui.cpp')
expected = [
u'Copyright (c) 2005-2008, Jouni Malinen <j@w1.fi>',
u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_xmlwriter_src_org_jheer_xmlwriter_java(self):
test_file = self.get_test_loc('ics/xmlwriter-src-org-jheer/XMLWriter.java')
expected = [
u'Copyright (c) 2004-2006 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_yaffs2_yaffs2_devextras_h(self):
test_file = self.get_test_loc('ics/yaffs2-yaffs2/devextras.h')
expected = [
u'Copyright (c) 2002 Aleph One Ltd. for Toby Churchill Ltd and Brightstar Engineering',
]
check_detection(expected, test_file)
def test_ics_yaffs2_yaffs2_patch_ker_sh(self):
test_file = self.get_test_loc('ics/yaffs2-yaffs2/patch-ker.sh')
expected = [
u'Copyright (c) 2002 Aleph One Ltd.',
]
check_detection(expected, test_file)
def test_ics_yaffs2_yaffs2_yaffs_qsort_h(self):
test_file = self.get_test_loc('ics/yaffs2-yaffs2/yaffs_qsort.h')
expected = [
u'Copyright (c) 2000-2002 Silicon Graphics, Inc.',
]
check_detection(expected, test_file)
def test_ics_yaffs2_yaffs2_direct_makefile(self):
test_file = self.get_test_loc('ics/yaffs2-yaffs2-direct/Makefile')
expected = [
u'Copyright (c) 2003 Aleph One Ltd.',
]
check_detection(expected, test_file)
def test_ics_zlib_adler32_c(self):
test_file = self.get_test_loc('ics/zlib/adler32.c')
expected = [
u'Copyright (c) 1995-2007 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_crc32_c(self):
test_file = self.get_test_loc('ics/zlib/crc32.c')
expected = [
u'Copyright (c) 1995-2006, 2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_deflate_c(self):
test_file = self.get_test_loc('ics/zlib/deflate.c')
expected = [
u'Copyright (c) 1995-2010 Jean-loup Gailly and Mark Adler',
u'Copyright 1995-2010 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_deflate_h(self):
test_file = self.get_test_loc('ics/zlib/deflate.h')
expected = [
u'Copyright (c) 1995-2010 Jean-loup Gailly',
]
check_detection(expected, test_file)
def test_ics_zlib_example_c(self):
test_file = self.get_test_loc('ics/zlib/example.c')
expected = [
u'Copyright (c) 1995-2006 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_gzclose_c(self):
test_file = self.get_test_loc('ics/zlib/gzclose.c')
expected = [
u'Copyright (c) 2004, 2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_gzguts_h(self):
test_file = self.get_test_loc('ics/zlib/gzguts.h')
expected = [
u'Copyright (c) 2004, 2005, 2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_infback_c(self):
test_file = self.get_test_loc('ics/zlib/infback.c')
expected = [
u'Copyright (c) 1995-2009 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_inffast_c(self):
test_file = self.get_test_loc('ics/zlib/inffast.c')
expected = [
u'Copyright (c) 1995-2008, 2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_inffast_h(self):
test_file = self.get_test_loc('ics/zlib/inffast.h')
expected = [
u'Copyright (c) 1995-2003, 2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_inflate_c(self):
test_file = self.get_test_loc('ics/zlib/inflate.c')
expected = [
u'Copyright (c) 1995-2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_inftrees_c(self):
test_file = self.get_test_loc('ics/zlib/inftrees.c')
expected = [
u'Copyright (c) 1995-2010 Mark Adler',
u'Copyright 1995-2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_inftrees_h(self):
test_file = self.get_test_loc('ics/zlib/inftrees.h')
expected = [
u'Copyright (c) 1995-2005, 2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_makefile_in(self):
test_file = self.get_test_loc('ics/zlib/Makefile.in')
expected = [
u'Copyright (c) 1995-2010 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_minigzip_c(self):
test_file = self.get_test_loc('ics/zlib/minigzip.c')
expected = [
u'Copyright (c) 1995-2006, 2010 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_notice(self):
test_file = self.get_test_loc('ics/zlib/NOTICE')
expected = [
u'(c) 1995-2004 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_readme(self):
test_file = self.get_test_loc('ics/zlib/README')
expected = [
u'(c) 1995-2010 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_trees_c(self):
test_file = self.get_test_loc('ics/zlib/trees.c')
expected = [
u'Copyright (c) 1995-2010 Jean-loup Gailly',
]
check_detection(expected, test_file)
def test_ics_zlib_uncompr_c(self):
test_file = self.get_test_loc('ics/zlib/uncompr.c')
expected = [
u'Copyright (c) 1995-2003, 2010 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_zconf_h(self):
test_file = self.get_test_loc('ics/zlib/zconf.h')
expected = [
u'Copyright (c) 1995-2010 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_zlib_h(self):
test_file = self.get_test_loc('ics/zlib/zlib.h')
expected = [
u'Copyright (c) 1995-2010 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_zutil_c(self):
test_file = self.get_test_loc('ics/zlib/zutil.c')
expected = [
u'Copyright (c) 1995-2005, 2010 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_amiga_makefile_pup(self):
test_file = self.get_test_loc('ics/zlib-amiga/Makefile.pup')
expected = [
u'Copyright (c) 1998 by Andreas R. Kleinert',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_ada_buffer_demo_adb(self):
test_file = self.get_test_loc('ics/zlib-contrib-ada/buffer_demo.adb')
expected = [
u'Copyright (c) 2002-2004 Dmitriy Anisimkov',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_ada_mtest_adb(self):
test_file = self.get_test_loc('ics/zlib-contrib-ada/mtest.adb')
expected = [
u'Copyright (c) 2002-2003 Dmitriy Anisimkov',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_ada_zlib_ads(self):
test_file = self.get_test_loc('ics/zlib-contrib-ada/zlib.ads')
expected = [
u'Copyright (c) 2002-2004 Dmitriy Anisimkov',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_blast_blast_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-blast/blast.c')
expected = [
u'Copyright (c) 2003 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_blast_blast_h(self):
test_file = self.get_test_loc('ics/zlib-contrib-blast/blast.h')
expected = [
u'Copyright (c) 2003 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_delphi_readme_txt(self):
test_file = self.get_test_loc('ics/zlib-contrib-delphi/readme.txt')
expected = [
u'Copyright (c) 1997,99 Borland Corp.',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_dotzlib_readme_txt(self):
test_file = self.get_test_loc('ics/zlib-contrib-dotzlib/readme.txt')
expected = [
u'Copyright (c) Henrik Ravn 2004',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_dotzlib_dotzlib_checksumimpl_cs(self):
test_file = self.get_test_loc('ics/zlib-contrib-dotzlib-DotZLib/ChecksumImpl.cs')
expected = [
u'(c) Copyright Henrik Ravn 2004',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_dotzlib_dotzlib_assemblyinfo_cs(self):
test_file = self.get_test_loc('ics/zlib-contrib-dotzlib-DotZLib/AssemblyInfo.cs')
expected = [
u'(c) 2004 by Henrik Ravn',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_infback9_infback9_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-infback9/infback9.c')
expected = [
u'Copyright (c) 1995-2008 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_infback9_infback9_h(self):
test_file = self.get_test_loc('ics/zlib-contrib-infback9/infback9.h')
expected = [
u'Copyright (c) 2003 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_inflate86_inffas86_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-inflate86/inffas86.c')
expected = [
u'Copyright (c) 1995-2003 Mark Adler',
u'Copyright (c) 2003 Chris Anderson <christop@charm.net>',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_masmx86_gvmat32c_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-masmx86/gvmat32c.c')
expected = [
u'Copyright (c) 1995-1996 Jean-loup Gailly and Gilles Vollant.',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_minizip_crypt_h(self):
test_file = self.get_test_loc('ics/zlib-contrib-minizip/crypt.h')
expected = [
u'Copyright (c) 1998-2005 Gilles Vollant',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_minizip_ioapi_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-minizip/ioapi.c')
expected = [
u'Copyright (c) 1998-2010 Gilles Vollant',
u'Copyright (c) 2009-2010 Mathias Svensson http://result42.com',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_minizip_miniunz_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-minizip/miniunz.c')
expected = [
u'Copyright (c) 1998-2010 Gilles Vollant',
u'Copyright (c) 2007-2008 Even Rouault',
u'Copyright (c) 2009-2010 Mathias Svensson http://result42.com',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_minizip_minizip64_info_txt(self):
test_file = self.get_test_loc('ics/zlib-contrib-minizip/MiniZip64_info.txt')
expected = [
u'Copyright (c) 1998-2010 - by Gilles Vollant',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_minizip_unzip_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-minizip/unzip.c')
expected = [
u'Copyright (c) 1998-2010 Gilles Vollant',
u'Copyright (c) 2007-2008 Even Rouault',
u'Copyright (c) 2009-2010 Mathias Svensson http://result42.com',
u'Copyright (c) 1990-2000 Info-ZIP.',
u'Copyright (c) 2007-2008 Even Rouault',
u'Copyright (c) 1998 - 2010 Gilles Vollant, Even Rouault, Mathias Svensson',
u'Copyright 1998-2004 Gilles Vollant',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_minizip_zip_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-minizip/zip.c')
expected = [
u'Copyright (c) 1998-2010 Gilles Vollant',
u'Copyright (c) 2009-2010 Mathias Svensson http://result42.com',
u'Copyright 1998-2004 Gilles Vollant',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_pascal_readme_txt(self):
test_file = self.get_test_loc('ics/zlib-contrib-pascal/readme.txt')
expected = [
u'Copyright (c) 1995-2003 Jean-loup Gailly and Mark Adler.',
u'Copyright (c) 1998 by Bob Dellaca.',
u'Copyright (c) 2003 by Cosmin Truta.',
u'Copyright (c) 1995-2003 by Jean-loup Gailly.',
u'Copyright (c) 1998,1999,2000 by Jacques Nomssi Nzali.',
u'Copyright (c) 2003 by Cosmin Truta.',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_puff_puff_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-puff/puff.c')
expected = [
u'Copyright (c) 2002-2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_puff_puff_h(self):
test_file = self.get_test_loc('ics/zlib-contrib-puff/puff.h')
expected = [
u'Copyright (c) 2002-2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_vstudio_vc10_zlib_rc(self):
test_file = self.get_test_loc('ics/zlib-contrib-vstudio-vc10/zlib.rc')
expected = [
u'(c) 1995-2010 Jean-loup Gailly & Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_vstudio_vc7_zlib_rc(self):
test_file = self.get_test_loc('ics/zlib-contrib-vstudio-vc7/zlib.rc')
expected = [
u'(c) 1995-2003 Jean-loup Gailly & Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_doc_rfc1950_txt(self):
test_file = self.get_test_loc('ics/zlib-doc/rfc1950.txt')
expected = [
u'Copyright (c) 1996 L. Peter Deutsch and Jean-Loup Gailly',
]
check_detection(expected, test_file)
def test_ics_zlib_doc_rfc1951_txt(self):
test_file = self.get_test_loc('ics/zlib-doc/rfc1951.txt')
expected = [
u'Copyright (c) 1996 L. Peter Deutsch',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_enough_c(self):
test_file = self.get_test_loc('ics/zlib-examples/enough.c')
expected = [
u'Copyright (c) 2007, 2008 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_gun_c(self):
test_file = self.get_test_loc('ics/zlib-examples/gun.c')
expected = [
u'Copyright (c) 2003, 2005, 2008, 2010 Mark Adler',
u'Copyright (c) 2003-2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_gzappend_c(self):
test_file = self.get_test_loc('ics/zlib-examples/gzappend.c')
expected = [
u'Copyright (c) 2003 Mark Adler',
u'Copyright (c) 2003 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_gzjoin_c(self):
test_file = self.get_test_loc('ics/zlib-examples/gzjoin.c')
expected = [
u'Copyright (c) 2004 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_gzlog_c(self):
test_file = self.get_test_loc('ics/zlib-examples/gzlog.c')
expected = [
u'Copyright (c) 2004, 2008 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_gzlog_h(self):
test_file = self.get_test_loc('ics/zlib-examples/gzlog.h')
expected = [
u'Copyright (c) 2004, 2008 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_zlib_how_html(self):
test_file = self.get_test_loc('ics/zlib-examples/zlib_how.html')
expected = [
u'Copyright (c) 2004, 2005 Mark Adler.',
u'Copyright (c) 2004, 2005 by Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_zran_c(self):
test_file = self.get_test_loc('ics/zlib-examples/zran.c')
expected = [
u'Copyright (c) 2005 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_msdos_makefile_dj2(self):
test_file = self.get_test_loc('ics/zlib-msdos/Makefile.dj2')
expected = [
u'Copyright (c) 1995-1998 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_old_zlib_html(self):
test_file = self.get_test_loc('ics/zlib-old/zlib.html')
expected = [
u'Copyright (c) 1995-2002 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_old_visualc6_readme_txt(self):
test_file = self.get_test_loc('ics/zlib-old-visualc6/README.txt')
expected = [
u'Copyright (c) 2000-2004 Simon-Pierre Cadieux.',
u'Copyright (c) 2004 Cosmin Truta.',
]
check_detection(expected, test_file)
def test_ics_zlib_win32_makefile_gcc(self):
test_file = self.get_test_loc('ics/zlib-win32/Makefile.gcc')
expected = [
u'Copyright (c) 1995-2003 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_win32_makefile_msc(self):
test_file = self.get_test_loc('ics/zlib-win32/Makefile.msc')
expected = [
u'copyright (c) 1995-2006 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_win32_zlib1_rc(self):
test_file = self.get_test_loc('ics/zlib-win32/zlib1.rc')
expected = [
u'(c) 1995-2006 Jean-loup Gailly & Mark Adler',
]
check_detection(expected, test_file)
| {
"content_hash": "9ac4b4ebb83359e7a0f4eb56f0868bec",
"timestamp": "",
"source": "github",
"line_count": 19458,
"max_line_length": 224,
"avg_line_length": 41.70372083461815,
"alnum_prop": 0.6139886699586307,
"repo_name": "yashdsaraf/scancode-toolkit",
"id": "c3565c2e0cd296a689836768a905b7ea342eb6c4",
"size": "812853",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/cluecode/test_copyrights_ics.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "1C Enterprise",
"bytes": "492"
},
{
"name": "Ada",
"bytes": "1251"
},
{
"name": "AppleScript",
"bytes": "168"
},
{
"name": "Assembly",
"bytes": "35437"
},
{
"name": "Awk",
"bytes": "248"
},
{
"name": "Batchfile",
"bytes": "5336"
},
{
"name": "C",
"bytes": "1710609"
},
{
"name": "C#",
"bytes": "5901"
},
{
"name": "C++",
"bytes": "771494"
},
{
"name": "CMake",
"bytes": "142"
},
{
"name": "CSS",
"bytes": "4895"
},
{
"name": "GAP",
"bytes": "579"
},
{
"name": "HTML",
"bytes": "3003794"
},
{
"name": "Inno Setup",
"bytes": "235"
},
{
"name": "Java",
"bytes": "172545"
},
{
"name": "JavaScript",
"bytes": "32733"
},
{
"name": "M4",
"bytes": "45516"
},
{
"name": "Makefile",
"bytes": "19150"
},
{
"name": "Matlab",
"bytes": "148"
},
{
"name": "Objective-C",
"bytes": "199657"
},
{
"name": "Objective-C++",
"bytes": "950"
},
{
"name": "PHP",
"bytes": "621154"
},
{
"name": "Perl",
"bytes": "279180"
},
{
"name": "PostScript",
"bytes": "562"
},
{
"name": "Protocol Buffer",
"bytes": "374"
},
{
"name": "Python",
"bytes": "3582576"
},
{
"name": "Ragel",
"bytes": "27606"
},
{
"name": "Roff",
"bytes": "209319"
},
{
"name": "Ruby",
"bytes": "164946"
},
{
"name": "Scala",
"bytes": "4500"
},
{
"name": "Shell",
"bytes": "1595173"
},
{
"name": "Smalltalk",
"bytes": "603"
},
{
"name": "TeX",
"bytes": "3126"
},
{
"name": "Vim script",
"bytes": "1129"
},
{
"name": "Visual Basic",
"bytes": "23"
},
{
"name": "XSLT",
"bytes": "474"
},
{
"name": "Yacc",
"bytes": "1497"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
import horizon
class AdminFloatingIps(horizon.Panel):
name = _("Floating IPs")
slug = 'floating_ips'
permissions = ('openstack.services.network', )
policy_rules = (("network", "context_is_admin"),)
@staticmethod
def can_register():
network_config = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {})
return network_config.get('enable_router', True)
| {
"content_hash": "060e960e28275b71d6f931049440a1ad",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 75,
"avg_line_length": 30.125,
"alnum_prop": 0.6887966804979253,
"repo_name": "noironetworks/horizon",
"id": "589457ca073f605428a49136f9bd34de7f68bc5e",
"size": "1118",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "openstack_dashboard/dashboards/admin/floating_ips/panel.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "129247"
},
{
"name": "HTML",
"bytes": "581169"
},
{
"name": "JavaScript",
"bytes": "2455930"
},
{
"name": "Python",
"bytes": "5190295"
},
{
"name": "Shell",
"bytes": "7108"
}
],
"symlink_target": ""
} |
"""
<Program>
transition_canonical_to_onepercentmanyevents.py
<Purpose>
The purpose of this program is to transition nodes from the
canonical state to the onepercentmanyevents state by bypassing
through the movingtoonepercentmanyevents state.
<Started>
August 13, 2009
<Author>
Monzur Muhammad
monzum@cs.washington.edu
<Usage>
Ensure that seattlegeni and seattle are in the PYTHONPATH.
Ensure that the database is setup properly and django settings
are set correctly.
python transition_canonical_to_onepercentmanyevents.py
"""
import os
import random
import traceback
from seattlegeni.common.api import backend
from seattlegeni.common.api import maindb
from seattlegeni.common.api import nodemanager
from seattlegeni.common.util.decorators import log_function_call
from seattlegeni.common.exceptions import *
from seattlegeni.node_state_transitions import node_transition_lib
# The full path to the onepercentmanyevents.resources file, including the filename.
RESOURCES_TEMPLATE_FILE_PATH = os.path.join(os.path.dirname(__file__), "resource_files", "onepercentmanyevents.resources")
@log_function_call
def onepercentmanyevents_divide (node_string, node_info, database_nodeobject, onepercent_resourcetemplate):
"""
<Purpose>
The purpose of this function is to take a node thats in canonical state
with one vessel, and split it into the 1% vessels so the vessels can
be acquired by users.
<Arguments>
node_string - the name of the node. ip:port or NAT:port
node_info - a dictionary containing information about the node
database_nodeobject - a database object for the node
onepercent_resourcetemplate - the file that has information about resources
<Exceptions>
NodeError - Error raised if node is not in the right state
NodemanagerCommunicationError - raised if we cannot retrieve the usable ports for a node
NodeProcessError - raised if unable to split vessels properly
DatabaseError - raised if unable to modify the database
<Side Effects>
Database gets modified.
<Return>
None
"""
node_transition_lib.log("Beginning onepercentmanyevents_divide on node: "+node_string)
# Extract the ip/NAT and the port.
# Note that the first portion of the node might be an ip or a NAT string.
(ip_or_nat_string, port_num) = node_transition_lib.split_node_string(node_string)
donated_vesselname = database_nodeobject.extra_vessel_name
# Retrieve the usable ports list for the node and then shuffle
# the ports so each vessel gets a random subset of the ports
usable_ports_list = nodemanager.get_vessel_resources(ip_or_nat_string, port_num, donated_vesselname)['usableports']
node_transition_lib.log("List of usable ports in node: "+node_string+". "+str(usable_ports_list))
random.shuffle(usable_ports_list)
#the vessel that we start with
current_vessel = donated_vesselname
node_transition_lib.log("Name of starting vessel: "+current_vessel)
# Keep splittiing the vessel until we run out of resources.
# Note that when split_vessel is called the left vessel
# has the leftover (extra vessel)and the right vessel has
# the vessel with the exact resources.
while len(usable_ports_list) >= 10:
desired_resourcedata = get_resource_data(onepercent_resourcetemplate, usable_ports_list)
#use the first 10 ports so remove them from the list of usable_ports_list
used_ports_list = usable_ports_list[:10]
usable_ports_list = usable_ports_list[10:]
node_transition_lib.log("Ports we are going to use for the new vessel: "+str(used_ports_list))
node_transition_lib.log("Starting to split vessel: "+current_vessel)
# Split the current vessel. The exact vessel is the right vessel
# and the extra vessel is the left vessel.
try:
leftover_vessel, new_vessel = backend.split_vessel(database_nodeobject, current_vessel, desired_resourcedata)
except NodemanagerCommunicationError, e:
# The object 'e' will already include traceback info that has the actual node error.
# If the failure is due to inability to split further, that's ok.
if 'Insufficient quantity:' in str(e):
node_transition_lib.log("Could not split " + current_vessel +
" any further due to insufficient resource/quantity. " + str(e))
# We must break out of the while loop here. If we let the exception get,
# raised, it will look like the transition failed.
break
raise
node_transition_lib.log("Successfully split vessel: "+current_vessel+" into vessels: "+leftover_vessel+" and "+new_vessel)
current_vessel = leftover_vessel
# Make sure to update the database and record the new
# name of the extra vessel as when backend.split_vessels()
# is called, the old vessel does not exist anymore.
# Instead two new vessels are created, where the first
# vessel is the extra vessel with leftover resources
# and the second vessel has the actual amount of resources
maindb.set_node_extra_vessel_name(database_nodeobject, current_vessel)
#set the user_list for the new vesel to be empty. Remember that user_list is what determines
#the transition state, and only the extra vessel should have this set.
backend.set_vessel_user_keylist(database_nodeobject, new_vessel, [])
node_transition_lib.log("Changed the userkeys for the vessel "+new_vessel+" to []")
# Add the newly created vessel to the database and then add the ports associated with
# the vessel to the database also.
try:
node_transition_lib.log("Creating a vessel record in the database for vessel "+new_vessel+" for node "+node_string)
vessel_object = maindb.create_vessel(database_nodeobject, new_vessel)
node_transition_lib.log("Setting the vessel ports in the database for vessel "+new_vessel+" with port list: "+str(used_ports_list))
maindb.set_vessel_ports(vessel_object, used_ports_list)
except:
raise node_transition_lib.DatabaseError("Failed to create vessel entry or change vessel entry for vessel: "+
new_vessel+". "+traceback.format_exc())
# Note: there is one last thing we need to do: set the node as active. We
# don't want to do this here just in case setting the state key on the node
# fails. So, rather than add a post-state-key-setting-action for each state,
# the logic has been put right in processnode() with an if statement to look
# for the 'onepercentmanyevents_state' as the end state. We could alternately
# just leave it for the node to be marked as active by the 1pct-to-1pct
# transition script, but that could introduce a delay of many minutes before
# the node/donation becomes active.
node_transition_lib.log("Finished splitting vessels up for the node: "+node_string)
@log_function_call
def get_resource_data(onepercent_resourcetemplate, usable_ports_list):
"""
<Purpose>
Create the resource_template and return it.
<Arguments>
onepercent_resourcetemplate - the resource file
usable_ports_list - the list of ports that the node has
<Exception>
None
<Side Effects>
None
<Return>
None
"""
# Edit the resource file to add the resources in.
resources_data = onepercent_resourcetemplate % (str(usable_ports_list[0]), str(usable_ports_list[1]),
str(usable_ports_list[2]), str(usable_ports_list[3]),
str(usable_ports_list[4]), str(usable_ports_list[5]),
str(usable_ports_list[6]), str(usable_ports_list[7]),
str(usable_ports_list[8]), str(usable_ports_list[9]),
str(usable_ports_list[0]), str(usable_ports_list[1]),
str(usable_ports_list[2]), str(usable_ports_list[3]),
str(usable_ports_list[4]), str(usable_ports_list[5]),
str(usable_ports_list[6]), str(usable_ports_list[7]),
str(usable_ports_list[8]), str(usable_ports_list[9]))
return resources_data
def main():
"""
<Purpose>
The main function that calls the process_nodes_and_change_state() function
in the node_transition_lib passing in the process and error functions.
<Arguments>
None
<Exceptions>
None
<Side Effects>
None
"""
#open and read the resource file that is necessary for onepercentmanyevents
onepercentmanyevents_resource_fd = file(RESOURCES_TEMPLATE_FILE_PATH)
onepercentmanyevents_resourcetemplate = onepercentmanyevents_resource_fd.read()
onepercentmanyevents_resource_fd.close()
"""
build up the tuple list to call process_nodes_and_change_state()
The transition from canonical to onepercentmanyevents happens in 3 steps.
Step1: Move the canonical nodes to the movingtoonepercent state (the reason
this is done is because incase some transition fails, we know that they are
going to be in the movingtoonepercent state.
Step2: Next run the process function and change the state from movingtoonepercent
state to the onepercentmanyevents state.
Step3: Find all the nodes that failed to transition from movingtoonepercent
state to onepercentmanyevents and transition them back to the canonical state.
"""
state_function_arg_tuplelist = [
("canonical", "movingto_onepercentmanyevents", node_transition_lib.noop, node_transition_lib.noop),
("movingto_onepercentmanyevents", "onepercentmanyevents", onepercentmanyevents_divide,
node_transition_lib.noop,onepercentmanyevents_resourcetemplate),
("movingto_onepercentmanyevents", "canonical", node_transition_lib.combine_vessels,
node_transition_lib.noop)]
sleeptime = 10
process_name = "canonical_to_onepercentmanyevents"
parallel_instances = 10
#call process_nodes_and_change_state() to start the node state transition
node_transition_lib.process_nodes_and_change_state(state_function_arg_tuplelist, process_name, sleeptime, parallel_instances)
if __name__ == '__main__':
main()
| {
"content_hash": "554ee76e44dafc3f6f69bb56d98dc849",
"timestamp": "",
"source": "github",
"line_count": 267,
"max_line_length": 137,
"avg_line_length": 38.89138576779026,
"alnum_prop": 0.6974191063174114,
"repo_name": "sburnett/seattle",
"id": "81c495bd50e69622bcde131a31817f3b86aabbf1",
"size": "10384",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "seattlegeni/node_state_transitions/transition_canonical_to_onepercentmanyevents.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "85039"
},
{
"name": "CSS",
"bytes": "44140"
},
{
"name": "Java",
"bytes": "178864"
},
{
"name": "JavaScript",
"bytes": "791008"
},
{
"name": "Perl",
"bytes": "36791"
},
{
"name": "Python",
"bytes": "4683648"
},
{
"name": "Scala",
"bytes": "2587"
},
{
"name": "Shell",
"bytes": "87609"
}
],
"symlink_target": ""
} |
"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2014 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilent3000A import *
class agilentMSOX3054A(agilent3000A):
"Agilent InfiniiVision MSOX3054A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'MSO-X 3054A')
super(agilentMSOX3054A, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 16
self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 500e6
self._init_channels()
| {
"content_hash": "37ac1d10af521bd6a060ee6056c32adb",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 86,
"avg_line_length": 38.52272727272727,
"alnum_prop": 0.7368731563421829,
"repo_name": "lude-ma/python-ivi",
"id": "b816cebda81680c6865820995791ff5164d55515",
"size": "1695",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "ivi/agilent/agilentMSOX3054A.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1387731"
},
{
"name": "Shell",
"bytes": "5102"
}
],
"symlink_target": ""
} |
"""This API defines FeatureColumn abstraction.
To distinguish the concept of a feature family and a specific binary feature
within a family, we refer to a feature family like "country" as a feature
column. For example "country:US" is a feature which is in "country" feature
column and has a feature value ("US").
Supported feature types are:
* _SparseColumn: also known as categorical features.
* _RealValuedColumn: also known as continuous features.
Supported transformations on above features are:
* Bucketization: also known as binning.
* Crossing: also known as conjunction or combination.
* Embedding.
Typical usage example:
```python
# Define features and transformations
country = sparse_column_with_keys(column_name="native_country",
keys=["US", "BRA", ...])
country_emb = embedding_column(sparse_id_column=country, dimension=3,
combiner="sum")
occupation = sparse_column_with_hash_bucket(column_name="occupation",
hash_bucket_size=1000)
occupation_emb = embedding_column(sparse_id_column=occupation, dimension=16,
combiner="sum")
occupation_x_country = crossed_column(columns=[occupation, country],
hash_bucket_size=10000)
age = real_valued_column("age")
age_buckets = bucketized_column(
source_column=age,
boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65])
my_features = [occupation_emb, age_buckets, country_emb]
# Building model via layers
columns_to_tensor = parse_feature_columns_from_examples(
serialized=my_data,
feature_columns=my_features)
first_layer = input_from_feature_columns(
columns_to_tensors=columns_to_tensor,
feature_columns=my_features)
second_layer = fully_connected(first_layer, ...)
# Building model via tf.learn.estimators
estimator = DNNLinearCombinedClassifier(
linear_feature_columns=my_wide_features,
dnn_feature_columns=my_deep_features,
dnn_hidden_units=[500, 250, 50])
estimator.train(...)
See feature_column_ops_test for more examples.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import collections
import math
from tensorflow.contrib.framework.python.framework import checkpoint_utils
from tensorflow.contrib.framework.python.framework import deprecation
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.layers.python.layers import embedding_ops
from tensorflow.contrib.layers.python.ops import bucketization_op
from tensorflow.contrib.layers.python.ops import sparse_feature_cross_op
from tensorflow.contrib.lookup import lookup_ops as contrib_lookup_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
class _EmbeddingLookupArguments(
collections.namedtuple("_EmbeddingLookupArguments",
["input_tensor",
"weight_tensor",
"vocab_size",
"initializer",
"combiner"])):
"""Represents the information needed from a column for embedding lookup.
Used to to compute DNN inputs and weighted sum.
"""
pass
class _FeatureColumn(object):
"""Represents a feature column abstraction.
To distinguish the concept of a feature family and a specific binary feature
within a family, we refer to a feature family like "country" as a feature
column. For example "country:US" is a feature which is in "country" feature
column and has a feature value ("US").
This class is an abstract class. User should not create one instance of this.
Following classes (_SparseColumn, _RealValuedColumn, ...) are concrete
instances.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
@deprecation.deprecated(
"2016-09-25",
"Should be private.")
def name(self):
"""Returns the name of column or transformed column."""
pass
@abc.abstractproperty
@deprecation.deprecated(
"2016-09-25",
"Should be private.")
def config(self):
"""Returns configuration of the base feature for `tf.parse_example`."""
pass
@abc.abstractproperty
@deprecation.deprecated(
"2016-09-25",
"Should be private.")
def key(self):
"""Returns a string which will be used as a key when we do sorting."""
pass
@abc.abstractmethod
@deprecation.deprecated(
"2016-09-25",
"Should be private.")
def insert_transformed_feature(self, columns_to_tensors):
"""Apply transformation and inserts it into columns_to_tensors.
Args:
columns_to_tensors: A mapping from feature columns to tensors. 'string'
key means a base feature (not-transformed). It can have _FeatureColumn
as a key too. That means that _FeatureColumn is already transformed.
"""
raise NotImplementedError("Transform is not implemented for {}.".format(
self))
@abc.abstractmethod
@deprecation.deprecated(
"2016-09-25",
"Use layers.input_from_feature_columns instead.")
def to_dnn_input_layer(self,
input_tensor,
weight_collection=None,
trainable=True):
"""Returns a Tensor as an input to the first layer of neural network."""
raise ValueError("Calling an abstract method.")
@abc.abstractmethod
@deprecation.deprecated(
"2016-09-25",
"Use layers.weighted_sum_from_feature_columns instead.")
def to_weighted_sum(self,
input_tensor,
num_outputs=1,
weight_collections=None,
trainable=True):
"""Returns a Tensor as linear predictions and a list of created Variable."""
raise ValueError("Calling an abstract method.")
# It is expected that classes implement either to_embedding_lookup_arguments
# or to_dense_tensor to be used in linear models.
# pylint: disable=unused-argument
def _to_embedding_lookup_arguments(self, input_tensor):
"""Returns arguments to look up embeddings for this column."""
raise NotImplementedError("Calling an abstract method.")
# pylint: disable=unused-argument
def _to_dense_tensor(self, input_tensor):
"""Returns a dense tensor representing this column's values."""
raise NotImplementedError("Calling an abstract method.")
def _checkpoint_path(self):
"""Returns None, or a (path,tensor_name) to load a checkpoint from."""
return None
def _key_without_properties(self, properties):
"""Helper method for self.key() that omits particular properties."""
fields_values = []
# pylint: disable=protected-access
for i, k in enumerate(self._fields):
if k in properties:
# Excludes a property from the key.
# For instance, exclude `initializer` from the key of EmbeddingColumn
# since we don't support users specifying different initializers for
# the same embedding column. Ditto for `normalizer` and
# RealValuedColumn.
# Special treatment is needed since the default str form of a
# function contains its address, which could introduce non-determinism
# in sorting.
continue
fields_values.append("{}={}".format(k, self[i]))
# pylint: enable=protected-access
# This is effectively the same format as str(self), except with our special
# treatment.
return "{}({})".format(type(self).__name__, ", ".join(fields_values))
# TODO(b/30410315): Support warm starting in all feature columns.
class _SparseColumn(_FeatureColumn,
collections.namedtuple("_SparseColumn",
["column_name", "is_integerized",
"bucket_size", "lookup_config",
"combiner", "dtype"])):
"""Represents a sparse feature column also known as categorical features.
Instances of this class are immutable. A sparse column means features are
sparse and dictionary returned by InputBuilder contains a
("column_name", SparseTensor) pair.
One and only one of bucket_size or lookup_config should be set. If
is_integerized is True then bucket_size should be set.
Attributes:
column_name: A string defining sparse column name.
is_integerized: A bool if True means type of feature is an integer.
Integerized means we can use the feature itself as id.
bucket_size: An int that is > 1. The number of buckets.
lookup_config: A _SparseIdLookupConfig defining feature-to-id lookup
configuration
combiner: A string specifying how to reduce if the sparse column is
multivalent. Currently "mean", "sqrtn" and "sum" are supported, with
"sum" the default:
* "sum": do not normalize features in the column
* "mean": do l1 normalization on features in the column
* "sqrtn": do l2 normalization on features in the column
For more information: `tf.embedding_lookup_sparse`.
dtype: Type of features, such as `tf.string` or `tf.int64`.
Raises:
TypeError: if lookup_config is not a _SparseIdLookupConfig.
ValueError: if above expectations about input fails.
"""
def __new__(cls,
column_name,
is_integerized=False,
bucket_size=None,
lookup_config=None,
combiner="sqrtn",
dtype=dtypes.string):
if is_integerized and bucket_size is None:
raise ValueError("bucket_size must be set if is_integerized is True. "
"column_name: {}".format(column_name))
if is_integerized and not dtype.is_integer:
raise ValueError("dtype must be an integer if is_integerized is True. "
"dtype: {}, column_name: {}.".format(dtype, column_name))
if bucket_size is None and lookup_config is None:
raise ValueError("one of bucket_size or lookup_config must be set. "
"column_name: {}".format(column_name))
if bucket_size is not None and lookup_config:
raise ValueError("one and only one of bucket_size or lookup_config "
"must be set. column_name: {}".format(column_name))
if bucket_size is not None and bucket_size < 2:
raise ValueError("bucket_size must be at least 2. "
"bucket_size: {}, column_name: {}".format(bucket_size,
column_name))
if ((lookup_config) and
(not isinstance(lookup_config, _SparseIdLookupConfig))):
raise TypeError(
"lookup_config must be an instance of _SparseIdLookupConfig. "
"Given one is in type {} for column_name {}".format(
type(lookup_config), column_name))
if (lookup_config and lookup_config.vocabulary_file and
lookup_config.vocab_size is None):
raise ValueError("vocab_size must be defined. "
"column_name: {}".format(column_name))
return super(_SparseColumn, cls).__new__(cls, column_name, is_integerized,
bucket_size, lookup_config,
combiner, dtype)
@property
def name(self):
return self.column_name
@property
def length(self):
"""Returns vocabulary or hash_bucket size."""
if self.bucket_size is not None:
return self.bucket_size
return self.lookup_config.vocab_size + self.lookup_config.num_oov_buckets
@property
def config(self):
return {self.column_name: parsing_ops.VarLenFeature(self.dtype)}
@property
def key(self):
"""Returns a string which will be used as a key when we do sorting."""
return "{}".format(self)
def id_tensor(self, input_tensor):
"""Returns the id tensor from the given transformed input_tensor."""
return input_tensor
# pylint: disable=unused-argument
def weight_tensor(self, input_tensor):
"""Returns the weight tensor from the given transformed input_tensor."""
return None
# pylint: disable=unused-argument
def to_dnn_input_layer(self,
input_tensor,
weight_collections=None,
trainable=True):
raise ValueError(
"SparseColumn is not supported in DNN. "
"Please use embedding_column or one_hot_column. column: {}".format(
self))
def to_weighted_sum(self,
input_tensor,
num_outputs=1,
weight_collections=None,
trainable=True):
return _create_embedding_lookup(
input_tensor=self.id_tensor(input_tensor),
weight_tensor=self.weight_tensor(input_tensor),
vocab_size=self.length,
dimension=num_outputs,
weight_collections=_add_variable_collection(weight_collections),
initializer=init_ops.zeros_initializer,
combiner=self.combiner,
trainable=trainable)
def _to_embedding_lookup_arguments(self, input_tensor):
return _EmbeddingLookupArguments(
input_tensor=self.id_tensor(input_tensor),
weight_tensor=self.weight_tensor(input_tensor),
vocab_size=self.length,
initializer=init_ops.zeros_initializer,
combiner=self.combiner)
def is_compatible(self, other_column):
"""Check compatability of two sparse columns."""
if self.lookup_config and other_column.lookup_config:
return self.lookup_config == other_column.lookup_config
compatible = (self.length == other_column.length and
(self.dtype == other_column.dtype or
(self.dtype.is_integer and other_column.dtype.is_integer)))
if compatible:
logging.warn("Column {} and {} may not have the same vocabulary.".
format(self.name, other_column.name))
return compatible
class _SparseColumnIntegerized(_SparseColumn):
"""See `sparse_column_with_integerized_feature`."""
def __new__(cls, column_name, bucket_size, combiner="sqrtn",
dtype=dtypes.int64):
if not dtype.is_integer:
raise ValueError("dtype must be an integer. "
"dtype: {}, column_name: {}".format(dtype, column_name))
return super(_SparseColumnIntegerized, cls).__new__(
cls,
column_name,
is_integerized=True,
bucket_size=bucket_size,
combiner=combiner,
dtype=dtype)
def insert_transformed_feature(self, columns_to_tensors):
"""Handles sparse column to id conversion."""
sparse_id_values = math_ops.mod(columns_to_tensors[self.name].values,
self.bucket_size,
name="mod")
columns_to_tensors[self] = ops.SparseTensor(
columns_to_tensors[self.name].indices, sparse_id_values,
columns_to_tensors[self.name].shape)
def sparse_column_with_integerized_feature(column_name,
bucket_size,
combiner=None,
dtype=dtypes.int64):
"""Creates an integerized _SparseColumn.
Use this when your features are already pre-integerized into int64 IDs.
output_id = input_feature
Args:
column_name: A string defining sparse column name.
bucket_size: An int that is > 1. The number of buckets. It should be bigger
than maximum feature. In other words features in this column should be an
int64 in range [0, bucket_size)
combiner: A string specifying how to reduce if the sparse column is
multivalent. Currently "mean", "sqrtn" and "sum" are supported, with
"sum" the default:
* "sum": do not normalize features in the column
* "mean": do l1 normalization on features in the column
* "sqrtn": do l2 normalization on features in the column
For more information: `tf.embedding_lookup_sparse`.
dtype: Type of features. It should be an integer type. Default value is
dtypes.int64.
Returns:
An integerized _SparseColumn definition.
Raises:
ValueError: bucket_size is not greater than 1.
ValueError: dtype is not integer.
"""
if combiner is None:
logging.warn("The default value of combiner will change from \"sum\" "
"to \"sqrtn\" after 2016/11/01.")
combiner = "sum"
return _SparseColumnIntegerized(
column_name, bucket_size, combiner=combiner, dtype=dtype)
class _SparseColumnHashed(_SparseColumn):
"""See `sparse_column_with_hash_bucket`."""
def __new__(cls,
column_name,
hash_bucket_size,
combiner="sum",
dtype=dtypes.string):
if dtype != dtypes.string and not dtype.is_integer:
raise ValueError("dtype must be string or integer. "
"dtype: {}, column_name: {}".format(dtype, column_name))
return super(_SparseColumnHashed, cls).__new__(
cls,
column_name,
bucket_size=hash_bucket_size,
combiner=combiner,
dtype=dtype)
def insert_transformed_feature(self, columns_to_tensors):
"""Handles sparse column to id conversion."""
sparse_tensor = columns_to_tensors[self.name]
if self.dtype.is_integer:
sparse_values = string_ops.as_string(sparse_tensor.values)
else:
sparse_values = sparse_tensor.values
sparse_id_values = string_ops.string_to_hash_bucket_fast(
sparse_values, self.bucket_size, name="lookup")
columns_to_tensors[self] = ops.SparseTensor(
sparse_tensor.indices, sparse_id_values, sparse_tensor.shape)
def sparse_column_with_hash_bucket(column_name,
hash_bucket_size,
combiner=None,
dtype=dtypes.string):
"""Creates a _SparseColumn with hashed bucket configuration.
Use this when your sparse features are in string or integer format, but you
don't have a vocab file that maps each value to an integer ID.
output_id = Hash(input_feature_string) % bucket_size
Args:
column_name: A string defining sparse column name.
hash_bucket_size: An int that is > 1. The number of buckets.
combiner: A string specifying how to reduce if the sparse column is
multivalent. Currently "mean", "sqrtn" and "sum" are supported, with
"sum" the default:
* "sum": do not normalize features in the column
* "mean": do l1 normalization on features in the column
* "sqrtn": do l2 normalization on features in the column
For more information: `tf.embedding_lookup_sparse`.
dtype: The type of features. Only string and integer types are supported.
Returns:
A _SparseColumn with hashed bucket configuration
Raises:
ValueError: hash_bucket_size is not greater than 2.
ValueError: dtype is neither string nor integer.
"""
if combiner is None:
logging.warn("The default value of combiner will change from \"sum\" "
"to \"sqrtn\" after 2016/11/01.")
combiner = "sum"
return _SparseColumnHashed(column_name, hash_bucket_size, combiner, dtype)
class _SparseColumnKeys(_SparseColumn):
"""See `sparse_column_with_keys`."""
def __new__(cls, column_name, keys, default_value=-1, combiner="sum"):
return super(_SparseColumnKeys, cls).__new__(
cls,
column_name,
combiner=combiner,
lookup_config=_SparseIdLookupConfig(
keys=keys, vocab_size=len(keys), default_value=default_value),
dtype=dtypes.string)
def insert_transformed_feature(self, columns_to_tensors):
"""Handles sparse column to id conversion."""
columns_to_tensors[self] = contrib_lookup_ops.string_to_index(
tensor=columns_to_tensors[self.name],
mapping=list(self.lookup_config.keys),
default_value=self.lookup_config.default_value,
name="lookup")
def sparse_column_with_keys(column_name, keys, default_value=-1,
combiner=None):
"""Creates a _SparseColumn with keys.
Look up logic is as follows:
lookup_id = index_of_feature_in_keys if feature in keys else default_value
Args:
column_name: A string defining sparse column name.
keys: a string list defining vocabulary.
default_value: The value to use for out-of-vocabulary feature values.
Default is -1.
combiner: A string specifying how to reduce if the sparse column is
multivalent. Currently "mean", "sqrtn" and "sum" are supported, with
"sum" the default:
* "sum": do not normalize features in the column
* "mean": do l1 normalization on features in the column
* "sqrtn": do l2 normalization on features in the column
For more information: `tf.embedding_lookup_sparse`.
Returns:
A _SparseColumnKeys with keys configuration.
"""
if combiner is None:
logging.warn("The default value of combiner will change from \"sum\" "
"to \"sqrtn\" after 2016/11/01.")
combiner = "sum"
return _SparseColumnKeys(
column_name, tuple(keys), default_value=default_value, combiner=combiner)
class _WeightedSparseColumn(_FeatureColumn, collections.namedtuple(
"_WeightedSparseColumn",
["sparse_id_column", "weight_column_name", "dtype"])):
"""See `weighted_sparse_column`."""
def __new__(cls, sparse_id_column, weight_column_name, dtype):
return super(_WeightedSparseColumn, cls).__new__(cls, sparse_id_column,
weight_column_name, dtype)
@property
def name(self):
return "{}_weighted_by_{}".format(self.sparse_id_column.name,
self.weight_column_name)
@property
def length(self):
"""Returns id size."""
return self.sparse_id_column.length
@property
def config(self):
config = _get_feature_config(self.sparse_id_column)
config.update(
{self.weight_column_name: parsing_ops.VarLenFeature(self.dtype)})
return config
@property
def key(self):
"""Returns a string which will be used as a key when we do sorting."""
return "{}".format(self)
def insert_transformed_feature(self, columns_to_tensors):
"""Inserts a tuple with the id and weight tensors."""
if self.sparse_id_column not in columns_to_tensors:
self.sparse_id_column.insert_transformed_feature(columns_to_tensors)
columns_to_tensors[self] = tuple([
columns_to_tensors[self.sparse_id_column],
columns_to_tensors[self.weight_column_name]
])
def id_tensor(self, input_tensor):
"""Returns the id tensor from the given transformed input_tensor."""
return input_tensor[0]
def weight_tensor(self, input_tensor):
"""Returns the weight tensor from the given transformed input_tensor."""
return input_tensor[1]
# pylint: disable=unused-argument
def to_dnn_input_layer(self,
input_tensor,
weight_collections=None,
trainable=True):
raise ValueError(
"WeightedSparseColumn is not supported in DNN. "
"Please use embedding_column or one_hot_column. column: {}".format(
self))
def to_weighted_sum(self,
input_tensor,
num_outputs=1,
weight_collections=None,
trainable=True):
return _create_embedding_lookup(
input_tensor=self.id_tensor(input_tensor),
weight_tensor=self.weight_tensor(input_tensor),
vocab_size=self.length,
dimension=num_outputs,
weight_collections=_add_variable_collection(weight_collections),
initializer=init_ops.zeros_initializer,
combiner=self.sparse_id_column.combiner,
trainable=trainable)
def _to_embedding_lookup_arguments(self, input_tensor):
return _EmbeddingLookupArguments(
input_tensor=self.id_tensor(input_tensor),
weight_tensor=self.weight_tensor(input_tensor),
vocab_size=self.length,
initializer=init_ops.zeros_initializer,
combiner=self.sparse_id_column.combiner)
def weighted_sparse_column(sparse_id_column,
weight_column_name,
dtype=dtypes.float32):
"""Creates a _SparseColumn by combining sparse_id_column with a weight column.
Args:
sparse_id_column: A `_SparseColumn` which is created by
`sparse_column_with_*` functions.
weight_column_name: A string defining a sparse column name which represents
weight or value of the corresponding sparse id feature.
dtype: Type of weights, such as `tf.float32`
Returns:
A _WeightedSparseColumn composed of two sparse features: one represents id,
the other represents weight (value) of the id feature in that example.
Raises:
ValueError: if dtype is not convertible to float.
An example usage:
```python
words = sparse_column_with_hash_bucket("words", 1000)
tfidf_weighted_words = weighted_sparse_column(words, "tfidf_score")
```
This configuration assumes that input dictionary of model contains the
following two items:
* (key="words", value=word_tensor) where word_tensor is a SparseTensor.
* (key="tfidf_score", value=tfidf_score_tensor) where tfidf_score_tensor
is a SparseTensor.
Following are assumed to be true:
* word_tensor.indices = tfidf_score_tensor.indices
* word_tensor.shape = tfidf_score_tensor.shape
"""
if not (dtype.is_integer or dtype.is_floating):
raise ValueError("dtype is not convertible to float. Given {}".format(
dtype))
return _WeightedSparseColumn(sparse_id_column, weight_column_name, dtype)
class _OneHotColumn(_FeatureColumn,
collections.namedtuple("_OneHotColumn",
["sparse_id_column"])):
"""Represents a one-hot column for use in deep networks.
Args:
sparse_id_column: A _SparseColumn which is created by `sparse_column_with_*`
function.
"""
@property
def name(self):
return "{}_one_hot".format(self.sparse_id_column.name)
@property
def length(self):
"""Returns vocabulary or hash_bucket size."""
return self.sparse_id_column.length
@property
def config(self):
"""Returns the parsing config of the origin column."""
return _get_feature_config(self.sparse_id_column)
@property
def key(self):
"""Returns a string which will be used as a key when we do sorting."""
return "{}".format(self)
def insert_transformed_feature(self, columns_to_tensors):
"""Used by the Transformer to prevent double transformations."""
if self.sparse_id_column not in columns_to_tensors:
self.sparse_id_column.insert_transformed_feature(columns_to_tensors)
columns_to_tensors[self] = columns_to_tensors[self.sparse_id_column]
def to_dnn_input_layer(self,
transformed_input_tensor,
unused_weight_collections=None,
unused_trainable=False):
"""Returns a Tensor as an input to the first layer of neural network.
Args:
transformed_input_tensor: A tensor that has undergone the transformations
in `insert_transformed_feature`.
unused_weight_collections: Unused. One hot encodings are not variable.
unused_trainable: Unused. One hot encodings are not trainable.
Returns:
A multihot Tensor to be fed into the first layer of neural network.
Raises:
ValueError: When using one_hot_column with weighted_sparse_column.
This is not yet supported.
"""
if (self.sparse_id_column.weight_tensor(transformed_input_tensor) is
not None):
raise ValueError("one_hot_column does not yet support "
"weighted_sparse_column. Column: {}".format(self))
dense_id_tensor = sparse_ops.sparse_tensor_to_dense(
self.sparse_id_column.id_tensor(transformed_input_tensor),
default_value=-1)
check_shape_op = control_flow_ops.Assert(
math_ops.equal(array_ops.rank(dense_id_tensor), 2),
["Tensor should be of shape: [batch, max num multivalent values]"])
with ops.control_dependencies([check_shape_op]):
# One hot must be float for tf.concat reasons since all other inputs to
# input_layer are float32.
one_hot_id_tensor = array_ops.one_hot(
dense_id_tensor, depth=self.length, on_value=1.0, off_value=0.0)
# Reduce to get a multi-hot per example.
return math_ops.reduce_sum(one_hot_id_tensor, reduction_indices=[1])
# pylint: disable=unused-argument
def to_weighted_sum(self,
input_tensor,
num_outputs=1,
weight_collections=None,
trainable=True):
raise ValueError("OneHotColumn is not supported in linear models. "
"Please use sparse_column. column: {}".format(self))
class _EmbeddingColumn(_FeatureColumn, collections.namedtuple(
"_EmbeddingColumn",
["sparse_id_column", "dimension", "combiner", "initializer",
"ckpt_to_load_from", "tensor_name_in_ckpt", "shared_embedding_name",
"shared_vocab_size"])):
"""Represents an embedding column.
Args:
sparse_id_column: A `_SparseColumn` which is created by
`sparse_column_with_*` or `weighted_sparse_column` functions.
dimension: An integer specifying dimension of the embedding.
combiner: A string specifying how to reduce if there are multiple entries
in a single row. Currently "mean", "sqrtn" and "sum" are supported. Each
of this can be thought as example level normalizations on the column:
* "sum": do not normalize features in the column
* "mean": do l1 normalization on features in the column
* "sqrtn": do l2 normalization on features in the column
For more information: `tf.embedding_lookup_sparse`.
initializer: A variable initializer function to be used in embedding
variable initialization. If not specified, defaults to
`tf.truncated_normal_initializer` with mean 0.0 and standard deviation
1/sqrt(sparse_id_column.length).
ckpt_to_load_from: (Optional). String representing checkpoint name/pattern
to restore the column weights. Required if `tensor_name_in_ckpt` is not
None.
tensor_name_in_ckpt: (Optional). Name of the `Tensor` in the provided
checkpoint from which to restore the column weights. Required if
`ckpt_to_load_from` is not None.
shared_embedding_name: (Optional). The common name for shared embedding.
shared_vocab_size: (Optional). The common vocab_size used for shared
embedding space.
Raises:
ValueError: if `initializer` is specified and is not callable. Also,
if only one of `ckpt_to_load_from` and `tensor_name_in_ckpt` is specified.
"""
def __new__(cls,
sparse_id_column,
dimension,
combiner="sqrtn",
initializer=None,
ckpt_to_load_from=None,
tensor_name_in_ckpt=None,
shared_embedding_name=None,
shared_vocab_size=None):
if initializer is not None and not callable(initializer):
raise ValueError("initializer must be callable if specified. "
"Embedding of column_name: {}".format(
sparse_id_column.name))
if (ckpt_to_load_from is None) != (tensor_name_in_ckpt is None):
raise ValueError("Must specify both `ckpt_to_load_from` and "
"`tensor_name_in_ckpt` or none of them.")
if initializer is None:
stddev = 1 / math.sqrt(sparse_id_column.length)
# TODO(b/25671353): Better initial value?
initializer = init_ops.truncated_normal_initializer(
mean=0.0, stddev=stddev)
return super(_EmbeddingColumn, cls).__new__(cls, sparse_id_column,
dimension, combiner,
initializer, ckpt_to_load_from,
tensor_name_in_ckpt,
shared_embedding_name,
shared_vocab_size)
@property
def name(self):
if self.shared_embedding_name is None:
return "{}_embedding".format(self.sparse_id_column.name)
else:
return "{}_shared_embedding".format(self.sparse_id_column.name)
@property
def length(self):
"""Returns id size."""
if self.shared_vocab_size is None:
return self.sparse_id_column.length
else:
return self.shared_vocab_size
@property
def config(self):
return _get_feature_config(self.sparse_id_column)
@property
def key(self):
"""Returns a string which will be used as a key when we do sorting."""
return self._key_without_properties(["initializer"])
def insert_transformed_feature(self, columns_to_tensors):
if self.sparse_id_column not in columns_to_tensors:
self.sparse_id_column.insert_transformed_feature(columns_to_tensors)
columns_to_tensors[self] = columns_to_tensors[self.sparse_id_column]
def to_dnn_input_layer(self,
input_tensor,
weight_collections=None,
trainable=True):
is_shared_embedding = self.shared_embedding_name is not None
output, embedding_weights = _create_embedding_lookup(
input_tensor=self.sparse_id_column.id_tensor(input_tensor),
weight_tensor=self.sparse_id_column.weight_tensor(input_tensor),
vocab_size=self.length,
dimension=self.dimension,
weight_collections=_add_variable_collection(weight_collections),
initializer=self.initializer,
combiner=self.combiner,
trainable=trainable,
name=self.shared_embedding_name,
is_shared_embedding=is_shared_embedding)
if self.ckpt_to_load_from is not None:
weights_to_restore = embedding_weights
if len(embedding_weights) == 1:
weights_to_restore = embedding_weights[0]
checkpoint_utils.init_from_checkpoint(
self.ckpt_to_load_from,
{self.tensor_name_in_ckpt: weights_to_restore})
return output
def _checkpoint_path(self):
if self.ckpt_to_load_from is not None:
return self.ckpt_to_load_from, self.tensor_name_in_ckpt
return None
# pylint: disable=unused-argument
def to_weighted_sum(self,
input_tensor,
num_outputs=1,
weight_collections=None,
trainable=True):
raise ValueError("EmbeddingColumn is not supported in linear models. "
"Please use sparse_column. column: {}".format(self))
# pylint: disable=unused-argument
def _to_embedding_lookup_arguments(self, input_tensor):
raise ValueError("Column {} is not supported in linear models. "
"Please use sparse_column.".format(self))
def one_hot_column(sparse_id_column):
"""Creates a _OneHotColumn.
Args:
sparse_id_column: A _SparseColumn which is created by
`sparse_column_with_*`
or crossed_column functions. Note that `combiner` defined in
`sparse_id_column` is ignored.
Returns:
An _OneHotColumn.
"""
return _OneHotColumn(sparse_id_column)
def embedding_column(sparse_id_column,
dimension,
combiner=None,
initializer=None,
ckpt_to_load_from=None,
tensor_name_in_ckpt=None):
"""Creates an `_EmbeddingColumn`.
Args:
sparse_id_column: A `_SparseColumn` which is created by for example
`sparse_column_with_*` or crossed_column functions. Note that `combiner`
defined in `sparse_id_column` is ignored.
dimension: An integer specifying dimension of the embedding.
combiner: A string specifying how to reduce if there are multiple entries
in a single row. Currently "mean", "sqrtn" and "sum" are supported. Each
of this can be considered an example level normalization on the column:
* "sum": do not normalize
* "mean": do l1 normalization
* "sqrtn": do l2 normalization
For more information: `tf.embedding_lookup_sparse`.
initializer: A variable initializer function to be used in embedding
variable initialization. If not specified, defaults to
`tf.truncated_normal_initializer` with mean 0.0 and standard deviation
1/sqrt(sparse_id_column.length).
ckpt_to_load_from: (Optional). String representing checkpoint name/pattern
to restore the column weights. Required if `tensor_name_in_ckpt` is not
None.
tensor_name_in_ckpt: (Optional). Name of the `Tensor` in the provided
checkpoint from which to restore the column weights. Required if
`ckpt_to_load_from` is not None.
Returns:
An `_EmbeddingColumn`.
"""
if combiner is None:
logging.warn("The default value of combiner will change from \"mean\" "
"to \"sqrtn\" after 2016/11/01.")
combiner = "mean"
return _EmbeddingColumn(sparse_id_column, dimension, combiner, initializer,
ckpt_to_load_from, tensor_name_in_ckpt)
def shared_embedding_columns(sparse_id_columns,
dimension,
combiner=None,
shared_embedding_name=None,
initializer=None,
ckpt_to_load_from=None,
tensor_name_in_ckpt=None):
"""Creates a list of `_EmbeddingColumn` sharing the same embedding.
Args:
sparse_id_columns: An iterable of `_SparseColumn`, such as those created by
`sparse_column_with_*` or crossed_column functions. Note that `combiner`
defined in each sparse_id_column is ignored.
dimension: An integer specifying dimension of the embedding.
combiner: A string specifying how to reduce if there are multiple entries
in a single row. Currently "mean", "sqrtn" and "sum" are supported. Each
of this can be considered an example level normalization on the column:
* "sum": do not normalize
* "mean": do l1 normalization
* "sqrtn": do l2 normalization
For more information: `tf.embedding_lookup_sparse`.
shared_embedding_name: (Optional). A string specifying the name of shared
embedding weights. This will be needed if you want to reference the shared
embedding separately from the generated `_EmbeddingColumn`.
initializer: A variable initializer function to be used in embedding
variable initialization. If not specified, defaults to
`tf.truncated_normal_initializer` with mean 0.0 and standard deviation
1/sqrt(sparse_id_columns[0].length).
ckpt_to_load_from: (Optional). String representing checkpoint name/pattern
to restore the column weights. Required if `tensor_name_in_ckpt` is not
None.
tensor_name_in_ckpt: (Optional). Name of the `Tensor` in the provided
checkpoint from which to restore the column weights. Required if
`ckpt_to_load_from` is not None.
Returns:
A tuple of `_EmbeddingColumn` with shared embedding space.
Raises:
ValueError: if sparse_id_columns is empty, or its elements are not
compatible with each other.
TypeError: if at least one element of sparse_id_columns is not a
`SparseTensor`.
"""
if combiner is None:
logging.warn("The default value of combiner will change from \"mean\" "
"to \"sqrtn\" after 2016/11/01.")
combiner = "mean"
if len(sparse_id_columns) < 1:
raise ValueError("The input sparse_id_columns should have at least one "
"element.")
for sparse_id_column in sparse_id_columns:
if not isinstance(sparse_id_column, _SparseColumn):
raise TypeError("Elements of sparse_id_columns must be _SparseColumn, but"
"{} is not.".format(sparse_id_column))
if not isinstance(sparse_id_columns, list):
sparse_id_columns = list(sparse_id_columns)
if len(sparse_id_columns) == 1:
return [
_EmbeddingColumn(sparse_id_columns[0], dimension, combiner, initializer,
ckpt_to_load_from, tensor_name_in_ckpt,
shared_embedding_name)]
else:
# check compatibility of sparse_id_columns
compatible = True
for column in sparse_id_columns[1:]:
compatible = compatible and column.is_compatible(sparse_id_columns[0])
if not compatible:
raise ValueError("The input sparse id columns are not compatible.")
# Construct the shared name and size for shared embedding space.
if not shared_embedding_name:
if len(sparse_id_columns) <= 3:
shared_embedding_name = "_".join([column.name
for column in sparse_id_columns])
else:
shared_embedding_name = "_".join([column.name
for column in sparse_id_columns[0:3]])
shared_embedding_name += (
"_plus_{}_others".format(len(sparse_id_columns)-3))
shared_embedding_name += "_shared_embedding"
shared_vocab_size = sparse_id_columns[0].length
embedded_columns = []
for column in sparse_id_columns:
embedded_columns.append(
_EmbeddingColumn(column, dimension, combiner, initializer,
ckpt_to_load_from, tensor_name_in_ckpt,
shared_embedding_name, shared_vocab_size))
return tuple(embedded_columns)
class _HashedEmbeddingColumn(collections.namedtuple(
"_HashedEmbeddingColumn", ["column_name", "size", "dimension", "combiner",
"initializer"]), _EmbeddingColumn):
"""See `hashed_embedding_column`."""
def __new__(cls,
column_name,
size,
dimension,
combiner="sqrtn",
initializer=None):
if initializer is not None and not callable(initializer):
raise ValueError("initializer must be callable if specified. "
"column_name: {}".format(column_name))
if initializer is None:
stddev = 0.1
# TODO(b/25671353): Better initial value?
initializer = init_ops.truncated_normal_initializer(
mean=0.0, stddev=stddev)
return super(_HashedEmbeddingColumn, cls).__new__(cls, column_name, size,
dimension, combiner,
initializer)
@property
def name(self):
return "{}_hashed_embedding".format(self.column_name)
@property
def config(self):
return {self.column_name: parsing_ops.VarLenFeature(dtypes.string)}
def insert_transformed_feature(self, columns_to_tensors):
columns_to_tensors[self] = columns_to_tensors[self.column_name]
def to_dnn_input_layer(self,
input_tensor,
weight_collections=None,
trainable=True):
embeddings = _create_embeddings(
shape=[self.size],
initializer=self.initializer,
dtype=dtypes.float32,
trainable=trainable,
weight_collections=_add_variable_collection(weight_collections))
return embedding_ops.hashed_embedding_lookup_sparse(
embeddings, input_tensor, self.dimension,
combiner=self.combiner, name="lookup")
def hashed_embedding_column(column_name,
size,
dimension,
combiner=None,
initializer=None):
"""Creates an embedding column of a sparse feature using parameter hashing.
The i-th embedding component of a value v is found by retrieving an
embedding weight whose index is a fingerprint of the pair (v,i).
Args:
column_name: A string defining sparse column name.
size: An integer specifying the number of parameters in the embedding layer.
dimension: An integer specifying dimension of the embedding.
combiner: A string specifying how to reduce if there are multiple entries
in a single row. Currently "mean", "sqrtn" and "sum" are supported. Each
of this can be thought as example level normalizations on the column:
* "sum": do not normalize features in the column
* "mean": do l1 normalization on features in the column
* "sqrtn": do l2 normalization on features in the column
For more information: `tf.embedding_lookup_sparse`.
initializer: A variable initializer function to be used in embedding
variable initialization. If not specified, defaults to
`tf.truncated_normal_initializer` with mean 0 and standard deviation 0.1.
Returns:
A _HashedEmbeddingColumn.
Raises:
ValueError: if dimension or size is not a positive integer; or if combiner
is not supported.
"""
if combiner is None:
logging.warn("The default value of combiner will change from \"mean\" "
"to \"sqrtn\" after 2016/11/01.")
combiner = "mean"
if (dimension < 1) or (size < 1):
raise ValueError("Dimension and size must be greater than 0. "
"dimension: {}, size: {}, column_name: {}".format(
dimension, size, column_name))
if combiner not in ("mean", "sqrtn", "sum"):
raise ValueError("Combiner must be one of 'mean', 'sqrtn' or 'sum'. "
"combiner: {}, column_name: {}".format(combiner,
column_name))
return _HashedEmbeddingColumn(column_name, size, dimension, combiner,
initializer)
class _RealValuedColumn(_FeatureColumn, collections.namedtuple(
"_RealValuedColumn",
["column_name", "dimension", "default_value", "dtype", "normalizer"])):
"""Represents a real valued feature column also known as continuous features.
Instances of this class are immutable. A real valued column means features are
dense. It means dictionary returned by InputBuilder contains a
("column_name", Tensor) pair. Tensor shape should be (batch_size, 1).
"""
def __new__(cls, column_name, dimension, default_value, dtype, normalizer):
if default_value is not None:
default_value = tuple(default_value)
return super(_RealValuedColumn, cls).__new__(cls, column_name, dimension,
default_value, dtype,
normalizer)
@property
def name(self):
return self.column_name
@property
def config(self):
default_value = self.default_value
if default_value is not None:
default_value = list(default_value)
return {self.column_name: parsing_ops.FixedLenFeature([self.dimension],
self.dtype,
default_value)}
@property
def key(self):
"""Returns a string which will be used as a key when we do sorting."""
return self._key_without_properties(["normalizer"])
@property
def normalizer_fn(self):
"""Returns the function used to normalize the column."""
return self.normalizer
def _normalized_input_tensor(self, input_tensor):
"""Returns the input tensor after custom normalization is applied."""
return (self.normalizer(input_tensor) if self.normalizer is not None else
input_tensor)
def insert_transformed_feature(self, columns_to_tensors):
"""Apply transformation and inserts it into columns_to_tensors.
Args:
columns_to_tensors: A mapping from feature columns to tensors. 'string'
key means a base feature (not-transformed). It can have _FeatureColumn
as a key too. That means that _FeatureColumn is already transformed.
"""
# Transform the input tensor according to the normalizer function + reshape.
input_tensor = self._normalized_input_tensor(columns_to_tensors[self.name])
batch_size = input_tensor.get_shape().as_list()[0]
batch_size = int(batch_size) if batch_size else -1
flattened_shape = [batch_size, self.dimension]
columns_to_tensors[self] = array_ops.reshape(
math_ops.to_float(input_tensor), flattened_shape, name="reshape")
# pylint: disable=unused-argument
def to_dnn_input_layer(self,
transformed_input_tensor,
weight_collections=None,
trainable=True):
"""Returns a Tensor as an input to the first layer of neural network."""
return transformed_input_tensor
def to_weighted_sum(self,
transformed_input_tensor,
num_outputs=1,
weight_collections=None,
trainable=True):
"""Returns a Tensor as linear predictions and a list of created Variable."""
def _weight(name):
return variable_scope.get_variable(
name,
shape=[self.dimension, num_outputs],
initializer=init_ops.zeros_initializer,
collections=_add_variable_collection(weight_collections))
if self.name:
weight = _weight("weight")
else:
# Old behavior to support a subset of old checkpoints.
weight = _weight("_weight")
# The _RealValuedColumn has the shape of [batch_size, column.dimension].
log_odds_by_dim = math_ops.matmul(
transformed_input_tensor, weight, name="matmul")
return log_odds_by_dim, [weight]
def _to_dense_tensor(self, input_tensor):
return input_tensor
def real_valued_column(column_name,
dimension=1,
default_value=None,
dtype=dtypes.float32,
normalizer=None):
"""Creates a _RealValuedColumn.
Args:
column_name: A string defining real valued column name.
dimension: An integer specifying dimension of the real valued column.
The default is 1. The Tensor representing the _RealValuedColumn
will have the shape of [batch_size, dimension].
default_value: A single value compatible with dtype or a list of values
compatible with dtype which the column takes on during tf.Example parsing
if data is missing. If None, then tf.parse_example will fail if an example
does not contain this column. If a single value is provided, the same
value will be applied as the default value for every dimension. If a
list of values is provided, the length of the list should be equal to the
value of `dimension`.
dtype: defines the type of values. Default value is tf.float32.
normalizer: If not None, a function that can be used to normalize the value
of the real valued column after default_value is applied for parsing.
Normalizer function takes the input tensor as its argument, and returns
the output tensor. (e.g. lambda x: (x - 3.0) / 4.2).
Returns:
A _RealValuedColumn.
Raises:
TypeError: if dimension is not an int
ValueError: if dimension is not a positive integer
TypeError: if default_value is a list but its length is not equal to the
value of `dimension`.
TypeError: if default_value is not compatible with dtype.
ValueError: if dtype is not convertable to tf.float32.
"""
if not isinstance(dimension, int):
raise TypeError("dimension must be an integer. "
"dimension: {}, column_name: {}".format(dimension,
column_name))
if dimension < 1:
raise ValueError("dimension must be greater than 0. "
"dimension: {}, column_name: {}".format(dimension,
column_name))
if not (dtype.is_integer or dtype.is_floating):
raise ValueError("dtype must be convertible to float. "
"dtype: {}, column_name: {}".format(dtype, column_name))
if default_value is None:
return _RealValuedColumn(column_name, dimension, default_value, dtype,
normalizer)
if isinstance(default_value, int):
if dtype.is_integer:
default_value = [default_value for _ in range(dimension)]
return _RealValuedColumn(column_name, dimension, default_value, dtype,
normalizer)
if dtype.is_floating:
default_value = float(default_value)
default_value = [default_value for _ in range(dimension)]
return _RealValuedColumn(column_name, dimension, default_value, dtype,
normalizer)
if isinstance(default_value, float):
if dtype.is_floating and (not dtype.is_integer):
default_value = [default_value for _ in range(dimension)]
return _RealValuedColumn(column_name, dimension, default_value, dtype,
normalizer)
if isinstance(default_value, list):
if len(default_value) != dimension:
raise ValueError(
"The length of default_value must be equal to dimension. "
"default_value: {}, dimension: {}, column_name: {}".format(
default_value, dimension, column_name))
# Check if the values in the list are all integers or are convertible to
# floats.
is_list_all_int = True
is_list_all_float = True
for v in default_value:
if not isinstance(v, int):
is_list_all_int = False
if not (isinstance(v, float) or isinstance(v, int)):
is_list_all_float = False
if is_list_all_int:
if dtype.is_integer:
return _RealValuedColumn(column_name, dimension, default_value, dtype,
normalizer)
elif dtype.is_floating:
default_value = [float(v) for v in default_value]
return _RealValuedColumn(column_name, dimension, default_value, dtype,
normalizer)
if is_list_all_float:
if dtype.is_floating and (not dtype.is_integer):
default_value = [float(v) for v in default_value]
return _RealValuedColumn(column_name, dimension, default_value, dtype,
normalizer)
raise TypeError("default_value must be compatible with dtype. "
"default_value: {}, dtype: {}, column_name: {}".format(
default_value, dtype, column_name))
class _BucketizedColumn(_FeatureColumn, collections.namedtuple(
"_BucketizedColumn", ["source_column", "boundaries"])):
"""Represents a bucketization transformation also known as binning.
Instances of this class are immutable. Values in `source_column` will be
bucketized based on `boundaries`.
For example, if the inputs are:
boundaries = [0, 10, 100]
source_column = [[-5], [150], [10], [0], [4], [19]]
then the bucketized feature will be:
output = [[0], [3], [2], [1], [1], [2]]
Attributes:
source_column: A _RealValuedColumn defining dense column.
boundaries: A list of floats specifying the boundaries. It has to be sorted.
[a, b, c] defines following buckets: (-inf., a), [a, b), [b, c), [c, inf.)
Raises:
ValueError: if 'boundaries' is empty or not sorted.
"""
def __new__(cls, source_column, boundaries):
if not isinstance(source_column, _RealValuedColumn):
raise TypeError("source_column must be an instance of _RealValuedColumn. "
"source_column: {}".format(source_column))
if not isinstance(boundaries, list) or not boundaries:
raise ValueError("boundaries must be a non-empty list. "
"boundaries: {}".format(boundaries))
# We allow bucket boundaries to be monotonically increasing
# (ie a[i+1] >= a[i]). When two bucket boundaries are the same, we
# de-duplicate.
sanitized_boundaries = []
for i in range(len(boundaries) - 1):
if boundaries[i] == boundaries[i + 1]:
continue
elif boundaries[i] < boundaries[i + 1]:
sanitized_boundaries.append(boundaries[i])
else:
raise ValueError("boundaries must be a sorted list. "
"boundaries: {}".format(boundaries))
sanitized_boundaries.append(boundaries[len(boundaries) - 1])
return super(_BucketizedColumn, cls).__new__(cls, source_column,
tuple(sanitized_boundaries))
@property
def name(self):
return "{}_bucketized".format(self.source_column.name)
@property
def length(self):
"""Returns total number of buckets."""
return len(self.boundaries) + 1
@property
def config(self):
return self.source_column.config
@property
def key(self):
"""Returns a string which will be used as a key when we do sorting."""
return "{}".format(self)
def insert_transformed_feature(self, columns_to_tensors):
# Bucketize the source column.
if self.source_column not in columns_to_tensors:
self.source_column.insert_transformed_feature(columns_to_tensors)
columns_to_tensors[self] = bucketization_op.bucketize(
columns_to_tensors[self.source_column],
boundaries=list(self.boundaries),
name="bucketize")
# pylint: disable=unused-argument
def to_dnn_input_layer(self,
input_tensor,
weight_collections=None,
trainable=True):
return array_ops.reshape(
array_ops.one_hot(
math_ops.to_int64(input_tensor),
self.length,
1.,
0.,
name="one_hot"), [-1, self.length * self.source_column.dimension],
name="reshape")
def to_sparse_tensor(self, input_tensor):
"""Creates a SparseTensor from the bucketized Tensor."""
dimension = self.source_column.dimension
batch_size = array_ops.shape(input_tensor, name="shape")[0]
if dimension > 1:
i1 = array_ops.reshape(
array_ops.tile(
array_ops.expand_dims(
math_ops.range(0, batch_size), 1, name="expand_dims"),
[1, dimension],
name="tile"), [-1],
name="rehsape")
i2 = array_ops.tile(
math_ops.range(0, dimension), [batch_size], name="tile")
# Flatten the bucket indices and unique them across dimensions
# E.g. 2nd dimension indices will range from k to 2*k-1 with k buckets
bucket_indices = array_ops.reshape(
input_tensor, [-1], name="reshape") + self.length * i2
else:
# Simpler indices when dimension=1
i1 = math_ops.range(0, batch_size)
i2 = array_ops.zeros([batch_size], dtype=dtypes.int32, name="zeros")
bucket_indices = array_ops.reshape(input_tensor, [-1], name="reshape")
indices = math_ops.to_int64(array_ops.transpose(array_ops.pack((i1, i2))))
shape = math_ops.to_int64(array_ops.pack([batch_size, dimension]))
sparse_id_values = ops.SparseTensor(indices, bucket_indices, shape)
return sparse_id_values
def to_weighted_sum(self,
input_tensor,
num_outputs=1,
weight_collections=None,
trainable=True):
"""Returns a Tensor as linear predictions and a list of created Variable."""
return _create_embedding_lookup(
input_tensor=self.to_sparse_tensor(input_tensor),
weight_tensor=None,
vocab_size=self.length * self.source_column.dimension,
dimension=num_outputs,
weight_collections=_add_variable_collection(weight_collections),
initializer=init_ops.zeros_initializer,
combiner="sum",
trainable=trainable)
def _to_embedding_lookup_arguments(self, input_tensor):
return _EmbeddingLookupArguments(
input_tensor=self.to_sparse_tensor(input_tensor),
weight_tensor=None,
vocab_size=self.length * self.source_column.dimension,
initializer=init_ops.zeros_initializer,
combiner="sum")
def bucketized_column(source_column, boundaries):
"""Creates a _BucketizedColumn.
Args:
source_column: A _RealValuedColumn defining dense column.
boundaries: A list of floats specifying the boundaries. It has to be sorted.
Returns:
A _BucketizedColumn.
Raises:
ValueError: if 'boundaries' is empty or not sorted.
"""
return _BucketizedColumn(source_column, boundaries)
class _CrossedColumn(_FeatureColumn,
collections.namedtuple("_CrossedColumn",
["columns", "hash_bucket_size",
"combiner", "ckpt_to_load_from",
"tensor_name_in_ckpt"])):
"""Represents a cross transformation also known as conjuction or combination.
Instances of this class are immutable. It crosses given `columns`. Crossed
column output will be hashed to hash_bucket_size.
Conceptually, transformation can be thought as:
Hash(cartesian product of features in columns) % `hash_bucket_size`
For example, if the columns are
SparseTensor referred by first column: shape = [2, 2]
[0, 0]: "a"
[1, 0]: "b"
[1, 1]: "c"
SparseTensor referred by second column: : shape = [2, 1]
[0, 0]: "d"
[1, 0]: "e"
then crossed feature will look like:
shape = [2, 2]
[0, 0]: Hash64("d", Hash64("a")) % hash_bucket_size
[1, 0]: Hash64("e", Hash64("b")) % hash_bucket_size
[1, 1]: Hash64("e", Hash64("c")) % hash_bucket_size
Attributes:
columns: An iterable of _FeatureColumn. Items can be an instance of
_SparseColumn, _CrossedColumn, or _BucketizedColumn.
hash_bucket_size: An int that is > 1. The number of buckets.
combiner: A string specifying how to reduce if there are multiple entries
in a single row. Currently "mean", "sqrtn" and "sum" are supported. Each
of this can be thought as example level normalizations on the column:
* "sum": do not normalize
* "mean": do l1 normalization
* "sqrtn": do l2 normalization
For more information: `tf.embedding_lookup_sparse`.
ckpt_to_load_from: (Optional). String representing checkpoint name/pattern
to restore the column weights. Required if `tensor_name_in_ckpt` is not
None.
tensor_name_in_ckpt: (Optional). Name of the `Tensor` in the provided
checkpoint from which to restore the column weights. Required if
`ckpt_to_load_from` is not None.
Raises:
TypeError: if all items in columns are not an instance of _SparseColumn,
_CrossedColumn, or _BucketizedColumn.
ValueError: if hash_bucket_size is not > 1 or len(columns) is not > 1. Also,
if only one of `ckpt_to_load_from` and `tensor_name_in_ckpt` is specified.
"""
@staticmethod
def _is_crossable(column):
return isinstance(column,
(_SparseColumn, _CrossedColumn, _BucketizedColumn))
def __new__(cls,
columns,
hash_bucket_size,
combiner="sqrtn",
ckpt_to_load_from=None,
tensor_name_in_ckpt=None):
for column in columns:
if not _CrossedColumn._is_crossable(column):
raise TypeError("columns must be a set of _SparseColumn, "
"_CrossedColumn, or _BucketizedColumn instances. "
"column: {}".format(column))
if len(columns) < 2:
raise ValueError("columns must contain at least 2 elements. "
"columns: {}".format(columns))
if hash_bucket_size < 2:
raise ValueError("hash_bucket_size must be at least 2. "
"hash_bucket_size: {}".format(hash_bucket_size))
if (ckpt_to_load_from is None) != (tensor_name_in_ckpt is None):
raise ValueError("Must specify both `ckpt_to_load_from` and "
"`tensor_name_in_ckpt` or none of them.")
sorted_columns = sorted(
[column for column in columns], key=lambda column: column.name)
return super(_CrossedColumn, cls).__new__(cls, tuple(sorted_columns),
hash_bucket_size, combiner,
ckpt_to_load_from,
tensor_name_in_ckpt)
@property
def name(self):
sorted_names = sorted([column.name for column in self.columns])
return "_X_".join(sorted_names)
@property
def config(self):
config = {}
for column in self.columns:
config.update(_get_feature_config(column))
return config
@property
def length(self):
"""Returns total number of buckets."""
return self.hash_bucket_size
@property
def key(self):
"""Returns a string which will be used as a key when we do sorting."""
return "{}".format(self)
def id_tensor(self, input_tensor):
"""Returns the id tensor from the given transformed input_tensor."""
return input_tensor
# pylint: disable=unused-argument
def weight_tensor(self, input_tensor):
"""Returns the weight tensor from the given transformed input_tensor."""
return None
def insert_transformed_feature(self, columns_to_tensors):
"""Handles cross transformation."""
def _collect_leaf_level_columns(cross):
"""Collects base columns contained in the cross."""
leaf_level_columns = []
for c in cross.columns:
if isinstance(c, _CrossedColumn):
leaf_level_columns.extend(_collect_leaf_level_columns(c))
else:
leaf_level_columns.append(c)
return leaf_level_columns
feature_tensors = []
for c in _collect_leaf_level_columns(self):
if isinstance(c, _SparseColumn):
feature_tensors.append(columns_to_tensors[c.name])
else:
if c not in columns_to_tensors:
c.insert_transformed_feature(columns_to_tensors)
if isinstance(c, _BucketizedColumn):
feature_tensors.append(c.to_sparse_tensor(columns_to_tensors[c]))
else:
feature_tensors.append(columns_to_tensors[c])
columns_to_tensors[self] = sparse_feature_cross_op.sparse_feature_cross(
feature_tensors,
hashed_output=True,
num_buckets=self.hash_bucket_size,
name="cross")
# pylint: disable=unused-argument
def to_dnn_input_layer(self,
input_tensor,
weight_collections=None,
trainable=True):
raise ValueError("CrossedColumn is not supported in DNN. "
"Please use embedding_column. column: {}".format(self))
def to_weighted_sum(self,
input_tensor,
num_outputs=1,
weight_collections=None,
trainable=True):
output, embedding_weights = _create_embedding_lookup(
input_tensor=input_tensor,
weight_tensor=None,
vocab_size=self.length,
dimension=num_outputs,
weight_collections=_add_variable_collection(weight_collections),
initializer=init_ops.zeros_initializer,
combiner=self.combiner,
trainable=trainable)
if self.ckpt_to_load_from is not None:
weights_to_restore = embedding_weights
if len(embedding_weights) == 1:
weights_to_restore = embedding_weights[0]
checkpoint_utils.init_from_checkpoint(
self.ckpt_to_load_from,
{self.tensor_name_in_ckpt: weights_to_restore})
return output, embedding_weights
def _checkpoint_path(self):
if self.ckpt_to_load_from is not None:
return self.ckpt_to_load_from, self.tensor_name_in_ckpt
return None
def _to_embedding_lookup_arguments(self, input_tensor):
return _EmbeddingLookupArguments(
input_tensor=input_tensor,
weight_tensor=None,
vocab_size=self.length,
initializer=init_ops.zeros_initializer,
combiner=self.combiner)
def crossed_column(columns, hash_bucket_size, combiner=None,
ckpt_to_load_from=None,
tensor_name_in_ckpt=None):
"""Creates a _CrossedColumn.
Args:
columns: An iterable of _FeatureColumn. Items can be an instance of
_SparseColumn, _CrossedColumn, or _BucketizedColumn.
hash_bucket_size: An int that is > 1. The number of buckets.
combiner: A combiner string, supports sum, mean, sqrtn.
ckpt_to_load_from: (Optional). String representing checkpoint name/pattern
to restore the column weights. Required if `tensor_name_in_ckpt` is not
None.
tensor_name_in_ckpt: (Optional). Name of the `Tensor` in the provided
checkpoint from which to restore the column weights. Required if
`ckpt_to_load_from` is not None.
Returns:
A _CrossedColumn.
Raises:
TypeError: if any item in columns is not an instance of _SparseColumn,
_CrossedColumn, or _BucketizedColumn, or
hash_bucket_size is not an int.
ValueError: if hash_bucket_size is not > 1 or
len(columns) is not > 1.
"""
if combiner is None:
logging.warn("The default value of combiner will change from \"sum\" "
"to \"sqrtn\" after 2016/11/01.")
combiner = "sum"
return _CrossedColumn(
columns,
hash_bucket_size,
combiner=combiner,
ckpt_to_load_from=ckpt_to_load_from,
tensor_name_in_ckpt=tensor_name_in_ckpt)
class DataFrameColumn(_FeatureColumn,
collections.namedtuple("DataFrameColumn",
["column_name", "series"])):
"""Represents a feature column produced from a `DataFrame`.
Instances of this class are immutable. A `DataFrame` column may be dense or
sparse, and may have any shape, with the constraint that dimension 0 is
batch_size.
Args:
column_name: a name for this column
series: a `Series` to be wrapped, which has already had its base features
substituted with `PredefinedSeries`.
"""
def __new__(cls, column_name, series):
return super(DataFrameColumn, cls).__new__(cls, column_name, series)
@property
def name(self):
return self.column_name
@property
def config(self):
return self.series.required_base_features()
@property
def key(self):
"""Returns a string which will be used as a key when we do sorting."""
return self.name
def insert_transformed_feature(self, columns_to_tensors):
# The cache must already contain mappings from the expected base feature
# names to Tensors.
# Passing columns_to_tensors as the cache here means that multiple outputs
# of the transform will be cached, keyed by the repr of their associated
# TransformedSeries.
# The specific requested output ends up in columns_to_tensors twice: once
# keyed by the TransformedSeries repr, and once keyed by this
# DataFrameColumn instance.
columns_to_tensors[self] = self.series.build(columns_to_tensors)
# pylint: disable=unused-argument
def to_dnn_input_layer(self,
input_tensor,
weight_collections=None,
trainable=True):
# DataFrame typically provides Tensors of shape [batch_size],
# but Estimator requires shape [batch_size, 1]
dims = input_tensor.get_shape().ndims
if dims == 0:
raise ValueError(
"Can't build input layer from tensor of shape (): {}".format(
self.column_name))
elif dims == 1:
return array_ops.expand_dims(input_tensor, 1, name="expand_dims")
else:
return input_tensor
# TODO(soergel): This mirrors RealValuedColumn for now, but should become
# better abstracted with less code duplication when we add other kinds.
def to_weighted_sum(self,
input_tensor,
num_outputs=1,
weight_collections=None,
trainable=True):
def _weight(name):
return variable_scope.get_variable(
name,
shape=[self.dimension, num_outputs],
initializer=init_ops.zeros_initializer,
collections=_add_variable_collection(weight_collections))
if self.name:
weight = _weight("weight")
else:
# Old behavior to support a subset of old checkpoints.
weight = _weight("_weight")
# The _RealValuedColumn has the shape of [batch_size, column.dimension].
log_odds_by_dim = math_ops.matmul(input_tensor, weight, name="matmul")
return log_odds_by_dim, [weight]
def _to_dense_tensor(self, input_tensor):
return self.to_dnn_input_layer(input_tensor)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def _get_feature_config(feature_column):
"""Returns configuration for the base feature defined in feature_column."""
if not isinstance(feature_column, _FeatureColumn):
raise TypeError(
"feature_columns should only contain instances of _FeatureColumn. "
"Given column is {}".format(feature_column))
if isinstance(feature_column, (_SparseColumn, _WeightedSparseColumn,
_EmbeddingColumn, _RealValuedColumn,
_BucketizedColumn, _CrossedColumn,
_OneHotColumn)):
return feature_column.config
raise TypeError("Not supported _FeatureColumn type. "
"Given column is {}".format(feature_column))
def create_feature_spec_for_parsing(feature_columns):
"""Helper that prepares features config from input feature_columns.
The returned feature config can be used as arg 'features' in tf.parse_example.
Typical usage example:
```python
# Define features and transformations
country = sparse_column_with_vocabulary_file("country", VOCAB_FILE)
age = real_valued_column("age")
click_bucket = bucketized_column(real_valued_column("historical_click_ratio"),
boundaries=[i/10. for i in range(10)])
country_x_click = crossed_column([country, click_bucket], 10)
feature_columns = set([age, click_bucket, country_x_click])
batch_examples = tf.parse_example(
serialized_examples,
create_feature_spec_for_parsing(feature_columns))
```
For the above example, create_feature_spec_for_parsing would return the dict:
{"age": parsing_ops.FixedLenFeature([1], dtype=tf.float32),
"historical_click_ratio": parsing_ops.FixedLenFeature([1], dtype=tf.float32),
"country": parsing_ops.VarLenFeature(tf.string)}
Args:
feature_columns: An iterable containing all the feature columns. All items
should be instances of classes derived from _FeatureColumn.
Returns:
A dict mapping feature keys to FixedLenFeature or VarLenFeature values.
"""
features_config = {}
for column in feature_columns:
features_config.update(_get_feature_config(column))
return features_config
def make_place_holder_tensors_for_base_features(feature_columns):
"""Returns placeholder tensors for inference.
Args:
feature_columns: An iterable containing all the feature columns. All items
should be instances of classes derived from _FeatureColumn.
Returns:
A dict mapping feature keys to SparseTensors (sparse columns) or
placeholder Tensors (dense columns).
"""
# Get dict mapping features to FixedLenFeature or VarLenFeature values.
dict_for_parse_example = create_feature_spec_for_parsing(feature_columns)
placeholders = {}
for column_name, column_type in dict_for_parse_example.items():
if isinstance(column_type, parsing_ops.VarLenFeature):
# Sparse placeholder for sparse tensors.
placeholders[column_name] = array_ops.sparse_placeholder(
column_type.dtype, name="Placeholder_{}".format(column_name))
else:
# Simple placeholder for dense tensors.
placeholders[column_name] = array_ops.placeholder(
column_type.dtype,
shape=(None, column_type.shape[0]),
name="Placeholder_{}".format(column_name))
return placeholders
class _SparseIdLookupConfig(
collections.namedtuple("_SparseIdLookupConfig",
["vocabulary_file", "keys", "num_oov_buckets",
"vocab_size", "default_value"])):
"""Defines lookup configuration for a sparse feature.
An immutable object defines lookup table configuration used by
tf.feature_to_id_v2.
Attributes:
vocabulary_file: The vocabulary filename. vocabulary_file cannot be combined
with keys.
keys: A 1-D string iterable that specifies the mapping of strings to
indices. It means a feature in keys will map to it's index in keys.
num_oov_buckets: The number of out-of-vocabulary buckets. If zero all out of
vocabulary features will be ignored.
vocab_size: Number of the elements in the vocabulary.
default_value: The value to use for out-of-vocabulary feature values.
Defaults to -1.
"""
def __new__(cls,
vocabulary_file=None,
keys=None,
num_oov_buckets=0,
vocab_size=None,
default_value=-1):
return super(_SparseIdLookupConfig, cls).__new__(cls, vocabulary_file, keys,
num_oov_buckets,
vocab_size, default_value)
def _add_variable_collection(weight_collections):
if weight_collections:
weight_collections = list(
set(list(weight_collections) + [ops.GraphKeys.VARIABLES]))
return weight_collections
def _create_embeddings(shape,
dtype,
initializer,
trainable,
weight_collections,
name=None):
"""Creates embedding variable.
If called within the scope of a partitioner, will partition the variable and
return a list of `tf.Variable`. If no partitioner is specified, returns a list
with just one variable.
Args:
shape: shape of the embeddding. Note this is not the shape of partitioned
variables.
dtype: type of the embedding. Also the shape of each partitioned variable.
initializer: A variable initializer function to be used in embedding
variable initialization.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
weight_collections: List of graph collections to which embedding variables
are added.
name: A string. The name of the embedding variable.
Returns:
A list of `tf.Variable` containing the partitioned embeddings.
Raises:
ValueError: If initializer is None or not callable.
"""
if name is None:
name = "weights"
if not initializer:
raise ValueError("initializer must be defined.")
if not callable(initializer):
raise ValueError("initializer must be callable.")
embeddings = contrib_variables.model_variable(
name=name,
shape=shape,
dtype=dtype,
initializer=initializer,
trainable=trainable,
collections=weight_collections)
if isinstance(embeddings, variables.Variable):
return [embeddings]
else: # Else it should be of type `_PartitionedVariable`.
return embeddings._get_variable_list() # pylint: disable=protected-access
def _create_shared_embeddings(name, shape, dtype, initializer, trainable,
weight_collections):
"""Creates or reuse shared embedding variable.
If called within the scope of a partitioner, will partition the variable and
return a list of `tf.Variable`. If no partitioner is specified, returns a list
with just one variable.
Args:
name: A string specifying the name of the embedding variable.
shape: shape of the embeddding. Note this is not the shape of partitioned
variables.
dtype: type of the embedding. Also the shape of each partitioned variable.
initializer: A variable initializer function to be used in embedding
variable initialization.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
weight_collections: List of graph collections to which embedding variables
are added.
Returns:
A list of `tf.Variable` containing the partitioned embeddings.
Raises:
ValueError: If initializer is None or not callable, or shape of existing
embedding does not match required shape.
"""
if not initializer:
raise ValueError("initializer must be defined.")
if not callable(initializer):
raise ValueError("initializer must be callable.")
shared_embedding_collection_name = (
"SHARED_EMBEDDING_COLLECTION_" + name.upper())
graph = ops.get_default_graph()
shared_embedding_collection = (
graph.get_collection_ref(shared_embedding_collection_name))
if shared_embedding_collection:
if len(shared_embedding_collection) > 1:
raise ValueError("Collection %s can only contain one "
"(partitioned) variable."
% shared_embedding_collection_name)
else:
embeddings = shared_embedding_collection[0]
if embeddings.get_shape() != shape:
raise ValueError("The embedding variable with name {} already exists, "
"but its shape does not match required embedding shape"
" here. Please make sure to use different "
"shared_embedding_name for different shared "
"embeddings.".format(name))
else:
embeddings = contrib_variables.model_variable(
name=name,
shape=shape,
dtype=dtype,
initializer=initializer,
trainable=trainable,
collections=weight_collections)
graph.add_to_collection(shared_embedding_collection_name, embeddings)
if isinstance(embeddings, variables.Variable):
return [embeddings]
else: # Else it should be of type `_PartitionedVariable`.
return embeddings._get_variable_list() # pylint: disable=protected-access
def _create_embedding_lookup(input_tensor, weight_tensor, vocab_size, dimension,
weight_collections, initializer, combiner,
trainable, name="weights",
is_shared_embedding=False):
"""Creates embedding variable and does a lookup.
Args:
input_tensor: A `SparseTensor` which should contain sparse id to look up.
weight_tensor: A `SparseTensor` with the same shape and indices as
`input_tensor`, which contains the float weights corresponding to each
sparse id, or None if all weights are assumed to be 1.0.
vocab_size: An integer specifying the vocabulary size.
dimension: An integer specifying the embedding vector dimension.
weight_collections: List of graph collections to which weights are added.
initializer: A variable initializer function to be used in embedding
variable initialization.
combiner: A string specifying how to reduce if the sparse column is
multivalent. Currently "mean", "sqrtn" and "sum" are supported:
* "sum": do not normalize features in the column
* "mean": do l1 normalization on features in the column
* "sqrtn": do l2 normalization on features in the column
For more information: `tf.embedding_lookup_sparse`.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
name: A string specifying the name of the embedding variable.
is_shared_embedding: An bool indicating if this is creating shared embedding
variable.
Returns:
A Tensor with shape [batch_size, dimension] and embedding Variable.
"""
if is_shared_embedding:
embeddings = _create_shared_embeddings(
name=name,
shape=[vocab_size, dimension],
dtype=dtypes.float32,
initializer=initializer,
trainable=trainable,
weight_collections=weight_collections)
else:
embeddings = _create_embeddings(name=name,
shape=[vocab_size, dimension],
dtype=dtypes.float32,
initializer=initializer,
trainable=trainable,
weight_collections=weight_collections)
return embedding_ops.safe_embedding_lookup_sparse(
embeddings,
input_tensor,
sparse_weights=weight_tensor,
default_id=0,
combiner=combiner,
name=name), embeddings
| {
"content_hash": "c8205adbf7e82b6577ed5b5cab905473",
"timestamp": "",
"source": "github",
"line_count": 2138,
"max_line_length": 82,
"avg_line_length": 39.97848456501403,
"alnum_prop": 0.6412827292510003,
"repo_name": "naturali/tensorflow",
"id": "924d8c4e9e057485b1f138c403f292ca585c5a57",
"size": "86163",
"binary": false,
"copies": "4",
"ref": "refs/heads/r0.11",
"path": "tensorflow/contrib/layers/python/layers/feature_column.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "159351"
},
{
"name": "C++",
"bytes": "9498060"
},
{
"name": "CMake",
"bytes": "29372"
},
{
"name": "CSS",
"bytes": "1297"
},
{
"name": "GCC Machine Description",
"bytes": "1"
},
{
"name": "HTML",
"bytes": "787519"
},
{
"name": "Java",
"bytes": "39229"
},
{
"name": "JavaScript",
"bytes": "10875"
},
{
"name": "Jupyter Notebook",
"bytes": "1773504"
},
{
"name": "Makefile",
"bytes": "12318"
},
{
"name": "Objective-C",
"bytes": "5332"
},
{
"name": "Objective-C++",
"bytes": "45585"
},
{
"name": "Protocol Buffer",
"bytes": "114983"
},
{
"name": "Python",
"bytes": "7015287"
},
{
"name": "Shell",
"bytes": "201064"
},
{
"name": "TypeScript",
"bytes": "414414"
}
],
"symlink_target": ""
} |
from rest_framework import serializers
from news.models import News
from ads.models import Ads
from skiclubs.models import Skiclubs
from pages.models import Pages
from rankings.models import Races
from widgets.models import Widgets
from angulation.models import Covers
from blogs.models import (
Bloggers,
BlogPosts,
)
class NewsSerializer(serializers.ModelSerializer):
class Meta:
model = News
fields = ('id', 'title', 'author', 'content', 'mag', 'date', )
class AdsSerializer(serializers.ModelSerializer):
class Meta:
model = Ads
fields = ('id', 'name', 'link', 'url', 'secureUrl',
'horizontal', 'vertical', 'square', 'date', )
class BloggersSerializer(serializers.ModelSerializer):
class Meta:
model = Bloggers
fields = ('id', 'name', 'biography', 'linkResults',
'profilePic', 'sponsors', 'ad', 'header', 'date', )
class BlogPostsSerializer(serializers.ModelSerializer):
class Meta:
model = BlogPosts
fields = ('id', 'title', 'content', 'blogId', 'date', )
class SkiclubsSerializer(serializers.ModelSerializer):
class Meta:
model = Skiclubs
fields = (
'id', 'title', 'latitude', 'longitude', 'contact', 'description', )
class PagesSerializer(serializers.ModelSerializer):
class Meta:
model = Pages
fields = ('id', 'name', 'content', 'date', )
class RacesSerializer(serializers.ModelSerializer):
class Meta:
model = Races
fields = ('id', 'info', 'category', 'genre', 'link',
'location', 'discipline', 'raceId', 'table', 'date', )
class WidgetsSerializer(serializers.ModelSerializer):
class Meta:
model = Widgets
fields = ('id', 'name', 'content', )
class CoversSerializer(serializers.ModelSerializer):
class Meta:
model = Covers
fields = ('id', 'url', )
| {
"content_hash": "a2d53c3212f8cf17df65b7ec6d96af37",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 79,
"avg_line_length": 24.518987341772153,
"alnum_prop": 0.6277749096541043,
"repo_name": "Tooskich/python_core",
"id": "c7b4c286858c8db5336799a38c3925fd7e1f4a6b",
"size": "1937",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apiv1/serializers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "75827"
}
],
"symlink_target": ""
} |
import logging
import re
# http://developer.android.com/reference/android/test/InstrumentationTestRunner.html
STATUS_CODE_START = 1
STATUS_CODE_OK = 0
STATUS_CODE_ERROR = -1
STATUS_CODE_FAILURE = -2
# http://developer.android.com/reference/android/app/Activity.html
RESULT_CODE_OK = -1
RESULT_CODE_CANCELED = 0
_INSTR_LINE_RE = re.compile(r'^\s*INSTRUMENTATION_([A-Z_]+): (.*)$')
class InstrumentationParser(object):
def __init__(self, stream):
"""An incremental parser for the output of Android instrumentation tests.
Example:
stream = adb.IterShell('am instrument -r ...')
parser = InstrumentationParser(stream)
for code, bundle in parser.IterStatus():
# do something with each instrumentation status
print 'status:', code, bundle
# do something with the final instrumentation result
code, bundle = parser.GetResult()
print 'result:', code, bundle
Args:
stream: a sequence of lines as produced by the raw output of an
instrumentation test (e.g. by |am instrument -r| or |uiautomator|).
"""
self._stream = stream
self._code = None
self._bundle = None
def IterStatus(self):
"""Iterate over statuses as they are produced by the instrumentation test.
Yields:
A tuple (code, bundle) for each instrumentation status found in the
output.
"""
def join_bundle_values(bundle):
for key in bundle:
bundle[key] = '\n'.join(bundle[key])
return bundle
bundle = {'STATUS': {}, 'RESULT': {}}
header = None
key = None
for line in self._stream:
m = _INSTR_LINE_RE.match(line)
if m:
header, value = m.groups()
key = None
if header in ['STATUS', 'RESULT'] and '=' in value:
key, value = value.split('=', 1)
bundle[header][key] = [value]
elif header == 'STATUS_CODE':
yield int(value), join_bundle_values(bundle['STATUS'])
bundle['STATUS'] = {}
elif header == 'CODE':
self._code = int(value)
else:
logging.warning('Unknown INSTRUMENTATION_%s line: %s', header, value)
elif key is not None:
bundle[header][key].append(line)
self._bundle = join_bundle_values(bundle['RESULT'])
def GetResult(self):
"""Return the final instrumentation result.
Returns:
A pair (code, bundle) with the final instrumentation result. The |code|
may be None if no instrumentation result was found in the output.
Raises:
AssertionError if attempting to get the instrumentation result before
exhausting |IterStatus| first.
"""
assert self._bundle is not None, (
'The IterStatus generator must be exhausted before reading the final'
' instrumentation result.')
return self._code, self._bundle
| {
"content_hash": "58030d41afd7f3190df232603b074ec5",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 84,
"avg_line_length": 30.75,
"alnum_prop": 0.6376811594202898,
"repo_name": "Teamxrtc/webrtc-streaming-node",
"id": "c27a2ead2b913212ffe8c91a06df9078c3f9bdb2",
"size": "2992",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "third_party/webrtc/src/chromium/src/build/android/pylib/instrumentation/instrumentation_parser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "44"
},
{
"name": "C++",
"bytes": "221840"
},
{
"name": "HTML",
"bytes": "2383"
},
{
"name": "JavaScript",
"bytes": "37396"
},
{
"name": "Python",
"bytes": "2860"
},
{
"name": "Shell",
"bytes": "104"
}
],
"symlink_target": ""
} |
""" Answer extraction for QA """
from operator import itemgetter
from collections import defaultdict
import importlib
import string
import logging as log
import nltk
from ner import SocketNER
from inquire import config
question_types = {
'DESC': 'description',
'ENTY': 'entity',
'ABBR': 'abbreviation',
'HUM': 'human',
'LOC': 'location',
'NUM': 'numeric',
}
def get_extractor(coarse, fine):
log.debug("getting coarse extractor for '{}'".format(coarse))
# http://stackoverflow.com/questions/301134/dynamic-module-import-in-python
try:
coarse_extractor = importlib.import_module(__package__+'.'+question_types[coarse])
except (ImportError, KeyError):
log.warn("Extractor for coarse type '{}' not implemented".format(coarse))
raise NoExtractorError(coarse)
return coarse_extractor.get_extractor(coarse, fine)
class NoExtractorError(Exception):
pass
class InvalidExtractorError(Exception):
pass
class BaseExtractor(object):
def __init__(self, question, docs):
self.docs = docs
self.question = question
self.lem = nltk.stem.wordnet.WordNetLemmatizer()
self.delete_punctuation_map = dict((ord(char), None) for char in string.punctuation)
def preprocess(self, pos=False, ner=False, tok_q=True):
log.debug("preprocessing documents")
if tok_q:
self.tok_question = unicode(self.question).translate(self.delete_punctuation_map)
self.tok_question = nltk.word_tokenize(self.tok_question.lower())
self.tok_question = [self.lem.lemmatize(word) for word in self.tok_question]
if pos:
# self.tok_docs = [nltk.word_tokenize(doc) for doc in self.docs]
self.pos_docs = [nltk.pos_tag(nltk.word_tokenize(doc)) for doc in self.docs]
if ner:
self.ner = SocketNER(host='localhost', port=config.NER_PORT, collapse=False)
self.ne_docs = [self.ner.get_entities(doc) for doc in self.docs]
def clean(self, s):
return self.lem.lemmatize(unicode(s).translate(self.delete_punctuation_map).lower())
def sort_candidates(self, candidates):
"""
Takes a dict with frequencies {'a':2, 'b':4, 'c':1} and sorts them.
Returns the list of sorted candidates with percentages.
"""
if len(candidates) == 0:
return None
# automatically creates nested dict when they don't exist
cleaned = defaultdict(dict)
for item, count in candidates.iteritems():
cleaned[self.clean(item)][item] = count
results = {}
for item, options in cleaned.iteritems():
selected_option, max_count, total_count = None, 0, 0
for option, count in options.iteritems():
total_count += count
if count > max_count:
selected_option, max_count = option, count
results[selected_option] = total_count
results = sorted(results.iteritems(), key=itemgetter(1), reverse=True)
total = sum(count for item, count in results)
# trim to first 10 items
return [(item, count/float(total)) for item, count in results][:10]
def answer(self):
"""
Answer should return a sorted list of answer tuples with their confidence
"""
return "I don't know how to answer that type of question yet"
class NETagExtractor(BaseExtractor):
""" extractor that uses named entity tagging """
def __init__(self, question, docs, tag=None):
super(NETagExtractor, self).__init__(question, docs)
if not tag:
raise InvalidExtractorError("No tag provided for NETagExtractor")
self.tag = tag
def answer(self):
self.preprocess(ner=True)
candidates = {}
# count up occurrences of the same NE
for doc in self.ne_docs:
for entity in doc:
# entities come first in this output
# don't count things that are part of the question
# TODO: fuzzy match this so we don't get spelling errors of the
# question as answers
# TODO: are we including punctuation in the comparison? (see: eval-1929)
if entity[0] == self.tag and self.clean(entity[1]) not in self.tok_question:
candidates[entity[1]] = candidates.get(entity[1], 0) + 1
# sort candidates by freqency
return self.sort_candidates(candidates)
class POSTagExtractor(BaseExtractor):
""" extractor that uses part-of-speech tagging """
def __init__(self, question, docs, tags=None):
super(POSTagExtractor, self).__init__(question, docs)
if not tags:
raise InvalidExtractorError("No tag provided for POSTagExtractor")
self.tags = tags
def answer(self):
self.preprocess(pos=True)
candidates = {}
# count up occurrences of the same POS
for doc in self.pos_docs:
for word in doc:
# don't count things that are part of the question
# TODO: fuzzy match this so we don't get spelling errors of the
# question as answers
if word[1] in self.tags and self.clean(word[0]) not in self.tok_question:
candidates[word[0]] = candidates.get(word[0], 0) + 1
# sort candidates by freqency
return self.sort_candidates(candidates)
| {
"content_hash": "6c349734fcfada127911394381c8f316",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 93,
"avg_line_length": 38.780141843971634,
"alnum_prop": 0.6228968544257498,
"repo_name": "jcelliott/inquire",
"id": "cec98b34138e9cfa7dceddc99c2688cd734db5fb",
"size": "5468",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "inquire/extraction/extractors.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "906"
},
{
"name": "Python",
"bytes": "53717"
},
{
"name": "Shell",
"bytes": "125"
}
],
"symlink_target": ""
} |
import yaml
from yaml import *
import logging
import os.path
import functools
log = logging.getLogger(__name__)
class ExtLoaderMeta(type):
def __new__(metacls, __name__, __bases__, __dict__):
"""Add include constructer to class."""
# register the include constructor on the class
cls = super().__new__(metacls, __name__, __bases__, __dict__)
cls.add_constructor('!include', cls.construct_include)
return cls
class ExtLoader(yaml.Loader, metaclass=ExtLoaderMeta):
"""YAML Loader with `!include` constructor."""
def __init__(self, stream):
"""Initialise Loader."""
try:
self._root = os.path.split(stream.name)[0]
except AttributeError:
self._root = os.path.curdir
super().__init__(stream)
def construct_include(self, node):
"""Include file referenced at node."""
filename = os.path.abspath(os.path.join(
self._root, self.construct_scalar(node)
))
extension = os.path.splitext(filename)[1].lstrip('.')
with open(filename, 'r') as f:
if extension in ('yaml', 'yml'):
return yaml.load(f, ExtLoader)
else:
return ''.join(f.readlines())
# Set MyLoader as default.
load = functools.partial(yaml.load, Loader=ExtLoader)
if __name__ == '__main__':
with open('foo.yaml', 'r') as f:
data = load(f)
print(data)
| {
"content_hash": "e8526e04fd157a34590fcd48abb6f2b9",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 69,
"avg_line_length": 25.473684210526315,
"alnum_prop": 0.5757575757575758,
"repo_name": "deisi/SFG2D",
"id": "a29775e81e65c7135c52dde46dce8d0c93066e5c",
"size": "1452",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sfg2d/myyaml.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "28658"
},
{
"name": "Makefile",
"bytes": "2287"
},
{
"name": "Python",
"bytes": "435530"
}
],
"symlink_target": ""
} |
from django import forms
from datetime import timedelta
from kitabu.search.reservations import SingleSubjectManagerReservationSearch
from lanes.models import LaneReservation
from spa.forms import SearchForm, PeriodForm
class PoolReservationsSearchForm(SearchForm, PeriodForm):
def search(self, subject_manager):
search = SingleSubjectManagerReservationSearch(reservation_model=LaneReservation,
subject_manager=subject_manager)
return search.search(self.cleaned_data['start'], self.cleaned_data['end'])
class ClusterSearchForm(SearchForm, PeriodForm):
required_size = forms.fields.IntegerField(min_value=1)
class PeriodSearchForm(SearchForm, PeriodForm):
required_size = forms.fields.IntegerField(min_value=1)
minutes = forms.fields.IntegerField(min_value=1, initial=60)
def clean(self):
cleaned_data = super(PeriodSearchForm, self).clean()
cleaned_data['required_duration'] = timedelta(0, cleaned_data['minutes'] * 60)
del cleaned_data['minutes']
return cleaned_data
| {
"content_hash": "a3fc535a824a5a66d254bbfadd59c693",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 89,
"avg_line_length": 36.666666666666664,
"alnum_prop": 0.7236363636363636,
"repo_name": "mbad/kitabu",
"id": "77b70279279f2a6051397e70ae70e613b6547557",
"size": "1100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example_project/pools/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "19960"
},
{
"name": "JavaScript",
"bytes": "91"
},
{
"name": "Python",
"bytes": "178099"
},
{
"name": "Shell",
"bytes": "885"
}
],
"symlink_target": ""
} |
from croplands_api import celery
from flask import current_app
import StringIO
import requests
from PIL import Image as Img
import boto
from boto.s3.key import Key
from bs4 import BeautifulSoup
from pyproj import Proj, transform as _transform
from croplands_api.models import Image, db, Location
import datetime
from croplands_api.utils.geo import (
distance,
decode_google_polyline,
calculate_plane_perpendicular_to_travel,
get_destination,
degree_to_tile_number
)
import uuid
from flask import json
import random
from multiprocessing.pool import ThreadPool
def _build_dg_url(x, y, zoom, connect_id, request="GetTile",
layer="DigitalGlobe:ImageryTileService",
profile="Consumer_Profile"):
"""
Function builds a url for use with Digital Globe Enhanced View
:param x: tile col
:param y: tile row
:param zoom: zoom level
:param connect_id: key
:param request: wmts request type
:param layer:
:param profile: https://www.digitalglobe.com/sites/default/files/dgcs/DGCS_DeveloperGuide_WMTS.pdf
:return: url
"""
url = "https://evwhs.digitalglobe.com/earthservice/wmtsaccess?connectid=%s" % connect_id
url += "&request=%s" % request
url += "&version=1.0.0&LAYER=%s&FORMAT=image/jpeg" % layer
url += "&TileRow=%d&TileCol=%d&TileMatrixSet=EPSG:3857&TileMatrix=EPSG:3857:%d" % (y, x, zoom)
url += "&featureProfile=%s" % profile
return url
def get_image_data(img):
try:
exif = img._getexif()
soup = BeautifulSoup(exif[37510])
except Exception as e:
print(e)
return
else:
corner_ne = soup.find_all("gml:uppercorner")[0].string.split()
corner_ne_lon, corner_ne_lat = transform(corner_ne[0], corner_ne[1])
corner_sw = soup.find_all("gml:lowercorner")[0].string.split()
corner_sw_lon, corner_sw_lat = transform(corner_sw[0], corner_sw[1])
format_string = "%a %b %d %H:%M:%S %Y"
return {
'date_acquired': datetime.datetime.strptime(
soup.find("digitalglobe:acquisitiondate").string.replace("UTC",""), format_string),
'date_acquired_earliest': datetime.datetime.strptime(
soup.find("digitalglobe:earliestacquisitiondate").string.replace("UTC",""), format_string),
'date_acquired_latest': datetime.datetime.strptime(
soup.find("digitalglobe:latestacquisitiondate").string.replace("UTC",""), format_string),
'image_type': 'digitalglobe' + soup.find("digitalglobe:producttype").string,
'copyright': soup.find("digitalglobe:copyright").string,
# 'source': soup.find("digitalglobe:source").string,
# 'source_unit': soup.find("digitalglobe:sourceunit").string,
# 'data_layer': soup.find("digitalglobe:datalayer").string,
'resolution': float(soup.find_all("digitalglobe:groundsampledistance")[0].string),
# 'ce90accuracy': soup.find("digitalglobe:ce90accuracy").string,
# 'rmseaccuracy': soup.find("digitalglobe:rmseaccuracy").string,
'corner_ne_lat': corner_ne_lat,
'corner_ne_lon': corner_ne_lon,
'corner_sw_lat': corner_sw_lat,
'corner_sw_lon': corner_sw_lon,
'lat': (corner_ne_lat + corner_sw_lat) / 2,
'lon': (corner_ne_lon + corner_sw_lon) / 2
}
def download_image(x, y, zoom, profile):
url = _build_dg_url(x, y, zoom, profile)
@celery.task(rate_limit="20/m")
def get_image(lat, lon, zoom, location_id=None, layer="DigitalGlobe:ImageryTileService",
profile="MyDG_Color_Consumer_Profile", training_only=False):
""" Gets a tile and saves it to s3 while also saving the important acquisition date to the db.
:param lat:
:param lon:
:param zoom:
:param location_id:
:param layer:
:return:
"""
# convert lat lon to tile
x, y = degree_to_tile_number(lat, lon, zoom)
# build url
url = _build_dg_url(x, y, zoom, current_app.config['DG_EV_CONNECT_ID'],
profile=profile)
# get tile
auth = current_app.config['DG_EV_USERNAME'], current_app.config['DG_EV_PASSWORD']
id = current_app.config['DG_EV_CONNECT_ID']
m, n = 5,5
mosaic = Img.new('RGB', (256 * m, 256 * n))
tile_matrix = [[None for i in range(m)] for j in range(n)]
def download(args):
i, j = args
img_url = _build_dg_url(x + i - m/2, y + j - n/2, zoom, id, profile=profile)
r = requests.get(img_url, auth=auth)
if r.status_code != 200 or int(r.headers['content-length']) < 1000:
if int(r.headers['content-length']) > 1000:
print(r.text)
return False
f = StringIO.StringIO(r.content)
tile = Img.open(f)
mosaic.paste(tile, (i * 256, j * 256))
tile_matrix[i][j] = {'tile': tile, 'data': get_image_data(tile)}
return True
pool = ThreadPool(min(current_app.config.get('VHRI_TILE_MAX_THREADS'), m * n))
results = pool.map(download,
[(i, j) for i, row in enumerate(tile_matrix) for j, col in enumerate(row)])
pool.close()
pool.join()
if sum(results) < m * n:
print('some tiles failed to download')
return
data = tile_matrix[int(len(tile_matrix) / 2)][int(len(tile_matrix[0]) / 2)]['data']
# adjust image data for all other tiles in mosaic
data['resolution'] = max(
[max([col['data']['resolution'] for col in row]) for row in tile_matrix])
data['date_acquired_earliest'] = min(
[min([col['data']['date_acquired_earliest'] for col in row]) for row in tile_matrix])
data['date_acquired_latest'] = min(
[min([col['data']['date_acquired_latest'] for col in row]) for row in tile_matrix])
data['corner_ne_lat'] = tile_matrix[-1][0]['data']['corner_ne_lat']
data['corner_ne_lon'] = tile_matrix[-1][0]['data']['corner_ne_lon']
data['corner_sw_lat'] = tile_matrix[0][-1]['data']['corner_sw_lat']
data['corner_sw_lon'] = tile_matrix[0][-1]['data']['corner_sw_lon']
data['url'] = "images/digital_globe/%s/%s" % (profile, str(uuid.uuid4()) + '.JPG')
data['source'] = "VHRI"
# quality checks
if (data['date_acquired_latest'] - data['date_acquired_earliest']).days > 200:
print('inconsistent acquisition date: %d days' % (
data['date_acquired_latest'] - data['date_acquired_earliest']).days)
return
if data['resolution'] > 1:
print('poor resolution: %f' % data['resolution'])
return
# n = 100
# size = mosaic.size
# white_thresh = 200
# num_white = 0
# for i in range(n):
# pixel = mosaic.getpixel((random.randrange(0,size[0]),random.randrange(0,size[1])))
# if sum((int(color > white_thresh) for color in pixel[:3])) >= 2:
# num_white += 1
#
# print num_white/float(n)
data.pop('resolution', None)
if location_id is None:
if training_only:
location = Location(lat=data['lat'], lon=data['lon'], source='random', use_validation=True)
else:
location = Location(lat=data['lat'], lon=data['lon'], source='random')
db.session.add(location)
db.session.flush()
location_id = location.id
data['location_id'] = location_id
# mosaic.show()
out = StringIO.StringIO()
mosaic.save(out, format='JPEG', optimize=True, quality=30)
image = Image(**data)
db.session.add(image)
# save image to s3
gs = boto.connect_gs(current_app.config['GS_ACCESS_KEY'],
current_app.config['GS_SECRET'])
# Get bucket
bucket = gs.get_bucket(current_app.config['BUCKET'])
cache_control = 'max-age=2000000'
content_type = 'image/jpeg'
s3_file = Key(bucket)
s3_file.key = data['url']
s3_file.set_metadata('cache-control', cache_control)
s3_file.set_metadata('content-type', content_type)
s3_file.set_contents_from_string(out.getvalue())
s3_file.make_public()
# save information to database
db.session.commit()
def transform(x, y, source_projection='epsg:3857', target_projection='epsg:4326'):
"""
Helper function for projection transform.
:type x: unicode
:type y: unicode
:type source_projection: str
:type target_projection: str
:return:
"""
return _transform(Proj(init=source_projection), Proj(init=target_projection), x, y)
@celery.task(rate_limit="1/s")
def get_google_street_view_image(lat, lon, location=None):
url = 'https://maps.googleapis.com/maps/api/streetview'
url += '?size=400x400&location=%f,%f&fov=%s&heading=%d&pitch=%d'
# get street view image
response = requests.get(url % (lat, lon, 90, 90, 1))
# @celery.task(rate_limit="1/s")
# def get_snapped_points(start_lat, start_lon, end_lat, end_lon):
# api_key = current_app.config['GOOGLE_STREET_VIEW_API_KEY']
# url = 'https://roads.googleapis.com/v1/snapToRoads'
# url += '?path=%f,%f|%f,%f&key=%s&interpolate=true' % (
# start_lat, start_lon, end_lat, end_lon, api_key)
#
# response = requests.get(url)
# snapped_points = json.loads(response.data)['snappedPoints']
#
# for pt in snapped_points:
# get_google_street_view_image.delay(lat=pt['location']['latitude'],
# lon=pt['location']['longitude'])
@celery.task(rate_limit="1/s")
def get_directions(origin_lat, origin_lon, destination_lat, destination_lon):
api_key = current_app.config['GOOGLE_STREET_VIEW_API_KEY']
url = "https://maps.googleapis.com/maps/api/directions/json"
url += "?origin=%f,%f&destination=%f,%f&avoid=highways&key=%s" % (
origin_lat, origin_lon, destination_lat, destination_lon, api_key)
response = requests.get(url)
route = json.loads(response.text)['routes'][0]
# build polyline for driving segments
polyline = []
for leg in route['legs']:
for step in leg['steps']:
if step['travel_mode'] == 'DRIVING':
polyline.extend(decode_google_polyline(step['polyline']['points']))
geo_json = {
"type": "GeometryCollection",
"geometries": [
{"type": "LineString",
"coordinates": [[pt[1], pt[0]] for pt in polyline]
},
{
"type": "MultiPoint",
"coordinates": []
}]
}
previous = polyline[0]
for i in range(1, len(polyline) - 1):
if distance(previous[0], previous[1], polyline[i][0], polyline[i][1]) > 2000:
bearing = calculate_plane_perpendicular_to_travel(polyline[i - 1], polyline[i],
polyline[i + 1])
if random.choice([True, False]):
bearing += 180
offset = get_destination(polyline[i][0], polyline[i][1], bearing, 0.05) # km
geo_json['geometries'][1]['coordinates'].append([offset[1], offset[0]])
previous = polyline[i]
has_street_view_image(polyline[i][0], polyline[i][1], bearing)
print json.dumps(geo_json)
@celery.task(rate_limit="1/s")
def has_street_view_image(lat, lon, heading):
url = "https://maps.googleapis.com/maps/api/streetview"
url += "?size=400x400&location=%f,%f&fov=90&heading=%f&pitch=10" % (lat, lon, heading)
response = requests.get(url)
if int(response.headers['content-length']) < 8000:
return
# get image
f = StringIO.StringIO(response.content)
img = Img.open(f)
img.show()
@celery.task
def get_street_view_coverage(x, y, z=21):
url = "http://mt1.googleapis.com/vt?hl=en-US&lyrs=svv|cb_client:apiv3&style=40,18&gl=US&x=%d&y=%d&z=%d" % (
x, y, z)
response = requests.get(url)
f = StringIO.StringIO(response.content)
img = Img.open(f)
# save image to s3
gs = boto.connect_gs(current_app.config['GS_ACCESS_KEY'],
current_app.config['GS_SECRET'])
# Get bucket
bucket = gs.get_bucket(current_app.config['BUCKET'])
cache_control = 'max-age=200'
content_type = 'image/png'
s3_file = Key(bucket)
s3_file.key = 'temp/google_street_view_tiles/%d/%d/%d.PNG' % (z, x, y)
s3_file.set_metadata('cache-control', cache_control)
s3_file.set_metadata('content-type', content_type)
s3_file.set_contents_from_string(f.getvalue())
s3_file.make_public() | {
"content_hash": "b6a2e6862ce7e81766399da423172a31",
"timestamp": "",
"source": "github",
"line_count": 343,
"max_line_length": 111,
"avg_line_length": 36.137026239067055,
"alnum_prop": 0.6106494554255748,
"repo_name": "justinwp/croplands",
"id": "a2475b2e7eb6d4b23d18773b011514f30d0e5b91",
"size": "12395",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "croplands_api/tasks/high_res_imagery.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "21974"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "PLpgSQL",
"bytes": "553"
},
{
"name": "Python",
"bytes": "236431"
}
],
"symlink_target": ""
} |
'''
Splits a compiler outputted program into the asm module and the surrounding shell. This
can be useful if you want to process the shell in some manner (e.g. minifiy it) in ways
that would be harmful to asm.js code.
'''
import sys
import shared
from asm_module import AsmModule
try:
me, infile, out_shell, out_asm = sys.argv[:4]
except:
print >> sys.stderr, 'usage: emlink.py [input file] [shell output file] [asm output file]'
sys.exit(1)
print 'Input file:', infile
print 'Shell output:', out_shell
print 'Asm output:', out_asm
shared.try_delete(out_shell)
shared.try_delete(out_asm)
module = AsmModule(infile)
open(out_shell, 'w').write(module.pre_js + '\n// ASM_CODE\n' + module.post_js)
open(out_asm, 'w').write(module.asm_js)
| {
"content_hash": "5921cafa3f3813103a5b3620f980c333",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 92,
"avg_line_length": 26.678571428571427,
"alnum_prop": 0.714859437751004,
"repo_name": "slightperturbation/Cobalt",
"id": "39eaca00840585eeafb60267fa79fb87ebda1dcb",
"size": "771",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "ext/emsdk_portable/emscripten/1.27.0/tools/split_asm.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "7942339"
},
{
"name": "Batchfile",
"bytes": "27769"
},
{
"name": "C",
"bytes": "64431592"
},
{
"name": "C++",
"bytes": "192377551"
},
{
"name": "CMake",
"bytes": "2563457"
},
{
"name": "CSS",
"bytes": "32911"
},
{
"name": "DTrace",
"bytes": "12324"
},
{
"name": "Emacs Lisp",
"bytes": "11557"
},
{
"name": "Go",
"bytes": "132306"
},
{
"name": "Groff",
"bytes": "141757"
},
{
"name": "HTML",
"bytes": "10597275"
},
{
"name": "JavaScript",
"bytes": "7134930"
},
{
"name": "LLVM",
"bytes": "37169002"
},
{
"name": "Lua",
"bytes": "30196"
},
{
"name": "Makefile",
"bytes": "4368336"
},
{
"name": "Nix",
"bytes": "17734"
},
{
"name": "OCaml",
"bytes": "401898"
},
{
"name": "Objective-C",
"bytes": "492807"
},
{
"name": "PHP",
"bytes": "324917"
},
{
"name": "Perl",
"bytes": "27878"
},
{
"name": "Prolog",
"bytes": "1200"
},
{
"name": "Python",
"bytes": "3678053"
},
{
"name": "Shell",
"bytes": "3047898"
},
{
"name": "SourcePawn",
"bytes": "2461"
},
{
"name": "Standard ML",
"bytes": "2841"
},
{
"name": "TeX",
"bytes": "120660"
},
{
"name": "VimL",
"bytes": "13743"
}
],
"symlink_target": ""
} |
import sys
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._child_resources_operations import build_list_request
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ChildResourcesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resourcehealth.v2015_01_01.aio.MicrosoftResourceHealth`'s
:attr:`child_resources` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(
self, resource_uri: str, filter: Optional[str] = None, expand: Optional[str] = None, **kwargs: Any
) -> AsyncIterable["_models.AvailabilityStatus"]:
"""Lists the all the children and its current health status for a parent resource. Use the
nextLink property in the response to get the next page of children current health.
:param resource_uri: The fully qualified ID of the resource, including the resource name and
resource type. Currently the API only support not nested parent resource type:
/subscriptions/{subscriptionId}/resourceGroups/{resource-group-name}/providers/{resource-provider-name}/{resource-type}/{resource-name}.
Required.
:type resource_uri: str
:param filter: The filter to apply on the operation. For more information please see
https://docs.microsoft.com/en-us/rest/api/apimanagement/apis?redirectedfrom=MSDN. Default value
is None.
:type filter: str
:param expand: Setting $expand=recommendedactions in url query expands the recommendedactions
in the response. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AvailabilityStatus or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resourcehealth.v2015_01_01.models.AvailabilityStatus]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2015-01-01")) # type: Literal["2015-01-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.AvailabilityStatusListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_uri=resource_uri,
filter=filter,
expand=expand,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("AvailabilityStatusListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list.metadata = {"url": "/{resourceUri}/providers/Microsoft.ResourceHealth/childResources"} # type: ignore
| {
"content_hash": "21c5fc2ed095e9a22e2ab0ad081a3527",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 145,
"avg_line_length": 46.03378378378378,
"alnum_prop": 0.6440628210773521,
"repo_name": "Azure/azure-sdk-for-python",
"id": "8129ea7b351317d0a9fdab4dc4b4e6afe9d07a87",
"size": "7313",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/resourcehealth/azure-mgmt-resourcehealth/azure/mgmt/resourcehealth/v2015_01_01/aio/operations/_child_resources_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
"""Treadmill Cloud REST api.
"""
import flask
import flask_restplus as restplus
from flask_restplus import fields
# Disable E0611: No 'name' in module
from treadmill import webutils # pylint: disable=E0611
def handle_api_error(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
return flask.abort(flask.make_response(
flask.jsonify(message=e.message), 400)
)
return wrapper
# Old style classes, no init method.
#
# pylint: disable=W0232
def init(api, cors, impl):
"""Configures REST handlers for cloud resource."""
namespace = webutils.namespace(
api, __name__, 'Cloud REST operations'
)
server_req_model = {
'role': fields.String(description='Role', required=True),
'key': fields.String(description='Key', required=True),
'image': fields.String(description='Image', required=True),
'ipa_admin_password': fields.String(description='IPA Admin Password',
required=True),
'subnet_name': fields.String(description='Cell(Subnet) Name',
required=True),
'region': fields.String(description='Region'),
'with_api': fields.String(description='With API Flag'),
'instance_type': fields.String(description='Instance Type'),
'tm_release': fields.String(
description='Treadmill Release URL/Version'
),
'app_root': fields.String(description='Server APP Root'),
}
server_model = api.model(
'server', server_req_model
)
ldap_req_model = {
'role': fields.String(description='Role', required=True),
'key': fields.String(description='Key', required=True),
'image': fields.String(description='Image', required=True),
'ipa_admin_password': fields.String(description='IPA Admin Password',
required=True),
'subnet_name': fields.String(description='LDAP Subnet Name',
required=True),
'region': fields.String(description='Region'),
'ldap_cidr_block': fields.String(description='LDAP CIDR Block'),
'instance_type': fields.String(description='Instance Type'),
'tm_release': fields.String(
description='Treadmill Release URL/Version'
),
'app_root': fields.String(description='Server APP Root'),
}
ldap_model = api.model(
'ldap', ldap_req_model
)
cell_req_model = {
'role': fields.String(description='Role', required=True),
'key': fields.String(description='Key', required=True),
'image': fields.String(description='Image', required=True),
'subnet_name': fields.String(description='Cell(Subnet) Name',
required=True),
'ipa_admin_password': fields.String(description='IPA Admin Password',
required=True),
'region': fields.String(description='Region'),
'instance_type': fields.String(description='Instance Type'),
'tm_release': fields.String(
description='Treadmill Release URL/Version'
),
'app_root': fields.String(description='Server APP Root'),
'cidr_block': fields.String(description='Cell CIDR Block'),
}
cell_model = api.model(
'cell', cell_req_model
)
@namespace.route(
'/vpc/<vpc_name>/domain/<domain>/server/<name>'
)
@api.doc(params={
'vpc_name': 'VPC Name',
'domain': 'Domain',
'name': 'Node Instance Name Tag'
})
class _Server(restplus.Resource):
"""Treadmill Node Server"""
@webutils.post_api(
api,
cors,
req_model=server_model
)
@handle_api_error
def post(self, vpc_name, domain, name):
"Configure Worker Node"""
return impl.configure(
vpc_name, domain, name, flask.request.json
)
@webutils.delete_api(
api,
cors,
)
@handle_api_error
def delete(self, vpc_name, domain, name):
"Delete Worker Node"""
return impl.delete_server(
vpc_name, domain, name
)
@namespace.route(
'/vpc/<vpc_name>/domain/<domain>/ldap/<name>'
)
@api.doc(params={
'vpc_name': 'VPC Name',
'domain': 'Domain',
'name': 'LDAP Instance Name Tag'
})
class _LDAP(restplus.Resource):
"""Treadmill LDAP Server"""
@webutils.post_api(
api,
cors,
req_model=ldap_model
)
@handle_api_error
def post(self, vpc_name, domain, name):
"""Configure LDAP Server"""
return impl.configure(
vpc_name, domain, name, flask.request.json
)
@webutils.delete_api(
api,
cors,
)
@handle_api_error
def delete(self, vpc_name, domain, name):
"""Delete LDAP Server"""
return impl.delete_ldap(
vpc_name, domain, name
)
cell_req_parser = api.parser()
cell_req_parser.add_argument('cell_name', help='CELL(Subnet) Name',
location='args', required=False)
@namespace.route(
'/vpc/<vpc_name>/domain/<domain>/cell'
)
@api.doc(params={
'vpc_name': 'VPC Name',
'domain': 'Domain'
})
class _CellConfigure(restplus.Resource):
"""Treadmill CELL Configure"""
@webutils.get_api(
api,
cors,
parser=cell_req_parser
)
def get(self, vpc_name, domain):
"""CELL Info"""
args = cell_req_parser.parse_args()
cell_name = args.get('cell_name')
return impl.cells(domain,
vpc_name,
cell_name)
@webutils.post_api(
api,
cors,
req_model=cell_model
)
@handle_api_error
def post(self, vpc_name, domain):
"""Configure Treadmill CELL"""
return impl.configure(
vpc_name,
domain,
None,
flask.request.json
)
@namespace.route(
'/vpc/<vpc_name>/domain/<domain>/cell/<cell_name>'
)
@api.doc(params={
'vpc_name': 'VPC Name',
'domain': 'Domain',
'cell_name': 'Cell(Subnet) Name'
})
class _CellCleaner(restplus.Resource):
"""Treadmill CELL Delete"""
@handle_api_error
@webutils.delete_api(
api,
cors,
)
def delete(self, vpc_name, domain, cell_name):
"""Delete Treadmill CELL"""
return impl.delete_cell(
vpc_name,
domain,
cell_name
)
vpc_req_parser = api.parser()
vpc_req_parser.add_argument('vpc_name', help='VPC Name',
location='args', required=False)
vpc_req_parser.add_argument('domain', help='Domain',
location='args', required=True)
@namespace.route('/vpc')
class _Vpc(restplus.Resource):
"""VPC"""
@webutils.get_api(
api,
cors,
parser=vpc_req_parser
)
def get(self):
"""VPC Info"""
args = vpc_req_parser.parse_args()
return impl.vpcs(args.get('domain', ''),
args.get('vpc_name', ''))
| {
"content_hash": "fcc1169f0b4d88e2ca5e46580f72c5f1",
"timestamp": "",
"source": "github",
"line_count": 244,
"max_line_length": 77,
"avg_line_length": 31.815573770491802,
"alnum_prop": 0.5231225041865258,
"repo_name": "bretttegart/treadmill",
"id": "3b1988be8860d79c8cf6823a3ccb2c49607ccfd7",
"size": "7763",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/python/treadmill/rest/api/cloud.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PowerShell",
"bytes": "3656"
},
{
"name": "Python",
"bytes": "2975485"
},
{
"name": "Ruby",
"bytes": "3712"
},
{
"name": "Shell",
"bytes": "56911"
}
],
"symlink_target": ""
} |
"""Contains the logic for `aq add sandbox`."""
import os
from aquilon.exceptions_ import AuthorizationException, ArgumentError
from aquilon.aqdb.model import Sandbox, Branch
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.commands.get import CommandGet
from aquilon.worker.dbwrappers.branch import add_branch, force_my_sandbox
from aquilon.worker.processes import GitRepo
class CommandAddSandbox(CommandGet):
required_parameters = ["sandbox"]
# Need to override CommandGet which has this as True...
requires_readonly = False
default_style = "csv"
requires_format = True
def render(self, session, logger, dbuser, sandbox, start, get, comments,
**_):
if not dbuser:
raise AuthorizationException("Cannot create a sandbox without an "
"authenticated connection.")
sandbox, dbauthor = force_my_sandbox(session, dbuser, sandbox)
# Check that the user has cleared up a directory of the same
# name; if this is not the case the branch may be created (in git)
# and added to the database - however CommandGet will fail roleing
# back the database leaving the branch created in git
templatesdir = self.config.get("broker", "templatesdir")
sandboxdir = os.path.join(templatesdir, dbauthor.name, sandbox)
if os.path.exists(sandboxdir):
raise ArgumentError("Sandbox directory %s already exists; "
"cannot create branch." %
sandboxdir)
if not start:
start = self.config.get("broker", "default_domain_start")
dbstart = Branch.get_unique(session, start, compel=True)
kingrepo = GitRepo.template_king(logger)
base_commit = kingrepo.ref_commit("refs/heads/" + dbstart.name)
dbsandbox = add_branch(session, self.config, Sandbox, sandbox,
owner=dbuser, base_commit=base_commit,
comments=comments)
session.flush()
# Currently this will fail if the branch already exists...
# That seems like the right behavior. It's an internal
# consistency issue that would need to be addressed explicitly.
kingrepo.run(["branch", dbsandbox.name, dbstart.name])
# If we arrive there the above "git branch" command has succeeded;
# therefore we should comit the changes to the database. If this is
# not done, and CommandGet fails (see dir check above), then the
# git branch will be created but the database changes roled back.
session.commit()
if get is False:
# The client knows to interpret an empty response as no action.
return []
return CommandGet.render(self, session=session, logger=logger,
dbuser=dbuser, sandbox=dbsandbox.name)
| {
"content_hash": "0165e30ae66a5985f1cd14c0c0d9183a",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 78,
"avg_line_length": 43.69117647058823,
"alnum_prop": 0.6428811847862672,
"repo_name": "quattor/aquilon",
"id": "953ee621857c12386f8fbd2b301795fda78dd67d",
"size": "3679",
"binary": false,
"copies": "2",
"ref": "refs/heads/upstream",
"path": "lib/aquilon/worker/commands/add_sandbox.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "1823"
},
{
"name": "Makefile",
"bytes": "5732"
},
{
"name": "Mako",
"bytes": "4178"
},
{
"name": "PLSQL",
"bytes": "102109"
},
{
"name": "PLpgSQL",
"bytes": "8091"
},
{
"name": "Pan",
"bytes": "1058"
},
{
"name": "Perl",
"bytes": "6057"
},
{
"name": "Python",
"bytes": "5884984"
},
{
"name": "SQLPL",
"bytes": "869"
},
{
"name": "Shell",
"bytes": "33547"
},
{
"name": "Smarty",
"bytes": "4603"
}
],
"symlink_target": ""
} |
'''Functions for working with lbl files
lbl files are represented as a numpy rec array with fields:
name (a unicode string), start, stop.
For events with no explicit stop, stop = start.
typical use:
abcd_intervals = find_seq(read(this/is/a_lbl_file.lbl), 'abcd')
'''
from __future__ import unicode_literals, print_function, absolute_import, division
import numpy as np
import re
__version__ = '0.1.1'
def read(fname):
'''reads in a lbl file named fname to a list'''
lines = open(fname, 'r').readlines()[7:]
if len(lines) == 0:
raise ValueError('This lbl file is empty')
stringpairs = [x.split()[::2] for x in lines]
lbl = [(float(x), y) for x, y in stringpairs]
labels = []
times = []
while len(lbl) > 0:
start, label = lbl.pop(0)
if len(label) > 2 and '-0' in label:
labels.append(label[:-2])
#find and pop end time
matches = (i for i, (stop, offlabel)
in enumerate(lbl)
if offlabel == label[:-2] + '-1')
stopidx = next(matches, None)
if stopidx is not None:
stop = lbl.pop(stopidx)[0]
else:
stop = start
times.append([start, stop])
else: # no associated offset
labels.append(label)
times.append([start, start])
dtype = [('name', 'U' + str(max([len(x) for x in labels]))),
('start', float), ('stop', float)]
return np.array([(l, sta, sto) for l, (sta, sto) in zip(labels, times)],
dtype=dtype)
def find_seq(lbl_rec, sequence):
'''returns the onset and offset times of substring 'sequence'
from lbllist'''
labels = reduce(lambda x, y: x + y, lbl_rec['name'])
matches = [m.start() for m in re.finditer('(?=%s)' % (sequence), labels)]
if matches == []: return []
starts = lbl_rec['start'][matches]
stops = lbl_rec['stop'][np.array(matches) + len(sequence) - 1]
return np.column_stack((starts, stops))
def write(fname, lbl_rec):
''' Writes lbl_rec to an lbl file named fname'''
header = '''signal feasd
type 0
color 121
font *-fixed-bold-*-*-*-15-*-*-*-*-*-*-*
separator ;
nfields 1
#
'''
f = open(fname, 'w')
f.write(header)
for label, start, stop in lbl_rec:
if start == stop:
f.write(' %.18e 121 %s\n' % (start, label))
else:
f.write(' %.18e 121 %s-0\n' % (start, label))
f.write(' %.18e 121 %s-1\n' % (stop, label))
| {
"content_hash": "07f7eabfb7f3aec03727ea0ef54c174e",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 82,
"avg_line_length": 33.28947368421053,
"alnum_prop": 0.5513833992094862,
"repo_name": "kylerbrown/lbl",
"id": "72d03ccdde4c534b8c3805489fc6a8b058aa374c",
"size": "2530",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lbl.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5106"
}
],
"symlink_target": ""
} |
"""Copyright 2022 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import create_shortcut
class TestCreateShortcut(unittest.TestCase):
"""Unit test class for file snippet"""
@classmethod
def test_create_shortcut(cls):
"""Test create_folder"""
file_id = create_shortcut.create_shortcut()
cls.assertIsNotNone(cls, file_id)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "558e5c1dfd71d9959b199b8d6d8ac64b",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 72,
"avg_line_length": 29.580645161290324,
"alnum_prop": 0.732824427480916,
"repo_name": "gsuitedevs/python-samples",
"id": "2e2f308027584d9a278c4b146bd677c6f0070e2d",
"size": "917",
"binary": false,
"copies": "6",
"ref": "refs/heads/main",
"path": "drive/snippets/drive-v2/file snippet/test_create_shortcut.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "401984"
}
],
"symlink_target": ""
} |
from coherence.backend import BackendStore
# The data itself is stored in BackendItems. They are also the first things we
# are going to create.
from coherence.backend import BackendItem
# To make the data 'renderable' we need to define the DIDLite-Class of the Media
# we are providing. For that we have a bunch of helpers that we also want to
# import
from coherence.upnp.core import DIDLLite
# And we also import the reactor, that allows us to specify an action to happen
# later
from twisted.internet import reactor
from twisted.internet.threads import deferToThread
from coherence.upnp.core.utils import ReverseProxyUriResource, ReverseProxyResource, StaticFile, BufferFile
from coherence import log
from twisted.web import server
from twisted.web.resource import Resource
from urllib import urlretrieve
from os.path import dirname,expanduser, exists, join, getsize
from os import makedirs, chmod, system, popen
import os
from popen2 import Popen3
QUICKTIME_MIMETYPE = 'video/mov'
class TestVideoProxy(ReverseProxyUriResource, log.Loggable):
logCategory = 'iplayer_store'
def __init__(self, video, uri, pid,
cache_directory,
cache_maxsize=100000000,
buffer_size=2000000,
fct=None, **kwargs):
ReverseProxyUriResource.__init__(self, uri)
self.pid = pid
self.video = video
self.cache_directory = cache_directory
self.cache_maxsize = int(cache_maxsize)
self.buffer_size = int(buffer_size)
self.downloader = None
self.mimetype = None
self.filesize = 0
self.file_in_cache = False
def requestFinished(self, result):
""" self.connection is set in utils.ReverseProxyResource.render """
self.info("ProxyStream requestFinished",result)
if hasattr(self,'connection'):
self.connection.transport.loseConnection()
def render(self, request):
self.info("VideoProxy render", request)
self.info("VideoProxy headers:", request.getAllHeaders())
self.info("VideoProxy id:", self.pid)
d = request.notifyFinish()
d.addBoth(self.requestFinished)
reactor.callLater(0.05,self.proxyURL,request)
return server.NOT_DONE_YET
def proxyURL(self, request):
self.info("request %s" % request.method)
# download stream to cache,
# and send it to the client in // after X bytes
filepath = join(self.cache_directory, self.pid)
file_is_already_available = False
if (exists(filepath)
and getsize(filepath) == self.filesize and getsize(filepath)>0):
res = self.renderFile(request, filepath)
if isinstance(res,int):
return res
request.write(res)
request.finish()
else:
self.downloadFile(request, filepath, None)
res = self.renderBufferFile (request, filepath, self.buffer_size)
if res == '' and request.method != 'HEAD':
return server.NOT_DONE_YET
if not isinstance(res,int):
request.write(res)
if request.method == 'HEAD':
request.finish()
def renderFile(self,request,filepath):
self.info('Cache file available %r %r ' %(request, filepath))
downloadedFile = StaticFile(filepath, self.mimetype)
downloadedFile.type = QUICKTIME_MIMETYPE
downloadedFile.encoding = None
return downloadedFile.render(request)
def renderBufferFile (self, request, filepath, buffer_size):
# Try to render file(if we have enough data)
self.info("renderBufferFile %s" % filepath)
rendering = False
if exists(filepath) is True:
filesize = getsize(filepath)
if filesize >= buffer_size:
rendering = True
self.info("Render file", filepath, filesize, buffer_size)
bufferFile = BufferFile(filepath, filesize, QUICKTIME_MIMETYPE)
bufferFile.type = QUICKTIME_MIMETYPE
bufferFile.encoding = None
try:
return bufferFile.render(request)
except Exception,error:
self.info(error)
if request.method != 'HEAD':
self.info('Will retry later to render buffer file')
reactor.callLater(0.5, self.renderBufferFile, request,filepath,buffer_size)
return ''
def downloadFinished(self, result):
self.info('Download finished!')
self.downloader = None
def gotDownloadError(self, error, request):
self.info("Unable to download stream to file")
self.info(request)
self.info(error)
def get_pid(self, request, filepath):
cmd = "%s --force --pid %s --symlink %s --output %s"%(self.video.store.script_path,self.pid, filepath, dirname(filepath))
print "render",self.pid, cmd
system(cmd)
#run = Popen4(cmd, True)
#f = open(filepath,"w+b")
#run.childerr.read(128)
#while run.poll()==-1:
# data = run.fromchild.read(128)
# #request.write(data)
# f.write(data)
# f.flush()
# print "file size",getsize(filepath),len(data)
#f.close()
#print run.childerr.read()
#print "render return",run.wait()
def downloadFile(self, request, filepath, callback, *args):
if (self.downloader is None):
self.info("Proxy: download data to cache file %s" % filepath)
self.checkCacheSize()
self.downloader = deferToThread(self.get_pid, request, filepath)
self.downloader.addCallback(self.downloadFinished)
self.downloader.addErrback(self.gotDownloadError, request)
if(callback is not None):
self.downloader.addCallback(callback, request, filepath, *args)
return self.downloader
def checkCacheSize(self):
cache_listdir = os.listdir(self.cache_directory)
cache_size = 0
for filename in cache_listdir:
path = "%s%s%s" % (self.cache_directory, os.sep, filename)
statinfo = os.stat(path)
cache_size += statinfo.st_size
self.info("Cache size: %d (max is %s)" % (cache_size, self.cache_maxsize))
if (cache_size > self.cache_maxsize):
cache_targetsize = self.cache_maxsize * 2/3
self.info("Cache above max size: Reducing to %d" % cache_targetsize)
def compare_atime(filename1, filename2):
path1 = "%s%s%s" % (self.cache_directory, os.sep, filename1)
path2 = "%s%s%s" % (self.cache_directory, os.sep, filename2)
cmp = int(os.stat(path1).st_atime - os.stat(path2).st_atime)
return cmp
cache_listdir = sorted(cache_listdir,compare_atime)
while (cache_size > cache_targetsize):
filename = cache_listdir.pop(0)
path = "%s%s%s" % (self.cache_directory, os.sep, filename)
cache_size -= os.stat(path).st_size
os.remove(path)
self.info("removed %s" % filename)
self.info("new cache size is %d" % cache_size)
########## The models
# After the download and parsing of the data is done, we want to save it. In
# this case, we want to fetch the videos and store their URL and the title of
# the image. That is the IplayerVideo class:
class IplayerVideo(BackendItem):
logCategory = 'iplayer_store'
# We inherit from BackendItem as it already contains a lot of helper methods
# and implementations. For this simple example, we only have to fill the
# item with data.
def __init__(self, parent, id, title, pid):
self.parentid = parent.id # used to be able to 'go back'
self.parent = parent
top_parent = parent
while not isinstance(top_parent, BackendStore):
top_parent = top_parent.parent
self.store = top_parent
self.update_id = 0
self.id = id # each item has its own and unique id
self.url = self.store.urlbase + pid
self.location = TestVideoProxy(self, self.url, pid,
self.store.cache_directory, self.store.cache_maxsize,self.store.buffer_size)
self.name = unicode(title,"utf8","ignore") # the title of the picture. Inside
# coherence this is called 'name'
# Item.item is a special thing. This is used to explain the client what
# kind of data this is. For e.g. A VideoItem or a MusicTrack.
self.item = DIDLLite.VideoItem(id, parent.id, self.name)
# each Item.item has to have one or more Resource objects
# these hold detailed information about the media data
# and can represent variants of it (different sizes, transcoded formats)
res = DIDLLite.Resource(self.url, 'http-get:*:video/quicktime:*')
res.size = None #FIXME: we should have a size here
# and a resolution entry would be nice too
self.item.res.append(res)
def get_path(self):
return self.url
class IplayerContainer(BackendItem):
logCategory = 'iplayer_store'
# The IplayerContainer will hold the reference to all our IplayerImages. This
# kind of BackenedItem is a bit different from the normal BackendItem,
# because it has 'children' (the Iplayerimages). Because of that we have
# some more stuff to do in here.
def __init__(self, parent, id, name):
# the ids as above
self.parent = parent
if self.parent != None:
self.parent_id = parent.id
else:
self.parent_id = None
self.id = id
self.name = unicode(name,"utf8","ignore") # the title of the picture. Inside
# but we need to set it to a certain mimetype to explain it, that we
# contain 'children'.
self.mimetype = 'directory'
# As we are updating our data periodically, we increase this value so
# that our clients can check easier if something has changed since their
# last request.
self.update_id = 0
# that is where we hold the children
self.children = []
# and we need to give a DIDLLite again. This time we want to be
# understood as 'Container'.
self.item = DIDLLite.Container(id, self.parent_id, self.name)
self.item.childCount = None # will be set as soon as we have images
def get_children(self, start=0, end=0):
# This is the only important implementation thing: we have to return our
# list of children
if end != 0:
return self.children[start:end]
return self.children[start:]
# there is nothing special in here
# FIXME: move it to a base BackendContainer class
def get_child_count(self):
return len(self.children)
def get_item(self):
return self.item
def get_name(self):
return self.name
def get_id(self):
return self.id
########## The server
# As already said before the implementation of the server is done in an
# inheritance of a BackendStore. This is where the real code happens (usually).
# In our case this would be: downloading the page, parsing the content, saving
# it in the models and returning them on request.
class IplayerStore(BackendStore):
logCategory = 'iplayer_store'
# this *must* be set. Because the (most used) MediaServer Coherence also
# allows other kind of Backends (like remote lights).
implements = ['MediaServer']
script_url = "http://linuxcentre.net/get_iplayer/get_iplayer"
script_path = expanduser("~/.local/share/coherence/get_iplayer")
# as we are going to build a (very small) tree with the items, we need to
# define the first (the root) item:
ROOT_ID = 0
id = ROOT_ID
def __init__(self, server, *args, **kwargs):
# first we inizialize our heritage
BackendStore.__init__(self,server,**kwargs)
self.cache_directory = kwargs.get('cache_directory', '/tmp/coherence-cache')
if not exists(self.cache_directory):
makedirs(self.cache_directory)
self.cache_maxsize = kwargs.get('cache_maxsize', 100000000)
self.buffer_size = kwargs.get('buffer_size', 750000)
# When a Backend is initialized, the configuration is given as keyword
# arguments to the initialization. We receive it here as a dicitonary
# and allow some values to be set:
# the name of the MediaServer as it appears in the network
self.name = kwargs.get('name', 'iPlayer')
# timeout between updates in hours:
self.refresh = int(kwargs.get('refresh', 1)) * (60 *60)
# internally used to have a new id for each item
self.next_id = 1000
# the UPnP device that's hosting that backend, that's already done
# in the BackendStore.__init__, just left here the sake of completeness
self.server = server
# initialize our Iplayer container (no parent, this is the root)
self.container = IplayerContainer(None, self.ROOT_ID, "iPlayer")
# but as we also have to return them on 'get_by_id', we have our local
# store of videos per id:
self.everything = {}
# we tell that if an XBox sends a request for videos we'll
# map the WMC id of that request to our local one
self.wmc_mapping = {'15': 0}
# and trigger an update of the data
dfr = self.update_data()
# So, even though the initialize is kind of done, Coherence does not yet
# announce our Media Server.
# Coherence does wait for signal send by us that we are ready now.
# And we don't want that to happen as long as we don't have succeeded
# in fetching some first data, so we delay this signaling after the update is done:
dfr.addCallback(self.init_completed)
dfr.addCallback(self.queue_update)
def get_by_id(self, id):
print "asked for", id, type(id)
# what ever we are asked for, we want to return the container only
if isinstance(id, basestring):
id = id.split('@',1)[0]
try:
id = int(id)
except ValueError:
pass
if id == self.ROOT_ID:
return self.container
val = self.everything.get(id,None)
print id,val,val.name
return val
def upnp_init(self):
# after the signal was triggered, this method is called by coherence and
# from now on self.server is existing and we can do
# the necessary setup here
# that allows us to specify our server options in more detail.
# here we define what kind of media content we do provide
# mostly needed to make some naughty DLNA devices behave
# will probably move into Coherence internals one day
self.server.connection_manager_server.set_variable( \
0, 'SourceProtocolInfo', ['internal:*:video/quicktime:*',
'http-get:*:video/quicktime:*'], default=True)
# and as it was done after we fetched the data the first time
# we want to take care about the server wide updates as well
self._update_container()
def _update_container(self, result=None):
# we need to inform Coherence about these changes
# again this is something that will probably move
# into Coherence internals one day
if self.server:
self.server.content_directory_server.set_variable(0,
'SystemUpdateID', self.update_id)
value = (self.ROOT_ID,self.container.update_id)
self.server.content_directory_server.set_variable(0,
'ContainerUpdateIDs', value)
return result
def update_loop(self):
# in the loop we want to call update_data
dfr = self.update_data()
# aftert it was done we want to take care about updating
# the container
dfr.addCallback(self._update_container)
# in ANY case queue an update of the data
dfr.addBoth(self.queue_update)
def get_schedule(self):
if not exists(dirname(self.script_path)):
makedirs(dirname(self.script_path))
if not exists(self.script_path):
urlretrieve(self.script_url, self.script_path)
chmod(self.script_path, 0755)
ret = system("%s --plugins-update"%self.script_path)
assert ret == 0
data = popen("%s --listformat '<name>;<pid>;<episode>;<channel>'"%self.script_path).readlines()
return data
def update_data(self):
# trigger an update of the data
# fetch the rss
dfr = deferToThread(self.get_schedule)
# then parse the data into our models
dfr.addCallback(self.parse_data)
return dfr
def parse_data(self, data):
# reset the childrens list of the container and the local storage
self.container.children = []
self.videos = {}
self.channels = {}
self.series = {}
open("dump","w").write(" ".join(data))
post_match = False
for line in data:
if not post_match:
if line == "Matches:\n":
post_match = True
continue
if len(line.strip()) == 0: # blank after list
break
(series,pid,episode,channel) = line.strip().split(";")
if channel not in self.channels:
self.channels[channel] = IplayerContainer(self, self.next_id, channel)
self.everything[self.next_id] = self.channels[channel]
self.container.children.append(self.channels[channel])
self.next_id += 1
if series not in self.series:
self.series[series] = IplayerContainer(self.channels[channel], self.next_id, series)
self.everything[self.next_id] = self.series[series]
self.channels[channel].children.append(self.series[series])
self.channels[channel].update_id +=1
self.next_id +=1
video = IplayerVideo(self.series[series], self.next_id, "%s - %s"%(series,episode), pid)
self.series[series].children.append(video)
self.series[series].update_id +=1
self.everything[self.next_id] = video
self.everything[pid] = video
self.next_id += 1
# and increase the container update id and the system update id
# so that the clients can refresh with the new data
self.container.update_id += 1
self.update_id += 1
def queue_update(self, error_or_failure):
# We use the reactor to queue another updating of our data
print "error or failure",error_or_failure
reactor.callLater(self.refresh, self.update_loop)
if __name__ == '__main__':
from twisted.internet import reactor
f = IplayerStore(None)
reactor.run()
| {
"content_hash": "664a7a4d2f8fa7dd4b83216ba0a4011c",
"timestamp": "",
"source": "github",
"line_count": 499,
"max_line_length": 123,
"avg_line_length": 33.38276553106213,
"alnum_prop": 0.7118501620842839,
"repo_name": "palfrey/coherence",
"id": "de738ad7902a8299c7fa691736511f6fe8f30fd7",
"size": "17086",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "coherence/backends/iplayer_storage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1240247"
},
{
"name": "Shell",
"bytes": "1566"
}
],
"symlink_target": ""
} |
import copy
from oslo_log import log as logging
import six
import webob.exc
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import exception
from nova.i18n import _LE
ALIAS = 'extensions'
LOG = logging.getLogger(__name__)
authorize = extensions.os_compute_authorizer(ALIAS)
# NOTE(cyeoh): The following mappings are currently incomplete
# Having a v2.1 extension loaded can imply that several v2 extensions
# should also appear to be loaded (although they no longer do in v2.1)
v21_to_v2_extension_list_mapping = {
'os-quota-sets': [{'name': 'UserQuotas', 'alias': 'os-user-quotas'},
{'name': 'ExtendedQuotas',
'alias': 'os-extended-quotas'}],
'os-cells': [{'name': 'CellCapacities', 'alias': 'os-cell-capacities'}],
'os-baremetal-nodes': [{'name': 'BareMetalExtStatus',
'alias': 'os-baremetal-ext-status'}],
'os-block-device-mapping': [{'name': 'BlockDeviceMappingV2Boot',
'alias': 'os-block-device-mapping-v2-boot'}],
'os-cloudpipe': [{'name': 'CloudpipeUpdate',
'alias': 'os-cloudpipe-update'}],
'servers': [{'name': 'Createserverext', 'alias': 'os-create-server-ext'},
{'name': 'ExtendedIpsMac', 'alias': 'OS-EXT-IPS-MAC'},
{'name': 'ExtendedIps', 'alias': 'OS-EXT-IPS'},
{'name': 'ServerListMultiStatus',
'alias': 'os-server-list-multi-status'},
{'name': 'ServerSortKeys', 'alias': 'os-server-sort-keys'},
{'name': 'ServerStartStop', 'alias': 'os-server-start-stop'}],
'flavors': [{'name': 'FlavorDisabled', 'alias': 'OS-FLV-DISABLED'},
{'name': 'FlavorExtraData', 'alias': 'OS-FLV-EXT-DATA'},
{'name': 'FlavorSwap', 'alias': 'os-flavor-swap'}],
'os-services': [{'name': 'ExtendedServicesDelete',
'alias': 'os-extended-services-delete'},
{'name': 'ExtendedServices', 'alias':
'os-extended-services'}],
'os-evacuate': [{'name': 'ExtendedEvacuateFindHost',
'alias': 'os-extended-evacuate-find-host'}],
'os-floating-ips': [{'name': 'ExtendedFloatingIps',
'alias': 'os-extended-floating-ips'}],
'os-hypervisors': [{'name': 'ExtendedHypervisors',
'alias': 'os-extended-hypervisors'},
{'name': 'HypervisorStatus',
'alias': 'os-hypervisor-status'}],
'os-networks': [{'name': 'ExtendedNetworks',
'alias': 'os-extended-networks'}],
'os-rescue': [{'name': 'ExtendedRescueWithImage',
'alias': 'os-extended-rescue-with-image'}],
'os-extended-status': [{'name': 'ExtendedStatus',
'alias': 'OS-EXT-STS'}],
'os-virtual-interfaces': [{'name': 'ExtendedVIFNet',
'alias': 'OS-EXT-VIF-NET'}],
'os-used-limits': [{'name': 'UsedLimitsForAdmin',
'alias': 'os-used-limits-for-admin'}],
'os-volumes': [{'name': 'VolumeAttachmentUpdate',
'alias': 'os-volume-attachment-update'}],
'os-server-groups': [{'name': 'ServerGroupQuotas',
'alias': 'os-server-group-quotas'}],
}
# v2.1 plugins which should never appear in the v2 extension list
# This should be the v2.1 alias, not the V2.0 alias
v2_extension_suppress_list = ['servers', 'images', 'versions', 'flavors',
'os-block-device-mapping-v1', 'os-consoles',
'extensions', 'image-metadata', 'ips', 'limits',
'server-metadata'
]
# v2.1 plugins which should appear under a different name in v2
v21_to_v2_alias_mapping = {
'image-size': 'OS-EXT-IMG-SIZE',
'os-remote-consoles': 'os-consoles',
'os-disk-config': 'OS-DCF',
'os-extended-availability-zone': 'OS-EXT-AZ',
'os-extended-server-attributes': 'OS-EXT-SRV-ATTR',
'os-multinic': 'NMN',
'os-scheduler-hints': 'OS-SCH-HNT',
'os-server-usage': 'OS-SRV-USG',
'os-instance-usage-audit-log': 'os-instance_usage_audit_log',
}
# V2.1 does not support XML but we need to keep an entry in the
# /extensions information returned to the user for backwards
# compatibility
FAKE_XML_URL = "http://docs.openstack.org/compute/ext/fake_xml"
FAKE_UPDATED_DATE = "2014-12-03T00:00:00Z"
class FakeExtension(object):
def __init__(self, name, alias):
self.name = name
self.alias = alias
self.__doc__ = ""
self.version = -1
class ExtensionInfoController(wsgi.Controller):
def __init__(self, extension_info):
self.extension_info = extension_info
def _translate(self, ext):
ext_data = {}
ext_data["name"] = ext.name
ext_data["alias"] = ext.alias
ext_data["description"] = ext.__doc__
ext_data["namespace"] = FAKE_XML_URL
ext_data["updated"] = FAKE_UPDATED_DATE
ext_data["links"] = []
return ext_data
def _create_fake_ext(self, alias, name):
return FakeExtension(alias, name)
def _get_extensions(self, context):
"""Filter extensions list based on policy."""
discoverable_extensions = dict()
for alias, ext in six.iteritems(self.extension_info.get_extensions()):
authorize = extensions.os_compute_soft_authorizer(alias)
if authorize(context, action='discoverable'):
discoverable_extensions[alias] = ext
else:
LOG.debug("Filter out extension %s from discover list",
alias)
# Add fake v2 extensions to list
extra_exts = {}
for alias in discoverable_extensions:
if alias in v21_to_v2_extension_list_mapping:
for extra_ext in v21_to_v2_extension_list_mapping[alias]:
extra_exts[extra_ext["alias"]] = self._create_fake_ext(
extra_ext["name"], extra_ext["alias"])
discoverable_extensions.update(extra_exts)
# Suppress extensions which we don't want to see in v2
for suppress_ext in v2_extension_suppress_list:
try:
del discoverable_extensions[suppress_ext]
except KeyError:
pass
# v2.1 to v2 extension name mapping
for rename_ext in v21_to_v2_alias_mapping:
if rename_ext in discoverable_extensions:
new_name = v21_to_v2_alias_mapping[rename_ext]
mod_ext = copy.deepcopy(
discoverable_extensions.pop(rename_ext))
mod_ext.alias = new_name
discoverable_extensions[new_name] = mod_ext
return discoverable_extensions
@extensions.expected_errors(())
def index(self, req):
context = req.environ['nova.context']
authorize(context)
sorted_ext_list = sorted(
six.iteritems(self._get_extensions(context)))
extensions = []
for _alias, ext in sorted_ext_list:
extensions.append(self._translate(ext))
return dict(extensions=extensions)
@extensions.expected_errors(404)
def show(self, req, id):
context = req.environ['nova.context']
authorize(context)
try:
# NOTE(dprince): the extensions alias is used as the 'id' for show
ext = self._get_extensions(context)[id]
except KeyError:
raise webob.exc.HTTPNotFound()
return dict(extension=self._translate(ext))
class ExtensionInfo(extensions.V3APIExtensionBase):
"""Extension information."""
name = "Extensions"
alias = ALIAS
version = 1
def get_resources(self):
resources = [
extensions.ResourceExtension(
ALIAS, ExtensionInfoController(self.extension_info),
member_name='extension')]
return resources
def get_controller_extensions(self):
return []
class LoadedExtensionInfo(object):
"""Keep track of all loaded API extensions."""
def __init__(self):
self.extensions = {}
def register_extension(self, ext):
if not self._check_extension(ext):
return False
alias = ext.alias
if alias in self.extensions:
raise exception.NovaException("Found duplicate extension: %s"
% alias)
self.extensions[alias] = ext
return True
def _check_extension(self, extension):
"""Checks for required methods in extension objects."""
try:
extension.is_valid()
except AttributeError:
LOG.exception(_LE("Exception loading extension"))
return False
return True
def get_extensions(self):
return self.extensions
| {
"content_hash": "d16171369879193218d1a604a4652e09",
"timestamp": "",
"source": "github",
"line_count": 235,
"max_line_length": 78,
"avg_line_length": 38.11489361702127,
"alnum_prop": 0.5770905437088311,
"repo_name": "watonyweng/nova",
"id": "069aaf52d68b856eaabcff375bf2a2c198d5703c",
"size": "9559",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nova/api/openstack/compute/extension_info.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16467436"
},
{
"name": "Shell",
"bytes": "20716"
},
{
"name": "Smarty",
"bytes": "285755"
}
],
"symlink_target": ""
} |
"""Test bitcoind with different proxy configuration.
Test plan:
- Start bitcoind's with different proxy configurations
- Use addnode to initiate connections
- Verify that proxies are connected to, and the right connection command is given
- Proxy configurations to test on bitcoind side:
- `-proxy` (proxy everything)
- `-onion` (proxy just onions)
- `-proxyrandomize` Circuit randomization
- Proxy configurations to test on proxy side,
- support no authentication (other proxy)
- support no authentication + user/pass authentication (Tor)
- proxy on IPv6
- Create various proxies (as threads)
- Create nodes that connect to them
- Manipulate the peer connections using addnode (onetry) and observe effects
- Test the getpeerinfo `network` field for the peer
addnode connect to IPv4
addnode connect to IPv6
addnode connect to onion
addnode connect to generic DNS name
- Test getnetworkinfo for each node
"""
import socket
import os
from test_framework.socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
PORT_MIN,
PORT_RANGE,
assert_equal,
)
from test_framework.netutil import test_ipv6_local
RANGE_BEGIN = PORT_MIN + 2 * PORT_RANGE # Start after p2p and rpc ports
# Networks returned by RPC getpeerinfo, defined in src/netbase.cpp::GetNetworkName()
NET_UNROUTABLE = "unroutable"
NET_IPV4 = "ipv4"
NET_IPV6 = "ipv6"
NET_ONION = "onion"
# Networks returned by RPC getnetworkinfo, defined in src/rpc/net.cpp::GetNetworksInfo()
NETWORKS = frozenset({NET_IPV4, NET_IPV6, NET_ONION})
class ProxyTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.setup_clean_chain = True
def setup_nodes(self):
self.have_ipv6 = test_ipv6_local()
# Create two proxies on different ports
# ... one unauthenticated
self.conf1 = Socks5Configuration()
self.conf1.addr = ('127.0.0.1', RANGE_BEGIN + (os.getpid() % 1000))
self.conf1.unauth = True
self.conf1.auth = False
# ... one supporting authenticated and unauthenticated (Tor)
self.conf2 = Socks5Configuration()
self.conf2.addr = ('127.0.0.1', RANGE_BEGIN + 1000 + (os.getpid() % 1000))
self.conf2.unauth = True
self.conf2.auth = True
if self.have_ipv6:
# ... one on IPv6 with similar configuration
self.conf3 = Socks5Configuration()
self.conf3.af = socket.AF_INET6
self.conf3.addr = ('::1', RANGE_BEGIN + 2000 + (os.getpid() % 1000))
self.conf3.unauth = True
self.conf3.auth = True
else:
self.log.warning("Testing without local IPv6 support")
self.serv1 = Socks5Server(self.conf1)
self.serv1.start()
self.serv2 = Socks5Server(self.conf2)
self.serv2.start()
if self.have_ipv6:
self.serv3 = Socks5Server(self.conf3)
self.serv3.start()
# Note: proxies are not used to connect to local nodes. This is because the proxy to
# use is based on CService.GetNetwork(), which returns NET_UNROUTABLE for localhost.
args = [
['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-proxyrandomize=1'],
['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-onion=%s:%i' % (self.conf2.addr),'-proxyrandomize=0'],
['-listen', '-proxy=%s:%i' % (self.conf2.addr),'-proxyrandomize=1'],
[]
]
if self.have_ipv6:
args[3] = ['-listen', '-proxy=[%s]:%i' % (self.conf3.addr),'-proxyrandomize=0', '-noonion']
self.add_nodes(self.num_nodes, extra_args=args)
self.start_nodes()
def network_test(self, node, addr, network):
for peer in node.getpeerinfo():
if peer["addr"] == addr:
assert_equal(peer["network"], network)
def node_test(self, node, proxies, auth, test_onion=True):
rv = []
addr = "15.61.23.23:1234"
self.log.debug("Test: outgoing IPv4 connection through node for address {}".format(addr))
node.addnode(addr, "onetry")
cmd = proxies[0].queue.get()
assert isinstance(cmd, Socks5Command)
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"15.61.23.23")
assert_equal(cmd.port, 1234)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
self.network_test(node, addr, network=NET_IPV4)
if self.have_ipv6:
addr = "[1233:3432:2434:2343:3234:2345:6546:4534]:5443"
self.log.debug("Test: outgoing IPv6 connection through node for address {}".format(addr))
node.addnode(addr, "onetry")
cmd = proxies[1].queue.get()
assert isinstance(cmd, Socks5Command)
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"1233:3432:2434:2343:3234:2345:6546:4534")
assert_equal(cmd.port, 5443)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
self.network_test(node, addr, network=NET_IPV6)
if test_onion:
addr = "bitcoinostk4e4re.onion:8333"
self.log.debug("Test: outgoing onion connection through node for address {}".format(addr))
node.addnode(addr, "onetry")
cmd = proxies[2].queue.get()
assert isinstance(cmd, Socks5Command)
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"bitcoinostk4e4re.onion")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
self.network_test(node, addr, network=NET_ONION)
addr = "node.noumenon:8333"
self.log.debug("Test: outgoing DNS name connection through node for address {}".format(addr))
node.addnode(addr, "onetry")
cmd = proxies[3].queue.get()
assert isinstance(cmd, Socks5Command)
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"node.noumenon")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
self.network_test(node, addr, network=NET_UNROUTABLE)
return rv
def run_test(self):
# basic -proxy
self.node_test(self.nodes[0], [self.serv1, self.serv1, self.serv1, self.serv1], False)
# -proxy plus -onion
self.node_test(self.nodes[1], [self.serv1, self.serv1, self.serv2, self.serv1], False)
# -proxy plus -onion, -proxyrandomize
rv = self.node_test(self.nodes[2], [self.serv2, self.serv2, self.serv2, self.serv2], True)
# Check that credentials as used for -proxyrandomize connections are unique
credentials = set((x.username,x.password) for x in rv)
assert_equal(len(credentials), len(rv))
if self.have_ipv6:
# proxy on IPv6 localhost
self.node_test(self.nodes[3], [self.serv3, self.serv3, self.serv3, self.serv3], False, False)
def networks_dict(d):
r = {}
for x in d['networks']:
r[x['name']] = x
return r
self.log.info("Test RPC getnetworkinfo")
n0 = networks_dict(self.nodes[0].getnetworkinfo())
assert_equal(NETWORKS, n0.keys())
for net in NETWORKS:
assert_equal(n0[net]['proxy'], '%s:%i' % (self.conf1.addr))
assert_equal(n0[net]['proxy_randomize_credentials'], True)
assert_equal(n0['onion']['reachable'], True)
n1 = networks_dict(self.nodes[1].getnetworkinfo())
assert_equal(NETWORKS, n1.keys())
for net in ['ipv4', 'ipv6']:
assert_equal(n1[net]['proxy'], '%s:%i' % (self.conf1.addr))
assert_equal(n1[net]['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['proxy'], '%s:%i' % (self.conf2.addr))
assert_equal(n1['onion']['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['reachable'], True)
n2 = networks_dict(self.nodes[2].getnetworkinfo())
assert_equal(NETWORKS, n2.keys())
for net in NETWORKS:
assert_equal(n2[net]['proxy'], '%s:%i' % (self.conf2.addr))
assert_equal(n2[net]['proxy_randomize_credentials'], True)
assert_equal(n2['onion']['reachable'], True)
if self.have_ipv6:
n3 = networks_dict(self.nodes[3].getnetworkinfo())
assert_equal(NETWORKS, n3.keys())
for net in NETWORKS:
assert_equal(n3[net]['proxy'], '[%s]:%i' % (self.conf3.addr))
assert_equal(n3[net]['proxy_randomize_credentials'], False)
assert_equal(n3['onion']['reachable'], False)
if __name__ == '__main__':
ProxyTest().main()
| {
"content_hash": "7852ab1bcb060c07c021656410a947a5",
"timestamp": "",
"source": "github",
"line_count": 228,
"max_line_length": 121,
"avg_line_length": 41.228070175438596,
"alnum_prop": 0.6164893617021276,
"repo_name": "rnicoll/dogecoin",
"id": "05b658ed8720fcf1e4ee5dbdfc1f81557e27afb3",
"size": "9614",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "test/functional/feature_proxy.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28173"
},
{
"name": "C",
"bytes": "1064604"
},
{
"name": "C++",
"bytes": "8101614"
},
{
"name": "CMake",
"bytes": "28560"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "M4",
"bytes": "215256"
},
{
"name": "Makefile",
"bytes": "117017"
},
{
"name": "Objective-C++",
"bytes": "5497"
},
{
"name": "Python",
"bytes": "2237402"
},
{
"name": "QMake",
"bytes": "798"
},
{
"name": "Sage",
"bytes": "35184"
},
{
"name": "Scheme",
"bytes": "7554"
},
{
"name": "Shell",
"bytes": "153769"
}
],
"symlink_target": ""
} |
"""
Created on 2017
Author : Edouard Cuvelier
Affiliation : Université catholique de Louvain - ICTEAM - UCL Crypto Group
Address : Place du Levant 3, 1348 Louvain-la-Neuve, BELGIUM
email : firstname.lastname@uclouvain.be
"""
import tools.fingexp as fingexp
from Crypto.Random.random import randint
from random import sample
import time
import pickle
from cryptoTools.polyCommitment import PolynomialCommitment
def randomPermutation(L):
'''
return a random permutation of the list L
'''
L_copy = L+[]
L_perm = []
#permutation = []
#p_int = range(len(L))
while L_copy != [] :
k = len(L_copy)-1
r = randint(0,k)
L_perm.append(L_copy.pop(r))
#permutation.append(p_int.pop(r))
#return L_perm, permutation
return L_perm
def generatePermTuple(n):
'''
generate all permutations of (0,...,n-1) and store them in a Tuple ((perm1),(perm2),...)
'''
L = range(n)
def perm(L):
if len(L) == 1 :
return [L[0]]
else :
Lperm = []
for i in range(len(L)):
L_copy = L+[]
l = L_copy.pop(i)
pList = perm(L_copy)
for Li in pList :
if not type(Li) == type([]) :
Li = [Li]
Lperm.append([l]+Li)
return Lperm
Lperm = perm(L)
T = ()
for Li in Lperm :
T = T+(tuple(Li),)
return T
def pathToIndexList(path,nbChildren,SZ):
'''
Given a path of the form a string of integers ranging from 0 to nbChildren,
convert it to a list of indexes which are the position indexes in a list
storing the tree containing the path
For example, the path '021' where nbChildren = 3 and Z = 2 returns the list
of indexes [0,1,6,7,22,23]
'''
assert path[0] == '0'
n = nbChildren
k = len(path)
pow_n = range(k)
for i in pow_n:
pow_n[i] = n**i
pow_n.reverse()
#print 'pow_n', pow_n
def prodVec(X,Y):
assert len(X) == len(Y)
s = 0
for i in range(len(X)) :
s += X[i]*Y[i]
return s
pathList = []
for i in range(1,k):
pathList.append(int(path[i])+1)
indexesList = range(SZ)
#print 'indexesList', indexesList
for i in range(1,k):
pr = prodVec(pow_n[-i:],pathList[:i])
a = SZ*pr
b = a + SZ
seg = range(a,b)
indexesList += seg
#print 'seg', seg
return indexesList
def positionToSubPath(position,nbChildren,SZ,depth,subPathDic):
'''
Given a position in a list representing the tree,
return the subpath leading to the node of the tree.
'''
# First, look if the entry exists in the dictionary
try :
subpath = subPathDic[position]
except KeyError :
pass
else :
return subpath
n = nbChildren
#order = position % Z
#node_pos = position - order
index = 0
a = 0
for i in range(depth+1) :
b = a + n**i
if a*SZ <= position and position < b*SZ :
index = i
break
save_a = a
a = b
else :
print 'i',i
print 'index', index
print 'depth',depth
print 'position',position
print 'a',save_a
print 'b',b
print 'cond', (save_a*SZ <= position and position < b*SZ)
assert i != depth-1 # problem!
pivot = a*SZ
#print 'pivot',pivot
#print 'index',index
subPath = ''
for k in range(0,index+1):
Z_n_i_minus_k = SZ*n**(index-k)
#remain = (position-pivot) % Z_n_i_minus_k
#j_k = (position-pivot-remain)/Z_n_i_minus_k
j_k = (position-pivot)//Z_n_i_minus_k
pivot = pivot + j_k*Z_n_i_minus_k
#print 'pivot',pivot
subPath += str(j_k)
subPathDic[position] = subPath
return subPath
def randomPath(subpath, nbChildren, depth) :
'''
Given a subpath, return a random path containing that subpath
'''
if len(subpath) == depth :
return subpath
assert len(subpath) <= depth
return randomPath(subpath+str(randint(0,nbChildren-1)),nbChildren,depth)
class PathORAMTree :
def __init__(self,blocksList = [], treeID=''):
'''
- blocksList is the list of all the blocks of the tree ordered in a canonic way
- treeID is a string used to identify the tree
'''
self.treeID = treeID
self.blocksList = blocksList
def __str__(self):
return 'Path ORAM Tree '+str(self.treeID)
def __repr__(self):
return self.__str__()
def getBlocks(self,indexesList):
L = []
for position in indexesList :
L.append(self.blocksList[position])
#print 'getting blocks of tree :', L
return L
def writeBlocks(self, L):
#print 'writing blocks to tree :', L
for position, block in L :
self.blocksList[position] = block
class PathORAMTree_for_Polynomial_Commitment(PathORAMTree):
def __init__(self,pC_PK, blocksList = [], treeID=''):
'''
- blocksList is the list of all the blocks of the tree ordered in a canonic way
- treeID is a string used to identify the tree
'''
self.treeID = treeID
self.blocksList = blocksList
self.pC_PK = pC_PK
def getBlocks(self,indexesList):
ECG = self.pC_PK.pairing.EFp
L = []
for position in indexesList :
b, X = self.blocksList[position]
c = ECG.uncompress(b,X)
com = PolynomialCommitment(c, self.pC_PK)
L.append(com)
return L
def writeBlocks(self, L):
for position, block in L :
self.blocksList[position] = block.c.compress()
class RingORAM :
def __init__(self,POTree, Z= 4, S = 4, A = 4, nbChildren = 2 ,depth = 10, treeHash = '', createDummyBlock = None, rerandomizeBlock = None):
'''
- POTree is the Path ORAM tree in which the data will be stored
- Z is the number of real blocks per node (or bucket)
- S is the number of dummy blocks per node (or bucket)
- A is the frequency at which eviction of paths are performed
- nbChildren is the exact number of children a node must have
- depth is the number of levels of the tree
- treeHash is the Merkle-Damgard hash of the tree
- createDummyBlock, a method to call when creating dummyBlocks
- rerandomizeBlock, a method to re-randomize a block
The class initialize the folowing variables:
- positionMap is a dictionnary used to store the position in which a block
is currently stored, an item of the dictionnary is of the form
{blockID : (position,path)} ; position is set to 'stash', when the
block is stored in the client Stash
- positionList is a list of entries (blockID,path,not_visited) where :
* the index in positionList corresponds to the index of the list
the tree
* blockID and path are set to None when the respective block is a
dummy block
* not_visited is a boolean set to True when the block has not yet
been touched since its last re-randomization
- clientStash is a dictionary of the form { blockID : block }, it is used
to store blocks after a query or after a call to self.evictPath()
- SZ = S+Z the exact number of blocks in each bucket (S+Z)
- tLoad is the number of blocks in the tree
- nbNodes is the number of buckets (or nodes) in the tree
- query_counter keeps track of the number of queries performed, it helps
decide when to call self.evictPath(), relatievely to self.A
- path_counter keeps track of the index of the path to evict when
self.evictPath() is called. The index is the one of the list :
- orderedPathList which stores the paths in the reverse lexicographic order
- sPD is the subpath dictionary that is saved externally to speedup computations
It indicates, given a position, the subpath leading to it, and thus
speeding up any call to positionToSubPath(...).
'''
self.POTree = POTree
self.Z = Z
self.S = S
self.A = A
self.query_counter = 1
self.path_counter = 0
self.SZ = S+Z # exact number of blocks in each bucket (S+Z)
self.nbChildren = nbChildren # exact number of children a bucket has
self.depth = depth # of the tree
self.treeHash = treeHash #MD hash of the tree
tLoad = self.SZ
st = 1
for i in range(depth):
st = st*nbChildren
tLoad += self.SZ*st
self.tLoad = tLoad
if self.POTree.blocksList == [] :
self.POTree.blocksList = [None]*self.tLoad
self.nbNodes = tLoad/self.SZ
self.POTree = POTree
self.positionList = [(None,None,True)]*self.tLoad # at each position stores entries of the form (blockID,path,not_visited)
self.positionMap = {} # stores entries of the form {blockID : (position,path)}
#self.bucketDic = {} # stores entries of the form {bucketID : [realList,validList]} where realList = [True,False,...] of size S+Z where True accounts for a real block and False for a dummy one. validList = [True,False,...] of size S+Z where True means the blocks has not been visited already
self.clientStash = {} # stores entries of the form {blockID : block}
self.dummyStash = [] # List containing dummy blocks
self.pathList = self.buildPathList()
self.orderedPathList = self.orderListInReverseLexicographic(self.pathList)
self.hashDic = {}
self.dummyCounter = 0
# Load the dictionary to speedup computations
try :
s = str(self.SZ)+str(self.depth)+str(self.nbChildren)
f = open('./posDictionaries/positionDic'+s,'r')
subPathDic = pickle.load(f)
f.close()
except IOError :
s = str(self.SZ)+'_'+str(self.depth)+'_'+str(self.nbChildren)
f = open('./posDictionaries/positionDic'+s,'w')
pickle.dump({},f)
f.close()
subPathDic = {}
self.sPD = subPathDic
'''
try :
s = str(self.SZ)
f = open('./permutations/perm'+s,'r')
permTup = pickle.load(f)
f.close()
except IOError :
s = str(self.SZ)
f = open('./permutations/perm'+s,'w')
permTup = generatePermTuple(self.SZ)
pickle.dump(permTup,f)
f.close()
self.permTup = permTup
'''
# Below are default methods to use when rerandomizeBlock, createDummyBlock
# and isADummyBlock methods are not specified
if rerandomizeBlock == None :
def fa(block,blockID):
#print 'rerandomizing block',block
return 'r-'+block
self.rerandomizeBlock = fa
else :
self.rerandomizeBlock = rerandomizeBlock
if createDummyBlock == None :
def fb():
#return 'DB'+(str(randint(0,2**20))), 'dummy block'
return 'DB', 'dummy block'
self.createDummyBlock = fb
else :
self.createDummyBlock = createDummyBlock
def hashPath(self,path):
path_copy = path[:-1]
while path_copy != '' :
self.hashNode(path_copy)
path_copy = path_copy[:-1]
self.treeHash = self.hashDic['0']
def hashNode(self, subpath):
hashList = []
for i in range(self.nbChildren):
hashList.append(self.hashDic[subpath+str(i)])
self.hashDic[subpath] = fingexp.fingerprint([self.hashDic[subpath]]+hashList)
def buildPathList(self):
'''
this method returns an iterable of the path of self.POTree
A path is a string of the form '025103...40' where a letter x at index i
indicates that the child x of the previous node of level i-1 is in the
path. The first letter is 0, for the root.
'''
def genWords(alphabet,length):
'''
alphabet is a list of string
'''
if length == 1 :
return alphabet
else :
new_words = []
words = genWords(alphabet,length-1)
for word in words :
for letter in alphabet :
new_words.append(letter+word)
return new_words
alphabet = []
for i in range(self.nbChildren):
alphabet.append(str(i))
paths = genWords(alphabet,self.depth)
pathList = []
for path in paths :
pathList.append('0'+path)
return pathList
def orderListInReverseLexicographic(self,L):
'''
See the paper [] for the meaning of this order
'''
L_copy = L+[]
for i in range(len(L)):
L_copy[i] = L_copy[i][::-1]
L_copy.sort()
for i in range(len(L)):
L_copy[i] = L_copy[i][::-1]
return L_copy
def getDummyBlock(self):
'''
Returns a dummy block either by taking one from the dummy stash or by
creating a new one.
'''
if self.dummyStash !=[]:
#print 'here'
return self.rerandomizeBlock(self.dummyStash.pop(),None)
else :
self.dummyCounter +=1
return self.createDummyBlock()
#return 'dummy block'
def fillupTree(self,blockList):
'''
This method assigns blocks to the tree nodes and pad with dummy
blocks
We assume here that the tree is empty
'''
#print 'blockList',blockList
t = self.tLoad
t1 = time.time()
#permuted_blockList, permutation = randomPermutation(blockList)
permuted_blockList = randomPermutation(blockList)
t2 = time.time()
#print 'permuted_blockList',permuted_blockList
new_blockList = []
while permuted_blockList != [] :
bucket = []
i = 0
while permuted_blockList != [] and i < self.Z :
bucket.append((permuted_blockList.pop(),True))
i +=1
while len(bucket)<(self.SZ) :
bucket.append((self.createDummyBlock(),False))
#permuted_bucket, perm = randomPermutation(bucket) #TODO: time-consuming!
permuted_bucket= randomPermutation(bucket) #TODO: time-consuming!
new_blockList = permuted_bucket +new_blockList
k = max(t-len(new_blockList),0)
#print 'new_blockList',new_blockList
t3 = time.time()
for i in range(k):
new_blockList = [(self.createDummyBlock(),False)]+new_blockList
assert len(new_blockList) == t
#print 'new_blockList',new_blockList
t4 = time.time()
for i in range(0,t,self.SZ):
bucket = []
subpath = positionToSubPath(i,self.nbChildren,self.SZ,self.depth,self.sPD)
for j in range(self.SZ):
bucket.append(new_blockList[i+j][0])
self.hashDic[subpath] = fingexp.fingerprint(bucket)
self.treeHash = self.hashDic['0']
t4b = time.time()
for i in range(t):
if new_blockList[i][1] == True :
blockID, block = new_blockList[i][0]
subpath = positionToSubPath(i,self.nbChildren,self.SZ,self.depth,self.sPD)
path = randomPath(subpath, self.nbChildren, self.depth+1)
self.positionList[i] = (blockID,path,True)
self.positionMap[blockID] = (i,path)
new_blockList[i] = new_blockList[i][0][1]
else :
# the block is a dummy one
new_blockList[i] = new_blockList[i][0]
t5 = time.time()
L = enumerate(new_blockList)
#print 'new_blockList',new_blockList
self.POTree.writeBlocks(L)
t6 = time.time()
print 'permutation of blockList:',t2-t1,'\n buckets creation:',t3-t2,'\n dummy block creation:',t4-t3,'\n hashing nodes of the tree:',t4b-t4,'\n filling up of the tree:',t5-t4b, '\n block rerwriting in tree:',t6-t5
def getCandidates(self,indexesList,path):
'''
This method returns a list (postion,blockID) of blocks to refill the path.
The candidate blocks are sought in the client stash and the dummy stash,
new dummy blocks are created when needed.
'''
L = indexesList + []
Z = self.Z
M = {}
L.reverse()
#K = self.clientStash.keys()
#print 'here length of stash is ', len(self.clientStash), len(K)
for blockID_i in self.clientStash.keys() :
# This loop fills M that will be used to find good block candidates for filling the buckets
pos_i, path_i = self.positionMap[blockID_i]
assert pos_i == 'stash'
assert path_i != None
if path_i not in M :
M[path_i] = []
M[path_i].append(blockID_i)
#print 'M', len(M)
path_copy = path+''
index = 0
#counter = 0
new_blockList = []
#tbL = []
while path_copy != '':
new_bucket = []
for i in range(Z):
position = L[index]
candidate = None
pathList = M.keys()
for pathb in pathList :
if pathb[:len(path_copy)] == path_copy and M[pathb] != [] :
candidate = M[pathb].pop()
break
if not candidate == None :
new_bucket.append((True,position,candidate)) # here candidate is a blockID
#tbL.append(candidate)
#counter +=1
index +=1
#if len(new_bucket) < self.Z:
# print 'M, new_bucket',M, new_bucket, path_copy
k = self.SZ-len(new_bucket)
for i in range(k) :
position = L[index]
dummyBlock = self.getDummyBlock()
new_bucket.append((False,position,dummyBlock))
index +=1
new_blockList.append((new_bucket))
path_copy = path_copy[:-1]
#tbL.sort()
#print 'extracting ', counter, 'blocks from stash', tbL
return new_blockList
def getBucketCandidates(self,bucket_path):
'''
this method returns the real blockID of blocks currently in the client stash
who might be stored in the bucket
'''
L = []
for blockID_i in self.clientStash.keys() :
# This loop fills L that will be used to find good block candidates for filling the buckets
pos_i, path_i = self.positionMap[blockID_i]
if bucket_path == path_i[:len(bucket_path)] :
L.append(blockID_i)
return L
def evictPath(self,buckets_to_reshuffle_list):
'''
This method takes the next path to evict in self.orderedPathList and evict it.
This means that all the blocks along the path are read, stored into the stash
(for real blocks) and then the path is refilled with blocks from the stash
and dummy blocks.
- buckets_to_reshuffle_list is a list containing the ID of the buckets meant
to be reshuffle by the earlyReshuffle method. As they will be reshuffled in
the current method, there is no need to reshuffle them later. The list of
buckets not to reshuffle later is returned by the method.
'''
path_to_evict = self.orderedPathList[self.path_counter % len(self.orderedPathList)]
#print '\t eviction of path', path_to_evict
indexesList = pathToIndexList(path_to_evict,self.nbChildren,self.SZ)
niL = []
for i in range(0,len(indexesList),self.SZ) :
realblocklist = []
dummyblocklist = []
for j in range(self.SZ):
index = indexesList[i+j]
if self.positionList[index][2] == False:
pass
elif self.positionList[index][0] == None :
dummyblocklist.append(index)
else :
realblocklist.append(index)
k = len(realblocklist)
if k < self.Z :
rbucket = realblocklist+dummyblocklist[:(self.Z-k)]
assert len(rbucket) == self.Z
elif k == self.Z:
rbucket = realblocklist
else :
assert False
niL += rbucket
#print 'retrieving indexes', niL
#blockList = self.POTree.getBlocks(indexesList) # Reading the tree
blockList = self.POTree.getBlocks(niL) # Reading the tree
#print 'indexesList is', indexesList
#print 'blockList is', blockList
bL_copy = blockList+[]
#self.checkSync()
#print 'checking 1'
buckets_not_to_reshuffle_list = []
for index in buckets_to_reshuffle_list :
if index in indexesList :
buckets_not_to_reshuffle_list.append(index)
#added_to_stash = 0
#tbI = []
#print 'length of stash', len(self.clientStash)
for index in niL :
# This loop retrieves the real blocks from blockList and save them in the stash
if self.positionList[index][0] == None :
pass
elif self.positionList[index][0] == 'real' :
assert False
else:
# the block is not a dummy block
blockID = self.positionList[index][0]
path_i = self.positionList[index][1]
block_i = blockList[niL.index(index)]
self.clientStash[blockID] = self.rerandomizeBlock(bL_copy.pop(bL_copy.index(block_i)),blockID)
self.positionMap[blockID] = 'stash', path_i
#print '(3)updtating position map of ',blockID, 'to ','stash', path_i
#tbI.append(blockID)
#added_to_stash += 1
#tbI.sort()
#print added_to_stash,'blocks added to stash', tbI,'new length of stash', len(self.clientStash)
#print 'dummy stash before increm.', len(self.dummyStash)
self.dummyStash += bL_copy # Add remaining dummy blocks to the dummy stash
#print 'dummy stash after increm.', len(self.dummyStash)
new_blockList = self.getCandidates(indexesList,path_to_evict)
#print 'dummy stash after after increm.', len(self.dummyStash)
#print 'candidates got are ', new_blockList
#self.checkSync()
#print 'checking 2'
nBL = []
#print 'sit1', self.positionList, self.POTree.blocksList
for bucket in new_blockList :
b_copy = bucket+[]
b_copy.reverse()
#print 'bucket to re-shuffle', b_copy
new_bucket = self.reshuffleBucket(b_copy)
nBL += new_bucket
#print 'nBL', nBL
self.POTree.writeBlocks(nBL)
#print 'sit2', self.positionList, self.POTree.blocksList
#self.checkSync()
#print 'checking 3'
self.path_counter +=1
return buckets_not_to_reshuffle_list
def earlyReshuffle(self,buckets_to_reshuffle_list):
'''
This method will reshuffle all buckets of buckets_to_reshuffle_list.
- By doing so, the method might add fitting blocks stored in the client
stash into the bucket, up to Z
- The real blocks already in the bucket, remain there after the shuffle
- buckets_to_reshuffle_list contains the positions of the first block for
each bucket
'''
#print '\t early reshuffling of buckets', buckets_to_reshuffle_list
btrs = buckets_to_reshuffle_list+[]
btrs.reverse() # begin with the deeper buckets
new_blockList = []
for first_pos in btrs :
bucket = range(first_pos,first_pos+self.SZ)
blocksList = self.POTree.getBlocks(bucket)
#print 'block list got from tree',blocksList
new_bucket = []
b_ID_List = []
for i in range(self.SZ):
blockID,path,not_visited = self.positionList[bucket[i]]
block_i = blocksList[i]
if not blockID == None and not blockID == 'real':
b_ID_List.append(blockID)
self.clientStash[blockID] = self.rerandomizeBlock(block_i,blockID)
self.positionMap[blockID] = 'stash',path
nblock = (True,first_pos+len(new_bucket),blockID)
#print 'inserting nblock (1)', nblock
new_bucket.append(nblock)
elif blockID == 'real' :
pass
else :
"""
if not block_i == 'dummy block':
block_i = 'dummy block'
"""
self.dummyStash.append(block_i)
if len(b_ID_List) < self.Z :
bucket_path = positionToSubPath(first_pos,self.nbChildren,self.SZ,self.depth,self.sPD)
new_candidates = self.getBucketCandidates(bucket_path)
for b_ID in new_candidates :
if not b_ID in b_ID_List and not len(b_ID_List) >= self.Z :
nblock = (True,first_pos+len(new_bucket),b_ID)
#print 'inserting nblock (2)', nblock
new_bucket.append(nblock)
#print 'reinserting',b_ID, 'from stash'
b_ID_List.append(b_ID)
while len(new_bucket)< self.SZ :
dummyBlock = self.getDummyBlock()
nblock = (False,first_pos+len(new_bucket),dummyBlock)
#print 'inserting nblock (3)', nblock
new_bucket.append(nblock)
assert len(new_bucket) == self.SZ
perm_bucket = self.reshuffleBucket(new_bucket)
new_blockList += perm_bucket
self.POTree.writeBlocks(new_blockList)
def check_insertion(self,position,blockID):
for b_ID in self.positionMap :
pos,path = self.positionMap[b_ID]
if position == pos and not pos == 'stash' :
print position,blockID,self.positionMap
assert b_ID == blockID
def reshuffleBucket(self,bucket):
'''
This method randomly shuffles the bucket and update the positionList,
the positionMap and the clientStash accordingly.
bucket is a list of tuples (is_real_block, position, blockID_or_dummyBlock)
We assume all real blocks are stored in the client Stash
'''
#positionL = []
first_pos = bucket[0][1]
#print '\t --> reshuffling bucket', first_pos,'to',first_pos+self.SZ-1
#for i in range(len(bucket)):
# positionL.append(bucket[i][1])
#print 'bucket',bucket
perm_bucket = randomPermutation(bucket)
#print 'perm_bucket',perm_bucket
for i in range(len(bucket)):
#position = positionL[i]
position = first_pos+i
if perm_bucket[i][0] == True :
# a real block
blockID = perm_bucket[i][2]
block = self.clientStash.pop(blockID)
assert block != None
#print 're-inserting block',blockID,'from stash'
old_pos, path = self.positionMap[blockID]
assert not path == None
self.positionList[position] = blockID,path,True
#print '(3) modifying positionList at',position
#self.check_insertion(position,blockID)
self.positionMap[blockID] = position, path
#print '(1) updtating position map of ',blockID, 'to ',position, path
perm_bucket[i] = (position,blockID,block)
else :
# a dummy block
self.positionList[position] = None,None,True
#print '(4) modifying positionList at',position
perm_bucket[i] = (position,'dummy',perm_bucket[i][2])
#print 'permuted bucket', perm_bucket
for i in range(len(perm_bucket)):
perm_bucket[i] = (perm_bucket[i][0], perm_bucket[i][2])
subpath = positionToSubPath(perm_bucket[0][0],self.nbChildren,self.SZ,self.depth,self.sPD)
B = []
for i in range(self.SZ):
B.append(perm_bucket[i][1])
self.hashDic[subpath] = fingexp.fingerprint(B)
return perm_bucket
def selectIndexes(self,indexesList,position):
'''
This method returns the list of indexes of blocks that need to be read
by selecting them randomly in each bucket among the dummy blocks except
for the bucket (if any) containing the real block.
'''
#print position,indexesList
assert len(indexesList)/self.SZ == self.depth+1
select_indexesList = []
reshuffle_bucket_list = [] # keeps track of the bucket that will be reshuffled after the execution of the method
for i in range(self.depth+1):
bucket = indexesList[i*self.SZ:(i+1)*self.SZ]
#print 'visiting bucket', bucket
if position in bucket :
#print self.positionList[position]
assert self.positionList[position][2] == True
select_indexesList.append(position)
self.positionList[position] = 'real',None,False
#print '(1) modifying positionList at',position
else :
randomBlocks = [] # collects the dummy blocks not visited yet
for index in bucket:
if self.positionList[index][0] == None and self.positionList[index][2] == True :
# the block is a dummy and not visited yet
#print 'appending dummy block', self.positionList[index]
randomBlocks.append(index)
# choose randomly a dummy block to read
r = randint(0,len(randomBlocks)-1)
randBlock_index = randomBlocks[r]
select_indexesList.append(randBlock_index)
self.positionList[randBlock_index] = None,None,False
#print '(2) modifying positionList at',randBlock_index
count = 0 # keeps track of the number of blocks visited yet in the bucket
for index in bucket :
if self.positionList[index][2] == False :
count += 1
if count > self.S-1 :
print '!!! Error : counter too big!!!', count
assert False
elif count == self.S-1 :
# this bucket needs to be re-shuffled
reshuffle_bucket_list.append(indexesList[i*self.SZ])
#print 'counter for bucket', bucket,'equals',count
assert len(select_indexesList) == self.depth+1
return select_indexesList, reshuffle_bucket_list
def queryBlock(self,blockID):
'''
This method returns the block stored in the self.POTree which corresponds
to the blockID
Doing so, the method might modify all the blocks along one path.
The blocks are either :
- rerandomized
- moved in the stash
- reassigned in the path
- replaced by dummy blocks
The method might re-shuffle buckets that have been visited more than
self.S times
The method recomputes the hash of the tree if some modification occurs
'''
assert blockID in self.positionMap
position, path = self.positionMap[blockID]
assert path in self.pathList
indexesList = pathToIndexList(path,self.nbChildren,self.SZ)
#print 'blockID',blockID,' at position', position
#print 'seeking path',path, 'returned indexesList',indexesList
assert ((position != 'stash') and (position in indexesList)) or (position == 'stash')
l = len(self.pathList)
r = randint(0,l-1)
new_path = self.pathList[r]
# the list of indexes to visit (according to ring ORAM)
select_indexesList, buckets_to_reshuffle_list = self.selectIndexes(indexesList,position)
#print 'select_indexesList are',select_indexesList
blockList = self.POTree.getBlocks(select_indexesList)
#print 'retrieved blocks from the tree are', blockList
#querriedBlock_index = None
if position == 'stash':
querriedBlock = self.clientStash[blockID]
else :
querriedBlock_index = select_indexesList.index(position)
querriedBlock = blockList[querriedBlock_index]
#print 'querriedBlock',querriedBlock
self.clientStash[blockID] = self.rerandomizeBlock(querriedBlock,blockID)
self.positionMap[blockID] = 'stash', new_path
#print '(2)updtating position map of ',blockID, 'to ','stash', path
#for index in select_indexesList:
# self.positionList[index] = None,None,False # False means the dummy blocks have been visited
#if not querriedBlock_index == None :
# self.positionList[querriedBlock_index] = 'real',None,False
buckets_not_to_reshuffle_list = []
rehashTree = False
if self.query_counter == 0 :
# Time to evict a path according to self.A and the previous number of queries
# buckets_not_to_reshuffle_list is a list of buckets that have been
# reshuffled in the evictPath method and so are not to be re-re-shuffled
# in the earlyReshuffle method
#print 'stash size before evict path', len(self.clientStash)
buckets_not_to_reshuffle_list = self.evictPath(buckets_to_reshuffle_list)
#print 'stash size after evict path', len(self.clientStash)
rehashTree = True
for bucket in buckets_not_to_reshuffle_list :
buckets_to_reshuffle_list.remove(bucket)
self.query_counter = (self.query_counter+1) % self.A
if buckets_to_reshuffle_list != [] :
self.earlyReshuffle(buckets_to_reshuffle_list)
rehashTree = True
if rehashTree :
self.hashPath(path)
return querriedBlock
def checkSync(self):
for i in range(len(self.positionList)) :
block_id, path_i, b_i = self.positionList[i]
if not block_id == None :
if block_id in self.clientStash :
pass
elif not 'dummy block' in self.POTree.blocksList[i] :
pass
else :
print '!!! problem !!!', block_id, 'wrongly situated (1)'
print i, self.POTree.blocksList[i], self.positionList[i]
assert False
for block_id in self.positionMap.keys() :
pos, path = self.positionMap[block_id]
if pos == 'stash' :
if block_id in self.clientStash :
pass
else :
print '!!! problem !!!', block_id, 'wrongly situated (2)'
print block_id,'\n',self.POTree.blocksList,'\n', self.positionList,'\n',self.positionMap
assert False
else :
if not 'dummy block' in self.POTree.blocksList[pos]:
pass
else :
print '!!! problem !!!', block_id, 'wrongly situated (3)'
print block_id,'\n',self.POTree.blocksList,'\n', self.positionList,'\n',self.positionMap
assert False
##################### Test Example #############################################
def test_example(Z = 3, S = 4, A = 4,nbChildren = 3, depth = 3,nbWords = None):
# create PO Tree
po_tree = PathORAMTree( treeID = 'test_PO_tree')
RO = RingORAM(po_tree,Z = Z, S=S, A=A , nbChildren = nbChildren, depth = depth)
if nbWords == None :
nbWords = int(RO.tLoad/6)
print 'parameters are\n Z:',Z,'\n depth:', depth,'\n number of children:', nbChildren,'\n number of blocks:', nbWords,'\n theoretic load of the tree:', RO.tLoad
t1 = time.time()
print 'Ring ORAM tree created'
'''
L = ['ba','be','bi','bo','bu','ca','ce','ci','co','cu','da','de','di','do','du','fa','fe','fi','fo','fu','ga','ge','gi','go','gu','ha','he','hi','ho','hu','ja','je','ji','jo','ju','ka','ke','ki','ko','ku','la','le','li','lo','lu','ma','me','mi','mo','mu','na','ne','ni','no','nu','pa','pe','pi','po','pu','ra','re','ri','ro','ru','sa','se','si','so','su','ta','te','ti','to','tu','va','ve','vi','vo','vu','wa','we','wi','wo','wu','xa','xe','xi','xo','xu','za','ze','zi','zo','zu']
blockList = []
for i in range(nbWords):
word = ''
for j in range(5) :
syllab = sample(L,1)[0]
word += syllab
blockList.append(('Block '+str(i),word))
'''
blockList = []
for i in range(nbWords):
blockList.append(('Block '+str(i),'word '+str(i)))
print 'List of blocks generated\n Filling up the tree'
t2 = time.time()
RO.fillupTree(blockList)
t3 = time.time()
print 'Tree filled', t3-t2
s = str(Z)+'_'+str(depth)+'_'+str(nbChildren)
f = open('./posDictionaries/positionDic'+s,'w')
pickle.dump(RO.sPD,f)
f.close()
return RO,blockList,t2-t1,t3-t2
def generatePO():
for i in range(2,6):
for j in range(2,5):
for k in range(2,9):
PO,t1,t2 = test_example(i,k,j)
s = str(i)+'_'+str(k)+'_'+str(j)
f = open('./posDictionaries/positionDic'+s,'w')
pickle.dump(PO.sPD,f)
f.close()
print 'done Z,d,n',i,k,j
def testLengthStash(PO,blockList,n):
dummystashlenght = []
clientstashlenght = []
timeL = []
for i in range(n):
blockID = sample(blockList,1)[0][0]
t1 = time.time()
PO.queryBlock(blockID)
timeL.append(time.time()-t1)
dummystashlenght.append(len(PO.dummyStash))
clientstashlenght.append(len(PO.clientStash))
return dummystashlenght, clientstashlenght, sum(timeL)/n
| {
"content_hash": "85bd43aeee1c9277008281fe3a857a42",
"timestamp": "",
"source": "github",
"line_count": 1115,
"max_line_length": 484,
"avg_line_length": 36.718385650224214,
"alnum_prop": 0.5296646393590777,
"repo_name": "ecuvelier/P3MVEOS",
"id": "9d68161a4120fc2ccbcb623c954b105ee481a378",
"size": "40966",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pathORAM/ringORAM.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "243984"
}
],
"symlink_target": ""
} |
"""PYPOWER solves power flow and Optimal Power Flow (OPF) problems.
"""
| {
"content_hash": "b914e7803705a250baf78a014c56971a",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 67,
"avg_line_length": 36,
"alnum_prop": 0.7222222222222222,
"repo_name": "praba230890/PYPOWER",
"id": "8c3c4c2beb381bc4a6b033d4cb9e84364367e226",
"size": "229",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pypower/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1129744"
}
],
"symlink_target": ""
} |
""" This module contains all the data structures for music service plugins
"""
from __future__ import unicode_literals
from .xml import XML, ns_tag, NAMESPACES
from .exceptions import DIDLMetadataError
from .utils import camel_to_underscore
def get_ms_item(xml, service, parent_id):
"""Return the music service item that corresponds to xml. The class is
identified by getting the type from the 'itemType' tag
"""
cls = MS_TYPE_TO_CLASS.get(xml.findtext(ns_tag('ms', 'itemType')))
out = cls.from_xml(xml, service, parent_id)
return out
def tags_with_text(xml, tags=None):
"""Return a list of tags that contain text retrieved recursively from an
XML tree
"""
if tags is None:
tags = []
for element in xml:
if element.text is not None:
tags.append(element)
elif len(element) > 0:
tags_with_text(element, tags)
else:
message = 'Unknown XML structure: {0}'.format(element)
raise ValueError(message)
return tags
class MusicServiceItem(object):
"""Class that represents a music service item"""
# These fields must be overwritten in the sub classes
item_class = None
valid_fields = None
required_fields = None
def __init__(self, **kwargs):
super(MusicServiceItem, self).__init__()
self.content = kwargs
@classmethod
def from_xml(cls, xml, service, parent_id):
"""Return a Music Service item generated from xml
:param xml: Object XML. All items containing text are added to the
content of the item. The class variable ``valid_fields`` of each of
the classes list the valid fields (after translating the camel
case to underscore notation). Required fields are listed in the
class variable by that name (where 'id' has been renamed to
'item_id').
:type xml: :py:class:`xml.etree.ElementTree.Element`
:param service: The music service (plugin) instance that retrieved the
element. This service must contain ``id_to_extended_id`` and
``form_uri`` methods and ``description`` and ``service_id``
attributes.
:type service: Instance of sub-class of
:class:`soco.plugins.SoCoPlugin`
:param parent_id: The parent ID of the item, will either be the
extended ID of another MusicServiceItem or of a search
:type parent_id: str
For a track the XML can e.g. be on the following form:
.. code :: xml
<mediaMetadata xmlns="http://www.sonos.com/Services/1.1">
<id>trackid_141359</id>
<itemType>track</itemType>
<mimeType>audio/aac</mimeType>
<title>Teacher</title>
<trackMetadata>
<artistId>artistid_10597</artistId>
<artist>Jethro Tull</artist>
<composerId>artistid_10597</composerId>
<composer>Jethro Tull</composer>
<albumId>albumid_141358</albumId>
<album>MU - The Best Of Jethro Tull</album>
<albumArtistId>artistid_10597</albumArtistId>
<albumArtist>Jethro Tull</albumArtist>
<duration>229</duration>
<albumArtURI>http://varnish01.music.aspiro.com/sca/
imscale?h=90&w=90&img=/content/music10/prod/wmg/
1383757201/094639008452_20131105025504431/resources/094639008452.
jpg</albumArtURI>
<canPlay>true</canPlay>
<canSkip>true</canSkip>
<canAddToFavorites>true</canAddToFavorites>
</trackMetadata>
</mediaMetadata>
"""
# Add a few extra pieces of information
content = {'description': service.description,
'service_id': service.service_id,
'parent_id': parent_id}
# Extract values from the XML
all_text_elements = tags_with_text(xml)
for item in all_text_elements:
tag = item.tag[len(NAMESPACES['ms']) + 2:] # Strip namespace
tag = camel_to_underscore(tag) # Convert to nice names
if tag not in cls.valid_fields:
message = 'The info tag \'{0}\' is not allowed for this item'.\
format(tag)
raise ValueError(message)
content[tag] = item.text
# Convert values for known types
for key, value in content.items():
if key == 'duration':
content[key] = int(value)
if key in ['can_play', 'can_skip', 'can_add_to_favorites',
'can_enumerate']:
content[key] = True if value == 'true' else False
# Rename a single item
content['item_id'] = content.pop('id')
# And get the extended id
content['extended_id'] = service.id_to_extended_id(content['item_id'],
cls)
# Add URI if there is one for the relevant class
uri = service.form_uri(content, cls)
if uri:
content['uri'] = uri
# Check for all required values
for key in cls.required_fields:
if key not in content:
message = 'An XML field that correspond to the key \'{0}\' '\
'is required. See the docstring for help.'.format(key)
return cls.from_dict(content)
@classmethod
def from_dict(cls, dict_in):
"""Initialize the class from a dict
:param dict_in: The dictionary that contains the item content. Required
fields are listed class variable by that name
:type dict_in: dict
"""
kwargs = dict_in.copy()
args = [kwargs.pop(key) for key in cls.required_fields]
return cls(*args, **kwargs)
def __eq__(self, playable_item):
"""Return the equals comparison result to another ``playable_item``."""
if not isinstance(playable_item, MusicServiceItem):
return False
return self.content == playable_item.content
def __ne__(self, playable_item):
"""Return the not equals comparison result to another ``playable_item``
"""
if not isinstance(playable_item, MusicServiceItem):
return True
return self.content != playable_item.content
def __repr__(self):
"""Return the repr value for the item.
The repr is on the form::
<class_name 'middle_part[0:40]' at id_in_hex>
where middle_part is either the title item in content, if it is set,
or ``str(content)``. The output is also cleared of non-ascii
characters.
"""
# 40 originates from terminal width (78) - (15) for address part and
# (19) for the longest class name and a little left for buffer
if self.content.get('title') is not None:
middle = self.content['title'].encode('ascii', 'replace')[0:40]
else:
middle = str(self.content).encode('ascii', 'replace')[0:40]
return '<{0} \'{1}\' at {2}>'.format(self.__class__.__name__,
middle,
hex(id(self)))
def __str__(self):
"""Return the str value for the item::
<class_name 'middle_part[0:40]' at id_in_hex>
where middle_part is either the title item in content, if it is set, or
``str(content)``. The output is also cleared of non-ascii characters.
"""
return self.__repr__()
@property
def to_dict(self):
"""Return a copy of the content dict"""
return self.content.copy()
@property
def didl_metadata(self):
"""Return the DIDL metadata for a Music Service Track
The metadata is on the form:
.. code :: xml
<DIDL-Lite xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:upnp="urn:schemas-upnp-org:metadata-1-0/upnp/"
xmlns:r="urn:schemas-rinconnetworks-com:metadata-1-0/"
xmlns="urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/">
<item id="...self.extended_id..."
parentID="...self.parent_id..."
restricted="true">
<dc:title>...self.title...</dc:title>
<upnp:class>...self.item_class...</upnp:class>
<desc id="cdudn"
nameSpace="urn:schemas-rinconnetworks-com:metadata-1-0/">
self.content['description']
</desc>
</item>
</DIDL-Lite>
"""
# Check if this item is meant to be played
if not self.can_play:
message = 'This item is not meant to be played and therefore '\
'also not to create its own didl_metadata'
raise DIDLMetadataError(message)
# Check if we have the attributes to create the didl metadata:
for key in ['extended_id', 'title', 'item_class']:
if not hasattr(self, key):
message = 'The property \'{0}\' is not present on this item. '\
'This indicates that this item was not meant to create '\
'didl_metadata'.format(key)
raise DIDLMetadataError(message)
if 'description' not in self.content:
message = 'The item for \'description\' is not present in '\
'self.content. This indicates that this item was not meant '\
'to create didl_metadata'
raise DIDLMetadataError(message)
# Main element, ugly? yes! but I have given up on using namespaces
# with xml.etree.ElementTree
item_attrib = {
'xmlns:dc': 'http://purl.org/dc/elements/1.1/',
'xmlns:upnp': 'urn:schemas-upnp-org:metadata-1-0/upnp/',
'xmlns:r': 'urn:schemas-rinconnetworks-com:metadata-1-0/',
'xmlns': 'urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/'
}
xml = XML.Element('DIDL-Lite', item_attrib)
# Item sub element
item_attrib = {
'parentID': '',
'restricted': 'true',
'id': self.extended_id
}
# Only add the parent_id if we have it
if self.parent_id:
item_attrib['parentID'] = self.parent_id
item = XML.SubElement(xml, 'item', item_attrib)
# Add title and class
XML.SubElement(item, 'dc:title').text = self.title
XML.SubElement(item, 'upnp:class').text = self.item_class
# Add the desc element
desc_attrib = {
'id': 'cdudn',
'nameSpace': 'urn:schemas-rinconnetworks-com:metadata-1-0/'
}
desc = XML.SubElement(item, 'desc', desc_attrib)
desc.text = self.content['description']
return xml
@property
def item_id(self):
"""Return the item id"""
return self.content['item_id']
@property
def extended_id(self):
"""Return the extended id"""
return self.content['extended_id']
@property
def title(self):
"""Return the title"""
return self.content['title']
@property
def service_id(self):
"""Return the service ID"""
return self.content['service_id']
@property
def can_play(self):
"""Return a boolean for whether the item can be played"""
return bool(self.content.get('can_play'))
@property
def parent_id(self):
"""Return the extended parent_id, if set, otherwise return None"""
return self.content.get('parent_id')
@property
def album_art_uri(self):
"""Return the album art URI if set, otherwise return None"""
return self.content.get('album_art_uri')
class MSTrack(MusicServiceItem):
"""Class that represents a music service track"""
item_class = 'object.item.audioItem.musicTrack'
valid_fields = [
'album', 'can_add_to_favorites', 'artist', 'album_artist_id', 'title',
'album_id', 'album_art_uri', 'album_artist', 'composer_id',
'item_type', 'composer', 'duration', 'can_skip', 'artist_id',
'can_play', 'id', 'mime_type', 'description'
]
# IMPORTANT. Keep this list, __init__ args and content in __init__ in sync
required_fields = ['title', 'item_id', 'extended_id', 'uri', 'description',
'service_id']
def __init__(self, title, item_id, extended_id, uri, description,
service_id, **kwargs):
"""Initialize MSTrack item"""
content = {
'title': title, 'item_id': item_id, 'extended_id': extended_id,
'uri': uri, 'description': description, 'service_id': service_id,
}
content.update(kwargs)
super(MSTrack, self).__init__(**content)
@property
def album(self):
"""Return the album title if set, otherwise return None"""
return self.content.get('album')
@property
def artist(self):
"""Return the artist if set, otherwise return None"""
return self.content.get('artist')
@property
def duration(self):
"""Return the duration if set, otherwise return None"""
return self.content.get('duration')
@property
def uri(self):
"""Return the URI"""
# x-sonos-http:trackid_19356232.mp4?sid=20&flags=32
return self.content['uri']
class MSAlbum(MusicServiceItem):
"""Class that represents a Music Service Album"""
item_class = 'object.container.album.musicAlbum'
valid_fields = [
'username', 'can_add_to_favorites', 'artist', 'title', 'album_art_uri',
'can_play', 'item_type', 'service_id', 'id', 'description',
'can_cache', 'artist_id', 'can_skip'
]
# IMPORTANT. Keep this list, __init__ args and content in __init__ in sync
required_fields = ['title', 'item_id', 'extended_id', 'uri', 'description',
'service_id']
def __init__(self, title, item_id, extended_id, uri, description,
service_id, **kwargs):
content = {
'title': title, 'item_id': item_id, 'extended_id': extended_id,
'uri': uri, 'description': description, 'service_id': service_id,
}
content.update(kwargs)
super(MSAlbum, self).__init__(**content)
@property
def artist(self):
"""Return the artist if set, otherwise return None"""
return self.content.get('artist')
@property
def uri(self):
"""Return the URI"""
# x-rincon-cpcontainer:0004002calbumid_22757081
return self.content['uri']
class MSAlbumList(MusicServiceItem):
"""Class that represents a Music Service Album List"""
item_class = 'object.container.albumlist'
valid_fields = [
'id', 'title', 'item_type', 'artist', 'artist_id', 'can_play',
'can_enumerate', 'can_add_to_favorites', 'album_art_uri', 'can_cache'
]
# IMPORTANT. Keep this list, __init__ args and content in __init__ in sync
required_fields = ['title', 'item_id', 'extended_id', 'uri', 'description',
'service_id']
def __init__(self, title, item_id, extended_id, uri, description,
service_id, **kwargs):
content = {
'title': title, 'item_id': item_id, 'extended_id': extended_id,
'uri': uri, 'description': description, 'service_id': service_id,
}
content.update(kwargs)
super(MSAlbumList, self).__init__(**content)
@property
def uri(self):
"""Return the URI"""
# x-rincon-cpcontainer:000d006cplaylistid_26b18dbb-fd35-40bd-8d4f-
# 8669bfc9f712
return self.content['uri']
class MSPlaylist(MusicServiceItem):
"""Class that represents a Music Service Play List"""
item_class = 'object.container.albumlist'
valid_fields = ['id', 'item_type', 'title', 'can_play', 'can_cache',
'album_art_uri', 'artist', 'can_enumerate',
'can_add_to_favorites', 'artist_id']
# IMPORTANT. Keep this list, __init__ args and content in __init__ in sync
required_fields = ['title', 'item_id', 'extended_id', 'uri', 'description',
'service_id']
def __init__(self, title, item_id, extended_id, uri, description,
service_id, **kwargs):
content = {
'title': title, 'item_id': item_id, 'extended_id': extended_id,
'uri': uri, 'description': description, 'service_id': service_id,
}
content.update(kwargs)
super(MSPlaylist, self).__init__(**content)
@property
def uri(self):
"""Return the URI"""
# x-rincon-cpcontainer:000d006cplaylistid_c86ddf26-8ec5-483e-b292-
# abe18848e89e
return self.content['uri']
class MSArtistTracklist(MusicServiceItem):
"""Class that represents a Music Service Artist Track List"""
item_class = 'object.container.playlistContainer.sameArtist'
valid_fields = ['id', 'title', 'item_type', 'can_play', 'album_art_uri']
# IMPORTANT. Keep this list, __init__ args and content in __init__ in sync
required_fields = ['title', 'item_id', 'extended_id', 'uri', 'description',
'service_id']
def __init__(self, title, item_id, extended_id, uri, description,
service_id, **kwargs):
content = {
'title': title, 'item_id': item_id, 'extended_id': extended_id,
'uri': uri, 'description': description, 'service_id': service_id,
}
content.update(kwargs)
super(MSArtistTracklist, self).__init__(**content)
@property
def uri(self):
"""Return the URI"""
# x-rincon-cpcontainer:100f006cartistpopsongsid_1566
return 'x-rincon-cpcontainer:100f006c{0}'.format(self.item_id)
class MSArtist(MusicServiceItem):
"""Class that represents a Music Service Artist"""
valid_fields = [
'username', 'can_add_to_favorites', 'artist', 'title', 'album_art_uri',
'item_type', 'id', 'service_id', 'description', 'can_cache'
]
# Since MSArtist cannot produce didl_metadata, they are not strictly
# required, but it makes sense to require them anyway, since they are the
# fields that that describe the item
# IMPORTANT. Keep this list, __init__ args and content in __init__ in sync
required_fields = ['title', 'item_id', 'extended_id', 'service_id']
def __init__(self, title, item_id, extended_id, service_id, **kwargs):
content = {'title': title, 'item_id': item_id,
'extended_id': extended_id, 'service_id': service_id}
content.update(kwargs)
super(MSArtist, self).__init__(**content)
class MSFavorites(MusicServiceItem):
"""Class that represents a Music Service Favorite"""
valid_fields = ['id', 'item_type', 'title', 'can_play', 'can_cache',
'album_art_uri']
# Since MSFavorites cannot produce didl_metadata, they are not strictly
# required, but it makes sense to require them anyway, since they are the
# fields that that describe the item
# IMPORTANT. Keep this list, __init__ args and content in __init__ in sync
required_fields = ['title', 'item_id', 'extended_id', 'service_id']
def __init__(self, title, item_id, extended_id, service_id, **kwargs):
content = {'title': title, 'item_id': item_id,
'extended_id': extended_id, 'service_id': service_id}
content.update(kwargs)
super(MSFavorites, self).__init__(**content)
class MSCollection(MusicServiceItem):
"""Class that represents a Music Service Collection"""
valid_fields = ['id', 'item_type', 'title', 'can_play', 'can_cache',
'album_art_uri']
# Since MSCollection cannot produce didl_metadata, they are not strictly
# required, but it makes sense to require them anyway, since they are the
# fields that that describe the item
# IMPORTANT. Keep this list, __init__ args and content in __init__ in sync
required_fields = ['title', 'item_id', 'extended_id', 'service_id']
def __init__(self, title, item_id, extended_id, service_id, **kwargs):
content = {'title': title, 'item_id': item_id,
'extended_id': extended_id, 'service_id': service_id}
content.update(kwargs)
super(MSCollection, self).__init__(**content)
MS_TYPE_TO_CLASS = {'artist': MSArtist, 'album': MSAlbum, 'track': MSTrack,
'albumList': MSAlbumList, 'favorites': MSFavorites,
'collection': MSCollection, 'playlist': MSPlaylist,
'artistTrackList': MSArtistTracklist}
| {
"content_hash": "56ab88138f5a3ba33be53afe7aa861b3",
"timestamp": "",
"source": "github",
"line_count": 543,
"max_line_length": 79,
"avg_line_length": 38.17495395948435,
"alnum_prop": 0.5832891118722563,
"repo_name": "intfrr/SoCo",
"id": "ef8e9839c76c7ad64610ce5c13c05ccf8f82d2ef",
"size": "20867",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "soco/ms_data_structures.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "168"
},
{
"name": "Makefile",
"bytes": "66"
},
{
"name": "Python",
"bytes": "462657"
},
{
"name": "Shell",
"bytes": "342"
}
],
"symlink_target": ""
} |
"""FR-specific Form helpers"""
from __future__ import unicode_literals
import re
from datetime import date
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import CharField, RegexField, Select
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from localflavor.compat import EmptyValueCompatMixin
from localflavor.deprecation import DeprecatedPhoneNumberFormFieldMixin
from localflavor.generic.checksums import luhn
from .fr_department import DEPARTMENT_CHOICES_PER_REGION
from .fr_region import REGION_2016_CHOICES, REGION_CHOICES
nin_re = re.compile(
r'^(?P<gender>[1278])(?P<year_of_birth>\d{2})(?P<month_of_birth>0[1-9]|1[0-2]|20|3[0-9]|4[0-2]|[5-9][0-9])'
r'(?P<department_of_origin>\d{2}|2[AB])(?P<commune_of_origin>\d{3})(?P<person_unique_number>\d{3})'
r'(?P<control_key>\d{2})$')
class FRZipCodeField(RegexField):
"""
Validate local French zip code.
The correct format is 'XXXXX'.
"""
default_error_messages = {
'invalid': _('Enter a zip code in the format XXXXX.'),
}
def __init__(self, *args, **kwargs):
kwargs.setdefault('label', _('Zip code'))
kwargs['max_length'] = 5
kwargs['min_length'] = 5
super(FRZipCodeField, self).__init__(r'^\d{5}$', *args, **kwargs)
class FRPhoneNumberField(EmptyValueCompatMixin, CharField, DeprecatedPhoneNumberFormFieldMixin):
"""
Validate local French phone number (not international ones).
The correct format is '0X XX XX XX XX'.
'0X.XX.XX.XX.XX' and '0XXXXXXXXX' validate but are corrected to
'0X XX XX XX XX'.
"""
phone_digits_re = re.compile(r'^0\d(\s|\.)?(\d{2}(\s|\.)?){3}\d{2}$')
default_error_messages = {
'invalid': _('Phone numbers must be in 0X XX XX XX XX format.'),
}
def __init__(self, *args, **kwargs):
kwargs.setdefault('label', _('Phone number'))
kwargs['max_length'] = 14
kwargs['min_length'] = 10
super(FRPhoneNumberField, self).__init__(*args, **kwargs)
def clean(self, value):
value = super(FRPhoneNumberField, self).clean(value)
if value in self.empty_values:
return self.empty_value
value = re.sub('(\.|\s)', '', force_text(value))
m = self.phone_digits_re.search(value)
if m:
return '%s %s %s %s %s' % (
value[0:2],
value[2:4],
value[4:6],
value[6:8],
value[8:10]
)
raise ValidationError(self.error_messages['invalid'])
class FRDepartmentSelect(Select):
"""A Select widget that uses a list of FR departments as its choices."""
def __init__(self, attrs=None):
choices = [
(dep[0], '%s - %s' % (dep[0], dep[1]))
for dep in DEPARTMENT_CHOICES_PER_REGION
]
super(FRDepartmentSelect, self).__init__(
attrs,
choices=choices
)
class FRRegionSelect(Select):
"""A Select widget that uses a list of FR Regions as its choices."""
def __init__(self, attrs=None):
choices = [
(dep[0], '%s - %s' % (dep[0], dep[1]))
for dep in REGION_CHOICES
]
super(FRRegionSelect, self).__init__(
attrs,
choices=choices
)
class FRRegion2016Select(Select):
"""
A Select widget that uses a list of France's New Regions as its choices.
"""
def __init__(self, attrs=None):
choices = [
(reg[0], '%s - %s' % (reg[0], reg[1]))
for reg in REGION_2016_CHOICES
]
super(FRRegion2016Select, self).__init__(attrs, choices=choices)
class FRDepartmentField(CharField):
"""A Select Field that uses a FRDepartmentSelect widget."""
widget = FRDepartmentSelect
def __init__(self, *args, **kwargs):
kwargs.setdefault('label', _('Select Department'))
super(FRDepartmentField, self).__init__(*args, **kwargs)
class FRRegionField(CharField):
"""A Select Field that uses a FRRegionSelect widget."""
widget = FRRegionSelect
def __init__(self, *args, **kwargs):
kwargs.setdefault('label', _('Select Region'))
super(FRRegionField, self).__init__(*args, **kwargs)
class FRNationalIdentificationNumber(EmptyValueCompatMixin, CharField):
"""
Validates input as a French National Identification number.
Validation of the Number, and checksum calculation is detailed at http://en.wikipedia.org/wiki/INSEE_code
.. versionadded:: 1.1
"""
default_error_messages = {
'invalid': _('Enter a valid French National Identification number.'),
}
def clean(self, value):
super(FRNationalIdentificationNumber, self).clean(value)
if value in self.empty_values:
return self.empty_value
value = value.replace(' ', '').replace('-', '')
match = nin_re.match(value)
if not match:
raise ValidationError(self.error_messages['invalid'])
# Extract all parts of social number
gender = match.group('gender')
year_of_birth = match.group('year_of_birth')
month_of_birth = match.group('month_of_birth')
department_of_origin = match.group('department_of_origin')
commune_of_origin = match.group('commune_of_origin')
person_unique_number = match.group('person_unique_number')
control_key = int(match.group('control_key'))
# Get current year
current_year = int(str(date.today().year)[2:])
commune_of_origin, department_of_origin = self._clean_department_and_commune(commune_of_origin, current_year,
department_of_origin,
year_of_birth)
if person_unique_number == '000':
raise ValidationError(self.error_messages['invalid'])
if control_key > 97:
raise ValidationError(self.error_messages['invalid'])
control_number = int(gender + year_of_birth + month_of_birth +
department_of_origin.replace('A', '0').replace('B', '0') +
commune_of_origin + person_unique_number)
if (97 - control_number % 97) == control_key:
return value
else:
raise ValidationError(self.error_messages['invalid'])
def _clean_department_and_commune(self, commune_of_origin, current_year, department_of_origin, year_of_birth):
# Department number 98 is for Monaco
if department_of_origin == '98':
raise ValidationError(self.error_messages['invalid'])
# Departments number 20, 2A and 2B represent Corsica
if department_of_origin in ['20', '2A', '2B']:
# For people born before 1976, Corsica number was 20
if current_year < int(year_of_birth) < 76 and department_of_origin != '20':
raise ValidationError(self.error_messages['invalid'])
# For people born from 1976, Corsica dep number is either 2A or 2B
if (int(year_of_birth) > 75 and department_of_origin not in ['2A', '2B']):
raise ValidationError(self.error_messages['invalid'])
# Overseas department numbers starts with 97 and are 3 digits long
if department_of_origin == '97':
department_of_origin += commune_of_origin[:1]
if int(department_of_origin) not in range(971, 976):
raise ValidationError(self.error_messages['invalid'])
commune_of_origin = commune_of_origin[1:]
if int(commune_of_origin) < 1 or int(commune_of_origin) > 90:
raise ValidationError(self.error_messages['invalid'])
elif int(commune_of_origin) < 1 or int(commune_of_origin) > 990:
raise ValidationError(self.error_messages['invalid'])
return commune_of_origin, department_of_origin
class FRSIRENENumberMixin(object):
"""Abstract class for SIREN and SIRET numbers, from the SIRENE register."""
def clean(self, value):
super(FRSIRENENumberMixin, self).clean(value)
if value in self.empty_values:
return self.empty_value
value = value.replace(' ', '').replace('-', '')
if not self.r_valid.match(value) or not luhn(value):
raise ValidationError(self.error_messages['invalid'])
return value
class FRSIRENField(EmptyValueCompatMixin, FRSIRENENumberMixin, CharField):
"""
SIREN stands for "Système d'identification du répertoire des entreprises".
It's under authority of the INSEE.
See http://fr.wikipedia.org/wiki/Système_d'identification_du_répertoire_des_entreprises for more information.
.. versionadded:: 1.1
"""
r_valid = re.compile(r'^\d{9}$')
default_error_messages = {
'invalid': _('Enter a valid French SIREN number.'),
}
def prepare_value(self, value):
if value is None:
return value
value = value.replace(' ', '').replace('-', '')
return ' '.join((value[:3], value[3:6], value[6:]))
class FRSIRETField(EmptyValueCompatMixin, FRSIRENENumberMixin, CharField):
"""
SIRET stands for "Système d'identification du répertoire des établissements".
It's under authority of the INSEE.
See http://fr.wikipedia.org/wiki/Système_d'identification_du_répertoire_des_établissements for more information.
.. versionadded:: 1.1
"""
r_valid = re.compile(r'^\d{14}$')
default_error_messages = {
'invalid': _('Enter a valid French SIRET number.'),
}
def clean(self, value):
if value not in EMPTY_VALUES:
value = value.replace(' ', '').replace('-', '')
ret = super(FRSIRETField, self).clean(value)
if not luhn(ret[:9]):
raise ValidationError(self.error_messages['invalid'])
return ret
def prepare_value(self, value):
if value is None:
return value
value = value.replace(' ', '').replace('-', '')
return ' '.join((value[:3], value[3:6], value[6:9], value[9:]))
| {
"content_hash": "d2a91fd02d2ea4bd9f78068c7eaabb45",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 117,
"avg_line_length": 35.44827586206897,
"alnum_prop": 0.6035992217898832,
"repo_name": "jieter/django-localflavor",
"id": "82168f2a67365589a14e32709586e844dc80a1dc",
"size": "10314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "localflavor/fr/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "896597"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('customer', '0005_company_po_box'),
]
operations = [
migrations.AddField(
model_name='company',
name='region',
field=models.CharField(max_length=200, null=True),
),
]
| {
"content_hash": "440a4f77905ac9fd7bf9d2477acbf29a",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 62,
"avg_line_length": 21.444444444444443,
"alnum_prop": 0.5906735751295337,
"repo_name": "dgriff67/django-tech-test",
"id": "cea2344bdbb1b7162238911c7713f3399cd72d35",
"size": "459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "customer/migrations/0006_company_region.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2356"
},
{
"name": "Python",
"bytes": "26615"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.