repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
aknutas/servers | refs/heads/master | nginx/lancache/build/docopt/examples/git/git_commit.py | 17 | """usage: git commit [options] [--] [<filepattern>...]
-h, --help
-q, --quiet suppress summary after successful commit
-v, --verbose show diff in commit message template
Commit message options
-F, --file <file> read message from file
--author <author> override author for commit
--date <date> override date for commit
-m, --message <message>
commit message
-c, --reedit-message <commit>
reuse and edit message from specified commit
-C, --reuse-message <commit>
reuse message from specified commit
--fixup <commit> use autosquash formatted message to fixup specified commit
--squash <commit> use autosquash formatted message to squash specified commit
--reset-author the commit is authored by me now
(used with -C-c/--amend)
-s, --signoff add Signed-off-by:
-t, --template <file>
use specified template file
-e, --edit force edit of commit
--cleanup <default> how to strip spaces and #comments from message
--status include status in commit message template
Commit contents options
-a, --all commit all changed files
-i, --include add specified files to index for commit
--interactive interactively add files
-o, --only commit only specified files
-n, --no-verify bypass pre-commit hook
--dry-run show what would be committed
--short show status concisely
--branch show branch information
--porcelain machine-readable output
-z, --null terminate entries with NUL
--amend amend previous commit
--no-post-rewrite bypass post-rewrite hook
-u, --untracked-files=<mode>
show untracked files, optional modes: all, normal, no.
[default: all]
"""
from docopt import docopt
if __name__ == '__main__':
print(docopt(__doc__))
|
paplorinc/intellij-community | refs/heads/master | python/testData/inspections/PyUnresolvedReferencesInspection/PrefixExpressionOnClassHavingSkeletons/numpy/core/numeric.py | 93 | from . import multiarray
__all__ = ['ndarray', 'array']
ndarray = multiarray.ndarray
array = multiarray.array |
PabloCastellano/libreborme | refs/heads/master | libreborme/conf/__init__.py | 12133432 | |
mitya57/django | refs/heads/master | tests/introspection/__init__.py | 12133432 | |
54lihaoxin/leetcode_python | refs/heads/master | src/PlusOne/solution.py | 1 | # Plus One
#
# Given a non-negative number represented as an array of digits, plus one to the number.
#
# The digits are stored such that the most significant digit is at the head of the list.
debug = True
debug = False
# from CommonClasses import * # hxl: comment out this line for submission
class Solution:
# @param digits, a list of integer digits
# @return a list of integer digits
def plusOne(self, digits):
overflow = 0
digits[-1] += 1
if digits[-1] == 10:
if len(digits) == 1:
return [1, 0]
else:
digits[-1] = 0
overflow = 1
else:
return digits
i = len(digits) - 2
while overflow > 0:
digits[i] += 1
if digits[i] == 10:
digits[i] = 0
overflow = 1
else:
overflow = 0
i -= 1
if i == -1 and overflow == 1: # hxl: add one more digit to the beginning
return [1] + digits
if overflow == 0:
return digits
|
NaturalGIS/QGIS | refs/heads/master | tests/src/python/test_qgsserver_wfs.py | 11 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer WFS.
From build dir, run: ctest -R PyQgsServerWFS -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'René-Luc Dhont'
__date__ = '19/09/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
import os
# Needed on Qt 5 so that the serialization of XML is consistent among all executions
os.environ['QT_HASH_SEED'] = '1'
import re
import urllib.request
import urllib.parse
import urllib.error
from qgis.server import QgsServerRequest
from qgis.testing import unittest
from qgis.PyQt.QtCore import QSize
from qgis.core import QgsVectorLayer
import osgeo.gdal # NOQA
from test_qgsserver import QgsServerTestBase
# Strip path and content length because path may vary
RE_STRIP_UNCHECKABLE = b'MAP=[^"]+|Content-Length: \d+|timeStamp="[^"]+"'
RE_ATTRIBUTES = b'[^>\s]+=[^>\s]+'
class TestQgsServerWFS(QgsServerTestBase):
"""QGIS Server WFS Tests"""
# Set to True in child classes to re-generate reference files for this class
regenerate_reference = False
def wfs_request_compare(self,
request, version='',
extra_query_string='',
reference_base_name=None,
project_file="test_project_wfs.qgs",
requestMethod=QgsServerRequest.GetMethod,
data=None):
project = self.testdata_path + project_file
assert os.path.exists(project), "Project file not found: " + project
query_string = '?MAP=%s&SERVICE=WFS&REQUEST=%s' % (
urllib.parse.quote(project), request)
if version:
query_string += '&VERSION=%s' % version
if extra_query_string:
query_string += '&%s' % extra_query_string
header, body = self._execute_request(
query_string, requestMethod=requestMethod, data=data)
self.assert_headers(header, body)
response = header + body
if reference_base_name is not None:
reference_name = reference_base_name
else:
reference_name = 'wfs_' + request.lower()
if version == '1.0.0':
reference_name += '_1_0_0'
reference_name += '.txt'
reference_path = self.testdata_path + reference_name
self.store_reference(reference_path, response)
f = open(reference_path, 'rb')
expected = f.read()
f.close()
response = re.sub(RE_STRIP_UNCHECKABLE, b'', response)
expected = re.sub(RE_STRIP_UNCHECKABLE, b'', expected)
self.assertXMLEqual(response, expected, msg="request %s failed.\n Query: %s" % (
query_string, request))
return header, body
def test_operation_not_supported(self):
qs = '?MAP=%s&SERVICE=WFS&VERSION=1.1.0&REQUEST=NotAValidRequest' % urllib.parse.quote(self.projectPath)
self._assert_status_code(501, qs)
def test_project_wfs(self):
"""Test some WFS request"""
for request in ('GetCapabilities', 'DescribeFeatureType'):
self.wfs_request_compare(request)
self.wfs_request_compare(request, '1.0.0')
def wfs_getfeature_compare(self, requestid, request):
project = self.testdata_path + "test_project_wfs.qgs"
assert os.path.exists(project), "Project file not found: " + project
query_string = '?MAP=%s&SERVICE=WFS&VERSION=1.0.0&REQUEST=%s' % (
urllib.parse.quote(project), request)
header, body = self._execute_request(query_string)
if requestid == 'hits':
body = re.sub(b'timeStamp="\d+-\d+-\d+T\d+:\d+:\d+"',
b'timeStamp="****-**-**T**:**:**"', body)
self.result_compare(
'wfs_getfeature_' + requestid + '.txt',
"request %s failed.\n Query: %s" % (
query_string,
request,
),
header, body
)
def test_getfeature(self):
tests = []
tests.append(('nobbox', 'GetFeature&TYPENAME=testlayer'))
tests.append(
('startindex2', 'GetFeature&TYPENAME=testlayer&STARTINDEX=2'))
tests.append(('limit2', 'GetFeature&TYPENAME=testlayer&MAXFEATURES=2'))
tests.append(
('start1_limit1', 'GetFeature&TYPENAME=testlayer&MAXFEATURES=1&STARTINDEX=1'))
tests.append(
('srsname', 'GetFeature&TYPENAME=testlayer&SRSNAME=EPSG:3857'))
tests.append(('sortby', 'GetFeature&TYPENAME=testlayer&SORTBY=id D'))
tests.append(('hits', 'GetFeature&TYPENAME=testlayer&RESULTTYPE=hits'))
for id, req in tests:
self.wfs_getfeature_compare(id, req)
def test_wfs_getcapabilities_100_url(self):
"""Check that URL in GetCapabilities response is complete"""
# empty url in project
project = os.path.join(
self.testdata_path, "test_project_without_urls.qgs")
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(project),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetCapabilities"
}.items())])
r, h = self._result(self._execute_request(qs))
for item in str(r).split("\\n"):
if "onlineResource" in item:
self.assertEqual("onlineResource=\"?" in item, True)
# url well defined in query string
project = os.path.join(
self.testdata_path, "test_project_without_urls.qgs")
qs = "https://www.qgis-server.org?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(project),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetCapabilities"
}.items())])
r, h = self._result(self._execute_request(qs))
for item in str(r).split("\\n"):
if "onlineResource" in item:
self.assertTrue(
"onlineResource=\"https://www.qgis-server.org?" in item, True)
# url well defined in project
project = os.path.join(
self.testdata_path, "test_project_with_urls.qgs")
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(project),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetCapabilities"
}.items())])
r, h = self._result(self._execute_request(qs))
for item in str(r).split("\\n"):
if "onlineResource" in item:
self.assertEqual(
"onlineResource=\"my_wfs_advertised_url\"" in item, True)
def result_compare(self, file_name, error_msg_header, header, body):
self.assert_headers(header, body)
response = header + body
reference_path = self.testdata_path + file_name
self.store_reference(reference_path, response)
f = open(reference_path, 'rb')
expected = f.read()
f.close()
response = re.sub(RE_STRIP_UNCHECKABLE, b'', response)
expected = re.sub(RE_STRIP_UNCHECKABLE, b'', expected)
self.assertXMLEqual(response, expected, msg="%s\n" %
(error_msg_header))
def wfs_getfeature_post_compare(self, requestid, request):
project = self.testdata_path + "test_project_wfs.qgs"
assert os.path.exists(project), "Project file not found: " + project
query_string = '?MAP={}'.format(urllib.parse.quote(project))
header, body = self._execute_request(
query_string, requestMethod=QgsServerRequest.PostMethod, data=request.encode('utf-8'))
self.result_compare(
'wfs_getfeature_{}.txt'.format(requestid),
"GetFeature in POST for '{}' failed.".format(requestid),
header, body,
)
def test_getfeature_post(self):
tests = []
template = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>8 44</gml:lowerCorner>
<gml:upperCorner>9 45</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('nobbox_post', template.format("")))
tests.append(('startindex2_post', template.format('startIndex="2"')))
tests.append(('limit2_post', template.format('maxFeatures="2"')))
tests.append(('start1_limit1_post', template.format(
'startIndex="1" maxFeatures="1"')))
srsTemplate = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>890555.92634619 5465442.18332275</gml:lowerCorner>
<gml:upperCorner>1001875.41713946 5621521.48619207</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('srsname_post', srsTemplate.format("")))
srsTwoLayersTemplate = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>890555.92634619 5465442.18332275</gml:lowerCorner>
<gml:upperCorner>1001875.41713946 5621521.48619207</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
</wfs:Query>
<wfs:Query typeName="testlayer" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>890555.92634619 5465442.18332275</gml:lowerCorner>
<gml:upperCorner>1001875.41713946 5621521.48619207</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('srs_two_layers_post', srsTwoLayersTemplate.format("")))
sortTemplate = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>8 44</gml:lowerCorner>
<gml:upperCorner>9 45</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
<ogc:SortBy>
<ogc:SortProperty>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:SortOrder>DESC</ogc:SortOrder>
</ogc:SortProperty>
</ogc:SortBy>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('sortby_post', sortTemplate.format("")))
andTemplate = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('and_post', andTemplate.format("")))
andBboxTemplate = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>890555.92634619 5465442.18332275</gml:lowerCorner>
<gml:upperCorner>1001875.41713946 5621521.48619207</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('bbox_inside_and_post', andBboxTemplate.format("")))
# With namespace
template = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="feature:testlayer" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>8 44</gml:lowerCorner>
<gml:upperCorner>9 45</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('nobbox_post', template.format("")))
template = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" xmlns="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>8 44</gml:lowerCorner>
<gml:upperCorner>9 45</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('nobbox_post', template.format("")))
for id, req in tests:
self.wfs_getfeature_post_compare(id, req)
def test_getFeatureBBOX(self):
"""Test with (1.1.0) and without (1.0.0) CRS"""
# Tests without CRS
self.wfs_request_compare(
"GetFeature", '1.0.0', "TYPENAME=testlayer&RESULTTYPE=hits&BBOX=8.20347,44.901471,8.2035354,44.901493", 'wfs_getFeature_1_0_0_bbox_1_feature')
self.wfs_request_compare(
"GetFeature", '1.0.0', "TYPENAME=testlayer&RESULTTYPE=hits&BBOX=8.203127,44.9012765,8.204138,44.901632", 'wfs_getFeature_1_0_0_bbox_3_feature')
# Tests with CRS
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=8.20347,44.901471,8.2035354,44.901493,EPSG:4326", 'wfs_getFeature_1_0_0_epsgbbox_1_feature')
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=8.203127,44.9012765,8.204138,44.901632,EPSG:4326", 'wfs_getFeature_1_0_0_epsgbbox_3_feature')
self.wfs_request_compare(
"GetFeature", '1.1.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=8.20347,44.901471,8.2035354,44.901493,EPSG:4326", 'wfs_getFeature_1_1_0_epsgbbox_1_feature')
self.wfs_request_compare(
"GetFeature", '1.1.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=8.203127,44.9012765,8.204138,44.901632,EPSG:4326", 'wfs_getFeature_1_1_0_epsgbbox_3_feature')
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913144,5605992,913303,5606048,EPSG:3857",
'wfs_getFeature_1_0_0_epsgbbox_3_feature_3857')
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913206,5606024,913213,5606026,EPSG:3857",
'wfs_getFeature_1_0_0_epsgbbox_1_feature_3857')
self.wfs_request_compare("GetFeature", '1.1.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913144,5605992,913303,5606048,EPSG:3857",
'wfs_getFeature_1_1_0_epsgbbox_3_feature_3857')
self.wfs_request_compare("GetFeature", '1.1.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913206,5606024,913213,5606026,EPSG:3857",
'wfs_getFeature_1_1_0_epsgbbox_1_feature_3857')
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:3857&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913144,5605992,913303,5606048,EPSG:3857",
'wfs_getFeature_1_0_0_epsgbbox_3_feature_3857')
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:3857&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913206,5606024,913213,5606026,EPSG:3857",
'wfs_getFeature_1_0_0_epsgbbox_1_feature_3857')
self.wfs_request_compare("GetFeature", '1.1.0', "SRSNAME=EPSG:3857&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913144,5605992,913303,5606048,EPSG:3857",
'wfs_getFeature_1_1_0_epsgbbox_3_feature_3857')
self.wfs_request_compare("GetFeature", '1.1.0', "SRSNAME=EPSG:3857&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913206,5606024,913213,5606026,EPSG:3857",
'wfs_getFeature_1_1_0_epsgbbox_1_feature_3857')
def test_getFeatureFeatureId(self):
"""Test GetFeature with featureid"""
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&FEATUREID=testlayer.0", 'wfs_getFeature_1_0_0_featureid_0')
def test_getFeature_EXP_FILTER_regression_20927(self):
"""Test expressions with EXP_FILTER"""
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&FEATUREID=testlayer.0&EXP_FILTER=\"name\"='one'", 'wfs_getFeature_1_0_0_EXP_FILTER_FID_one')
# Note that FEATUREID takes precedence over EXP_FILTER and the filter is completely ignored when FEATUREID is set
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&FEATUREID=testlayer.0&EXP_FILTER=\"name\"='two'", 'wfs_getFeature_1_0_0_EXP_FILTER_FID_one')
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&EXP_FILTER=\"name\"='two'", 'wfs_getFeature_1_0_0_EXP_FILTER_two')
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&EXP_FILTER=\"name\"=concat('tw', 'o')", 'wfs_getFeature_1_0_0_EXP_FILTER_two')
# Syntax ok but function does not exist
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&EXP_FILTER=\"name\"=invalid_expression('tw', 'o')",
'wfs_getFeature_1_0_0_EXP_FILTER_invalid_expression')
# Syntax error in exp
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&EXP_FILTER=\"name\"=concat('tw, 'o')",
'wfs_getFeature_1_0_0_EXP_FILTER_syntax_error')
# BBOX gml expressions
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&EXP_FILTER=intersects($geometry, geom_from_gml('<gml:Box> <gml:coordinates cs=\",\" ts=\" \">8.20344750430995617,44.9013881888184514 8.20347909100379269,44.90140004005827024</gml:coordinates></gml:Box>'))", 'wfs_getFeature_1_0_0_EXP_FILTER_gml_bbox_three')
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&EXP_FILTER=intersects($geometry, geom_from_gml('<gml:Box> <gml:coordinates cs=\",\" ts=\" \">8.20348458304175665,44.90147459621791626 8.20351616973559317,44.9014864474577351</gml:coordinates></gml:Box>'))", 'wfs_getFeature_1_0_0_EXP_FILTER_gml_bbox_one')
def test_describeFeatureType(self):
"""Test DescribeFeatureType with TYPENAME filters"""
project_file = "test_project_wms_grouped_layers.qgs"
self.wfs_request_compare("DescribeFeatureType", '1.0.0', "TYPENAME=as_areas&",
'wfs_describeFeatureType_1_0_0_typename_as_areas', project_file=project_file)
self.wfs_request_compare("DescribeFeatureType", '1.1.0', "TYPENAME=as_areas&",
'wfs_describeFeatureType_1_1_0_typename_as_areas', project_file=project_file)
self.wfs_request_compare("DescribeFeatureType", '1.0.0', "",
'wfs_describeFeatureType_1_0_0_typename_empty', project_file=project_file)
self.wfs_request_compare("DescribeFeatureType", '1.1.0', "",
'wfs_describeFeatureType_1_1_0_typename_empty', project_file=project_file)
self.wfs_request_compare("DescribeFeatureType", '1.0.0', "TYPENAME=does_not_exist&",
'wfs_describeFeatureType_1_0_0_typename_wrong', project_file=project_file)
self.wfs_request_compare("DescribeFeatureType", '1.1.0', "TYPENAME=does_not_exist&",
'wfs_describeFeatureType_1_1_0_typename_wrong', project_file=project_file)
def test_describeFeatureTypeVirtualFields(self):
"""Test DescribeFeatureType with virtual fields: bug GH-29767"""
project_file = "bug_gh29767_double_vfield.qgs"
self.wfs_request_compare("DescribeFeatureType", '1.1.0', "",
'wfs_describeFeatureType_1_1_0_virtual_fields', project_file=project_file)
def test_getFeatureFeature_0_nulls(self):
"""Test that 0 and null in integer columns are reported correctly"""
# Test transactions with 0 and nulls
post_data = """<?xml version="1.0" ?>
<wfs:Transaction service="WFS" version="{version}"
xmlns:ogc="http://www.opengis.net/ogc"
xmlns:wfs="http://www.opengis.net/wfs"
xmlns:gml="http://www.opengis.net/gml">
<wfs:Update typeName="cdb_lines">
<wfs:Property>
<wfs:Name>{field}</wfs:Name>
<wfs:Value>{value}</wfs:Value>
</wfs:Property>
<fes:Filter>
<fes:FeatureId fid="cdb_lines.22"/>
</fes:Filter>
</wfs:Update>
</wfs:Transaction>
"""
def _round_trip(value, field, version='1.1.0'):
"""Set a value on fid 22 and field and check it back"""
encoded_data = post_data.format(field=field, value=value, version=version).encode('utf8')
# Strip the field if NULL
if value is None:
encoded_data = encoded_data.replace(b'<wfs:Value>None</wfs:Value>', b'')
header, body = self._execute_request("?MAP=%s&SERVICE=WFS&VERSION=%s" % (
self.testdata_path + 'test_project_wms_grouped_layers.qgs', version), QgsServerRequest.PostMethod, encoded_data)
if version == '1.0.0':
self.assertTrue(b'<SUCCESS/>' in body, body)
else:
self.assertTrue(b'<TotalUpdated>1</TotalUpdated>' in body, body)
header, body = self._execute_request("?MAP=%s&SERVICE=WFS&REQUEST=GetFeature&TYPENAME=cdb_lines&FEATUREID=cdb_lines.22" % (
self.testdata_path + 'test_project_wms_grouped_layers.qgs'))
if value is not None:
xml_value = '<qgs:{0}>{1}</qgs:{0}>'.format(field, value).encode('utf8')
self.assertTrue(xml_value in body, "%s not found in body" % xml_value)
else:
xml_value = '<qgs:{0}>'.format(field).encode('utf8')
self.assertFalse(xml_value in body)
# Check the backend
vl = QgsVectorLayer(
self.testdata_path + 'test_project_wms_grouped_layers.gpkg|layername=cdb_lines', 'vl', 'ogr')
self.assertTrue(vl.isValid())
self.assertEqual(
str(vl.getFeature(22)[field]), value if value is not None else 'NULL')
for version in ('1.0.0', '1.1.0'):
_round_trip('0', 'id_long', version)
_round_trip('12345', 'id_long', version)
_round_trip('0', 'id', version)
_round_trip('12345', 'id', version)
_round_trip(None, 'id', version)
_round_trip(None, 'id_long', version)
# "name" is NOT NULL: try to set it to empty string
_round_trip('', 'name', version)
# Then NULL
data = post_data.format(field='name', value='', version=version).encode('utf8')
encoded_data = data.replace(b'<wfs:Value></wfs:Value>', b'')
header, body = self._execute_request("?MAP=%s&SERVICE=WFS" % (
self.testdata_path + 'test_project_wms_grouped_layers.qgs'), QgsServerRequest.PostMethod, encoded_data)
if version == '1.0.0':
self.assertTrue(b'<ERROR/>' in body, body)
else:
self.assertTrue(b'<TotalUpdated>0</TotalUpdated>' in body)
self.assertTrue(b'<Message>NOT NULL constraint error on layer \'cdb_lines\', field \'name\'</Message>' in body, body)
def test_describeFeatureTypeGeometryless(self):
"""Test DescribeFeatureType with geometryless tables - bug GH-30381"""
project_file = "test_project_geometryless_gh30381.qgs"
self.wfs_request_compare("DescribeFeatureType", '1.1.0',
reference_base_name='wfs_describeFeatureType_1_1_0_geometryless',
project_file=project_file)
def test_getFeatureFeatureIdJson(self):
"""Test GetFeature with featureid JSON format and various content types"""
for ct in ('GeoJSON', 'application/vnd.geo+json', 'application/json', 'application/geo+json'):
self.wfs_request_compare(
"GetFeature",
'1.0.0',
("OUTPUTFORMAT=%s" % ct)
+ "&SRSNAME=EPSG:4326&TYPENAME=testlayer&FEATUREID=testlayer.0",
'wfs_getFeature_1_0_0_featureid_0_json')
if __name__ == '__main__':
unittest.main()
|
Varbin/EEH | refs/heads/master | testing/test_functional_config.py | 1 | from configparser import ConfigParser
from EEHlib.config import config
def test_config_configparser():
assert isinstance(config, ConfigParser)
if __name__ == "__main__":
import __main__
for i in dir(__main__):
if i.startswith("test_"):
print("---", i)
eval(i).__call__()
|
gammasoft71/Switch | refs/heads/master | 3rdparty/googletest/googletest/test/gtest_color_test.py | 3259 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly determines whether to use colors."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name = 'nt'
COLOR_ENV_VAR = 'GTEST_COLOR'
COLOR_FLAG = 'gtest_color'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_color_test_')
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
os.environ[env_var] = value
elif env_var in os.environ:
del os.environ[env_var]
def UsesColor(term, color_env_var, color_flag):
"""Runs gtest_color_test_ and returns its exit code."""
SetEnvVar('TERM', term)
SetEnvVar(COLOR_ENV_VAR, color_env_var)
if color_flag is None:
args = []
else:
args = ['--%s=%s' % (COLOR_FLAG, color_flag)]
p = gtest_test_utils.Subprocess([COMMAND] + args)
return not p.exited or p.exit_code
class GTestColorTest(gtest_test_utils.TestCase):
def testNoEnvVarNoFlag(self):
"""Tests the case when there's neither GTEST_COLOR nor --gtest_color."""
if not IS_WINDOWS:
self.assert_(not UsesColor('dumb', None, None))
self.assert_(not UsesColor('emacs', None, None))
self.assert_(not UsesColor('xterm-mono', None, None))
self.assert_(not UsesColor('unknown', None, None))
self.assert_(not UsesColor(None, None, None))
self.assert_(UsesColor('linux', None, None))
self.assert_(UsesColor('cygwin', None, None))
self.assert_(UsesColor('xterm', None, None))
self.assert_(UsesColor('xterm-color', None, None))
self.assert_(UsesColor('xterm-256color', None, None))
def testFlagOnly(self):
"""Tests the case when there's --gtest_color but not GTEST_COLOR."""
self.assert_(not UsesColor('dumb', None, 'no'))
self.assert_(not UsesColor('xterm-color', None, 'no'))
if not IS_WINDOWS:
self.assert_(not UsesColor('emacs', None, 'auto'))
self.assert_(UsesColor('xterm', None, 'auto'))
self.assert_(UsesColor('dumb', None, 'yes'))
self.assert_(UsesColor('xterm', None, 'yes'))
def testEnvVarOnly(self):
"""Tests the case when there's GTEST_COLOR but not --gtest_color."""
self.assert_(not UsesColor('dumb', 'no', None))
self.assert_(not UsesColor('xterm-color', 'no', None))
if not IS_WINDOWS:
self.assert_(not UsesColor('dumb', 'auto', None))
self.assert_(UsesColor('xterm-color', 'auto', None))
self.assert_(UsesColor('dumb', 'yes', None))
self.assert_(UsesColor('xterm-color', 'yes', None))
def testEnvVarAndFlag(self):
"""Tests the case when there are both GTEST_COLOR and --gtest_color."""
self.assert_(not UsesColor('xterm-color', 'no', 'no'))
self.assert_(UsesColor('dumb', 'no', 'yes'))
self.assert_(UsesColor('xterm-color', 'no', 'auto'))
def testAliasesOfYesAndNo(self):
"""Tests using aliases in specifying --gtest_color."""
self.assert_(UsesColor('dumb', None, 'true'))
self.assert_(UsesColor('dumb', None, 'YES'))
self.assert_(UsesColor('dumb', None, 'T'))
self.assert_(UsesColor('dumb', None, '1'))
self.assert_(not UsesColor('xterm', None, 'f'))
self.assert_(not UsesColor('xterm', None, 'false'))
self.assert_(not UsesColor('xterm', None, '0'))
self.assert_(not UsesColor('xterm', None, 'unknown'))
if __name__ == '__main__':
gtest_test_utils.Main()
|
jeffrey4l/nova | refs/heads/master | nova/scheduler/filter_scheduler.py | 18 | # Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The FilterScheduler is for creating instances locally.
You can customize this scheduler by specifying your own Host Filters and
Weighing Functions.
"""
import random
from oslo_config import cfg
from oslo_log import log as logging
from six.moves import range
from nova import exception
from nova.i18n import _
from nova import rpc
from nova.scheduler import driver
from nova.scheduler import scheduler_options
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
filter_scheduler_opts = [
cfg.IntOpt('scheduler_host_subset_size',
default=1,
help='New instances will be scheduled on a host chosen '
'randomly from a subset of the N best hosts. This '
'property defines the subset size that a host is '
'chosen from. A value of 1 chooses the '
'first host returned by the weighing functions. '
'This value must be at least 1. Any value less than 1 '
'will be ignored, and 1 will be used instead')
]
CONF.register_opts(filter_scheduler_opts)
class FilterScheduler(driver.Scheduler):
"""Scheduler that can be used for filtering and weighing."""
def __init__(self, *args, **kwargs):
super(FilterScheduler, self).__init__(*args, **kwargs)
self.options = scheduler_options.SchedulerOptions()
self.notifier = rpc.get_notifier('scheduler')
def select_destinations(self, context, request_spec, filter_properties):
"""Selects a filtered set of hosts and nodes."""
self.notifier.info(context, 'scheduler.select_destinations.start',
dict(request_spec=request_spec))
num_instances = request_spec['num_instances']
selected_hosts = self._schedule(context, request_spec,
filter_properties)
# Couldn't fulfill the request_spec
if len(selected_hosts) < num_instances:
# NOTE(Rui Chen): If multiple creates failed, set the updated time
# of selected HostState to None so that these HostStates are
# refreshed according to database in next schedule, and release
# the resource consumed by instance in the process of selecting
# host.
for host in selected_hosts:
host.obj.updated = None
# Log the details but don't put those into the reason since
# we don't want to give away too much information about our
# actual environment.
LOG.debug('There are %(hosts)d hosts available but '
'%(num_instances)d instances requested to build.',
{'hosts': len(selected_hosts),
'num_instances': num_instances})
reason = _('There are not enough hosts available.')
raise exception.NoValidHost(reason=reason)
dests = [dict(host=host.obj.host, nodename=host.obj.nodename,
limits=host.obj.limits) for host in selected_hosts]
self.notifier.info(context, 'scheduler.select_destinations.end',
dict(request_spec=request_spec))
return dests
def _get_configuration_options(self):
"""Fetch options dictionary. Broken out for testing."""
return self.options.get_configuration()
def populate_filter_properties(self, request_spec, filter_properties):
"""Stuff things into filter_properties. Can be overridden in a
subclass to add more data.
"""
# Save useful information from the request spec for filter processing:
project_id = request_spec['instance_properties']['project_id']
os_type = request_spec['instance_properties']['os_type']
filter_properties['project_id'] = project_id
filter_properties['os_type'] = os_type
def _schedule(self, context, request_spec, filter_properties):
"""Returns a list of hosts that meet the required specs,
ordered by their fitness.
"""
elevated = context.elevated()
instance_properties = request_spec['instance_properties']
instance_type = request_spec.get("instance_type", None)
update_group_hosts = filter_properties.get('group_updated', False)
config_options = self._get_configuration_options()
filter_properties.update({'context': context,
'request_spec': request_spec,
'config_options': config_options,
'instance_type': instance_type})
self.populate_filter_properties(request_spec,
filter_properties)
# Find our local list of acceptable hosts by repeatedly
# filtering and weighing our options. Each time we choose a
# host, we virtually consume resources on it so subsequent
# selections can adjust accordingly.
# Note: remember, we are using an iterator here. So only
# traverse this list once. This can bite you if the hosts
# are being scanned in a filter or weighing function.
hosts = self._get_all_host_states(elevated)
selected_hosts = []
num_instances = request_spec.get('num_instances', 1)
for num in range(num_instances):
# Filter local hosts based on requirements ...
hosts = self.host_manager.get_filtered_hosts(hosts,
filter_properties, index=num)
if not hosts:
# Can't get any more locally.
break
LOG.debug("Filtered %(hosts)s", {'hosts': hosts})
weighed_hosts = self.host_manager.get_weighed_hosts(hosts,
filter_properties)
LOG.debug("Weighed %(hosts)s", {'hosts': weighed_hosts})
scheduler_host_subset_size = CONF.scheduler_host_subset_size
if scheduler_host_subset_size > len(weighed_hosts):
scheduler_host_subset_size = len(weighed_hosts)
if scheduler_host_subset_size < 1:
scheduler_host_subset_size = 1
chosen_host = random.choice(
weighed_hosts[0:scheduler_host_subset_size])
LOG.debug("Selected host: %(host)s", {'host': chosen_host})
selected_hosts.append(chosen_host)
# Now consume the resources so the filter/weights
# will change for the next instance.
chosen_host.obj.consume_from_instance(instance_properties)
if update_group_hosts is True:
# NOTE(sbauza): Group details are serialized into a list now
# that they are populated by the conductor, we need to
# deserialize them
if isinstance(filter_properties['group_hosts'], list):
filter_properties['group_hosts'] = set(
filter_properties['group_hosts'])
filter_properties['group_hosts'].add(chosen_host.obj.host)
return selected_hosts
def _get_all_host_states(self, context):
"""Template method, so a subclass can implement caching."""
return self.host_manager.get_all_host_states(context)
|
mano3m/CouchPotatoServer | refs/heads/develop_mano3m | libs/tornado/__init__.py | 8 | #!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The Tornado web server and tools."""
from __future__ import absolute_import, division, print_function, with_statement
# version is a human-readable version number.
# version_info is a four-tuple for programmatic comparison. The first
# three numbers are the components of the version number. The fourth
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
version = "3.2.dev2"
version_info = (3, 2, 0, -99)
|
jennyzhang0215/incubator-mxnet | refs/heads/master | tools/rec2idx.py | 43 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import os
import time
import ctypes
from mxnet.base import _LIB
from mxnet.base import check_call
import mxnet as mx
import argparse
class IndexCreator(mx.recordio.MXRecordIO):
"""Reads `RecordIO` data format, and creates index file
that enables random access.
Example usage:
----------
>>> creator = IndexCreator('data/test.rec','data/test.idx')
>>> record.create_index()
>>> record.close()
>>> !ls data/
test.rec test.idx
Parameters
----------
uri : str
Path to the record file.
idx_path : str
Path to the index file, that will be created/overwritten.
key_type : type
Data type for keys (optional, default = int).
"""
def __init__(self, uri, idx_path, key_type=int):
self.key_type = key_type
self.fidx = None
self.idx_path = idx_path
super(IndexCreator, self).__init__(uri, 'r')
def open(self):
super(IndexCreator, self).open()
self.fidx = open(self.idx_path, 'w')
def close(self):
"""Closes the record and index files."""
if not self.is_open:
return
super(IndexCreator, self).close()
self.fidx.close()
def tell(self):
"""Returns the current position of read head.
"""
pos = ctypes.c_size_t()
check_call(_LIB.MXRecordIOReaderTell(self.handle, ctypes.byref(pos)))
return pos.value
def create_index(self):
"""Creates the index file from open record file
"""
self.reset()
counter = 0
pre_time = time.time()
while True:
if counter % 1000 == 0:
cur_time = time.time()
print('time:', cur_time - pre_time, ' count:', counter)
pos = self.tell()
cont = self.read()
if cont is None:
break
key = self.key_type(counter)
self.fidx.write('%s\t%d\n'%(str(key), pos))
counter = counter + 1
def parse_args():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='Create an index file from .rec file')
parser.add_argument('record', help='path to .rec file.')
parser.add_argument('index', help='path to index file.')
args = parser.parse_args()
args.record = os.path.abspath(args.record)
args.index = os.path.abspath(args.index)
return args
if __name__ == '__main__':
args = parse_args()
creator = IndexCreator(args.record, args.index)
creator.create_index()
creator.close()
|
ThomasYeoLab/CBIG | refs/heads/master | stable_projects/predict_phenotypes/He2019_KRDNN/replication/CBIG_KRDNN_proc_data.py | 1 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Written by Tong He and CBIG under MIT license:
https://github.com/ThomasYeoLab/CBIG/blob/master/LICENSE.md
"""
import os
import numpy as np
import scipy.io as sio
from cbig.He2019.config import config
from cbig.He2019.CBIG_prepare_data import data_ukbb_fnn, data_ukbb_fnn_sex
from cbig.He2019.CBIG_prepare_data import data_hcp_fnn, data_ukbb_brainnetcnn
from cbig.He2019.CBIG_prepare_data import data_ukbb_gcnn, data_ukbb_gcnn_sex
from cbig.He2019.CBIG_prepare_data import data_hcp_brainnetcnn, data_hcp_gcnn
from cbig.He2019.CBIG_prepare_data import get_gcnn_graph
from cbig.He2019.CBIG_prepare_data import data_ukbb_brainnetcnn_sex
def main():
"""main function to load and process dataset
Returns:
None
"""
# load and process for HCP dataset
os.makedirs(config.HCP_INTER_DIR, exist_ok=True)
data_hcp_fnn(config.HCP_INTER_DIR, config.HCP_NUM_FOLD)
data_hcp_brainnetcnn(config.HCP_INTER_DIR, config.HCP_NUM_FOLD)
data_hcp_gcnn(config.HCP_INTER_DIR, config.HCP_NUM_FOLD)
# load and process for UK biobank dataset
os.makedirs(config.UKBB_INTER_DIR, exist_ok=True)
data_ukbb_fnn(config.UKBB_INTER_DIR)
data_ukbb_fnn_sex(config.UKBB_INTER_DIR)
data_ukbb_brainnetcnn(config.UKBB_INTER_DIR)
data_ukbb_brainnetcnn_sex(config.UKBB_INTER_DIR)
data_ukbb_gcnn(config.UKBB_INTER_DIR)
data_ukbb_gcnn_sex(config.UKBB_INTER_DIR)
# generate adjacency matrix for GCNN
graph_dir = config.GRAPH_FOLDER
mat_content = sio.loadmat(
os.path.join(config.UKBB_ORIG_DIR, config.UKBB_CORR_MAT))
x = np.transpose(mat_content['corr_mat'], (2, 0, 1))
get_gcnn_graph(graph_dir, x)
get_gcnn_graph(graph_dir, x, k=1)
mat_content = sio.loadmat(
os.path.join(config.HCP_ORIG_DIR, config.HCP_CORR_MAT))
x = np.transpose(mat_content['corr_mat'], (2, 0, 1))
get_gcnn_graph(graph_dir, x)
return
if __name__ == '__main__':
main()
|
poojavade/Genomics_Docker | refs/heads/master | Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/Flask-0.10.1-py2.7.egg/flask/testsuite/test_apps/flaskext/__init__.py | 12133432 | |
johnsonlau/multivimdriver-vmware-vio | refs/heads/master | vio/vio/swagger/views/hypervisor/__init__.py | 12133432 | |
ccnmtl/lettuce | refs/heads/master | tests/integration/lib/Django-1.3/tests/regressiontests/bug8245/__init__.py | 12133432 | |
jadhavhninad/-CSE_515_MWD_Analytics- | refs/heads/master | Phase 2/DEMO/Phase 2 submissions/Phase 2 Submission/Code/MWDBProject/mwd_proj/mwd_proj/phase1/migrations/__init__.py | 12133432 | |
jarias/heroku-buildpack-go | refs/heads/master | vendor/virtualenv-1.9/docs/conf.py | 3 | # -*- coding: utf-8 -*-
#
# Paste documentation build configuration file, created by
# sphinx-quickstart on Tue Apr 22 22:08:49 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
import os
import sys
# If your extensions are in another directory, add it here.
sys.path.insert(0, os.path.abspath(os.pardir))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
## FIXME: disabled for now because I haven't figured out how to use this:
#templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'virtualenv'
copyright = '2007-2013, Ian Bicking, The Open Planning Project, The virtualenv developers'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
try:
from virtualenv import __version__
# The short X.Y version.
version = '.'.join(__version__.split('.')[:2])
# The full version, including alpha/beta/rc tags.
release = __version__
except ImportError:
version = release = 'dev'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
unused_docs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
#html_style = 'default.css'
html_theme = 'nature'
html_theme_path = ['_theme']
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Content template for the index page.
#html_index = ''
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# Output file base name for HTML help builder.
htmlhelp_basename = 'Pastedoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
#latex_documents = []
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
|
JFonS/tmp | refs/heads/master | .config/sublime-text-3/Packages/Arduino-like IDE/app/serial.py | 4 | #-*- coding: utf-8 -*-
# stino/serial.py
import os
import threading
import time
from . import constant
from . import pyserial
class SerialListener:
def __init__(self, menu):
self.menu = menu
self.serial_list = []
self.is_alive = False
def start(self):
if not self.is_alive:
self.is_alive = True
listener_thread = threading.Thread(target=self.update)
listener_thread.start()
def update(self):
while self.is_alive:
pre_serial_list = self.serial_list
self.serial_list = getSerialPortList()
if self.serial_list != pre_serial_list:
self.menu.refresh()
time.sleep(1)
def stop(self):
self.is_alive = False
def getSerialPortList():
serial_port_list = []
has_ports = False
if constant.sys_platform == "windows":
if constant.sys_version < 3:
import _winreg as winreg
else:
import winreg
path = 'HARDWARE\\DEVICEMAP\\SERIALCOMM'
try:
reg = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, path,)
has_ports = True
except WindowsError:
pass
if has_ports:
for i in range(128):
try:
name,value,type = winreg.EnumValue(reg,i)
except WindowsError:
pass
else:
serial_port_list.append(value)
else:
if constant.sys_platform == 'osx':
dev_names = ['tty.', 'cu.']
else:
dev_names = ['ttyACM', 'ttyUSB']
serial_port_list = []
dev_path = '/dev'
dev_file_list = os.listdir(dev_path)
for dev_file in dev_file_list:
for dev_name in dev_names:
if dev_name in dev_file:
dev_file_path = os.path.join(dev_path, dev_file)
serial_port_list.append(dev_file_path)
return serial_port_list
def isSerialAvailable(serial_port):
state = False
serial = pyserial.Serial()
serial.port = serial_port
try:
serial.open()
except pyserial.serialutil.SerialException:
pass
except UnicodeDecodeError:
pass
else:
if serial.isOpen():
state = True
serial.close()
return state
def getSelectedSerialPort():
serial_list = getSerialPortList()
serial_port_id = constant.sketch_settings.get('serial_port', -1)
serial_port = 'no_serial_port'
if serial_list:
try:
serial_port = serial_list[serial_port_id]
except IndexError:
serial_port = serial_list[0]
return serial_port |
fpsluozi/youtube-dl | refs/heads/master | youtube_dl/extractor/jukebox.py | 140 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
RegexNotFoundError,
unescapeHTML,
)
class JukeboxIE(InfoExtractor):
_VALID_URL = r'^http://www\.jukebox?\..+?\/.+[,](?P<id>[a-z0-9\-]+)\.html'
_TEST = {
'url': 'http://www.jukebox.es/kosheen/videoclip,pride,r303r.html',
'info_dict': {
'id': 'r303r',
'ext': 'flv',
'title': 'Kosheen-En Vivo Pride',
'uploader': 'Kosheen',
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
html = self._download_webpage(url, video_id)
iframe_url = unescapeHTML(self._search_regex(r'<iframe .*src="([^"]*)"', html, 'iframe url'))
iframe_html = self._download_webpage(iframe_url, video_id, 'Downloading iframe')
if re.search(r'class="jkb_waiting"', iframe_html) is not None:
raise ExtractorError('Video is not available(in your country?)!')
self.report_extraction(video_id)
try:
video_url = self._search_regex(r'"config":{"file":"(?P<video_url>http:[^"]+\?mdtk=[0-9]+)"',
iframe_html, 'video url')
video_url = unescapeHTML(video_url).replace('\/', '/')
except RegexNotFoundError:
youtube_url = self._search_regex(
r'config":{"file":"(http:\\/\\/www\.youtube\.com\\/watch\?v=[^"]+)"',
iframe_html, 'youtube url')
youtube_url = unescapeHTML(youtube_url).replace('\/', '/')
self.to_screen('Youtube video detected')
return self.url_result(youtube_url, ie='Youtube')
title = self._html_search_regex(r'<h1 class="inline">([^<]+)</h1>',
html, 'title')
artist = self._html_search_regex(r'<span id="infos_article_artist">([^<]+)</span>',
html, 'artist')
return {
'id': video_id,
'url': video_url,
'title': artist + '-' + title,
'uploader': artist,
}
|
2014c2g19/2014c2g19 | refs/heads/master | exts/sphinx.search.py | 38 | # -*- coding: utf-8 -*-
"""
sphinx.search
~~~~~~~~~~~~~
Create a search index for offline search.
:copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
採用結巴套件 for Python3 進行中文繁體內容的分詞
https://github.com/fxsjy/jieba
"""
import re
import cPickle as pickle
from docutils.nodes import comment, Text, NodeVisitor, SkipNode
from sphinx.util import jsdump, rpartition
try:
# http://bitbucket.org/methane/porterstemmer/
from porterstemmer import Stemmer as CStemmer
CSTEMMER = True
except ImportError:
from sphinx.util.stemmer import PorterStemmer
CSTEMMER = False
# 採用結巴分詞套件
import jieba
word_re = re.compile(r'\w+(?u)')
stopwords = set("""
a and are as at
be but by
for
if in into is it
near no not
of on or
such
that the their then there these they this to
was will with
""".split())
class _JavaScriptIndex(object):
"""
The search index as javascript file that calls a function
on the documentation search object to register the index.
"""
PREFIX = 'Search.setIndex('
SUFFIX = ')'
def dumps(self, data):
return self.PREFIX + jsdump.dumps(data) + self.SUFFIX
def loads(self, s):
data = s[len(self.PREFIX):-len(self.SUFFIX)]
if not data or not s.startswith(self.PREFIX) or not \
s.endswith(self.SUFFIX):
raise ValueError('invalid data')
return jsdump.loads(data)
def dump(self, data, f):
f.write(self.dumps(data))
def load(self, f):
return self.loads(f.read())
js_index = _JavaScriptIndex()
if CSTEMMER:
class Stemmer(CStemmer):
def stem(self, word):
return self(word.lower())
else:
class Stemmer(PorterStemmer):
"""
All those porter stemmer implementations look hideous.
make at least the stem method nicer.
"""
def stem(self, word):
word = word.lower()
return PorterStemmer.stem(self, word, 0, len(word) - 1)
class WordCollector(NodeVisitor):
"""
A special visitor that collects words for the `IndexBuilder`.
"""
def __init__(self, document):
NodeVisitor.__init__(self, document)
self.found_words = []
def dispatch_visit(self, node):
if node.__class__ is comment:
raise SkipNode
if node.__class__ is Text:
# 採用結巴套件
words = jieba.cut(node.astext().encode("utf8"),cut_all=True)
words.reverse()
self.found_words.extend(words)
#testfile.write(node.astext().encode("utf8")+"\n")
#testfile.write(u",".join(words).encode("utf8") + "\n")
class IndexBuilder(object):
"""
Helper class that creates a searchindex based on the doctrees
passed to the `feed` method.
"""
formats = {
'jsdump': jsdump,
'pickle': pickle
}
def __init__(self, env):
self.env = env
self._stemmer = Stemmer()
# filename -> title
self._titles = {}
# stemmed word -> set(filenames)
self._mapping = {}
# objtype -> index
self._objtypes = {}
# objtype index -> objname (localized)
self._objnames = {}
def load(self, stream, format):
"""Reconstruct from frozen data."""
if isinstance(format, basestring):
format = self.formats[format]
frozen = format.load(stream)
# if an old index is present, we treat it as not existing.
if not isinstance(frozen, dict):
raise ValueError('old format')
index2fn = frozen['filenames']
self._titles = dict(zip(index2fn, frozen['titles']))
self._mapping = {}
for k, v in frozen['terms'].iteritems():
if isinstance(v, int):
self._mapping[k] = set([index2fn[v]])
else:
self._mapping[k] = set(index2fn[i] for i in v)
# no need to load keywords/objtypes
def dump(self, stream, format):
"""Dump the frozen index to a stream."""
if isinstance(format, basestring):
format = self.formats[format]
format.dump(self.freeze(), stream)
def get_objects(self, fn2index):
rv = {}
otypes = self._objtypes
onames = self._objnames
for domainname, domain in self.env.domains.iteritems():
for fullname, dispname, type, docname, anchor, prio in \
domain.get_objects():
# XXX use dispname?
if docname not in fn2index:
continue
if prio < 0:
continue
# XXX splitting at dot is kind of Python specific
prefix, name = rpartition(fullname, '.')
pdict = rv.setdefault(prefix, {})
try:
i = otypes[domainname, type]
except KeyError:
i = len(otypes)
otypes[domainname, type] = i
otype = domain.object_types.get(type)
if otype:
# use unicode() to fire translation proxies
onames[i] = unicode(domain.get_type_name(otype))
else:
onames[i] = type
pdict[name] = (fn2index[docname], i, prio)
return rv
def get_terms(self, fn2index):
rv = {}
for k, v in self._mapping.iteritems():
if len(v) == 1:
fn, = v
if fn in fn2index:
rv[k] = fn2index[fn]
else:
rv[k] = [fn2index[fn] for fn in v if fn in fn2index]
return rv
def freeze(self):
"""Create a usable data structure for serializing."""
filenames = self._titles.keys()
titles = self._titles.values()
fn2index = dict((f, i) for (i, f) in enumerate(filenames))
terms = self.get_terms(fn2index)
objects = self.get_objects(fn2index) # populates _objtypes
objtypes = dict((v, k[0] + ':' + k[1])
for (k, v) in self._objtypes.iteritems())
objnames = self._objnames
return dict(filenames=filenames, titles=titles, terms=terms,
objects=objects, objtypes=objtypes, objnames=objnames)
def prune(self, filenames):
"""Remove data for all filenames not in the list."""
new_titles = {}
for filename in filenames:
if filename in self._titles:
new_titles[filename] = self._titles[filename]
self._titles = new_titles
for wordnames in self._mapping.itervalues():
wordnames.intersection_update(filenames)
def feed(self, filename, title, doctree):
"""Feed a doctree to the index."""
self._titles[filename] = title
visitor = WordCollector(doctree)
doctree.walk(visitor)
def add_term(word, stem=self._stemmer.stem):
word = stem(word)
if len(word) < 2 or word in stopwords or word.isdigit():
return
self._mapping.setdefault(word, set()).add(filename)
# 使用結巴套件
words = jieba.cut(title.encode("utf8"),cut_all=True)
for word in words:
add_term(word)
for word in visitor.found_words:
add_term(word)
def load_indexer(self):
def func(docnames):
import os.path as path
print "############### CHINESE INDEXER ###############"
self.indexer = IndexBuilder(self.env)
keep = set(self.env.all_docs) - set(docnames)
try:
f = open(path.join(self.outdir, self.searchindex_filename), 'rb')
try:
self.indexer.load(f, self.indexer_format)
finally:
f.close()
except (IOError, OSError, ValueError):
if keep:
self.warn('search index couldn\'t be loaded, but not all '
'documents will be built: the index will be '
'incomplete.')
# delete all entries for files that will be rebuilt
self.indexer.prune(keep)
return func
def builder_inited(app):
if app.builder.name == 'html':
print "****************************"
app.builder.load_indexer = load_indexer(app.builder)
def setup(app):
app.connect('builder-inited', builder_inited) |
Kuwagata/md5crack | refs/heads/master | Condor/socklib.py | 1 | import sys, os, socket, pickle, math
def send(sock, data = ""):
s = sock
# Package data
toSend = pickle.dumps(data)
# Calculate number of sends, rounded up to nearest integer
packets = int(math.ceil(len(toSend) / 1024.0))
# Send result
s.send(str(packets))
# Wait for acknowledgement
s.recv(8)
# Proceed to send
s.sendall(toSend)
def recv(sock)
s = sock
temp = s.recv(8)
packets = int(temp)
s.send("ack")
data = ""
for i in xrange(packets):
data = data + s.recv(1024)
return pickle.loads(data)
|
tylerjereddy/pycon-2017 | refs/heads/master | produce_npy_files.py | 1 | import glob
import pickle
import numpy as np
for pickled_file in glob.glob('*.p'):
print('pickled_file:', pickled_file)
with open(pickled_file, 'rb') as input_file:
numpy_data = pickle.load(input_file, encoding='latin1')
print('numpy_data.shape:', numpy_data.shape)
np.save(pickled_file.split('.')[0], arr=numpy_data,
allow_pickle=False)
|
hyperized/ansible | refs/heads/devel | lib/ansible/modules/net_tools/basics/uri.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2013, Romeo Theriault <romeot () hawaii.edu>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: uri
short_description: Interacts with webservices
description:
- Interacts with HTTP and HTTPS web services and supports Digest, Basic and WSSE
HTTP authentication mechanisms.
- For Windows targets, use the M(win_uri) module instead.
version_added: "1.1"
options:
url:
description:
- HTTP or HTTPS URL in the form (http|https)://host.domain[:port]/path
type: str
required: true
dest:
description:
- A path of where to download the file to (if desired). If I(dest) is a
directory, the basename of the file on the remote server will be used.
type: path
url_username:
description:
- A username for the module to use for Digest, Basic or WSSE authentication.
type: str
aliases: [ user ]
url_password:
description:
- A password for the module to use for Digest, Basic or WSSE authentication.
type: str
aliases: [ password ]
body:
description:
- The body of the http request/response to the web service. If C(body_format) is set
to 'json' it will take an already formatted JSON string or convert a data structure
into JSON. If C(body_format) is set to 'form-urlencoded' it will convert a dictionary
or list of tuples into an 'application/x-www-form-urlencoded' string. (Added in v2.7)
type: raw
body_format:
description:
- The serialization format of the body. When set to C(json) or C(form-urlencoded), encodes the
body argument, if needed, and automatically sets the Content-Type header accordingly.
As of C(2.3) it is possible to override the `Content-Type` header, when
set to C(json) or C(form-urlencoded) via the I(headers) option.
type: str
choices: [ form-urlencoded, json, raw ]
default: raw
version_added: "2.0"
method:
description:
- The HTTP method of the request or response.
- In more recent versions we do not restrict the method at the module level anymore
but it still must be a valid method accepted by the service handling the request.
type: str
default: GET
return_content:
description:
- Whether or not to return the body of the response as a "content" key in
the dictionary result.
- Independently of this option, if the reported Content-type is "application/json", then the JSON is
always loaded into a key called C(json) in the dictionary results.
type: bool
default: no
force_basic_auth:
description:
- Force the sending of the Basic authentication header upon initial request.
- The library used by the uri module only sends authentication information when a webservice
responds to an initial request with a 401 status. Since some basic auth services do not properly
send a 401, logins will fail.
type: bool
default: no
follow_redirects:
description:
- Whether or not the URI module should follow redirects. C(all) will follow all redirects.
C(safe) will follow only "safe" redirects, where "safe" means that the client is only
doing a GET or HEAD on the URI to which it is being redirected. C(none) will not follow
any redirects. Note that C(yes) and C(no) choices are accepted for backwards compatibility,
where C(yes) is the equivalent of C(all) and C(no) is the equivalent of C(safe). C(yes) and C(no)
are deprecated and will be removed in some future version of Ansible.
type: str
choices: ['all', 'no', 'none', 'safe', 'urllib2', 'yes']
default: safe
creates:
description:
- A filename, when it already exists, this step will not be run.
type: path
removes:
description:
- A filename, when it does not exist, this step will not be run.
type: path
status_code:
description:
- A list of valid, numeric, HTTP status codes that signifies success of the request.
type: list
default: [ 200 ]
timeout:
description:
- The socket level timeout in seconds
type: int
default: 30
headers:
description:
- Add custom HTTP headers to a request in the format of a YAML hash. As
of C(2.3) supplying C(Content-Type) here will override the header
generated by supplying C(json) or C(form-urlencoded) for I(body_format).
type: dict
version_added: '2.1'
validate_certs:
description:
- If C(no), SSL certificates will not be validated.
- This should only set to C(no) used on personally controlled sites using self-signed certificates.
- Prior to 1.9.2 the code defaulted to C(no).
type: bool
default: yes
version_added: '1.9.2'
client_cert:
description:
- PEM formatted certificate chain file to be used for SSL client authentication.
- This file can also include the key as well, and if the key is included, I(client_key) is not required
type: path
version_added: '2.4'
client_key:
description:
- PEM formatted file that contains your private key to be used for SSL client authentication.
- If I(client_cert) contains both the certificate and key, this option is not required.
type: path
version_added: '2.4'
src:
description:
- Path to file to be submitted to the remote server.
- Cannot be used with I(body).
type: path
version_added: '2.7'
remote_src:
description:
- If C(no), the module will search for src on originating/master machine.
- If C(yes) the module will use the C(src) path on the remote/target machine.
type: bool
default: no
version_added: '2.7'
force:
description:
- If C(yes) do not get a cached copy.
- Alias C(thirsty) has been deprecated and will be removed in 2.13.
type: bool
default: no
aliases: [ thirsty ]
use_proxy:
description:
- If C(no), it will not use a proxy, even if one is defined in an environment variable on the target hosts.
type: bool
default: yes
unix_socket:
description:
- Path to Unix domain socket to use for connection
version_added: '2.8'
http_agent:
description:
- Header to identify as, generally appears in web server logs.
type: str
default: ansible-httpget
notes:
- The dependency on httplib2 was removed in Ansible 2.1.
- The module returns all the HTTP headers in lower-case.
- For Windows targets, use the M(win_uri) module instead.
seealso:
- module: get_url
- module: win_uri
author:
- Romeo Theriault (@romeotheriault)
extends_documentation_fragment: files
'''
EXAMPLES = r'''
- name: Check that you can connect (GET) to a page and it returns a status 200
uri:
url: http://www.example.com
- name: Check that a page returns a status 200 and fail if the word AWESOME is not in the page contents
uri:
url: http://www.example.com
return_content: yes
register: this
failed_when: "'AWESOME' not in this.content"
- name: Create a JIRA issue
uri:
url: https://your.jira.example.com/rest/api/2/issue/
user: your_username
password: your_pass
method: POST
body: "{{ lookup('file','issue.json') }}"
force_basic_auth: yes
status_code: 201
body_format: json
- name: Login to a form based webpage, then use the returned cookie to access the app in later tasks
uri:
url: https://your.form.based.auth.example.com/index.php
method: POST
body_format: form-urlencoded
body:
name: your_username
password: your_password
enter: Sign in
status_code: 302
register: login
- name: Login to a form based webpage using a list of tuples
uri:
url: https://your.form.based.auth.example.com/index.php
method: POST
body_format: form-urlencoded
body:
- [ name, your_username ]
- [ password, your_password ]
- [ enter, Sign in ]
status_code: 302
register: login
- name: Connect to website using a previously stored cookie
uri:
url: https://your.form.based.auth.example.com/dashboard.php
method: GET
return_content: yes
headers:
Cookie: "{{ login.set_cookie }}"
- name: Queue build of a project in Jenkins
uri:
url: http://{{ jenkins.host }}/job/{{ jenkins.job }}/build?token={{ jenkins.token }}
user: "{{ jenkins.user }}"
password: "{{ jenkins.password }}"
method: GET
force_basic_auth: yes
status_code: 201
- name: POST from contents of local file
uri:
url: https://httpbin.org/post
method: POST
src: file.json
- name: POST from contents of remote file
uri:
url: https://httpbin.org/post
method: POST
src: /path/to/my/file.json
remote_src: yes
- name: Pause play until a URL is reachable from this host
uri:
url: "http://192.0.2.1/some/test"
follow_redirects: none
method: GET
register: _result
until: _result.status == 200
retries: 720 # 720 * 5 seconds = 1hour (60*60/5)
delay: 5 # Every 5 seconds
# There are issues in a supporting Python library that is discussed in
# https://github.com/ansible/ansible/issues/52705 where a proxy is defined
# but you want to bypass proxy use on CIDR masks by using no_proxy
- name: Work around a python issue that doesn't support no_proxy envvar
uri:
follow_redirects: none
validate_certs: false
timeout: 5
url: "http://{{ ip_address }}:{{ port | default(80) }}"
register: uri_data
failed_when: false
changed_when: false
vars:
ip_address: 192.0.2.1
environment: |
{
{% for no_proxy in (lookup('env', 'no_proxy') | regex_replace('\s*,\s*', ' ') ).split() %}
{% if no_proxy | regex_search('\/') and
no_proxy | ipaddr('net') != '' and
no_proxy | ipaddr('net') != false and
ip_address | ipaddr(no_proxy) is not none and
ip_address | ipaddr(no_proxy) != false %}
'no_proxy': '{{ ip_address }}'
{% elif no_proxy | regex_search(':') != '' and
no_proxy | regex_search(':') != false and
no_proxy == ip_address + ':' + (port | default(80)) %}
'no_proxy': '{{ ip_address }}:{{ port | default(80) }}'
{% elif no_proxy | ipaddr('host') != '' and
no_proxy | ipaddr('host') != false and
no_proxy == ip_address %}
'no_proxy': '{{ ip_address }}'
{% elif no_proxy | regex_search('^(\*|)\.') != '' and
no_proxy | regex_search('^(\*|)\.') != false and
no_proxy | regex_replace('\*', '') in ip_address %}
'no_proxy': '{{ ip_address }}'
{% endif %}
{% endfor %}
}
'''
RETURN = r'''
# The return information includes all the HTTP headers in lower-case.
elapsed:
description: The number of seconds that elapsed while performing the download
returned: on success
type: int
sample: 23
msg:
description: The HTTP message from the request
returned: always
type: str
sample: OK (unknown bytes)
redirected:
description: Whether the request was redirected
returned: on success
type: bool
sample: false
status:
description: The HTTP status code from the request
returned: always
type: int
sample: 200
url:
description: The actual URL used for the request
returned: always
type: str
sample: https://www.ansible.com/
'''
import cgi
import datetime
import json
import os
import re
import shutil
import sys
import tempfile
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import PY2, iteritems, string_types
from ansible.module_utils.six.moves.urllib.parse import urlencode, urlsplit
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.common._collections_compat import Mapping, Sequence
from ansible.module_utils.urls import fetch_url, url_argument_spec
JSON_CANDIDATES = ('text', 'json', 'javascript')
def format_message(err, resp):
msg = resp.pop('msg')
return err + (' %s' % msg if msg else '')
def write_file(module, url, dest, content, resp):
# create a tempfile with some test content
fd, tmpsrc = tempfile.mkstemp(dir=module.tmpdir)
f = open(tmpsrc, 'wb')
try:
f.write(content)
except Exception as e:
os.remove(tmpsrc)
msg = format_message("Failed to create temporary content file: %s" % to_native(e), resp)
module.fail_json(msg=msg, **resp)
f.close()
checksum_src = None
checksum_dest = None
# raise an error if there is no tmpsrc file
if not os.path.exists(tmpsrc):
os.remove(tmpsrc)
msg = format_message("Source '%s' does not exist" % tmpsrc, resp)
module.fail_json(msg=msg, **resp)
if not os.access(tmpsrc, os.R_OK):
os.remove(tmpsrc)
msg = format_message("Source '%s' not readable" % tmpsrc, resp)
module.fail_json(msg=msg, **resp)
checksum_src = module.sha1(tmpsrc)
# check if there is no dest file
if os.path.exists(dest):
# raise an error if copy has no permission on dest
if not os.access(dest, os.W_OK):
os.remove(tmpsrc)
msg = format_message("Destination '%s' not writable" % dest, resp)
module.fail_json(msg=msg, **resp)
if not os.access(dest, os.R_OK):
os.remove(tmpsrc)
msg = format_message("Destination '%s' not readable" % dest, resp)
module.fail_json(msg=msg, **resp)
checksum_dest = module.sha1(dest)
else:
if not os.access(os.path.dirname(dest), os.W_OK):
os.remove(tmpsrc)
msg = format_message("Destination dir '%s' not writable" % os.path.dirname(dest), resp)
module.fail_json(msg=msg, **resp)
if checksum_src != checksum_dest:
try:
shutil.copyfile(tmpsrc, dest)
except Exception as e:
os.remove(tmpsrc)
msg = format_message("failed to copy %s to %s: %s" % (tmpsrc, dest, to_native(e)), resp)
module.fail_json(msg=msg, **resp)
os.remove(tmpsrc)
def url_filename(url):
fn = os.path.basename(urlsplit(url)[2])
if fn == '':
return 'index.html'
return fn
def absolute_location(url, location):
"""Attempts to create an absolute URL based on initial URL, and
next URL, specifically in the case of a ``Location`` header.
"""
if '://' in location:
return location
elif location.startswith('/'):
parts = urlsplit(url)
base = url.replace(parts[2], '')
return '%s%s' % (base, location)
elif not location.startswith('/'):
base = os.path.dirname(url)
return '%s/%s' % (base, location)
else:
return location
def kv_list(data):
''' Convert data into a list of key-value tuples '''
if data is None:
return None
if isinstance(data, Sequence):
return list(data)
if isinstance(data, Mapping):
return list(data.items())
raise TypeError('cannot form-urlencode body, expect list or dict')
def form_urlencoded(body):
''' Convert data into a form-urlencoded string '''
if isinstance(body, string_types):
return body
if isinstance(body, (Mapping, Sequence)):
result = []
# Turn a list of lists into a list of tupples that urlencode accepts
for key, values in kv_list(body):
if isinstance(values, string_types) or not isinstance(values, (Mapping, Sequence)):
values = [values]
for value in values:
if value is not None:
result.append((to_text(key), to_text(value)))
return urlencode(result, doseq=True)
return body
def uri(module, url, dest, body, body_format, method, headers, socket_timeout):
# is dest is set and is a directory, let's check if we get redirected and
# set the filename from that url
redirected = False
redir_info = {}
r = {}
src = module.params['src']
if src:
try:
headers.update({
'Content-Length': os.stat(src).st_size
})
data = open(src, 'rb')
except OSError:
module.fail_json(msg='Unable to open source file %s' % src, elapsed=0)
else:
data = body
kwargs = {}
if dest is not None:
# Stash follow_redirects, in this block we don't want to follow
# we'll reset back to the supplied value soon
follow_redirects = module.params['follow_redirects']
module.params['follow_redirects'] = False
if os.path.isdir(dest):
# first check if we are redirected to a file download
_, redir_info = fetch_url(module, url, data=body,
headers=headers,
method=method,
timeout=socket_timeout, unix_socket=module.params['unix_socket'])
# if we are redirected, update the url with the location header,
# and update dest with the new url filename
if redir_info['status'] in (301, 302, 303, 307):
url = redir_info['location']
redirected = True
dest = os.path.join(dest, url_filename(url))
# if destination file already exist, only download if file newer
if os.path.exists(dest):
kwargs['last_mod_time'] = datetime.datetime.utcfromtimestamp(os.path.getmtime(dest))
# Reset follow_redirects back to the stashed value
module.params['follow_redirects'] = follow_redirects
resp, info = fetch_url(module, url, data=data, headers=headers,
method=method, timeout=socket_timeout, unix_socket=module.params['unix_socket'],
**kwargs)
try:
content = resp.read()
except AttributeError:
# there was no content, but the error read()
# may have been stored in the info as 'body'
content = info.pop('body', '')
if src:
# Try to close the open file handle
try:
data.close()
except Exception:
pass
r['redirected'] = redirected or info['url'] != url
r.update(redir_info)
r.update(info)
return r, content, dest
def main():
argument_spec = url_argument_spec()
argument_spec.update(
dest=dict(type='path'),
url_username=dict(type='str', aliases=['user']),
url_password=dict(type='str', aliases=['password'], no_log=True),
body=dict(type='raw'),
body_format=dict(type='str', default='raw', choices=['form-urlencoded', 'json', 'raw']),
src=dict(type='path'),
method=dict(type='str', default='GET'),
return_content=dict(type='bool', default=False),
follow_redirects=dict(type='str', default='safe', choices=['all', 'no', 'none', 'safe', 'urllib2', 'yes']),
creates=dict(type='path'),
removes=dict(type='path'),
status_code=dict(type='list', default=[200]),
timeout=dict(type='int', default=30),
headers=dict(type='dict', default={}),
unix_socket=dict(type='path'),
)
module = AnsibleModule(
argument_spec=argument_spec,
add_file_common_args=True,
mutually_exclusive=[['body', 'src']],
)
if module.params.get('thirsty'):
module.deprecate('The alias "thirsty" has been deprecated and will be removed, use "force" instead', version='2.13')
url = module.params['url']
body = module.params['body']
body_format = module.params['body_format'].lower()
method = module.params['method'].upper()
dest = module.params['dest']
return_content = module.params['return_content']
creates = module.params['creates']
removes = module.params['removes']
status_code = [int(x) for x in list(module.params['status_code'])]
socket_timeout = module.params['timeout']
dict_headers = module.params['headers']
if not re.match('^[A-Z]+$', method):
module.fail_json(msg="Parameter 'method' needs to be a single word in uppercase, like GET or POST.")
if body_format == 'json':
# Encode the body unless its a string, then assume it is pre-formatted JSON
if not isinstance(body, string_types):
body = json.dumps(body)
if 'content-type' not in [header.lower() for header in dict_headers]:
dict_headers['Content-Type'] = 'application/json'
elif body_format == 'form-urlencoded':
if not isinstance(body, string_types):
try:
body = form_urlencoded(body)
except ValueError as e:
module.fail_json(msg='failed to parse body as form_urlencoded: %s' % to_native(e), elapsed=0)
if 'content-type' not in [header.lower() for header in dict_headers]:
dict_headers['Content-Type'] = 'application/x-www-form-urlencoded'
if creates is not None:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of uri executions.
if os.path.exists(creates):
module.exit_json(stdout="skipped, since '%s' exists" % creates, changed=False)
if removes is not None:
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of uri executions.
if not os.path.exists(removes):
module.exit_json(stdout="skipped, since '%s' does not exist" % removes, changed=False)
# Make the request
start = datetime.datetime.utcnow()
resp, content, dest = uri(module, url, dest, body, body_format, method,
dict_headers, socket_timeout)
resp['elapsed'] = (datetime.datetime.utcnow() - start).seconds
resp['status'] = int(resp['status'])
resp['changed'] = False
# Write the file out if requested
if dest is not None:
if resp['status'] in status_code and resp['status'] != 304:
write_file(module, url, dest, content, resp)
# allow file attribute changes
resp['changed'] = True
module.params['path'] = dest
file_args = module.load_file_common_arguments(module.params)
file_args['path'] = dest
resp['changed'] = module.set_fs_attributes_if_different(file_args, resp['changed'])
resp['path'] = dest
# Transmogrify the headers, replacing '-' with '_', since variables don't
# work with dashes.
# In python3, the headers are title cased. Lowercase them to be
# compatible with the python2 behaviour.
uresp = {}
for key, value in iteritems(resp):
ukey = key.replace("-", "_").lower()
uresp[ukey] = value
if 'location' in uresp:
uresp['location'] = absolute_location(url, uresp['location'])
# Default content_encoding to try
content_encoding = 'utf-8'
if 'content_type' in uresp:
# Handle multiple Content-Type headers
charsets = []
content_types = []
for value in uresp['content_type'].split(','):
ct, params = cgi.parse_header(value)
if ct not in content_types:
content_types.append(ct)
if 'charset' in params:
if params['charset'] not in charsets:
charsets.append(params['charset'])
if content_types:
content_type = content_types[0]
if len(content_types) > 1:
module.warn(
'Received multiple conflicting Content-Type values (%s), using %s' % (', '.join(content_types), content_type)
)
if charsets:
content_encoding = charsets[0]
if len(charsets) > 1:
module.warn(
'Received multiple conflicting charset values (%s), using %s' % (', '.join(charsets), content_encoding)
)
u_content = to_text(content, encoding=content_encoding)
if any(candidate in content_type for candidate in JSON_CANDIDATES):
try:
js = json.loads(u_content)
uresp['json'] = js
except Exception:
if PY2:
sys.exc_clear() # Avoid false positive traceback in fail_json() on Python 2
else:
u_content = to_text(content, encoding=content_encoding)
if resp['status'] not in status_code:
uresp['msg'] = 'Status code was %s and not %s: %s' % (resp['status'], status_code, uresp.get('msg', ''))
module.fail_json(content=u_content, **uresp)
elif return_content:
module.exit_json(content=u_content, **uresp)
else:
module.exit_json(**uresp)
if __name__ == '__main__':
main()
|
Alwnikrotikz/mpmath | refs/heads/master | mpmath/tests/test_fp.py | 15 | """
Easy-to-use test-generating code:
cases = '''
exp 2.25
log 2.25
'''
from mpmath import *
mp.dps = 20
for test in cases.splitlines():
if not test:
continue
words = test.split()
fname = words[0]
args = words[1:]
argstr = ", ".join(args)
testline = "%s(%s)" % (fname, argstr)
ans = str(eval(testline))
print " assert ae(fp.%s, %s)" % (testline, ans)
"""
from mpmath import fp
def ae(x, y, tol=1e-12):
if x == y:
return True
return abs(x-y) <= tol*abs(y)
def test_conj():
assert fp.conj(4) == 4
assert fp.conj(3+4j) == 3-4j
assert fp.fdot([1,2],[3,2+1j], conjugate=True) == 7-2j
def test_fp_number_parts():
assert ae(fp.arg(3), 0.0)
assert ae(fp.arg(-3), 3.1415926535897932385)
assert ae(fp.arg(3j), 1.5707963267948966192)
assert ae(fp.arg(-3j), -1.5707963267948966192)
assert ae(fp.arg(2+3j), 0.98279372324732906799)
assert ae(fp.arg(-1-1j), -2.3561944901923449288)
assert ae(fp.re(2.5), 2.5)
assert ae(fp.re(2.5+3j), 2.5)
assert ae(fp.im(2.5), 0.0)
assert ae(fp.im(2.5+3j), 3.0)
assert ae(fp.floor(2.5), 2.0)
assert ae(fp.floor(2), 2.0)
assert ae(fp.floor(2.0+0j), (2.0 + 0.0j))
assert ae(fp.floor(-1.5-0.5j), (-2.0 - 1.0j))
assert ae(fp.ceil(2.5), 3.0)
assert ae(fp.ceil(2), 2.0)
assert ae(fp.ceil(2.0+0j), (2.0 + 0.0j))
assert ae(fp.ceil(-1.5-0.5j), (-1.0 + 0.0j))
def test_fp_cospi_sinpi():
assert ae(fp.sinpi(0), 0.0)
assert ae(fp.sinpi(0.25), 0.7071067811865475244)
assert ae(fp.sinpi(0.5), 1.0)
assert ae(fp.sinpi(0.75), 0.7071067811865475244)
assert ae(fp.sinpi(1), 0.0)
assert ae(fp.sinpi(1.25), -0.7071067811865475244)
assert ae(fp.sinpi(1.5), -1.0)
assert ae(fp.sinpi(1.75), -0.7071067811865475244)
assert ae(fp.sinpi(2), 0.0)
assert ae(fp.sinpi(2.25), 0.7071067811865475244)
assert ae(fp.sinpi(0+3j), (0.0 + 6195.8238636085899556j))
assert ae(fp.sinpi(0.25+3j), (4381.1091260582448033 + 4381.1090689950686908j))
assert ae(fp.sinpi(0.5+3j), (6195.8239443081075259 + 0.0j))
assert ae(fp.sinpi(0.75+3j), (4381.1091260582448033 - 4381.1090689950686908j))
assert ae(fp.sinpi(1+3j), (0.0 - 6195.8238636085899556j))
assert ae(fp.sinpi(1.25+3j), (-4381.1091260582448033 - 4381.1090689950686908j))
assert ae(fp.sinpi(1.5+3j), (-6195.8239443081075259 + 0.0j))
assert ae(fp.sinpi(1.75+3j), (-4381.1091260582448033 + 4381.1090689950686908j))
assert ae(fp.sinpi(2+3j), (0.0 + 6195.8238636085899556j))
assert ae(fp.sinpi(2.25+3j), (4381.1091260582448033 + 4381.1090689950686908j))
assert ae(fp.sinpi(-0.75), -0.7071067811865475244)
assert ae(fp.sinpi(-1e-10), -3.1415926535897933529e-10)
assert ae(fp.sinpi(1e-10), 3.1415926535897933529e-10)
assert ae(fp.sinpi(1e-10+1e-10j), (3.141592653589793353e-10 + 3.1415926535897933528e-10j))
assert ae(fp.sinpi(1e-10-1e-10j), (3.141592653589793353e-10 - 3.1415926535897933528e-10j))
assert ae(fp.sinpi(-1e-10+1e-10j), (-3.141592653589793353e-10 + 3.1415926535897933528e-10j))
assert ae(fp.sinpi(-1e-10-1e-10j), (-3.141592653589793353e-10 - 3.1415926535897933528e-10j))
assert ae(fp.cospi(0), 1.0)
assert ae(fp.cospi(0.25), 0.7071067811865475244)
assert ae(fp.cospi(0.5), 0.0)
assert ae(fp.cospi(0.75), -0.7071067811865475244)
assert ae(fp.cospi(1), -1.0)
assert ae(fp.cospi(1.25), -0.7071067811865475244)
assert ae(fp.cospi(1.5), 0.0)
assert ae(fp.cospi(1.75), 0.7071067811865475244)
assert ae(fp.cospi(2), 1.0)
assert ae(fp.cospi(2.25), 0.7071067811865475244)
assert ae(fp.cospi(0+3j), (6195.8239443081075259 + 0.0j))
assert ae(fp.cospi(0.25+3j), (4381.1091260582448033 - 4381.1090689950686908j))
assert ae(fp.cospi(0.5+3j), (0.0 - 6195.8238636085899556j))
assert ae(fp.cospi(0.75+3j), (-4381.1091260582448033 - 4381.1090689950686908j))
assert ae(fp.cospi(1+3j), (-6195.8239443081075259 + 0.0j))
assert ae(fp.cospi(1.25+3j), (-4381.1091260582448033 + 4381.1090689950686908j))
assert ae(fp.cospi(1.5+3j), (0.0 + 6195.8238636085899556j))
assert ae(fp.cospi(1.75+3j), (4381.1091260582448033 + 4381.1090689950686908j))
assert ae(fp.cospi(2+3j), (6195.8239443081075259 + 0.0j))
assert ae(fp.cospi(2.25+3j), (4381.1091260582448033 - 4381.1090689950686908j))
assert ae(fp.cospi(-0.75), -0.7071067811865475244)
assert ae(fp.sinpi(-0.7), -0.80901699437494750611)
assert ae(fp.cospi(-0.7), -0.5877852522924730163)
assert ae(fp.cospi(-3+2j), (-267.74676148374822225 + 0.0j))
assert ae(fp.sinpi(-3+2j), (0.0 - 267.74489404101651426j))
assert ae(fp.sinpi(-0.7+2j), (-216.6116802292079471 - 157.37650009392034693j))
assert ae(fp.cospi(-0.7+2j), (-157.37759774921754565 + 216.61016943630197336j))
def test_fp_expj():
assert ae(fp.expj(0), (1.0 + 0.0j))
assert ae(fp.expj(1), (0.5403023058681397174 + 0.84147098480789650665j))
assert ae(fp.expj(2), (-0.416146836547142387 + 0.9092974268256816954j))
assert ae(fp.expj(0.75), (0.73168886887382088631 + 0.68163876002333416673j))
assert ae(fp.expj(2+3j), (-0.020718731002242879378 + 0.045271253156092975488j))
assert ae(fp.expjpi(0), (1.0 + 0.0j))
assert ae(fp.expjpi(1), (-1.0 + 0.0j))
assert ae(fp.expjpi(2), (1.0 + 0.0j))
assert ae(fp.expjpi(0.75), (-0.7071067811865475244 + 0.7071067811865475244j))
assert ae(fp.expjpi(2+3j), (0.000080699517570304599239 + 0.0j))
def test_fp_bernoulli():
assert ae(fp.bernoulli(0), 1.0)
assert ae(fp.bernoulli(1), -0.5)
assert ae(fp.bernoulli(2), 0.16666666666666666667)
assert ae(fp.bernoulli(10), 0.075757575757575757576)
assert ae(fp.bernoulli(11), 0.0)
def test_fp_gamma():
assert ae(fp.gamma(1), 1.0)
assert ae(fp.gamma(1.5), 0.88622692545275801365)
assert ae(fp.gamma(10), 362880.0)
assert ae(fp.gamma(-0.5), -3.5449077018110320546)
assert ae(fp.gamma(-7.1), 0.0016478244570263333622)
assert ae(fp.gamma(12.3), 83385367.899970000963)
assert ae(fp.gamma(2+0j), (1.0 + 0.0j))
assert ae(fp.gamma(-2.5+0j), (-0.94530872048294188123 + 0.0j))
assert ae(fp.gamma(3+4j), (0.0052255384713692141947 - 0.17254707929430018772j))
assert ae(fp.gamma(-3-4j), (0.00001460997305874775607 - 0.000020760733311509070396j))
assert ae(fp.fac(0), 1.0)
assert ae(fp.fac(1), 1.0)
assert ae(fp.fac(20), 2432902008176640000.0)
assert ae(fp.fac(-3.5), -0.94530872048294188123)
assert ae(fp.fac(2+3j), (-0.44011340763700171113 - 0.06363724312631702183j))
assert ae(fp.loggamma(1.0), 0.0)
assert ae(fp.loggamma(2.0), 0.0)
assert ae(fp.loggamma(3.0), 0.69314718055994530942)
assert ae(fp.loggamma(7.25), 7.0521854507385394449)
assert ae(fp.loggamma(1000.0), 5905.2204232091812118)
assert ae(fp.loggamma(1e50), 1.1412925464970229298e+52)
assert ae(fp.loggamma(1e25+1e25j), (5.6125802751733671621e+26 + 5.7696599078528568383e+26j))
assert ae(fp.loggamma(3+4j), (-1.7566267846037841105 + 4.7426644380346579282j))
assert ae(fp.loggamma(-0.5), (1.2655121234846453965 - 3.1415926535897932385j))
assert ae(fp.loggamma(-1.25), (1.3664317612369762346 - 6.2831853071795864769j))
assert ae(fp.loggamma(-2.75), (0.0044878975359557733115 - 9.4247779607693797154j))
assert ae(fp.loggamma(-3.5), (-1.3090066849930420464 - 12.566370614359172954j))
assert ae(fp.loggamma(-4.5), (-2.8130840817693161197 - 15.707963267948966192j))
assert ae(fp.loggamma(-2+3j), (-6.776523813485657093 - 4.568791367260286402j))
assert ae(fp.loggamma(-1000.3), (-5912.8440347785205041 - 3144.7342462433830317j))
assert ae(fp.loggamma(-100-100j), (-632.35117666833135562 - 158.37641469650352462j))
assert ae(fp.loggamma(1e-10), 23.025850929882735237)
assert ae(fp.loggamma(-1e-10), (23.02585092999817837 - 3.1415926535897932385j))
assert ae(fp.loggamma(1e-10j), (23.025850929940456804 - 1.5707963268526181857j))
assert ae(fp.loggamma(1e-10j-1e-10), (22.679277339718205716 - 2.3561944902500664954j))
def test_fp_psi():
assert ae(fp.psi(0, 3.7), 1.1671535393615114409)
assert ae(fp.psi(0, 0.5), -1.9635100260214234794)
assert ae(fp.psi(0, 1), -0.57721566490153286061)
assert ae(fp.psi(0, -2.5), 1.1031566406452431872)
assert ae(fp.psi(0, 12.9), 2.5179671503279156347)
assert ae(fp.psi(0, 100), 4.6001618527380874002)
assert ae(fp.psi(0, 2500.3), 7.8239660143238547877)
assert ae(fp.psi(0, 1e40), 92.103403719761827391)
assert ae(fp.psi(0, 1e200), 460.51701859880913677)
assert ae(fp.psi(0, 3.7+0j), (1.1671535393615114409 + 0.0j))
assert ae(fp.psi(1, 3), 0.39493406684822643647)
assert ae(fp.psi(3, 2+3j), (-0.05383196209159972116 + 0.0076890935247364805218j))
assert ae(fp.psi(4, -0.5+1j), (1.2719531355492328195 - 18.211833410936276774j))
assert ae(fp.harmonic(0), 0.0)
assert ae(fp.harmonic(1), 1.0)
assert ae(fp.harmonic(2), 1.5)
assert ae(fp.harmonic(100), 5.1873775176396202608)
assert ae(fp.harmonic(-2.5), 1.2803723055467760478)
assert ae(fp.harmonic(2+3j), (1.9390425294578375875 + 0.87336044981834544043j))
assert ae(fp.harmonic(-5-4j), (2.3725754822349437733 - 2.4160904444801621j))
def test_fp_zeta():
assert ae(fp.zeta(1e100), 1.0)
assert ae(fp.zeta(3), 1.2020569031595942854)
assert ae(fp.zeta(2+0j), (1.6449340668482264365 + 0.0j))
assert ae(fp.zeta(0.93), -13.713619351638164784)
assert ae(fp.zeta(1.74), 1.9796863545771774095)
assert ae(fp.zeta(0.0), -0.5)
assert ae(fp.zeta(-1.0), -0.083333333333333333333)
assert ae(fp.zeta(-2.0), 0.0)
assert ae(fp.zeta(-3.0), 0.0083333333333333333333)
assert ae(fp.zeta(-500.0), 0.0)
assert ae(fp.zeta(-7.4), 0.0036537321227995882447)
assert ae(fp.zeta(2.1), 1.5602165335033620158)
assert ae(fp.zeta(26.9), 1.0000000079854809935)
assert ae(fp.zeta(26), 1.0000000149015548284)
assert ae(fp.zeta(27), 1.0000000074507117898)
assert ae(fp.zeta(28), 1.0000000037253340248)
assert ae(fp.zeta(27.1), 1.000000006951755045)
assert ae(fp.zeta(32.7), 1.0000000001433243232)
assert ae(fp.zeta(100), 1.0)
assert ae(fp.altzeta(3.5), 0.92755357777394803511)
assert ae(fp.altzeta(1), 0.69314718055994530942)
assert ae(fp.altzeta(2), 0.82246703342411321824)
assert ae(fp.altzeta(0), 0.5)
assert ae(fp.zeta(-2+3j, 1), (0.13297115587929864827 + 0.12305330040458776494j))
assert ae(fp.zeta(-2+3j, 5), (18.384866151867576927 - 11.377015110597711009j))
assert ae(fp.zeta(1.0000000001), 9999999173.1735741337)
assert ae(fp.zeta(0.9999999999), -9999999172.0191428039)
assert ae(fp.zeta(1+0.000000001j), (0.57721566490153286061 - 999999999.99999993765j))
assert ae(fp.primezeta(2.5+4j), (-0.16922458243438033385 - 0.010847965298387727811j))
assert ae(fp.primezeta(4), 0.076993139764246844943)
assert ae(fp.riemannr(3.7), 2.3034079839110855717)
assert ae(fp.riemannr(8), 3.9011860449341499474)
assert ae(fp.riemannr(3+4j), (2.2369653314259991796 + 1.6339943856990281694j))
def test_fp_hyp2f1():
assert ae(fp.hyp2f1(1, (3,2), 3.25, 5.0), (-0.46600275923108143059 - 0.74393667908854842325j))
assert ae(fp.hyp2f1(1+1j, (3,2), 3.25, 5.0), (-5.9208875603806515987 - 2.3813557707889590686j))
assert ae(fp.hyp2f1(1+1j, (3,2), 3.25, 2+3j), (0.17174552030925080445 + 0.19589781970539389999j))
def test_fp_erf():
assert fp.erf(2) == fp.erf(2.0) == fp.erf(2.0+0.0j)
assert fp.erf(fp.inf) == 1.0
assert fp.erf(fp.ninf) == -1.0
assert ae(fp.erf(0), 0.0)
assert ae(fp.erf(-0), -0.0)
assert ae(fp.erf(0.3), 0.32862675945912741619)
assert ae(fp.erf(-0.3), -0.32862675945912741619)
assert ae(fp.erf(0.9), 0.79690821242283213966)
assert ae(fp.erf(-0.9), -0.79690821242283213966)
assert ae(fp.erf(1.0), 0.84270079294971486934)
assert ae(fp.erf(-1.0), -0.84270079294971486934)
assert ae(fp.erf(1.1), 0.88020506957408172966)
assert ae(fp.erf(-1.1), -0.88020506957408172966)
assert ae(fp.erf(8.5), 1.0)
assert ae(fp.erf(-8.5), -1.0)
assert ae(fp.erf(9.1), 1.0)
assert ae(fp.erf(-9.1), -1.0)
assert ae(fp.erf(20.0), 1.0)
assert ae(fp.erf(-20.0), -1.0)
assert ae(fp.erf(10000.0), 1.0)
assert ae(fp.erf(-10000.0), -1.0)
assert ae(fp.erf(1e+50), 1.0)
assert ae(fp.erf(-1e+50), -1.0)
assert ae(fp.erf(1j), 1.650425758797542876j)
assert ae(fp.erf(-1j), -1.650425758797542876j)
assert ae(fp.erf((2+3j)), (-20.829461427614568389 + 8.6873182714701631444j))
assert ae(fp.erf(-(2+3j)), -(-20.829461427614568389 + 8.6873182714701631444j))
assert ae(fp.erf((8+9j)), (-1072004.2525062051158 + 364149.91954310255423j))
assert ae(fp.erf(-(8+9j)), -(-1072004.2525062051158 + 364149.91954310255423j))
assert fp.erfc(fp.inf) == 0.0
assert fp.erfc(fp.ninf) == 2.0
assert fp.erfc(0) == 1
assert fp.erfc(-0.0) == 1
assert fp.erfc(0+0j) == 1
assert ae(fp.erfc(0.3), 0.67137324054087258381)
assert ae(fp.erfc(-0.3), 1.3286267594591274162)
assert ae(fp.erfc(0.9), 0.20309178757716786034)
assert ae(fp.erfc(-0.9), 1.7969082124228321397)
assert ae(fp.erfc(1.0), 0.15729920705028513066)
assert ae(fp.erfc(-1.0), 1.8427007929497148693)
assert ae(fp.erfc(1.1), 0.11979493042591827034)
assert ae(fp.erfc(-1.1), 1.8802050695740817297)
assert ae(fp.erfc(8.5), 2.7623240713337714461e-33)
assert ae(fp.erfc(-8.5), 2.0)
assert ae(fp.erfc(9.1), 6.6969004279886077452e-38)
assert ae(fp.erfc(-9.1), 2.0)
assert ae(fp.erfc(20.0), 5.3958656116079009289e-176)
assert ae(fp.erfc(-20.0), 2.0)
assert ae(fp.erfc(10000.0), 0.0)
assert ae(fp.erfc(-10000.0), 2.0)
assert ae(fp.erfc(1e+50), 0.0)
assert ae(fp.erfc(-1e+50), 2.0)
assert ae(fp.erfc(1j), (1.0 - 1.650425758797542876j))
assert ae(fp.erfc(-1j), (1.0 + 1.650425758797542876j))
assert ae(fp.erfc((2+3j)), (21.829461427614568389 - 8.6873182714701631444j), 1e-13)
assert ae(fp.erfc(-(2+3j)), (-19.829461427614568389 + 8.6873182714701631444j), 1e-13)
assert ae(fp.erfc((8+9j)), (1072005.2525062051158 - 364149.91954310255423j))
assert ae(fp.erfc(-(8+9j)), (-1072003.2525062051158 + 364149.91954310255423j))
assert ae(fp.erfc(20+0j), (5.3958656116079009289e-176 + 0.0j))
def test_fp_lambertw():
assert ae(fp.lambertw(0.0), 0.0)
assert ae(fp.lambertw(1.0), 0.567143290409783873)
assert ae(fp.lambertw(7.5), 1.5662309537823875394)
assert ae(fp.lambertw(-0.25), -0.35740295618138890307)
assert ae(fp.lambertw(-10.0), (1.3699809685212708156 + 2.140194527074713196j))
assert ae(fp.lambertw(0+0j), (0.0 + 0.0j))
assert ae(fp.lambertw(4+0j), (1.2021678731970429392 + 0.0j))
assert ae(fp.lambertw(1000.5), 5.2500227450408980127)
assert ae(fp.lambertw(1e100), 224.84310644511850156)
assert ae(fp.lambertw(-1000.0), (5.1501630246362515223 + 2.6641981432905204596j))
assert ae(fp.lambertw(1e-10), 9.9999999990000003645e-11)
assert ae(fp.lambertw(1e-10j), (1.0000000000000000728e-20 + 1.0000000000000000364e-10j))
assert ae(fp.lambertw(3+4j), (1.2815618061237758782 + 0.53309522202097107131j))
assert ae(fp.lambertw(-3-4j), (1.0750730665692549276 - 1.3251023817343588823j))
assert ae(fp.lambertw(10000+1000j), (7.2361526563371602186 + 0.087567810943839352034j))
assert ae(fp.lambertw(0.0, -1), -fp.inf)
assert ae(fp.lambertw(1.0, -1), (-1.5339133197935745079 - 4.3751851530618983855j))
assert ae(fp.lambertw(7.5, -1), (0.44125668415098614999 - 4.8039842008452390179j))
assert ae(fp.lambertw(-0.25, -1), -2.1532923641103496492)
assert ae(fp.lambertw(-10.0, -1), (1.3699809685212708156 - 2.140194527074713196j))
assert ae(fp.lambertw(0+0j, -1), -fp.inf)
assert ae(fp.lambertw(4+0j, -1), (-0.15730793189620765317 - 4.6787800704666656212j))
assert ae(fp.lambertw(1000.5, -1), (4.9153765415404024736 - 5.4465682700815159569j))
assert ae(fp.lambertw(1e100, -1), (224.84272130101601052 - 6.2553713838167244141j))
assert ae(fp.lambertw(-1000.0, -1), (5.1501630246362515223 - 2.6641981432905204596j))
assert ae(fp.lambertw(1e-10, -1), (-26.303186778379041521 - 3.2650939117038283975j))
assert ae(fp.lambertw(1e-10j, -1), (-26.297238779529035028 - 1.6328071613455765135j))
assert ae(fp.lambertw(3+4j, -1), (0.25856740686699741676 - 3.8521166861614355895j))
assert ae(fp.lambertw(-3-4j, -1), (-0.32028750204310768396 - 6.8801677192091972343j))
assert ae(fp.lambertw(10000+1000j, -1), (7.0255308742285435567 - 5.5177506835734067601j))
assert ae(fp.lambertw(0.0, 2), -fp.inf)
assert ae(fp.lambertw(1.0, 2), (-2.4015851048680028842 + 10.776299516115070898j))
assert ae(fp.lambertw(7.5, 2), (-0.38003357962843791529 + 10.960916473368746184j))
assert ae(fp.lambertw(-0.25, 2), (-4.0558735269061511898 + 13.852334658567271386j))
assert ae(fp.lambertw(-10.0, 2), (-0.34479123764318858696 + 14.112740596763592363j))
assert ae(fp.lambertw(0+0j, 2), -fp.inf)
assert ae(fp.lambertw(4+0j, 2), (-1.0070343323804262788 + 10.903476551861683082j))
assert ae(fp.lambertw(1000.5, 2), (4.4076185165459395295 + 11.365524591091402177j))
assert ae(fp.lambertw(1e100, 2), (224.84156762724875878 + 12.510785262632255672j))
assert ae(fp.lambertw(-1000.0, 2), (4.1984245610246530756 + 14.420478573754313845j))
assert ae(fp.lambertw(1e-10, 2), (-26.362258095445866488 + 9.7800247407031482519j))
assert ae(fp.lambertw(1e-10j, 2), (-26.384250801683084252 + 11.403535950607739763j))
assert ae(fp.lambertw(3+4j, 2), (-0.86554679943333993562 + 11.849956798331992027j))
assert ae(fp.lambertw(-3-4j, 2), (-0.55792273874679112639 + 8.7173627024159324811j))
assert ae(fp.lambertw(10000+1000j, 2), (6.6223802254585662734 + 11.61348646825020766j))
def test_fp_stress_ei_e1():
# Can be tightened on recent Pythons with more accurate math/cmath
ATOL = 1e-13
PTOL = 1e-12
v = fp.e1(1.1641532182693481445e-10)
assert ae(v, 22.296641293693077672, tol=ATOL)
assert type(v) is float
v = fp.e1(0.25)
assert ae(v, 1.0442826344437381945, tol=ATOL)
assert type(v) is float
v = fp.e1(1.0)
assert ae(v, 0.21938393439552027368, tol=ATOL)
assert type(v) is float
v = fp.e1(2.0)
assert ae(v, 0.048900510708061119567, tol=ATOL)
assert type(v) is float
v = fp.e1(5.0)
assert ae(v, 0.0011482955912753257973, tol=ATOL)
assert type(v) is float
v = fp.e1(20.0)
assert ae(v, 9.8355252906498816904e-11, tol=ATOL)
assert type(v) is float
v = fp.e1(30.0)
assert ae(v, 3.0215520106888125448e-15, tol=ATOL)
assert type(v) is float
v = fp.e1(40.0)
assert ae(v, 1.0367732614516569722e-19, tol=ATOL)
assert type(v) is float
v = fp.e1(50.0)
assert ae(v, 3.7832640295504590187e-24, tol=ATOL)
assert type(v) is float
v = fp.e1(80.0)
assert ae(v, 2.2285432586884729112e-37, tol=ATOL)
assert type(v) is float
v = fp.e1((1.1641532182693481445e-10 + 0.0j))
assert ae(v, (22.296641293693077672 + 0.0j), tol=ATOL)
assert ae(v.real, 22.296641293693077672, tol=PTOL)
assert v.imag == 0
v = fp.e1((0.25 + 0.0j))
assert ae(v, (1.0442826344437381945 + 0.0j), tol=ATOL)
assert ae(v.real, 1.0442826344437381945, tol=PTOL)
assert v.imag == 0
v = fp.e1((1.0 + 0.0j))
assert ae(v, (0.21938393439552027368 + 0.0j), tol=ATOL)
assert ae(v.real, 0.21938393439552027368, tol=PTOL)
assert v.imag == 0
v = fp.e1((2.0 + 0.0j))
assert ae(v, (0.048900510708061119567 + 0.0j), tol=ATOL)
assert ae(v.real, 0.048900510708061119567, tol=PTOL)
assert v.imag == 0
v = fp.e1((5.0 + 0.0j))
assert ae(v, (0.0011482955912753257973 + 0.0j), tol=ATOL)
assert ae(v.real, 0.0011482955912753257973, tol=PTOL)
assert v.imag == 0
v = fp.e1((20.0 + 0.0j))
assert ae(v, (9.8355252906498816904e-11 + 0.0j), tol=ATOL)
assert ae(v.real, 9.8355252906498816904e-11, tol=PTOL)
assert v.imag == 0
v = fp.e1((30.0 + 0.0j))
assert ae(v, (3.0215520106888125448e-15 + 0.0j), tol=ATOL)
assert ae(v.real, 3.0215520106888125448e-15, tol=PTOL)
assert v.imag == 0
v = fp.e1((40.0 + 0.0j))
assert ae(v, (1.0367732614516569722e-19 + 0.0j), tol=ATOL)
assert ae(v.real, 1.0367732614516569722e-19, tol=PTOL)
assert v.imag == 0
v = fp.e1((50.0 + 0.0j))
assert ae(v, (3.7832640295504590187e-24 + 0.0j), tol=ATOL)
assert ae(v.real, 3.7832640295504590187e-24, tol=PTOL)
assert v.imag == 0
v = fp.e1((80.0 + 0.0j))
assert ae(v, (2.2285432586884729112e-37 + 0.0j), tol=ATOL)
assert ae(v.real, 2.2285432586884729112e-37, tol=PTOL)
assert v.imag == 0
v = fp.e1((4.6566128730773925781e-10 + 1.1641532182693481445e-10j))
assert ae(v, (20.880034622014215597 - 0.24497866301044883237j), tol=ATOL)
assert ae(v.real, 20.880034622014215597, tol=PTOL)
assert ae(v.imag, -0.24497866301044883237, tol=PTOL)
v = fp.e1((1.0 + 0.25j))
assert ae(v, (0.19731063945004229095 - 0.087366045774299963672j), tol=ATOL)
assert ae(v.real, 0.19731063945004229095, tol=PTOL)
assert ae(v.imag, -0.087366045774299963672, tol=PTOL)
v = fp.e1((4.0 + 1.0j))
assert ae(v, (0.0013106173980145506944 - 0.0034542480199350626699j), tol=ATOL)
assert ae(v.real, 0.0013106173980145506944, tol=PTOL)
assert ae(v.imag, -0.0034542480199350626699, tol=PTOL)
v = fp.e1((8.0 + 2.0j))
assert ae(v, (-0.000022278049065270225945 - 0.000029191940456521555288j), tol=ATOL)
assert ae(v.real, -0.000022278049065270225945, tol=PTOL)
assert ae(v.imag, -0.000029191940456521555288, tol=PTOL)
v = fp.e1((20.0 + 5.0j))
assert ae(v, (4.7711374515765346894e-11 + 8.2902652405126947359e-11j), tol=ATOL)
assert ae(v.real, 4.7711374515765346894e-11, tol=PTOL)
assert ae(v.imag, 8.2902652405126947359e-11, tol=PTOL)
v = fp.e1((80.0 + 20.0j))
assert ae(v, (3.8353473865788235787e-38 - 2.129247592349605139e-37j), tol=ATOL)
assert ae(v.real, 3.8353473865788235787e-38, tol=PTOL)
assert ae(v.imag, -2.129247592349605139e-37, tol=PTOL)
v = fp.e1((120.0 + 30.0j))
assert ae(v, (2.3836002337480334716e-55 + 5.6704043587126198306e-55j), tol=ATOL)
assert ae(v.real, 2.3836002337480334716e-55, tol=PTOL)
assert ae(v.imag, 5.6704043587126198306e-55, tol=PTOL)
v = fp.e1((160.0 + 40.0j))
assert ae(v, (-1.6238022898654510661e-72 - 1.104172355572287367e-72j), tol=ATOL)
assert ae(v.real, -1.6238022898654510661e-72, tol=PTOL)
assert ae(v.imag, -1.104172355572287367e-72, tol=PTOL)
v = fp.e1((200.0 + 50.0j))
assert ae(v, (6.6800061461666228487e-90 + 1.4473816083541016115e-91j), tol=ATOL)
assert ae(v.real, 6.6800061461666228487e-90, tol=PTOL)
assert ae(v.imag, 1.4473816083541016115e-91, tol=PTOL)
v = fp.e1((320.0 + 80.0j))
assert ae(v, (4.2737871527778786157e-143 + 3.1789935525785660314e-142j), tol=ATOL)
assert ae(v.real, 4.2737871527778786157e-143, tol=PTOL)
assert ae(v.imag, 3.1789935525785660314e-142, tol=PTOL)
v = fp.e1((1.1641532182693481445e-10 + 1.1641532182693481445e-10j))
assert ae(v, (21.950067703413105017 - 0.7853981632810329878j), tol=ATOL)
assert ae(v.real, 21.950067703413105017, tol=PTOL)
assert ae(v.imag, -0.7853981632810329878, tol=PTOL)
v = fp.e1((0.25 + 0.25j))
assert ae(v, (0.71092525792923287894 - 0.56491812441304194711j), tol=ATOL)
assert ae(v.real, 0.71092525792923287894, tol=PTOL)
assert ae(v.imag, -0.56491812441304194711, tol=PTOL)
v = fp.e1((1.0 + 1.0j))
assert ae(v, (0.00028162445198141832551 - 0.17932453503935894015j), tol=ATOL)
assert ae(v.real, 0.00028162445198141832551, tol=PTOL)
assert ae(v.imag, -0.17932453503935894015, tol=PTOL)
v = fp.e1((2.0 + 2.0j))
assert ae(v, (-0.033767089606562004246 - 0.018599414169750541925j), tol=ATOL)
assert ae(v.real, -0.033767089606562004246, tol=PTOL)
assert ae(v.imag, -0.018599414169750541925, tol=PTOL)
v = fp.e1((5.0 + 5.0j))
assert ae(v, (0.0007266506660356393891 + 0.00047102780163522245054j), tol=ATOL)
assert ae(v.real, 0.0007266506660356393891, tol=PTOL)
assert ae(v.imag, 0.00047102780163522245054, tol=PTOL)
v = fp.e1((20.0 + 20.0j))
assert ae(v, (-2.3824537449367396579e-11 - 6.6969873156525615158e-11j), tol=ATOL)
assert ae(v.real, -2.3824537449367396579e-11, tol=PTOL)
assert ae(v.imag, -6.6969873156525615158e-11, tol=PTOL)
v = fp.e1((30.0 + 30.0j))
assert ae(v, (1.7316045841744061617e-15 + 1.3065678019487308689e-15j), tol=ATOL)
assert ae(v.real, 1.7316045841744061617e-15, tol=PTOL)
assert ae(v.imag, 1.3065678019487308689e-15, tol=PTOL)
v = fp.e1((40.0 + 40.0j))
assert ae(v, (-7.4001043002899232182e-20 - 4.991847855336816304e-21j), tol=ATOL)
assert ae(v.real, -7.4001043002899232182e-20, tol=PTOL)
assert ae(v.imag, -4.991847855336816304e-21, tol=PTOL)
v = fp.e1((50.0 + 50.0j))
assert ae(v, (2.3566128324644641219e-24 - 1.3188326726201614778e-24j), tol=ATOL)
assert ae(v.real, 2.3566128324644641219e-24, tol=PTOL)
assert ae(v.imag, -1.3188326726201614778e-24, tol=PTOL)
v = fp.e1((80.0 + 80.0j))
assert ae(v, (9.8279750572186526673e-38 + 1.243952841288868831e-37j), tol=ATOL)
assert ae(v.real, 9.8279750572186526673e-38, tol=PTOL)
assert ae(v.imag, 1.243952841288868831e-37, tol=PTOL)
v = fp.e1((1.1641532182693481445e-10 + 4.6566128730773925781e-10j))
assert ae(v, (20.880034621664969632 - 1.3258176632023711778j), tol=ATOL)
assert ae(v.real, 20.880034621664969632, tol=PTOL)
assert ae(v.imag, -1.3258176632023711778, tol=PTOL)
v = fp.e1((0.25 + 1.0j))
assert ae(v, (-0.16868306393667788761 - 0.4858011885947426971j), tol=ATOL)
assert ae(v.real, -0.16868306393667788761, tol=PTOL)
assert ae(v.imag, -0.4858011885947426971, tol=PTOL)
v = fp.e1((1.0 + 4.0j))
assert ae(v, (0.03373591813926547318 + 0.073523452241083821877j), tol=ATOL)
assert ae(v.real, 0.03373591813926547318, tol=PTOL)
assert ae(v.imag, 0.073523452241083821877, tol=PTOL)
v = fp.e1((2.0 + 8.0j))
assert ae(v, (-0.015392833434733785143 - 0.0031747121557605415914j), tol=ATOL)
assert ae(v.real, -0.015392833434733785143, tol=PTOL)
assert ae(v.imag, -0.0031747121557605415914, tol=PTOL)
v = fp.e1((5.0 + 20.0j))
assert ae(v, (-0.00024419662286542966525 - 0.00021008322966152755674j), tol=ATOL)
assert ae(v.real, -0.00024419662286542966525, tol=PTOL)
assert ae(v.imag, -0.00021008322966152755674, tol=PTOL)
v = fp.e1((20.0 + 80.0j))
assert ae(v, (2.3255552781051330088e-11 + 8.9463918891349438007e-12j), tol=ATOL)
assert ae(v.real, 2.3255552781051330088e-11, tol=PTOL)
assert ae(v.imag, 8.9463918891349438007e-12, tol=PTOL)
v = fp.e1((30.0 + 120.0j))
assert ae(v, (-2.7068919097124652332e-16 - 7.0477762411705130239e-16j), tol=ATOL)
assert ae(v.real, -2.7068919097124652332e-16, tol=PTOL)
assert ae(v.imag, -7.0477762411705130239e-16, tol=PTOL)
v = fp.e1((40.0 + 160.0j))
assert ae(v, (-1.1695597827678024687e-20 + 2.2907401455645736661e-20j), tol=ATOL)
assert ae(v.real, -1.1695597827678024687e-20, tol=PTOL)
assert ae(v.imag, 2.2907401455645736661e-20, tol=PTOL)
v = fp.e1((50.0 + 200.0j))
assert ae(v, (9.0323746914410162531e-25 - 2.3950601790033530935e-25j), tol=ATOL)
assert ae(v.real, 9.0323746914410162531e-25, tol=PTOL)
assert ae(v.imag, -2.3950601790033530935e-25, tol=PTOL)
v = fp.e1((80.0 + 320.0j))
assert ae(v, (3.4819106748728063576e-38 - 4.215653005615772724e-38j), tol=ATOL)
assert ae(v.real, 3.4819106748728063576e-38, tol=PTOL)
assert ae(v.imag, -4.215653005615772724e-38, tol=PTOL)
v = fp.e1((0.0 + 1.1641532182693481445e-10j))
assert ae(v, (22.29664129357666235 - 1.5707963266784812974j), tol=ATOL)
assert ae(v.real, 22.29664129357666235, tol=PTOL)
assert ae(v.imag, -1.5707963266784812974, tol=PTOL)
v = fp.e1((0.0 + 0.25j))
assert ae(v, (0.82466306258094565309 - 1.3216627564751394551j), tol=ATOL)
assert ae(v.real, 0.82466306258094565309, tol=PTOL)
assert ae(v.imag, -1.3216627564751394551, tol=PTOL)
v = fp.e1((0.0 + 1.0j))
assert ae(v, (-0.33740392290096813466 - 0.62471325642771360429j), tol=ATOL)
assert ae(v.real, -0.33740392290096813466, tol=PTOL)
assert ae(v.imag, -0.62471325642771360429, tol=PTOL)
v = fp.e1((0.0 + 2.0j))
assert ae(v, (-0.4229808287748649957 + 0.034616650007798229345j), tol=ATOL)
assert ae(v.real, -0.4229808287748649957, tol=PTOL)
assert ae(v.imag, 0.034616650007798229345, tol=PTOL)
v = fp.e1((0.0 + 5.0j))
assert ae(v, (0.19002974965664387862 - 0.020865081850222481957j), tol=ATOL)
assert ae(v.real, 0.19002974965664387862, tol=PTOL)
assert ae(v.imag, -0.020865081850222481957, tol=PTOL)
v = fp.e1((0.0 + 20.0j))
assert ae(v, (-0.04441982084535331654 - 0.022554625751456779068j), tol=ATOL)
assert ae(v.real, -0.04441982084535331654, tol=PTOL)
assert ae(v.imag, -0.022554625751456779068, tol=PTOL)
v = fp.e1((0.0 + 30.0j))
assert ae(v, (0.033032417282071143779 - 0.0040397867645455082476j), tol=ATOL)
assert ae(v.real, 0.033032417282071143779, tol=PTOL)
assert ae(v.imag, -0.0040397867645455082476, tol=PTOL)
v = fp.e1((0.0 + 40.0j))
assert ae(v, (-0.019020007896208766962 + 0.016188792559887887544j), tol=ATOL)
assert ae(v.real, -0.019020007896208766962, tol=PTOL)
assert ae(v.imag, 0.016188792559887887544, tol=PTOL)
v = fp.e1((0.0 + 50.0j))
assert ae(v, (0.0056283863241163054402 - 0.019179254308960724503j), tol=ATOL)
assert ae(v.real, 0.0056283863241163054402, tol=PTOL)
assert ae(v.imag, -0.019179254308960724503, tol=PTOL)
v = fp.e1((0.0 + 80.0j))
assert ae(v, (0.012402501155070958192 + 0.0015345601175906961199j), tol=ATOL)
assert ae(v.real, 0.012402501155070958192, tol=PTOL)
assert ae(v.imag, 0.0015345601175906961199, tol=PTOL)
v = fp.e1((-1.1641532182693481445e-10 + 4.6566128730773925781e-10j))
assert ae(v, (20.880034621432138988 - 1.8157749894560994861j), tol=ATOL)
assert ae(v.real, 20.880034621432138988, tol=PTOL)
assert ae(v.imag, -1.8157749894560994861, tol=PTOL)
v = fp.e1((-0.25 + 1.0j))
assert ae(v, (-0.59066621214766308594 - 0.74474454765205036972j), tol=ATOL)
assert ae(v.real, -0.59066621214766308594, tol=PTOL)
assert ae(v.imag, -0.74474454765205036972, tol=PTOL)
v = fp.e1((-1.0 + 4.0j))
assert ae(v, (0.49739047283060471093 + 0.41543605404038863174j), tol=ATOL)
assert ae(v.real, 0.49739047283060471093, tol=PTOL)
assert ae(v.imag, 0.41543605404038863174, tol=PTOL)
v = fp.e1((-2.0 + 8.0j))
assert ae(v, (-0.8705211147733730969 + 0.24099328498605539667j), tol=ATOL)
assert ae(v.real, -0.8705211147733730969, tol=PTOL)
assert ae(v.imag, 0.24099328498605539667, tol=PTOL)
v = fp.e1((-5.0 + 20.0j))
assert ae(v, (-7.0789514293925893007 - 1.6102177171960790536j), tol=ATOL)
assert ae(v.real, -7.0789514293925893007, tol=PTOL)
assert ae(v.imag, -1.6102177171960790536, tol=PTOL)
v = fp.e1((-20.0 + 80.0j))
assert ae(v, (5855431.4907298084434 - 720920.93315409165707j), tol=ATOL)
assert ae(v.real, 5855431.4907298084434, tol=PTOL)
assert ae(v.imag, -720920.93315409165707, tol=PTOL)
v = fp.e1((-30.0 + 120.0j))
assert ae(v, (-65402491644.703470747 - 56697658399.657460294j), tol=ATOL)
assert ae(v.real, -65402491644.703470747, tol=PTOL)
assert ae(v.imag, -56697658399.657460294, tol=PTOL)
v = fp.e1((-40.0 + 160.0j))
assert ae(v, (25504929379604.776769 + 1429035198630573.2463j), tol=ATOL)
assert ae(v.real, 25504929379604.776769, tol=PTOL)
assert ae(v.imag, 1429035198630573.2463, tol=PTOL)
v = fp.e1((-50.0 + 200.0j))
assert ae(v, (18437746526988116954.0 - 17146362239046152345.0j), tol=ATOL)
assert ae(v.real, 18437746526988116954.0, tol=PTOL)
assert ae(v.imag, -17146362239046152345.0, tol=PTOL)
v = fp.e1((-80.0 + 320.0j))
assert ae(v, (3.3464697299634526706e+31 - 1.6473152633843023919e+32j), tol=ATOL)
assert ae(v.real, 3.3464697299634526706e+31, tol=PTOL)
assert ae(v.imag, -1.6473152633843023919e+32, tol=PTOL)
v = fp.e1((-4.6566128730773925781e-10 + 1.1641532182693481445e-10j))
assert ae(v, (20.880034621082893023 - 2.8966139903465137624j), tol=ATOL)
assert ae(v.real, 20.880034621082893023, tol=PTOL)
assert ae(v.imag, -2.8966139903465137624, tol=PTOL)
v = fp.e1((-1.0 + 0.25j))
assert ae(v, (-1.8942716983721074932 - 2.4689102827070540799j), tol=ATOL)
assert ae(v.real, -1.8942716983721074932, tol=PTOL)
assert ae(v.imag, -2.4689102827070540799, tol=PTOL)
v = fp.e1((-4.0 + 1.0j))
assert ae(v, (-14.806699492675420438 + 9.1384225230837893776j), tol=ATOL)
assert ae(v.real, -14.806699492675420438, tol=PTOL)
assert ae(v.imag, 9.1384225230837893776, tol=PTOL)
v = fp.e1((-8.0 + 2.0j))
assert ae(v, (54.633252667426386294 + 413.20318163814670688j), tol=ATOL)
assert ae(v.real, 54.633252667426386294, tol=PTOL)
assert ae(v.imag, 413.20318163814670688, tol=PTOL)
v = fp.e1((-20.0 + 5.0j))
assert ae(v, (-711836.97165402624643 - 24745250.939695900956j), tol=ATOL)
assert ae(v.real, -711836.97165402624643, tol=PTOL)
assert ae(v.imag, -24745250.939695900956, tol=PTOL)
v = fp.e1((-80.0 + 20.0j))
assert ae(v, (-4.2139911108612653091e+32 + 5.3367124741918251637e+32j), tol=ATOL)
assert ae(v.real, -4.2139911108612653091e+32, tol=PTOL)
assert ae(v.imag, 5.3367124741918251637e+32, tol=PTOL)
v = fp.e1((-120.0 + 30.0j))
assert ae(v, (9.7760616203707508892e+48 - 1.058257682317195792e+50j), tol=ATOL)
assert ae(v.real, 9.7760616203707508892e+48, tol=PTOL)
assert ae(v.imag, -1.058257682317195792e+50, tol=PTOL)
v = fp.e1((-160.0 + 40.0j))
assert ae(v, (8.7065541466623638861e+66 + 1.6577106725141739889e+67j), tol=ATOL)
assert ae(v.real, 8.7065541466623638861e+66, tol=PTOL)
assert ae(v.imag, 1.6577106725141739889e+67, tol=PTOL)
v = fp.e1((-200.0 + 50.0j))
assert ae(v, (-3.070744996327018106e+84 - 1.7243244846769415903e+84j), tol=ATOL)
assert ae(v.real, -3.070744996327018106e+84, tol=PTOL)
assert ae(v.imag, -1.7243244846769415903e+84, tol=PTOL)
v = fp.e1((-320.0 + 80.0j))
assert ae(v, (9.9960598637998647276e+135 - 2.6855081527595608863e+136j), tol=ATOL)
assert ae(v.real, 9.9960598637998647276e+135, tol=PTOL)
assert ae(v.imag, -2.6855081527595608863e+136, tol=PTOL)
v = fp.e1(-1.1641532182693481445e-10)
assert ae(v, (22.296641293460247028 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, 22.296641293460247028, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1(-0.25)
assert ae(v, (0.54254326466191372953 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, 0.54254326466191372953, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1(-1.0)
assert ae(v, (-1.8951178163559367555 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -1.8951178163559367555, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1(-2.0)
assert ae(v, (-4.9542343560018901634 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -4.9542343560018901634, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1(-5.0)
assert ae(v, (-40.185275355803177455 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -40.185275355803177455, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1(-20.0)
assert ae(v, (-25615652.66405658882 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -25615652.66405658882, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1(-30.0)
assert ae(v, (-368973209407.27419706 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -368973209407.27419706, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1(-40.0)
assert ae(v, (-6039718263611241.5784 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -6039718263611241.5784, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1(-50.0)
assert ae(v, (-1.0585636897131690963e+20 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -1.0585636897131690963e+20, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1(-80.0)
assert ae(v, (-7.0146000049047999696e+32 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -7.0146000049047999696e+32, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1((-1.1641532182693481445e-10 + 0.0j))
assert ae(v, (22.296641293460247028 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, 22.296641293460247028, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1((-0.25 + 0.0j))
assert ae(v, (0.54254326466191372953 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, 0.54254326466191372953, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1((-1.0 + 0.0j))
assert ae(v, (-1.8951178163559367555 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -1.8951178163559367555, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1((-2.0 + 0.0j))
assert ae(v, (-4.9542343560018901634 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -4.9542343560018901634, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1((-5.0 + 0.0j))
assert ae(v, (-40.185275355803177455 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -40.185275355803177455, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1((-20.0 + 0.0j))
assert ae(v, (-25615652.66405658882 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -25615652.66405658882, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1((-30.0 + 0.0j))
assert ae(v, (-368973209407.27419706 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -368973209407.27419706, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1((-40.0 + 0.0j))
assert ae(v, (-6039718263611241.5784 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -6039718263611241.5784, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1((-50.0 + 0.0j))
assert ae(v, (-1.0585636897131690963e+20 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -1.0585636897131690963e+20, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1((-80.0 + 0.0j))
assert ae(v, (-7.0146000049047999696e+32 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -7.0146000049047999696e+32, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.e1((-4.6566128730773925781e-10 - 1.1641532182693481445e-10j))
assert ae(v, (20.880034621082893023 + 2.8966139903465137624j), tol=ATOL)
assert ae(v.real, 20.880034621082893023, tol=PTOL)
assert ae(v.imag, 2.8966139903465137624, tol=PTOL)
v = fp.e1((-1.0 - 0.25j))
assert ae(v, (-1.8942716983721074932 + 2.4689102827070540799j), tol=ATOL)
assert ae(v.real, -1.8942716983721074932, tol=PTOL)
assert ae(v.imag, 2.4689102827070540799, tol=PTOL)
v = fp.e1((-4.0 - 1.0j))
assert ae(v, (-14.806699492675420438 - 9.1384225230837893776j), tol=ATOL)
assert ae(v.real, -14.806699492675420438, tol=PTOL)
assert ae(v.imag, -9.1384225230837893776, tol=PTOL)
v = fp.e1((-8.0 - 2.0j))
assert ae(v, (54.633252667426386294 - 413.20318163814670688j), tol=ATOL)
assert ae(v.real, 54.633252667426386294, tol=PTOL)
assert ae(v.imag, -413.20318163814670688, tol=PTOL)
v = fp.e1((-20.0 - 5.0j))
assert ae(v, (-711836.97165402624643 + 24745250.939695900956j), tol=ATOL)
assert ae(v.real, -711836.97165402624643, tol=PTOL)
assert ae(v.imag, 24745250.939695900956, tol=PTOL)
v = fp.e1((-80.0 - 20.0j))
assert ae(v, (-4.2139911108612653091e+32 - 5.3367124741918251637e+32j), tol=ATOL)
assert ae(v.real, -4.2139911108612653091e+32, tol=PTOL)
assert ae(v.imag, -5.3367124741918251637e+32, tol=PTOL)
v = fp.e1((-120.0 - 30.0j))
assert ae(v, (9.7760616203707508892e+48 + 1.058257682317195792e+50j), tol=ATOL)
assert ae(v.real, 9.7760616203707508892e+48, tol=PTOL)
assert ae(v.imag, 1.058257682317195792e+50, tol=PTOL)
v = fp.e1((-160.0 - 40.0j))
assert ae(v, (8.7065541466623638861e+66 - 1.6577106725141739889e+67j), tol=ATOL)
assert ae(v.real, 8.7065541466623638861e+66, tol=PTOL)
assert ae(v.imag, -1.6577106725141739889e+67, tol=PTOL)
v = fp.e1((-200.0 - 50.0j))
assert ae(v, (-3.070744996327018106e+84 + 1.7243244846769415903e+84j), tol=ATOL)
assert ae(v.real, -3.070744996327018106e+84, tol=PTOL)
assert ae(v.imag, 1.7243244846769415903e+84, tol=PTOL)
v = fp.e1((-320.0 - 80.0j))
assert ae(v, (9.9960598637998647276e+135 + 2.6855081527595608863e+136j), tol=ATOL)
assert ae(v.real, 9.9960598637998647276e+135, tol=PTOL)
assert ae(v.imag, 2.6855081527595608863e+136, tol=PTOL)
v = fp.e1((-1.1641532182693481445e-10 - 1.1641532182693481445e-10j))
assert ae(v, (21.950067703180274374 + 2.356194490075929607j), tol=ATOL)
assert ae(v.real, 21.950067703180274374, tol=PTOL)
assert ae(v.imag, 2.356194490075929607, tol=PTOL)
v = fp.e1((-0.25 - 0.25j))
assert ae(v, (0.21441047326710323254 + 2.0732153554307936389j), tol=ATOL)
assert ae(v.real, 0.21441047326710323254, tol=PTOL)
assert ae(v.imag, 2.0732153554307936389, tol=PTOL)
v = fp.e1((-1.0 - 1.0j))
assert ae(v, (-1.7646259855638540684 + 0.7538228020792708192j), tol=ATOL)
assert ae(v.real, -1.7646259855638540684, tol=PTOL)
assert ae(v.imag, 0.7538228020792708192, tol=PTOL)
v = fp.e1((-2.0 - 2.0j))
assert ae(v, (-1.8920781621855474089 - 2.1753697842428647236j), tol=ATOL)
assert ae(v.real, -1.8920781621855474089, tol=PTOL)
assert ae(v.imag, -2.1753697842428647236, tol=PTOL)
v = fp.e1((-5.0 - 5.0j))
assert ae(v, (13.470936071475245856 + 18.464085049321024206j), tol=ATOL)
assert ae(v.real, 13.470936071475245856, tol=PTOL)
assert ae(v.imag, 18.464085049321024206, tol=PTOL)
v = fp.e1((-20.0 - 20.0j))
assert ae(v, (-16589317.398788971896 - 5831702.3296441771206j), tol=ATOL)
assert ae(v.real, -16589317.398788971896, tol=PTOL)
assert ae(v.imag, -5831702.3296441771206, tol=PTOL)
v = fp.e1((-30.0 - 30.0j))
assert ae(v, (154596484273.69322527 + 204179357837.41389696j), tol=ATOL)
assert ae(v.real, 154596484273.69322527, tol=PTOL)
assert ae(v.imag, 204179357837.41389696, tol=PTOL)
v = fp.e1((-40.0 - 40.0j))
assert ae(v, (-287512180321448.45408 - 4203502407932314.974j), tol=ATOL)
assert ae(v.real, -287512180321448.45408, tol=PTOL)
assert ae(v.imag, -4203502407932314.974, tol=PTOL)
v = fp.e1((-50.0 - 50.0j))
assert ae(v, (-36128528616649268826.0 + 64648801861338741963.0j), tol=ATOL)
assert ae(v.real, -36128528616649268826.0, tol=PTOL)
assert ae(v.imag, 64648801861338741963.0, tol=PTOL)
v = fp.e1((-80.0 - 80.0j))
assert ae(v, (3.8674816337930010217e+32 + 3.0540709639658071041e+32j), tol=ATOL)
assert ae(v.real, 3.8674816337930010217e+32, tol=PTOL)
assert ae(v.imag, 3.0540709639658071041e+32, tol=PTOL)
v = fp.e1((-1.1641532182693481445e-10 - 4.6566128730773925781e-10j))
assert ae(v, (20.880034621432138988 + 1.8157749894560994861j), tol=ATOL)
assert ae(v.real, 20.880034621432138988, tol=PTOL)
assert ae(v.imag, 1.8157749894560994861, tol=PTOL)
v = fp.e1((-0.25 - 1.0j))
assert ae(v, (-0.59066621214766308594 + 0.74474454765205036972j), tol=ATOL)
assert ae(v.real, -0.59066621214766308594, tol=PTOL)
assert ae(v.imag, 0.74474454765205036972, tol=PTOL)
v = fp.e1((-1.0 - 4.0j))
assert ae(v, (0.49739047283060471093 - 0.41543605404038863174j), tol=ATOL)
assert ae(v.real, 0.49739047283060471093, tol=PTOL)
assert ae(v.imag, -0.41543605404038863174, tol=PTOL)
v = fp.e1((-2.0 - 8.0j))
assert ae(v, (-0.8705211147733730969 - 0.24099328498605539667j), tol=ATOL)
assert ae(v.real, -0.8705211147733730969, tol=PTOL)
assert ae(v.imag, -0.24099328498605539667, tol=PTOL)
v = fp.e1((-5.0 - 20.0j))
assert ae(v, (-7.0789514293925893007 + 1.6102177171960790536j), tol=ATOL)
assert ae(v.real, -7.0789514293925893007, tol=PTOL)
assert ae(v.imag, 1.6102177171960790536, tol=PTOL)
v = fp.e1((-20.0 - 80.0j))
assert ae(v, (5855431.4907298084434 + 720920.93315409165707j), tol=ATOL)
assert ae(v.real, 5855431.4907298084434, tol=PTOL)
assert ae(v.imag, 720920.93315409165707, tol=PTOL)
v = fp.e1((-30.0 - 120.0j))
assert ae(v, (-65402491644.703470747 + 56697658399.657460294j), tol=ATOL)
assert ae(v.real, -65402491644.703470747, tol=PTOL)
assert ae(v.imag, 56697658399.657460294, tol=PTOL)
v = fp.e1((-40.0 - 160.0j))
assert ae(v, (25504929379604.776769 - 1429035198630573.2463j), tol=ATOL)
assert ae(v.real, 25504929379604.776769, tol=PTOL)
assert ae(v.imag, -1429035198630573.2463, tol=PTOL)
v = fp.e1((-50.0 - 200.0j))
assert ae(v, (18437746526988116954.0 + 17146362239046152345.0j), tol=ATOL)
assert ae(v.real, 18437746526988116954.0, tol=PTOL)
assert ae(v.imag, 17146362239046152345.0, tol=PTOL)
v = fp.e1((-80.0 - 320.0j))
assert ae(v, (3.3464697299634526706e+31 + 1.6473152633843023919e+32j), tol=ATOL)
assert ae(v.real, 3.3464697299634526706e+31, tol=PTOL)
assert ae(v.imag, 1.6473152633843023919e+32, tol=PTOL)
v = fp.e1((0.0 - 1.1641532182693481445e-10j))
assert ae(v, (22.29664129357666235 + 1.5707963266784812974j), tol=ATOL)
assert ae(v.real, 22.29664129357666235, tol=PTOL)
assert ae(v.imag, 1.5707963266784812974, tol=PTOL)
v = fp.e1((0.0 - 0.25j))
assert ae(v, (0.82466306258094565309 + 1.3216627564751394551j), tol=ATOL)
assert ae(v.real, 0.82466306258094565309, tol=PTOL)
assert ae(v.imag, 1.3216627564751394551, tol=PTOL)
v = fp.e1((0.0 - 1.0j))
assert ae(v, (-0.33740392290096813466 + 0.62471325642771360429j), tol=ATOL)
assert ae(v.real, -0.33740392290096813466, tol=PTOL)
assert ae(v.imag, 0.62471325642771360429, tol=PTOL)
v = fp.e1((0.0 - 2.0j))
assert ae(v, (-0.4229808287748649957 - 0.034616650007798229345j), tol=ATOL)
assert ae(v.real, -0.4229808287748649957, tol=PTOL)
assert ae(v.imag, -0.034616650007798229345, tol=PTOL)
v = fp.e1((0.0 - 5.0j))
assert ae(v, (0.19002974965664387862 + 0.020865081850222481957j), tol=ATOL)
assert ae(v.real, 0.19002974965664387862, tol=PTOL)
assert ae(v.imag, 0.020865081850222481957, tol=PTOL)
v = fp.e1((0.0 - 20.0j))
assert ae(v, (-0.04441982084535331654 + 0.022554625751456779068j), tol=ATOL)
assert ae(v.real, -0.04441982084535331654, tol=PTOL)
assert ae(v.imag, 0.022554625751456779068, tol=PTOL)
v = fp.e1((0.0 - 30.0j))
assert ae(v, (0.033032417282071143779 + 0.0040397867645455082476j), tol=ATOL)
assert ae(v.real, 0.033032417282071143779, tol=PTOL)
assert ae(v.imag, 0.0040397867645455082476, tol=PTOL)
v = fp.e1((0.0 - 40.0j))
assert ae(v, (-0.019020007896208766962 - 0.016188792559887887544j), tol=ATOL)
assert ae(v.real, -0.019020007896208766962, tol=PTOL)
assert ae(v.imag, -0.016188792559887887544, tol=PTOL)
v = fp.e1((0.0 - 50.0j))
assert ae(v, (0.0056283863241163054402 + 0.019179254308960724503j), tol=ATOL)
assert ae(v.real, 0.0056283863241163054402, tol=PTOL)
assert ae(v.imag, 0.019179254308960724503, tol=PTOL)
v = fp.e1((0.0 - 80.0j))
assert ae(v, (0.012402501155070958192 - 0.0015345601175906961199j), tol=ATOL)
assert ae(v.real, 0.012402501155070958192, tol=PTOL)
assert ae(v.imag, -0.0015345601175906961199, tol=PTOL)
v = fp.e1((1.1641532182693481445e-10 - 4.6566128730773925781e-10j))
assert ae(v, (20.880034621664969632 + 1.3258176632023711778j), tol=ATOL)
assert ae(v.real, 20.880034621664969632, tol=PTOL)
assert ae(v.imag, 1.3258176632023711778, tol=PTOL)
v = fp.e1((0.25 - 1.0j))
assert ae(v, (-0.16868306393667788761 + 0.4858011885947426971j), tol=ATOL)
assert ae(v.real, -0.16868306393667788761, tol=PTOL)
assert ae(v.imag, 0.4858011885947426971, tol=PTOL)
v = fp.e1((1.0 - 4.0j))
assert ae(v, (0.03373591813926547318 - 0.073523452241083821877j), tol=ATOL)
assert ae(v.real, 0.03373591813926547318, tol=PTOL)
assert ae(v.imag, -0.073523452241083821877, tol=PTOL)
v = fp.e1((2.0 - 8.0j))
assert ae(v, (-0.015392833434733785143 + 0.0031747121557605415914j), tol=ATOL)
assert ae(v.real, -0.015392833434733785143, tol=PTOL)
assert ae(v.imag, 0.0031747121557605415914, tol=PTOL)
v = fp.e1((5.0 - 20.0j))
assert ae(v, (-0.00024419662286542966525 + 0.00021008322966152755674j), tol=ATOL)
assert ae(v.real, -0.00024419662286542966525, tol=PTOL)
assert ae(v.imag, 0.00021008322966152755674, tol=PTOL)
v = fp.e1((20.0 - 80.0j))
assert ae(v, (2.3255552781051330088e-11 - 8.9463918891349438007e-12j), tol=ATOL)
assert ae(v.real, 2.3255552781051330088e-11, tol=PTOL)
assert ae(v.imag, -8.9463918891349438007e-12, tol=PTOL)
v = fp.e1((30.0 - 120.0j))
assert ae(v, (-2.7068919097124652332e-16 + 7.0477762411705130239e-16j), tol=ATOL)
assert ae(v.real, -2.7068919097124652332e-16, tol=PTOL)
assert ae(v.imag, 7.0477762411705130239e-16, tol=PTOL)
v = fp.e1((40.0 - 160.0j))
assert ae(v, (-1.1695597827678024687e-20 - 2.2907401455645736661e-20j), tol=ATOL)
assert ae(v.real, -1.1695597827678024687e-20, tol=PTOL)
assert ae(v.imag, -2.2907401455645736661e-20, tol=PTOL)
v = fp.e1((50.0 - 200.0j))
assert ae(v, (9.0323746914410162531e-25 + 2.3950601790033530935e-25j), tol=ATOL)
assert ae(v.real, 9.0323746914410162531e-25, tol=PTOL)
assert ae(v.imag, 2.3950601790033530935e-25, tol=PTOL)
v = fp.e1((80.0 - 320.0j))
assert ae(v, (3.4819106748728063576e-38 + 4.215653005615772724e-38j), tol=ATOL)
assert ae(v.real, 3.4819106748728063576e-38, tol=PTOL)
assert ae(v.imag, 4.215653005615772724e-38, tol=PTOL)
v = fp.e1((1.1641532182693481445e-10 - 1.1641532182693481445e-10j))
assert ae(v, (21.950067703413105017 + 0.7853981632810329878j), tol=ATOL)
assert ae(v.real, 21.950067703413105017, tol=PTOL)
assert ae(v.imag, 0.7853981632810329878, tol=PTOL)
v = fp.e1((0.25 - 0.25j))
assert ae(v, (0.71092525792923287894 + 0.56491812441304194711j), tol=ATOL)
assert ae(v.real, 0.71092525792923287894, tol=PTOL)
assert ae(v.imag, 0.56491812441304194711, tol=PTOL)
v = fp.e1((1.0 - 1.0j))
assert ae(v, (0.00028162445198141832551 + 0.17932453503935894015j), tol=ATOL)
assert ae(v.real, 0.00028162445198141832551, tol=PTOL)
assert ae(v.imag, 0.17932453503935894015, tol=PTOL)
v = fp.e1((2.0 - 2.0j))
assert ae(v, (-0.033767089606562004246 + 0.018599414169750541925j), tol=ATOL)
assert ae(v.real, -0.033767089606562004246, tol=PTOL)
assert ae(v.imag, 0.018599414169750541925, tol=PTOL)
v = fp.e1((5.0 - 5.0j))
assert ae(v, (0.0007266506660356393891 - 0.00047102780163522245054j), tol=ATOL)
assert ae(v.real, 0.0007266506660356393891, tol=PTOL)
assert ae(v.imag, -0.00047102780163522245054, tol=PTOL)
v = fp.e1((20.0 - 20.0j))
assert ae(v, (-2.3824537449367396579e-11 + 6.6969873156525615158e-11j), tol=ATOL)
assert ae(v.real, -2.3824537449367396579e-11, tol=PTOL)
assert ae(v.imag, 6.6969873156525615158e-11, tol=PTOL)
v = fp.e1((30.0 - 30.0j))
assert ae(v, (1.7316045841744061617e-15 - 1.3065678019487308689e-15j), tol=ATOL)
assert ae(v.real, 1.7316045841744061617e-15, tol=PTOL)
assert ae(v.imag, -1.3065678019487308689e-15, tol=PTOL)
v = fp.e1((40.0 - 40.0j))
assert ae(v, (-7.4001043002899232182e-20 + 4.991847855336816304e-21j), tol=ATOL)
assert ae(v.real, -7.4001043002899232182e-20, tol=PTOL)
assert ae(v.imag, 4.991847855336816304e-21, tol=PTOL)
v = fp.e1((50.0 - 50.0j))
assert ae(v, (2.3566128324644641219e-24 + 1.3188326726201614778e-24j), tol=ATOL)
assert ae(v.real, 2.3566128324644641219e-24, tol=PTOL)
assert ae(v.imag, 1.3188326726201614778e-24, tol=PTOL)
v = fp.e1((80.0 - 80.0j))
assert ae(v, (9.8279750572186526673e-38 - 1.243952841288868831e-37j), tol=ATOL)
assert ae(v.real, 9.8279750572186526673e-38, tol=PTOL)
assert ae(v.imag, -1.243952841288868831e-37, tol=PTOL)
v = fp.e1((4.6566128730773925781e-10 - 1.1641532182693481445e-10j))
assert ae(v, (20.880034622014215597 + 0.24497866301044883237j), tol=ATOL)
assert ae(v.real, 20.880034622014215597, tol=PTOL)
assert ae(v.imag, 0.24497866301044883237, tol=PTOL)
v = fp.e1((1.0 - 0.25j))
assert ae(v, (0.19731063945004229095 + 0.087366045774299963672j), tol=ATOL)
assert ae(v.real, 0.19731063945004229095, tol=PTOL)
assert ae(v.imag, 0.087366045774299963672, tol=PTOL)
v = fp.e1((4.0 - 1.0j))
assert ae(v, (0.0013106173980145506944 + 0.0034542480199350626699j), tol=ATOL)
assert ae(v.real, 0.0013106173980145506944, tol=PTOL)
assert ae(v.imag, 0.0034542480199350626699, tol=PTOL)
v = fp.e1((8.0 - 2.0j))
assert ae(v, (-0.000022278049065270225945 + 0.000029191940456521555288j), tol=ATOL)
assert ae(v.real, -0.000022278049065270225945, tol=PTOL)
assert ae(v.imag, 0.000029191940456521555288, tol=PTOL)
v = fp.e1((20.0 - 5.0j))
assert ae(v, (4.7711374515765346894e-11 - 8.2902652405126947359e-11j), tol=ATOL)
assert ae(v.real, 4.7711374515765346894e-11, tol=PTOL)
assert ae(v.imag, -8.2902652405126947359e-11, tol=PTOL)
v = fp.e1((80.0 - 20.0j))
assert ae(v, (3.8353473865788235787e-38 + 2.129247592349605139e-37j), tol=ATOL)
assert ae(v.real, 3.8353473865788235787e-38, tol=PTOL)
assert ae(v.imag, 2.129247592349605139e-37, tol=PTOL)
v = fp.e1((120.0 - 30.0j))
assert ae(v, (2.3836002337480334716e-55 - 5.6704043587126198306e-55j), tol=ATOL)
assert ae(v.real, 2.3836002337480334716e-55, tol=PTOL)
assert ae(v.imag, -5.6704043587126198306e-55, tol=PTOL)
v = fp.e1((160.0 - 40.0j))
assert ae(v, (-1.6238022898654510661e-72 + 1.104172355572287367e-72j), tol=ATOL)
assert ae(v.real, -1.6238022898654510661e-72, tol=PTOL)
assert ae(v.imag, 1.104172355572287367e-72, tol=PTOL)
v = fp.e1((200.0 - 50.0j))
assert ae(v, (6.6800061461666228487e-90 - 1.4473816083541016115e-91j), tol=ATOL)
assert ae(v.real, 6.6800061461666228487e-90, tol=PTOL)
assert ae(v.imag, -1.4473816083541016115e-91, tol=PTOL)
v = fp.e1((320.0 - 80.0j))
assert ae(v, (4.2737871527778786157e-143 - 3.1789935525785660314e-142j), tol=ATOL)
assert ae(v.real, 4.2737871527778786157e-143, tol=PTOL)
assert ae(v.imag, -3.1789935525785660314e-142, tol=PTOL)
v = fp.ei(1.1641532182693481445e-10)
assert ae(v, -22.296641293460247028, tol=ATOL)
assert type(v) is float
v = fp.ei(0.25)
assert ae(v, -0.54254326466191372953, tol=ATOL)
assert type(v) is float
v = fp.ei(1.0)
assert ae(v, 1.8951178163559367555, tol=ATOL)
assert type(v) is float
v = fp.ei(2.0)
assert ae(v, 4.9542343560018901634, tol=ATOL)
assert type(v) is float
v = fp.ei(5.0)
assert ae(v, 40.185275355803177455, tol=ATOL)
assert type(v) is float
v = fp.ei(20.0)
assert ae(v, 25615652.66405658882, tol=ATOL)
assert type(v) is float
v = fp.ei(30.0)
assert ae(v, 368973209407.27419706, tol=ATOL)
assert type(v) is float
v = fp.ei(40.0)
assert ae(v, 6039718263611241.5784, tol=ATOL)
assert type(v) is float
v = fp.ei(50.0)
assert ae(v, 1.0585636897131690963e+20, tol=ATOL)
assert type(v) is float
v = fp.ei(80.0)
assert ae(v, 7.0146000049047999696e+32, tol=ATOL)
assert type(v) is float
v = fp.ei((1.1641532182693481445e-10 + 0.0j))
assert ae(v, (-22.296641293460247028 + 0.0j), tol=ATOL)
assert ae(v.real, -22.296641293460247028, tol=PTOL)
assert v.imag == 0
v = fp.ei((0.25 + 0.0j))
assert ae(v, (-0.54254326466191372953 + 0.0j), tol=ATOL)
assert ae(v.real, -0.54254326466191372953, tol=PTOL)
assert v.imag == 0
v = fp.ei((1.0 + 0.0j))
assert ae(v, (1.8951178163559367555 + 0.0j), tol=ATOL)
assert ae(v.real, 1.8951178163559367555, tol=PTOL)
assert v.imag == 0
v = fp.ei((2.0 + 0.0j))
assert ae(v, (4.9542343560018901634 + 0.0j), tol=ATOL)
assert ae(v.real, 4.9542343560018901634, tol=PTOL)
assert v.imag == 0
v = fp.ei((5.0 + 0.0j))
assert ae(v, (40.185275355803177455 + 0.0j), tol=ATOL)
assert ae(v.real, 40.185275355803177455, tol=PTOL)
assert v.imag == 0
v = fp.ei((20.0 + 0.0j))
assert ae(v, (25615652.66405658882 + 0.0j), tol=ATOL)
assert ae(v.real, 25615652.66405658882, tol=PTOL)
assert v.imag == 0
v = fp.ei((30.0 + 0.0j))
assert ae(v, (368973209407.27419706 + 0.0j), tol=ATOL)
assert ae(v.real, 368973209407.27419706, tol=PTOL)
assert v.imag == 0
v = fp.ei((40.0 + 0.0j))
assert ae(v, (6039718263611241.5784 + 0.0j), tol=ATOL)
assert ae(v.real, 6039718263611241.5784, tol=PTOL)
assert v.imag == 0
v = fp.ei((50.0 + 0.0j))
assert ae(v, (1.0585636897131690963e+20 + 0.0j), tol=ATOL)
assert ae(v.real, 1.0585636897131690963e+20, tol=PTOL)
assert v.imag == 0
v = fp.ei((80.0 + 0.0j))
assert ae(v, (7.0146000049047999696e+32 + 0.0j), tol=ATOL)
assert ae(v.real, 7.0146000049047999696e+32, tol=PTOL)
assert v.imag == 0
v = fp.ei((4.6566128730773925781e-10 + 1.1641532182693481445e-10j))
assert ae(v, (-20.880034621082893023 + 0.24497866324327947603j), tol=ATOL)
assert ae(v.real, -20.880034621082893023, tol=PTOL)
assert ae(v.imag, 0.24497866324327947603, tol=PTOL)
v = fp.ei((1.0 + 0.25j))
assert ae(v, (1.8942716983721074932 + 0.67268237088273915854j), tol=ATOL)
assert ae(v.real, 1.8942716983721074932, tol=PTOL)
assert ae(v.imag, 0.67268237088273915854, tol=PTOL)
v = fp.ei((4.0 + 1.0j))
assert ae(v, (14.806699492675420438 + 12.280015176673582616j), tol=ATOL)
assert ae(v.real, 14.806699492675420438, tol=PTOL)
assert ae(v.imag, 12.280015176673582616, tol=PTOL)
v = fp.ei((8.0 + 2.0j))
assert ae(v, (-54.633252667426386294 + 416.34477429173650012j), tol=ATOL)
assert ae(v.real, -54.633252667426386294, tol=PTOL)
assert ae(v.imag, 416.34477429173650012, tol=PTOL)
v = fp.ei((20.0 + 5.0j))
assert ae(v, (711836.97165402624643 - 24745247.798103247366j), tol=ATOL)
assert ae(v.real, 711836.97165402624643, tol=PTOL)
assert ae(v.imag, -24745247.798103247366, tol=PTOL)
v = fp.ei((80.0 + 20.0j))
assert ae(v, (4.2139911108612653091e+32 + 5.3367124741918251637e+32j), tol=ATOL)
assert ae(v.real, 4.2139911108612653091e+32, tol=PTOL)
assert ae(v.imag, 5.3367124741918251637e+32, tol=PTOL)
v = fp.ei((120.0 + 30.0j))
assert ae(v, (-9.7760616203707508892e+48 - 1.058257682317195792e+50j), tol=ATOL)
assert ae(v.real, -9.7760616203707508892e+48, tol=PTOL)
assert ae(v.imag, -1.058257682317195792e+50, tol=PTOL)
v = fp.ei((160.0 + 40.0j))
assert ae(v, (-8.7065541466623638861e+66 + 1.6577106725141739889e+67j), tol=ATOL)
assert ae(v.real, -8.7065541466623638861e+66, tol=PTOL)
assert ae(v.imag, 1.6577106725141739889e+67, tol=PTOL)
v = fp.ei((200.0 + 50.0j))
assert ae(v, (3.070744996327018106e+84 - 1.7243244846769415903e+84j), tol=ATOL)
assert ae(v.real, 3.070744996327018106e+84, tol=PTOL)
assert ae(v.imag, -1.7243244846769415903e+84, tol=PTOL)
v = fp.ei((320.0 + 80.0j))
assert ae(v, (-9.9960598637998647276e+135 - 2.6855081527595608863e+136j), tol=ATOL)
assert ae(v.real, -9.9960598637998647276e+135, tol=PTOL)
assert ae(v.imag, -2.6855081527595608863e+136, tol=PTOL)
v = fp.ei((1.1641532182693481445e-10 + 1.1641532182693481445e-10j))
assert ae(v, (-21.950067703180274374 + 0.78539816351386363145j), tol=ATOL)
assert ae(v.real, -21.950067703180274374, tol=PTOL)
assert ae(v.imag, 0.78539816351386363145, tol=PTOL)
v = fp.ei((0.25 + 0.25j))
assert ae(v, (-0.21441047326710323254 + 1.0683772981589995996j), tol=ATOL)
assert ae(v.real, -0.21441047326710323254, tol=PTOL)
assert ae(v.imag, 1.0683772981589995996, tol=PTOL)
v = fp.ei((1.0 + 1.0j))
assert ae(v, (1.7646259855638540684 + 2.3877698515105224193j), tol=ATOL)
assert ae(v.real, 1.7646259855638540684, tol=PTOL)
assert ae(v.imag, 2.3877698515105224193, tol=PTOL)
v = fp.ei((2.0 + 2.0j))
assert ae(v, (1.8920781621855474089 + 5.3169624378326579621j), tol=ATOL)
assert ae(v.real, 1.8920781621855474089, tol=PTOL)
assert ae(v.imag, 5.3169624378326579621, tol=PTOL)
v = fp.ei((5.0 + 5.0j))
assert ae(v, (-13.470936071475245856 - 15.322492395731230968j), tol=ATOL)
assert ae(v.real, -13.470936071475245856, tol=PTOL)
assert ae(v.imag, -15.322492395731230968, tol=PTOL)
v = fp.ei((20.0 + 20.0j))
assert ae(v, (16589317.398788971896 + 5831705.4712368307104j), tol=ATOL)
assert ae(v.real, 16589317.398788971896, tol=PTOL)
assert ae(v.imag, 5831705.4712368307104, tol=PTOL)
v = fp.ei((30.0 + 30.0j))
assert ae(v, (-154596484273.69322527 - 204179357834.2723043j), tol=ATOL)
assert ae(v.real, -154596484273.69322527, tol=PTOL)
assert ae(v.imag, -204179357834.2723043, tol=PTOL)
v = fp.ei((40.0 + 40.0j))
assert ae(v, (287512180321448.45408 + 4203502407932318.1156j), tol=ATOL)
assert ae(v.real, 287512180321448.45408, tol=PTOL)
assert ae(v.imag, 4203502407932318.1156, tol=PTOL)
v = fp.ei((50.0 + 50.0j))
assert ae(v, (36128528616649268826.0 - 64648801861338741960.0j), tol=ATOL)
assert ae(v.real, 36128528616649268826.0, tol=PTOL)
assert ae(v.imag, -64648801861338741960.0, tol=PTOL)
v = fp.ei((80.0 + 80.0j))
assert ae(v, (-3.8674816337930010217e+32 - 3.0540709639658071041e+32j), tol=ATOL)
assert ae(v.real, -3.8674816337930010217e+32, tol=PTOL)
assert ae(v.imag, -3.0540709639658071041e+32, tol=PTOL)
v = fp.ei((1.1641532182693481445e-10 + 4.6566128730773925781e-10j))
assert ae(v, (-20.880034621432138988 + 1.3258176641336937524j), tol=ATOL)
assert ae(v.real, -20.880034621432138988, tol=PTOL)
assert ae(v.imag, 1.3258176641336937524, tol=PTOL)
v = fp.ei((0.25 + 1.0j))
assert ae(v, (0.59066621214766308594 + 2.3968481059377428687j), tol=ATOL)
assert ae(v.real, 0.59066621214766308594, tol=PTOL)
assert ae(v.imag, 2.3968481059377428687, tol=PTOL)
v = fp.ei((1.0 + 4.0j))
assert ae(v, (-0.49739047283060471093 + 3.5570287076301818702j), tol=ATOL)
assert ae(v.real, -0.49739047283060471093, tol=PTOL)
assert ae(v.imag, 3.5570287076301818702, tol=PTOL)
v = fp.ei((2.0 + 8.0j))
assert ae(v, (0.8705211147733730969 + 3.3825859385758486351j), tol=ATOL)
assert ae(v.real, 0.8705211147733730969, tol=PTOL)
assert ae(v.imag, 3.3825859385758486351, tol=PTOL)
v = fp.ei((5.0 + 20.0j))
assert ae(v, (7.0789514293925893007 + 1.5313749363937141849j), tol=ATOL)
assert ae(v.real, 7.0789514293925893007, tol=PTOL)
assert ae(v.imag, 1.5313749363937141849, tol=PTOL)
v = fp.ei((20.0 + 80.0j))
assert ae(v, (-5855431.4907298084434 - 720917.79156143806727j), tol=ATOL)
assert ae(v.real, -5855431.4907298084434, tol=PTOL)
assert ae(v.imag, -720917.79156143806727, tol=PTOL)
v = fp.ei((30.0 + 120.0j))
assert ae(v, (65402491644.703470747 - 56697658396.51586764j), tol=ATOL)
assert ae(v.real, 65402491644.703470747, tol=PTOL)
assert ae(v.imag, -56697658396.51586764, tol=PTOL)
v = fp.ei((40.0 + 160.0j))
assert ae(v, (-25504929379604.776769 + 1429035198630576.3879j), tol=ATOL)
assert ae(v.real, -25504929379604.776769, tol=PTOL)
assert ae(v.imag, 1429035198630576.3879, tol=PTOL)
v = fp.ei((50.0 + 200.0j))
assert ae(v, (-18437746526988116954.0 - 17146362239046152342.0j), tol=ATOL)
assert ae(v.real, -18437746526988116954.0, tol=PTOL)
assert ae(v.imag, -17146362239046152342.0, tol=PTOL)
v = fp.ei((80.0 + 320.0j))
assert ae(v, (-3.3464697299634526706e+31 - 1.6473152633843023919e+32j), tol=ATOL)
assert ae(v.real, -3.3464697299634526706e+31, tol=PTOL)
assert ae(v.imag, -1.6473152633843023919e+32, tol=PTOL)
v = fp.ei((0.0 + 1.1641532182693481445e-10j))
assert ae(v, (-22.29664129357666235 + 1.5707963269113119411j), tol=ATOL)
assert ae(v.real, -22.29664129357666235, tol=PTOL)
assert ae(v.imag, 1.5707963269113119411, tol=PTOL)
v = fp.ei((0.0 + 0.25j))
assert ae(v, (-0.82466306258094565309 + 1.8199298971146537833j), tol=ATOL)
assert ae(v.real, -0.82466306258094565309, tol=PTOL)
assert ae(v.imag, 1.8199298971146537833, tol=PTOL)
v = fp.ei((0.0 + 1.0j))
assert ae(v, (0.33740392290096813466 + 2.5168793971620796342j), tol=ATOL)
assert ae(v.real, 0.33740392290096813466, tol=PTOL)
assert ae(v.imag, 2.5168793971620796342, tol=PTOL)
v = fp.ei((0.0 + 2.0j))
assert ae(v, (0.4229808287748649957 + 3.1762093035975914678j), tol=ATOL)
assert ae(v.real, 0.4229808287748649957, tol=PTOL)
assert ae(v.imag, 3.1762093035975914678, tol=PTOL)
v = fp.ei((0.0 + 5.0j))
assert ae(v, (-0.19002974965664387862 + 3.1207275717395707565j), tol=ATOL)
assert ae(v.real, -0.19002974965664387862, tol=PTOL)
assert ae(v.imag, 3.1207275717395707565, tol=PTOL)
v = fp.ei((0.0 + 20.0j))
assert ae(v, (0.04441982084535331654 + 3.1190380278383364594j), tol=ATOL)
assert ae(v.real, 0.04441982084535331654, tol=PTOL)
assert ae(v.imag, 3.1190380278383364594, tol=PTOL)
v = fp.ei((0.0 + 30.0j))
assert ae(v, (-0.033032417282071143779 + 3.1375528668252477302j), tol=ATOL)
assert ae(v.real, -0.033032417282071143779, tol=PTOL)
assert ae(v.imag, 3.1375528668252477302, tol=PTOL)
v = fp.ei((0.0 + 40.0j))
assert ae(v, (0.019020007896208766962 + 3.157781446149681126j), tol=ATOL)
assert ae(v.real, 0.019020007896208766962, tol=PTOL)
assert ae(v.imag, 3.157781446149681126, tol=PTOL)
v = fp.ei((0.0 + 50.0j))
assert ae(v, (-0.0056283863241163054402 + 3.122413399280832514j), tol=ATOL)
assert ae(v.real, -0.0056283863241163054402, tol=PTOL)
assert ae(v.imag, 3.122413399280832514, tol=PTOL)
v = fp.ei((0.0 + 80.0j))
assert ae(v, (-0.012402501155070958192 + 3.1431272137073839346j), tol=ATOL)
assert ae(v.real, -0.012402501155070958192, tol=PTOL)
assert ae(v.imag, 3.1431272137073839346, tol=PTOL)
v = fp.ei((-1.1641532182693481445e-10 + 4.6566128730773925781e-10j))
assert ae(v, (-20.880034621664969632 + 1.8157749903874220607j), tol=ATOL)
assert ae(v.real, -20.880034621664969632, tol=PTOL)
assert ae(v.imag, 1.8157749903874220607, tol=PTOL)
v = fp.ei((-0.25 + 1.0j))
assert ae(v, (0.16868306393667788761 + 2.6557914649950505414j), tol=ATOL)
assert ae(v.real, 0.16868306393667788761, tol=PTOL)
assert ae(v.imag, 2.6557914649950505414, tol=PTOL)
v = fp.ei((-1.0 + 4.0j))
assert ae(v, (-0.03373591813926547318 + 3.2151161058308770603j), tol=ATOL)
assert ae(v.real, -0.03373591813926547318, tol=PTOL)
assert ae(v.imag, 3.2151161058308770603, tol=PTOL)
v = fp.ei((-2.0 + 8.0j))
assert ae(v, (0.015392833434733785143 + 3.1384179414340326969j), tol=ATOL)
assert ae(v.real, 0.015392833434733785143, tol=PTOL)
assert ae(v.imag, 3.1384179414340326969, tol=PTOL)
v = fp.ei((-5.0 + 20.0j))
assert ae(v, (0.00024419662286542966525 + 3.1413825703601317109j), tol=ATOL)
assert ae(v.real, 0.00024419662286542966525, tol=PTOL)
assert ae(v.imag, 3.1413825703601317109, tol=PTOL)
v = fp.ei((-20.0 + 80.0j))
assert ae(v, (-2.3255552781051330088e-11 + 3.1415926535987396304j), tol=ATOL)
assert ae(v.real, -2.3255552781051330088e-11, tol=PTOL)
assert ae(v.imag, 3.1415926535987396304, tol=PTOL)
v = fp.ei((-30.0 + 120.0j))
assert ae(v, (2.7068919097124652332e-16 + 3.1415926535897925337j), tol=ATOL)
assert ae(v.real, 2.7068919097124652332e-16, tol=PTOL)
assert ae(v.imag, 3.1415926535897925337, tol=PTOL)
v = fp.ei((-40.0 + 160.0j))
assert ae(v, (1.1695597827678024687e-20 + 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, 1.1695597827678024687e-20, tol=PTOL)
assert ae(v.imag, 3.1415926535897932385, tol=PTOL)
v = fp.ei((-50.0 + 200.0j))
assert ae(v, (-9.0323746914410162531e-25 + 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -9.0323746914410162531e-25, tol=PTOL)
assert ae(v.imag, 3.1415926535897932385, tol=PTOL)
v = fp.ei((-80.0 + 320.0j))
assert ae(v, (-3.4819106748728063576e-38 + 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -3.4819106748728063576e-38, tol=PTOL)
assert ae(v.imag, 3.1415926535897932385, tol=PTOL)
v = fp.ei((-4.6566128730773925781e-10 + 1.1641532182693481445e-10j))
assert ae(v, (-20.880034622014215597 + 2.8966139905793444061j), tol=ATOL)
assert ae(v.real, -20.880034622014215597, tol=PTOL)
assert ae(v.imag, 2.8966139905793444061, tol=PTOL)
v = fp.ei((-1.0 + 0.25j))
assert ae(v, (-0.19731063945004229095 + 3.0542266078154932748j), tol=ATOL)
assert ae(v.real, -0.19731063945004229095, tol=PTOL)
assert ae(v.imag, 3.0542266078154932748, tol=PTOL)
v = fp.ei((-4.0 + 1.0j))
assert ae(v, (-0.0013106173980145506944 + 3.1381384055698581758j), tol=ATOL)
assert ae(v.real, -0.0013106173980145506944, tol=PTOL)
assert ae(v.imag, 3.1381384055698581758, tol=PTOL)
v = fp.ei((-8.0 + 2.0j))
assert ae(v, (0.000022278049065270225945 + 3.1415634616493367169j), tol=ATOL)
assert ae(v.real, 0.000022278049065270225945, tol=PTOL)
assert ae(v.imag, 3.1415634616493367169, tol=PTOL)
v = fp.ei((-20.0 + 5.0j))
assert ae(v, (-4.7711374515765346894e-11 + 3.1415926536726958909j), tol=ATOL)
assert ae(v.real, -4.7711374515765346894e-11, tol=PTOL)
assert ae(v.imag, 3.1415926536726958909, tol=PTOL)
v = fp.ei((-80.0 + 20.0j))
assert ae(v, (-3.8353473865788235787e-38 + 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -3.8353473865788235787e-38, tol=PTOL)
assert ae(v.imag, 3.1415926535897932385, tol=PTOL)
v = fp.ei((-120.0 + 30.0j))
assert ae(v, (-2.3836002337480334716e-55 + 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -2.3836002337480334716e-55, tol=PTOL)
assert ae(v.imag, 3.1415926535897932385, tol=PTOL)
v = fp.ei((-160.0 + 40.0j))
assert ae(v, (1.6238022898654510661e-72 + 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, 1.6238022898654510661e-72, tol=PTOL)
assert ae(v.imag, 3.1415926535897932385, tol=PTOL)
v = fp.ei((-200.0 + 50.0j))
assert ae(v, (-6.6800061461666228487e-90 + 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -6.6800061461666228487e-90, tol=PTOL)
assert ae(v.imag, 3.1415926535897932385, tol=PTOL)
v = fp.ei((-320.0 + 80.0j))
assert ae(v, (-4.2737871527778786157e-143 + 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -4.2737871527778786157e-143, tol=PTOL)
assert ae(v.imag, 3.1415926535897932385, tol=PTOL)
v = fp.ei(-1.1641532182693481445e-10)
assert ae(v, -22.296641293693077672, tol=ATOL)
assert type(v) is float
v = fp.ei(-0.25)
assert ae(v, -1.0442826344437381945, tol=ATOL)
assert type(v) is float
v = fp.ei(-1.0)
assert ae(v, -0.21938393439552027368, tol=ATOL)
assert type(v) is float
v = fp.ei(-2.0)
assert ae(v, -0.048900510708061119567, tol=ATOL)
assert type(v) is float
v = fp.ei(-5.0)
assert ae(v, -0.0011482955912753257973, tol=ATOL)
assert type(v) is float
v = fp.ei(-20.0)
assert ae(v, -9.8355252906498816904e-11, tol=ATOL)
assert type(v) is float
v = fp.ei(-30.0)
assert ae(v, -3.0215520106888125448e-15, tol=ATOL)
assert type(v) is float
v = fp.ei(-40.0)
assert ae(v, -1.0367732614516569722e-19, tol=ATOL)
assert type(v) is float
v = fp.ei(-50.0)
assert ae(v, -3.7832640295504590187e-24, tol=ATOL)
assert type(v) is float
v = fp.ei(-80.0)
assert ae(v, -2.2285432586884729112e-37, tol=ATOL)
assert type(v) is float
v = fp.ei((-1.1641532182693481445e-10 + 0.0j))
assert ae(v, (-22.296641293693077672 + 0.0j), tol=ATOL)
assert ae(v.real, -22.296641293693077672, tol=PTOL)
assert v.imag == 0
v = fp.ei((-0.25 + 0.0j))
assert ae(v, (-1.0442826344437381945 + 0.0j), tol=ATOL)
assert ae(v.real, -1.0442826344437381945, tol=PTOL)
assert v.imag == 0
v = fp.ei((-1.0 + 0.0j))
assert ae(v, (-0.21938393439552027368 + 0.0j), tol=ATOL)
assert ae(v.real, -0.21938393439552027368, tol=PTOL)
assert v.imag == 0
v = fp.ei((-2.0 + 0.0j))
assert ae(v, (-0.048900510708061119567 + 0.0j), tol=ATOL)
assert ae(v.real, -0.048900510708061119567, tol=PTOL)
assert v.imag == 0
v = fp.ei((-5.0 + 0.0j))
assert ae(v, (-0.0011482955912753257973 + 0.0j), tol=ATOL)
assert ae(v.real, -0.0011482955912753257973, tol=PTOL)
assert v.imag == 0
v = fp.ei((-20.0 + 0.0j))
assert ae(v, (-9.8355252906498816904e-11 + 0.0j), tol=ATOL)
assert ae(v.real, -9.8355252906498816904e-11, tol=PTOL)
assert v.imag == 0
v = fp.ei((-30.0 + 0.0j))
assert ae(v, (-3.0215520106888125448e-15 + 0.0j), tol=ATOL)
assert ae(v.real, -3.0215520106888125448e-15, tol=PTOL)
assert v.imag == 0
v = fp.ei((-40.0 + 0.0j))
assert ae(v, (-1.0367732614516569722e-19 + 0.0j), tol=ATOL)
assert ae(v.real, -1.0367732614516569722e-19, tol=PTOL)
assert v.imag == 0
v = fp.ei((-50.0 + 0.0j))
assert ae(v, (-3.7832640295504590187e-24 + 0.0j), tol=ATOL)
assert ae(v.real, -3.7832640295504590187e-24, tol=PTOL)
assert v.imag == 0
v = fp.ei((-80.0 + 0.0j))
assert ae(v, (-2.2285432586884729112e-37 + 0.0j), tol=ATOL)
assert ae(v.real, -2.2285432586884729112e-37, tol=PTOL)
assert v.imag == 0
v = fp.ei((-4.6566128730773925781e-10 - 1.1641532182693481445e-10j))
assert ae(v, (-20.880034622014215597 - 2.8966139905793444061j), tol=ATOL)
assert ae(v.real, -20.880034622014215597, tol=PTOL)
assert ae(v.imag, -2.8966139905793444061, tol=PTOL)
v = fp.ei((-1.0 - 0.25j))
assert ae(v, (-0.19731063945004229095 - 3.0542266078154932748j), tol=ATOL)
assert ae(v.real, -0.19731063945004229095, tol=PTOL)
assert ae(v.imag, -3.0542266078154932748, tol=PTOL)
v = fp.ei((-4.0 - 1.0j))
assert ae(v, (-0.0013106173980145506944 - 3.1381384055698581758j), tol=ATOL)
assert ae(v.real, -0.0013106173980145506944, tol=PTOL)
assert ae(v.imag, -3.1381384055698581758, tol=PTOL)
v = fp.ei((-8.0 - 2.0j))
assert ae(v, (0.000022278049065270225945 - 3.1415634616493367169j), tol=ATOL)
assert ae(v.real, 0.000022278049065270225945, tol=PTOL)
assert ae(v.imag, -3.1415634616493367169, tol=PTOL)
v = fp.ei((-20.0 - 5.0j))
assert ae(v, (-4.7711374515765346894e-11 - 3.1415926536726958909j), tol=ATOL)
assert ae(v.real, -4.7711374515765346894e-11, tol=PTOL)
assert ae(v.imag, -3.1415926536726958909, tol=PTOL)
v = fp.ei((-80.0 - 20.0j))
assert ae(v, (-3.8353473865788235787e-38 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -3.8353473865788235787e-38, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.ei((-120.0 - 30.0j))
assert ae(v, (-2.3836002337480334716e-55 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -2.3836002337480334716e-55, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.ei((-160.0 - 40.0j))
assert ae(v, (1.6238022898654510661e-72 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, 1.6238022898654510661e-72, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.ei((-200.0 - 50.0j))
assert ae(v, (-6.6800061461666228487e-90 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -6.6800061461666228487e-90, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.ei((-320.0 - 80.0j))
assert ae(v, (-4.2737871527778786157e-143 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -4.2737871527778786157e-143, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.ei((-1.1641532182693481445e-10 - 1.1641532182693481445e-10j))
assert ae(v, (-21.950067703413105017 - 2.3561944903087602507j), tol=ATOL)
assert ae(v.real, -21.950067703413105017, tol=PTOL)
assert ae(v.imag, -2.3561944903087602507, tol=PTOL)
v = fp.ei((-0.25 - 0.25j))
assert ae(v, (-0.71092525792923287894 - 2.5766745291767512913j), tol=ATOL)
assert ae(v.real, -0.71092525792923287894, tol=PTOL)
assert ae(v.imag, -2.5766745291767512913, tol=PTOL)
v = fp.ei((-1.0 - 1.0j))
assert ae(v, (-0.00028162445198141832551 - 2.9622681185504342983j), tol=ATOL)
assert ae(v.real, -0.00028162445198141832551, tol=PTOL)
assert ae(v.imag, -2.9622681185504342983, tol=PTOL)
v = fp.ei((-2.0 - 2.0j))
assert ae(v, (0.033767089606562004246 - 3.1229932394200426965j), tol=ATOL)
assert ae(v.real, 0.033767089606562004246, tol=PTOL)
assert ae(v.imag, -3.1229932394200426965, tol=PTOL)
v = fp.ei((-5.0 - 5.0j))
assert ae(v, (-0.0007266506660356393891 - 3.1420636813914284609j), tol=ATOL)
assert ae(v.real, -0.0007266506660356393891, tol=PTOL)
assert ae(v.imag, -3.1420636813914284609, tol=PTOL)
v = fp.ei((-20.0 - 20.0j))
assert ae(v, (2.3824537449367396579e-11 - 3.1415926535228233653j), tol=ATOL)
assert ae(v.real, 2.3824537449367396579e-11, tol=PTOL)
assert ae(v.imag, -3.1415926535228233653, tol=PTOL)
v = fp.ei((-30.0 - 30.0j))
assert ae(v, (-1.7316045841744061617e-15 - 3.141592653589794545j), tol=ATOL)
assert ae(v.real, -1.7316045841744061617e-15, tol=PTOL)
assert ae(v.imag, -3.141592653589794545, tol=PTOL)
v = fp.ei((-40.0 - 40.0j))
assert ae(v, (7.4001043002899232182e-20 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, 7.4001043002899232182e-20, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.ei((-50.0 - 50.0j))
assert ae(v, (-2.3566128324644641219e-24 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -2.3566128324644641219e-24, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.ei((-80.0 - 80.0j))
assert ae(v, (-9.8279750572186526673e-38 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -9.8279750572186526673e-38, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.ei((-1.1641532182693481445e-10 - 4.6566128730773925781e-10j))
assert ae(v, (-20.880034621664969632 - 1.8157749903874220607j), tol=ATOL)
assert ae(v.real, -20.880034621664969632, tol=PTOL)
assert ae(v.imag, -1.8157749903874220607, tol=PTOL)
v = fp.ei((-0.25 - 1.0j))
assert ae(v, (0.16868306393667788761 - 2.6557914649950505414j), tol=ATOL)
assert ae(v.real, 0.16868306393667788761, tol=PTOL)
assert ae(v.imag, -2.6557914649950505414, tol=PTOL)
v = fp.ei((-1.0 - 4.0j))
assert ae(v, (-0.03373591813926547318 - 3.2151161058308770603j), tol=ATOL)
assert ae(v.real, -0.03373591813926547318, tol=PTOL)
assert ae(v.imag, -3.2151161058308770603, tol=PTOL)
v = fp.ei((-2.0 - 8.0j))
assert ae(v, (0.015392833434733785143 - 3.1384179414340326969j), tol=ATOL)
assert ae(v.real, 0.015392833434733785143, tol=PTOL)
assert ae(v.imag, -3.1384179414340326969, tol=PTOL)
v = fp.ei((-5.0 - 20.0j))
assert ae(v, (0.00024419662286542966525 - 3.1413825703601317109j), tol=ATOL)
assert ae(v.real, 0.00024419662286542966525, tol=PTOL)
assert ae(v.imag, -3.1413825703601317109, tol=PTOL)
v = fp.ei((-20.0 - 80.0j))
assert ae(v, (-2.3255552781051330088e-11 - 3.1415926535987396304j), tol=ATOL)
assert ae(v.real, -2.3255552781051330088e-11, tol=PTOL)
assert ae(v.imag, -3.1415926535987396304, tol=PTOL)
v = fp.ei((-30.0 - 120.0j))
assert ae(v, (2.7068919097124652332e-16 - 3.1415926535897925337j), tol=ATOL)
assert ae(v.real, 2.7068919097124652332e-16, tol=PTOL)
assert ae(v.imag, -3.1415926535897925337, tol=PTOL)
v = fp.ei((-40.0 - 160.0j))
assert ae(v, (1.1695597827678024687e-20 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, 1.1695597827678024687e-20, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.ei((-50.0 - 200.0j))
assert ae(v, (-9.0323746914410162531e-25 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -9.0323746914410162531e-25, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.ei((-80.0 - 320.0j))
assert ae(v, (-3.4819106748728063576e-38 - 3.1415926535897932385j), tol=ATOL)
assert ae(v.real, -3.4819106748728063576e-38, tol=PTOL)
assert ae(v.imag, -3.1415926535897932385, tol=PTOL)
v = fp.ei((0.0 - 1.1641532182693481445e-10j))
assert ae(v, (-22.29664129357666235 - 1.5707963269113119411j), tol=ATOL)
assert ae(v.real, -22.29664129357666235, tol=PTOL)
assert ae(v.imag, -1.5707963269113119411, tol=PTOL)
v = fp.ei((0.0 - 0.25j))
assert ae(v, (-0.82466306258094565309 - 1.8199298971146537833j), tol=ATOL)
assert ae(v.real, -0.82466306258094565309, tol=PTOL)
assert ae(v.imag, -1.8199298971146537833, tol=PTOL)
v = fp.ei((0.0 - 1.0j))
assert ae(v, (0.33740392290096813466 - 2.5168793971620796342j), tol=ATOL)
assert ae(v.real, 0.33740392290096813466, tol=PTOL)
assert ae(v.imag, -2.5168793971620796342, tol=PTOL)
v = fp.ei((0.0 - 2.0j))
assert ae(v, (0.4229808287748649957 - 3.1762093035975914678j), tol=ATOL)
assert ae(v.real, 0.4229808287748649957, tol=PTOL)
assert ae(v.imag, -3.1762093035975914678, tol=PTOL)
v = fp.ei((0.0 - 5.0j))
assert ae(v, (-0.19002974965664387862 - 3.1207275717395707565j), tol=ATOL)
assert ae(v.real, -0.19002974965664387862, tol=PTOL)
assert ae(v.imag, -3.1207275717395707565, tol=PTOL)
v = fp.ei((0.0 - 20.0j))
assert ae(v, (0.04441982084535331654 - 3.1190380278383364594j), tol=ATOL)
assert ae(v.real, 0.04441982084535331654, tol=PTOL)
assert ae(v.imag, -3.1190380278383364594, tol=PTOL)
v = fp.ei((0.0 - 30.0j))
assert ae(v, (-0.033032417282071143779 - 3.1375528668252477302j), tol=ATOL)
assert ae(v.real, -0.033032417282071143779, tol=PTOL)
assert ae(v.imag, -3.1375528668252477302, tol=PTOL)
v = fp.ei((0.0 - 40.0j))
assert ae(v, (0.019020007896208766962 - 3.157781446149681126j), tol=ATOL)
assert ae(v.real, 0.019020007896208766962, tol=PTOL)
assert ae(v.imag, -3.157781446149681126, tol=PTOL)
v = fp.ei((0.0 - 50.0j))
assert ae(v, (-0.0056283863241163054402 - 3.122413399280832514j), tol=ATOL)
assert ae(v.real, -0.0056283863241163054402, tol=PTOL)
assert ae(v.imag, -3.122413399280832514, tol=PTOL)
v = fp.ei((0.0 - 80.0j))
assert ae(v, (-0.012402501155070958192 - 3.1431272137073839346j), tol=ATOL)
assert ae(v.real, -0.012402501155070958192, tol=PTOL)
assert ae(v.imag, -3.1431272137073839346, tol=PTOL)
v = fp.ei((1.1641532182693481445e-10 - 4.6566128730773925781e-10j))
assert ae(v, (-20.880034621432138988 - 1.3258176641336937524j), tol=ATOL)
assert ae(v.real, -20.880034621432138988, tol=PTOL)
assert ae(v.imag, -1.3258176641336937524, tol=PTOL)
v = fp.ei((0.25 - 1.0j))
assert ae(v, (0.59066621214766308594 - 2.3968481059377428687j), tol=ATOL)
assert ae(v.real, 0.59066621214766308594, tol=PTOL)
assert ae(v.imag, -2.3968481059377428687, tol=PTOL)
v = fp.ei((1.0 - 4.0j))
assert ae(v, (-0.49739047283060471093 - 3.5570287076301818702j), tol=ATOL)
assert ae(v.real, -0.49739047283060471093, tol=PTOL)
assert ae(v.imag, -3.5570287076301818702, tol=PTOL)
v = fp.ei((2.0 - 8.0j))
assert ae(v, (0.8705211147733730969 - 3.3825859385758486351j), tol=ATOL)
assert ae(v.real, 0.8705211147733730969, tol=PTOL)
assert ae(v.imag, -3.3825859385758486351, tol=PTOL)
v = fp.ei((5.0 - 20.0j))
assert ae(v, (7.0789514293925893007 - 1.5313749363937141849j), tol=ATOL)
assert ae(v.real, 7.0789514293925893007, tol=PTOL)
assert ae(v.imag, -1.5313749363937141849, tol=PTOL)
v = fp.ei((20.0 - 80.0j))
assert ae(v, (-5855431.4907298084434 + 720917.79156143806727j), tol=ATOL)
assert ae(v.real, -5855431.4907298084434, tol=PTOL)
assert ae(v.imag, 720917.79156143806727, tol=PTOL)
v = fp.ei((30.0 - 120.0j))
assert ae(v, (65402491644.703470747 + 56697658396.51586764j), tol=ATOL)
assert ae(v.real, 65402491644.703470747, tol=PTOL)
assert ae(v.imag, 56697658396.51586764, tol=PTOL)
v = fp.ei((40.0 - 160.0j))
assert ae(v, (-25504929379604.776769 - 1429035198630576.3879j), tol=ATOL)
assert ae(v.real, -25504929379604.776769, tol=PTOL)
assert ae(v.imag, -1429035198630576.3879, tol=PTOL)
v = fp.ei((50.0 - 200.0j))
assert ae(v, (-18437746526988116954.0 + 17146362239046152342.0j), tol=ATOL)
assert ae(v.real, -18437746526988116954.0, tol=PTOL)
assert ae(v.imag, 17146362239046152342.0, tol=PTOL)
v = fp.ei((80.0 - 320.0j))
assert ae(v, (-3.3464697299634526706e+31 + 1.6473152633843023919e+32j), tol=ATOL)
assert ae(v.real, -3.3464697299634526706e+31, tol=PTOL)
assert ae(v.imag, 1.6473152633843023919e+32, tol=PTOL)
v = fp.ei((1.1641532182693481445e-10 - 1.1641532182693481445e-10j))
assert ae(v, (-21.950067703180274374 - 0.78539816351386363145j), tol=ATOL)
assert ae(v.real, -21.950067703180274374, tol=PTOL)
assert ae(v.imag, -0.78539816351386363145, tol=PTOL)
v = fp.ei((0.25 - 0.25j))
assert ae(v, (-0.21441047326710323254 - 1.0683772981589995996j), tol=ATOL)
assert ae(v.real, -0.21441047326710323254, tol=PTOL)
assert ae(v.imag, -1.0683772981589995996, tol=PTOL)
v = fp.ei((1.0 - 1.0j))
assert ae(v, (1.7646259855638540684 - 2.3877698515105224193j), tol=ATOL)
assert ae(v.real, 1.7646259855638540684, tol=PTOL)
assert ae(v.imag, -2.3877698515105224193, tol=PTOL)
v = fp.ei((2.0 - 2.0j))
assert ae(v, (1.8920781621855474089 - 5.3169624378326579621j), tol=ATOL)
assert ae(v.real, 1.8920781621855474089, tol=PTOL)
assert ae(v.imag, -5.3169624378326579621, tol=PTOL)
v = fp.ei((5.0 - 5.0j))
assert ae(v, (-13.470936071475245856 + 15.322492395731230968j), tol=ATOL)
assert ae(v.real, -13.470936071475245856, tol=PTOL)
assert ae(v.imag, 15.322492395731230968, tol=PTOL)
v = fp.ei((20.0 - 20.0j))
assert ae(v, (16589317.398788971896 - 5831705.4712368307104j), tol=ATOL)
assert ae(v.real, 16589317.398788971896, tol=PTOL)
assert ae(v.imag, -5831705.4712368307104, tol=PTOL)
v = fp.ei((30.0 - 30.0j))
assert ae(v, (-154596484273.69322527 + 204179357834.2723043j), tol=ATOL)
assert ae(v.real, -154596484273.69322527, tol=PTOL)
assert ae(v.imag, 204179357834.2723043, tol=PTOL)
v = fp.ei((40.0 - 40.0j))
assert ae(v, (287512180321448.45408 - 4203502407932318.1156j), tol=ATOL)
assert ae(v.real, 287512180321448.45408, tol=PTOL)
assert ae(v.imag, -4203502407932318.1156, tol=PTOL)
v = fp.ei((50.0 - 50.0j))
assert ae(v, (36128528616649268826.0 + 64648801861338741960.0j), tol=ATOL)
assert ae(v.real, 36128528616649268826.0, tol=PTOL)
assert ae(v.imag, 64648801861338741960.0, tol=PTOL)
v = fp.ei((80.0 - 80.0j))
assert ae(v, (-3.8674816337930010217e+32 + 3.0540709639658071041e+32j), tol=ATOL)
assert ae(v.real, -3.8674816337930010217e+32, tol=PTOL)
assert ae(v.imag, 3.0540709639658071041e+32, tol=PTOL)
v = fp.ei((4.6566128730773925781e-10 - 1.1641532182693481445e-10j))
assert ae(v, (-20.880034621082893023 - 0.24497866324327947603j), tol=ATOL)
assert ae(v.real, -20.880034621082893023, tol=PTOL)
assert ae(v.imag, -0.24497866324327947603, tol=PTOL)
v = fp.ei((1.0 - 0.25j))
assert ae(v, (1.8942716983721074932 - 0.67268237088273915854j), tol=ATOL)
assert ae(v.real, 1.8942716983721074932, tol=PTOL)
assert ae(v.imag, -0.67268237088273915854, tol=PTOL)
v = fp.ei((4.0 - 1.0j))
assert ae(v, (14.806699492675420438 - 12.280015176673582616j), tol=ATOL)
assert ae(v.real, 14.806699492675420438, tol=PTOL)
assert ae(v.imag, -12.280015176673582616, tol=PTOL)
v = fp.ei((8.0 - 2.0j))
assert ae(v, (-54.633252667426386294 - 416.34477429173650012j), tol=ATOL)
assert ae(v.real, -54.633252667426386294, tol=PTOL)
assert ae(v.imag, -416.34477429173650012, tol=PTOL)
v = fp.ei((20.0 - 5.0j))
assert ae(v, (711836.97165402624643 + 24745247.798103247366j), tol=ATOL)
assert ae(v.real, 711836.97165402624643, tol=PTOL)
assert ae(v.imag, 24745247.798103247366, tol=PTOL)
v = fp.ei((80.0 - 20.0j))
assert ae(v, (4.2139911108612653091e+32 - 5.3367124741918251637e+32j), tol=ATOL)
assert ae(v.real, 4.2139911108612653091e+32, tol=PTOL)
assert ae(v.imag, -5.3367124741918251637e+32, tol=PTOL)
v = fp.ei((120.0 - 30.0j))
assert ae(v, (-9.7760616203707508892e+48 + 1.058257682317195792e+50j), tol=ATOL)
assert ae(v.real, -9.7760616203707508892e+48, tol=PTOL)
assert ae(v.imag, 1.058257682317195792e+50, tol=PTOL)
v = fp.ei((160.0 - 40.0j))
assert ae(v, (-8.7065541466623638861e+66 - 1.6577106725141739889e+67j), tol=ATOL)
assert ae(v.real, -8.7065541466623638861e+66, tol=PTOL)
assert ae(v.imag, -1.6577106725141739889e+67, tol=PTOL)
v = fp.ei((200.0 - 50.0j))
assert ae(v, (3.070744996327018106e+84 + 1.7243244846769415903e+84j), tol=ATOL)
assert ae(v.real, 3.070744996327018106e+84, tol=PTOL)
assert ae(v.imag, 1.7243244846769415903e+84, tol=PTOL)
v = fp.ei((320.0 - 80.0j))
assert ae(v, (-9.9960598637998647276e+135 + 2.6855081527595608863e+136j), tol=ATOL)
assert ae(v.real, -9.9960598637998647276e+135, tol=PTOL)
assert ae(v.imag, 2.6855081527595608863e+136, tol=PTOL)
|
bverburg/CouchPotatoServer | refs/heads/master | libs/pyutil/fileutil.py | 122 | # Copyright (c) 2002-2010 Zooko Wilcox-O'Hearn
# This file is part of pyutil; see README.rst for licensing terms.
"""
Futz with files like a pro.
"""
import errno, exceptions, os, stat, tempfile
try:
import bsddb
except ImportError:
DBNoSuchFileError = None
else:
DBNoSuchFileError = bsddb.db.DBNoSuchFileError
# read_file() and write_file() copied from Mark Seaborn's blog post. Please
# read it for complete rationale:
# http://lackingrhoticity.blogspot.com/2009/12/readfile-and-writefile-in-python.html
def read_file(filename, mode='rb'):
""" Read the contents of the file named filename and return it in
a string. This function closes the file handle before it returns
(even if the underlying Python implementation's garbage collector
doesn't). """
fh = open(filename, mode)
try:
return fh.read()
finally:
fh.close()
def write_file(filename, data, mode='wb'):
""" Write the string data into a file named filename. This
function closes the file handle (ensuring that the written data is
flushed from the perspective of the Python implementation) before
it returns (even if the underlying Python implementation's garbage
collector doesn't)."""
fh = open(filename, mode)
try:
fh.write(data)
finally:
fh.close()
# For backwards-compatibility in case someone is using these names. We used to
# have a superkludge in fileutil.py under these names.
def rename(src, dst, tries=4, basedelay=0.1):
return os.rename(src, dst)
def remove(f, tries=4, basedelay=0.1):
return os.remove(f)
def rmdir(f, tries=4, basedelay=0.1):
return os.rmdir(f)
class _Dir(object):
"""
Hold a set of files and subdirs and clean them all up when asked to.
"""
def __init__(self, name, cleanup=True):
self.name = name
self.cleanup = cleanup
self.files = []
self.subdirs = set()
def file(self, fname, mode=None):
"""
Create a file in the tempdir and remember it so as to close() it
before attempting to cleanup the temp dir.
@rtype: file
"""
ffn = os.path.join(self.name, fname)
if mode is not None:
fo = open(ffn, mode)
else:
fo = open(ffn)
self.register_file(fo)
return fo
def subdir(self, dirname):
"""
Create a subdirectory in the tempdir and remember it so as to call
shutdown() on it before attempting to clean up.
@rtype: _Dir instance
"""
ffn = os.path.join(self.name, dirname)
sd = _Dir(ffn, self.cleanup)
self.register_subdir(sd)
make_dirs(sd.name)
return sd
def register_file(self, fileobj):
"""
Remember the file object and call close() on it before attempting to
clean up.
"""
self.files.append(fileobj)
def register_subdir(self, dirobj):
"""
Remember the _Dir object and call shutdown() on it before attempting
to clean up.
"""
self.subdirs.add(dirobj)
def shutdown(self):
if self.cleanup:
for subdir in hasattr(self, 'subdirs') and self.subdirs or []:
subdir.shutdown()
for fileobj in hasattr(self, 'files') and self.files or []:
if DBNoSuchFileError is None:
fileobj.close() # "close()" is idempotent so we don't need to catch exceptions here
else:
try:
fileobj.close()
except DBNoSuchFileError:
# Ah, except that the bsddb module's file-like object (a DB object) has a non-idempotent close...
pass
if hasattr(self, 'name'):
rm_dir(self.name)
def __repr__(self):
return "<%s instance at %x %s>" % (self.__class__.__name__, id(self), self.name)
def __str__(self):
return self.__repr__()
def __del__(self):
try:
self.shutdown()
except:
import traceback
traceback.print_exc()
class NamedTemporaryDirectory(_Dir):
"""
Call tempfile.mkdtemp(), store the name of the dir in self.name, and
rm_dir() when it gets garbage collected or "shutdown()".
Also keep track of file objects for files within the tempdir and call
close() on them before rm_dir(). This is a convenient way to open temp
files within the directory, and it is very helpful on Windows because you
can't delete a directory which contains a file which is currently open.
"""
def __init__(self, cleanup=True, *args, **kwargs):
""" If cleanup, then the directory will be rmrf'ed when the object is shutdown. """
name = tempfile.mkdtemp(*args, **kwargs)
_Dir.__init__(self, name, cleanup)
class ReopenableNamedTemporaryFile:
"""
This uses tempfile.mkstemp() to generate a secure temp file. It then closes
the file, leaving a zero-length file as a placeholder. You can get the
filename with ReopenableNamedTemporaryFile.name. When the
ReopenableNamedTemporaryFile instance is garbage collected or its shutdown()
method is called, it deletes the file.
"""
def __init__(self, *args, **kwargs):
fd, self.name = tempfile.mkstemp(*args, **kwargs)
os.close(fd)
def __repr__(self):
return "<%s instance at %x %s>" % (self.__class__.__name__, id(self), self.name)
def __str__(self):
return self.__repr__()
def __del__(self):
self.shutdown()
def shutdown(self):
remove(self.name)
def make_dirs(dirname, mode=0777):
"""
An idempotent version of os.makedirs(). If the dir already exists, do
nothing and return without raising an exception. If this call creates the
dir, return without raising an exception. If there is an error that
prevents creation or if the directory gets deleted after make_dirs() creates
it and before make_dirs() checks that it exists, raise an exception.
"""
tx = None
try:
os.makedirs(dirname, mode)
except OSError, x:
tx = x
if not os.path.isdir(dirname):
if tx:
raise tx
raise exceptions.IOError, "unknown error prevented creation of directory, or deleted the directory immediately after creation: %s" % dirname # careful not to construct an IOError with a 2-tuple, as that has a special meaning...
def rmtree(dirname):
"""
A threadsafe and idempotent version of shutil.rmtree(). If the dir is
already gone, do nothing and return without raising an exception. If this
call removes the dir, return without raising an exception. If there is an
error that prevents deletion or if the directory gets created again after
rm_dir() deletes it and before rm_dir() checks that it is gone, raise an
exception.
"""
excs = []
try:
os.chmod(dirname, stat.S_IWRITE | stat.S_IEXEC | stat.S_IREAD)
for f in os.listdir(dirname):
fullname = os.path.join(dirname, f)
if os.path.isdir(fullname):
rm_dir(fullname)
else:
remove(fullname)
os.rmdir(dirname)
except EnvironmentError, le:
# Ignore "No such file or directory", collect any other exception.
if (le.args[0] != 2 and le.args[0] != 3) or (le.args[0] != errno.ENOENT):
excs.append(le)
except Exception, le:
excs.append(le)
# Okay, now we've recursively removed everything, ignoring any "No
# such file or directory" errors, and collecting any other errors.
if os.path.exists(dirname):
if len(excs) == 1:
raise excs[0]
if len(excs) == 0:
raise OSError, "Failed to remove dir for unknown reason."
raise OSError, excs
def rm_dir(dirname):
# Renamed to be like shutil.rmtree and unlike rmdir.
return rmtree(dirname)
def remove_if_possible(f):
try:
remove(f)
except EnvironmentError:
pass
def remove_if_present(f):
try:
remove(f)
except EnvironmentError, le:
# Ignore "No such file or directory", re-raise any other exception.
if (le.args[0] != 2 and le.args[0] != 3) or (le.args[0] != errno.ENOENT):
raise
def rmdir_if_possible(f):
try:
rmdir(f)
except EnvironmentError:
pass
def open_or_create(fname, binarymode=True):
try:
f = open(fname, binarymode and "r+b" or "r+")
except EnvironmentError:
f = open(fname, binarymode and "w+b" or "w+")
return f
def du(basedir):
size = 0
for root, dirs, files in os.walk(basedir):
for f in files:
fn = os.path.join(root, f)
size += os.path.getsize(fn)
return size
|
anggorodewanto/oppia | refs/heads/develop | core/storage/base_model/__init__.py | 12133432 | |
SoftwareMaven/django | refs/heads/master | tests/migrations/migrations_test_apps/alter_fk/author_app/__init__.py | 12133432 | |
sagar30051991/ozsmart-erp | refs/heads/master | erpnext/projects/report/project_wise_stock_tracking/project_wise_stock_tracking.py | 62 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
def execute(filters=None):
columns = get_columns()
proj_details = get_project_details()
pr_item_map = get_purchased_items_cost()
se_item_map = get_issued_items_cost()
dn_item_map = get_delivered_items_cost()
data = []
for project in proj_details:
data.append([project.name, pr_item_map.get(project.name, 0),
se_item_map.get(project.name, 0), dn_item_map.get(project.name, 0),
project.project_name, project.status, project.company,
project.customer, project.estimated_costing, project.expected_start_date,
project.expected_end_date])
return columns, data
def get_columns():
return [_("Project Id") + ":Link/Project:140", _("Cost of Purchased Items") + ":Currency:160",
_("Cost of Issued Items") + ":Currency:160", _("Cost of Delivered Items") + ":Currency:160",
_("Project Name") + "::120", _("Project Status") + "::120", _("Company") + ":Link/Company:100",
_("Customer") + ":Link/Customer:140", _("Project Value") + ":Currency:120",
_("Project Start Date") + ":Date:120", _("Completion Date") + ":Date:120"]
def get_project_details():
return frappe.db.sql(""" select name, project_name, status, company, customer, estimated_costing,
expected_start_date, expected_end_date from tabProject where docstatus < 2""", as_dict=1)
def get_purchased_items_cost():
pr_items = frappe.db.sql("""select project, sum(base_net_amount) as amount
from `tabPurchase Receipt Item` where ifnull(project, '') != ''
and docstatus = 1 group by project""", as_dict=1)
pr_item_map = {}
for item in pr_items:
pr_item_map.setdefault(item.project, item.amount)
return pr_item_map
def get_issued_items_cost():
se_items = frappe.db.sql("""select se.project, sum(se_item.amount) as amount
from `tabStock Entry` se, `tabStock Entry Detail` se_item
where se.name = se_item.parent and se.docstatus = 1 and ifnull(se_item.t_warehouse, '') = ''
and ifnull(se.project, '') != '' group by se.project""", as_dict=1)
se_item_map = {}
for item in se_items:
se_item_map.setdefault(item.project, item.amount)
return se_item_map
def get_delivered_items_cost():
dn_items = frappe.db.sql("""select dn.project, sum(dn_item.base_net_amount) as amount
from `tabDelivery Note` dn, `tabDelivery Note Item` dn_item
where dn.name = dn_item.parent and dn.docstatus = 1 and ifnull(dn.project, '') != ''
group by dn.project""", as_dict=1)
si_items = frappe.db.sql("""select si.project, sum(si_item.base_net_amount) as amount
from `tabSales Invoice` si, `tabSales Invoice Item` si_item
where si.name = si_item.parent and si.docstatus = 1 and si.update_stock = 1
and si.is_pos = 1 and ifnull(si.project, '') != ''
group by si.project""", as_dict=1)
dn_item_map = {}
for item in dn_items:
dn_item_map.setdefault(item.project, item.amount)
for item in si_items:
dn_item_map.setdefault(item.project, item.amount)
return dn_item_map
|
jounex/hue | refs/heads/master | desktop/core/ext-py/boto-2.38.0/boto/ec2/securitygroup.py | 150 | # Copyright (c) 2006-2011 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2011, Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents an EC2 Security Group
"""
from boto.ec2.ec2object import TaggedEC2Object
from boto.exception import BotoClientError
class SecurityGroup(TaggedEC2Object):
def __init__(self, connection=None, owner_id=None,
name=None, description=None, id=None):
super(SecurityGroup, self).__init__(connection)
self.id = id
self.owner_id = owner_id
self.name = name
self.description = description
self.vpc_id = None
self.rules = IPPermissionsList()
self.rules_egress = IPPermissionsList()
def __repr__(self):
return 'SecurityGroup:%s' % self.name
def startElement(self, name, attrs, connection):
retval = super(SecurityGroup, self).startElement(name, attrs, connection)
if retval is not None:
return retval
if name == 'ipPermissions':
return self.rules
elif name == 'ipPermissionsEgress':
return self.rules_egress
else:
return None
def endElement(self, name, value, connection):
if name == 'ownerId':
self.owner_id = value
elif name == 'groupId':
self.id = value
elif name == 'groupName':
self.name = value
elif name == 'vpcId':
self.vpc_id = value
elif name == 'groupDescription':
self.description = value
elif name == 'ipRanges':
pass
elif name == 'return':
if value == 'false':
self.status = False
elif value == 'true':
self.status = True
else:
raise Exception(
'Unexpected value of status %s for group %s' % (
value,
self.name
)
)
else:
setattr(self, name, value)
def delete(self, dry_run=False):
if self.vpc_id:
return self.connection.delete_security_group(
group_id=self.id,
dry_run=dry_run
)
else:
return self.connection.delete_security_group(
self.name,
dry_run=dry_run
)
def add_rule(self, ip_protocol, from_port, to_port,
src_group_name, src_group_owner_id, cidr_ip,
src_group_group_id, dry_run=False):
"""
Add a rule to the SecurityGroup object. Note that this method
only changes the local version of the object. No information
is sent to EC2.
"""
rule = IPPermissions(self)
rule.ip_protocol = ip_protocol
rule.from_port = from_port
rule.to_port = to_port
self.rules.append(rule)
rule.add_grant(
src_group_name,
src_group_owner_id,
cidr_ip,
src_group_group_id,
dry_run=dry_run
)
def remove_rule(self, ip_protocol, from_port, to_port,
src_group_name, src_group_owner_id, cidr_ip,
src_group_group_id, dry_run=False):
"""
Remove a rule to the SecurityGroup object. Note that this method
only changes the local version of the object. No information
is sent to EC2.
"""
if not self.rules:
raise ValueError("The security group has no rules")
target_rule = None
for rule in self.rules:
if rule.ip_protocol == ip_protocol:
if rule.from_port == from_port:
if rule.to_port == to_port:
target_rule = rule
target_grant = None
for grant in rule.grants:
if grant.name == src_group_name or grant.group_id == src_group_group_id:
if grant.owner_id == src_group_owner_id:
if grant.cidr_ip == cidr_ip:
target_grant = grant
if target_grant:
rule.grants.remove(target_grant)
if len(rule.grants) == 0:
self.rules.remove(target_rule)
def authorize(self, ip_protocol=None, from_port=None, to_port=None,
cidr_ip=None, src_group=None, dry_run=False):
"""
Add a new rule to this security group.
You need to pass in either src_group_name
OR ip_protocol, from_port, to_port,
and cidr_ip. In other words, either you are authorizing another
group or you are authorizing some ip-based rule.
:type ip_protocol: string
:param ip_protocol: Either tcp | udp | icmp
:type from_port: int
:param from_port: The beginning port number you are enabling
:type to_port: int
:param to_port: The ending port number you are enabling
:type cidr_ip: string or list of strings
:param cidr_ip: The CIDR block you are providing access to.
See http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing
:type src_group: :class:`boto.ec2.securitygroup.SecurityGroup` or
:class:`boto.ec2.securitygroup.GroupOrCIDR`
:param src_group: The Security Group you are granting access to.
:rtype: bool
:return: True if successful.
"""
group_name = None
if not self.vpc_id:
group_name = self.name
group_id = None
if self.vpc_id:
group_id = self.id
src_group_name = None
src_group_owner_id = None
src_group_group_id = None
if src_group:
cidr_ip = None
src_group_owner_id = src_group.owner_id
if not self.vpc_id:
src_group_name = src_group.name
else:
if hasattr(src_group, 'group_id'):
src_group_group_id = src_group.group_id
else:
src_group_group_id = src_group.id
status = self.connection.authorize_security_group(group_name,
src_group_name,
src_group_owner_id,
ip_protocol,
from_port,
to_port,
cidr_ip,
group_id,
src_group_group_id,
dry_run=dry_run)
if status:
if not isinstance(cidr_ip, list):
cidr_ip = [cidr_ip]
for single_cidr_ip in cidr_ip:
self.add_rule(ip_protocol, from_port, to_port, src_group_name,
src_group_owner_id, single_cidr_ip,
src_group_group_id, dry_run=dry_run)
return status
def revoke(self, ip_protocol=None, from_port=None, to_port=None,
cidr_ip=None, src_group=None, dry_run=False):
group_name = None
if not self.vpc_id:
group_name = self.name
group_id = None
if self.vpc_id:
group_id = self.id
src_group_name = None
src_group_owner_id = None
src_group_group_id = None
if src_group:
cidr_ip = None
src_group_owner_id = src_group.owner_id
if not self.vpc_id:
src_group_name = src_group.name
else:
if hasattr(src_group, 'group_id'):
src_group_group_id = src_group.group_id
else:
src_group_group_id = src_group.id
status = self.connection.revoke_security_group(group_name,
src_group_name,
src_group_owner_id,
ip_protocol,
from_port,
to_port,
cidr_ip,
group_id,
src_group_group_id,
dry_run=dry_run)
if status:
self.remove_rule(ip_protocol, from_port, to_port, src_group_name,
src_group_owner_id, cidr_ip, src_group_group_id,
dry_run=dry_run)
return status
def copy_to_region(self, region, name=None, dry_run=False):
"""
Create a copy of this security group in another region.
Note that the new security group will be a separate entity
and will not stay in sync automatically after the copy
operation.
:type region: :class:`boto.ec2.regioninfo.RegionInfo`
:param region: The region to which this security group will be copied.
:type name: string
:param name: The name of the copy. If not supplied, the copy
will have the same name as this security group.
:rtype: :class:`boto.ec2.securitygroup.SecurityGroup`
:return: The new security group.
"""
if region.name == self.region:
raise BotoClientError('Unable to copy to the same Region')
conn_params = self.connection.get_params()
rconn = region.connect(**conn_params)
sg = rconn.create_security_group(
name or self.name,
self.description,
dry_run=dry_run
)
source_groups = []
for rule in self.rules:
for grant in rule.grants:
grant_nom = grant.name or grant.group_id
if grant_nom:
if grant_nom not in source_groups:
source_groups.append(grant_nom)
sg.authorize(None, None, None, None, grant,
dry_run=dry_run)
else:
sg.authorize(rule.ip_protocol, rule.from_port, rule.to_port,
grant.cidr_ip, dry_run=dry_run)
return sg
def instances(self, dry_run=False):
"""
Find all of the current instances that are running within this
security group.
:rtype: list of :class:`boto.ec2.instance.Instance`
:return: A list of Instance objects
"""
rs = []
if self.vpc_id:
rs.extend(self.connection.get_all_reservations(
filters={'instance.group-id': self.id},
dry_run=dry_run
))
else:
rs.extend(self.connection.get_all_reservations(
filters={'group-id': self.id},
dry_run=dry_run
))
instances = [i for r in rs for i in r.instances]
return instances
class IPPermissionsList(list):
def startElement(self, name, attrs, connection):
if name == 'item':
self.append(IPPermissions(self))
return self[-1]
return None
def endElement(self, name, value, connection):
pass
class IPPermissions(object):
def __init__(self, parent=None):
self.parent = parent
self.ip_protocol = None
self.from_port = None
self.to_port = None
self.grants = []
def __repr__(self):
return 'IPPermissions:%s(%s-%s)' % (self.ip_protocol,
self.from_port, self.to_port)
def startElement(self, name, attrs, connection):
if name == 'item':
self.grants.append(GroupOrCIDR(self))
return self.grants[-1]
return None
def endElement(self, name, value, connection):
if name == 'ipProtocol':
self.ip_protocol = value
elif name == 'fromPort':
self.from_port = value
elif name == 'toPort':
self.to_port = value
else:
setattr(self, name, value)
def add_grant(self, name=None, owner_id=None, cidr_ip=None, group_id=None,
dry_run=False):
grant = GroupOrCIDR(self)
grant.owner_id = owner_id
grant.group_id = group_id
grant.name = name
grant.cidr_ip = cidr_ip
self.grants.append(grant)
return grant
class GroupOrCIDR(object):
def __init__(self, parent=None):
self.owner_id = None
self.group_id = None
self.name = None
self.cidr_ip = None
def __repr__(self):
if self.cidr_ip:
return '%s' % self.cidr_ip
else:
return '%s-%s' % (self.name or self.group_id, self.owner_id)
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'userId':
self.owner_id = value
elif name == 'groupId':
self.group_id = value
elif name == 'groupName':
self.name = value
if name == 'cidrIp':
self.cidr_ip = value
else:
setattr(self, name, value)
|
Jspsun/LEETCodePractice | refs/heads/master | Python/StudentAttendanceRecord.py | 1 | class Solution(object):
def checkRecord(self, s):
"""
:type s: str
:rtype: bool
"""
noAbs = 0
contL = 0
for l in s:
if l == 'A':
noAbs += 1
contL = 0
elif l == 'L':
contL += 1
else:
contL = 0
if noAbs > 1 or contL > 2:
return False
return True
|
roth1002/react-native-intro-qnap | refs/heads/gh-pages | node_modules/node-gyp/gyp/pylib/gyp/__init__.py | 1524 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import gyp.input
import optparse
import os.path
import re
import shlex
import sys
import traceback
from gyp.common import GypError
# Default debug modes for GYP
debug = {}
# List of "official" debug modes, but you can use anything you like.
DEBUG_GENERAL = 'general'
DEBUG_VARIABLES = 'variables'
DEBUG_INCLUDES = 'includes'
def DebugOutput(mode, message, *args):
if 'all' in gyp.debug or mode in gyp.debug:
ctx = ('unknown', 0, 'unknown')
try:
f = traceback.extract_stack(limit=2)
if f:
ctx = f[0][:3]
except:
pass
if args:
message %= args
print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
ctx[1], ctx[2], message)
def FindBuildFiles():
extension = '.gyp'
files = os.listdir(os.getcwd())
build_files = []
for file in files:
if file.endswith(extension):
build_files.append(file)
return build_files
def Load(build_files, format, default_variables={},
includes=[], depth='.', params=None, check=False,
circular_check=True, duplicate_basename_check=True):
"""
Loads one or more specified build files.
default_variables and includes will be copied before use.
Returns the generator for the specified format and the
data returned by loading the specified build files.
"""
if params is None:
params = {}
if '-' in format:
format, params['flavor'] = format.split('-', 1)
default_variables = copy.copy(default_variables)
# Default variables provided by this program and its modules should be
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
# avoiding collisions with user and automatic variables.
default_variables['GENERATOR'] = format
default_variables['GENERATOR_FLAVOR'] = params.get('flavor', '')
# Format can be a custom python file, or by default the name of a module
# within gyp.generator.
if format.endswith('.py'):
generator_name = os.path.splitext(format)[0]
path, generator_name = os.path.split(generator_name)
# Make sure the path to the custom generator is in sys.path
# Don't worry about removing it once we are done. Keeping the path
# to each generator that is used in sys.path is likely harmless and
# arguably a good idea.
path = os.path.abspath(path)
if path not in sys.path:
sys.path.insert(0, path)
else:
generator_name = 'gyp.generator.' + format
# These parameters are passed in order (as opposed to by key)
# because ActivePython cannot handle key parameters to __import__.
generator = __import__(generator_name, globals(), locals(), generator_name)
for (key, val) in generator.generator_default_variables.items():
default_variables.setdefault(key, val)
# Give the generator the opportunity to set additional variables based on
# the params it will receive in the output phase.
if getattr(generator, 'CalculateVariables', None):
generator.CalculateVariables(default_variables, params)
# Give the generator the opportunity to set generator_input_info based on
# the params it will receive in the output phase.
if getattr(generator, 'CalculateGeneratorInputInfo', None):
generator.CalculateGeneratorInputInfo(params)
# Fetch the generator specific info that gets fed to input, we use getattr
# so we can default things and the generators only have to provide what
# they need.
generator_input_info = {
'non_configuration_keys':
getattr(generator, 'generator_additional_non_configuration_keys', []),
'path_sections':
getattr(generator, 'generator_additional_path_sections', []),
'extra_sources_for_rules':
getattr(generator, 'generator_extra_sources_for_rules', []),
'generator_supports_multiple_toolsets':
getattr(generator, 'generator_supports_multiple_toolsets', False),
'generator_wants_static_library_dependencies_adjusted':
getattr(generator,
'generator_wants_static_library_dependencies_adjusted', True),
'generator_wants_sorted_dependencies':
getattr(generator, 'generator_wants_sorted_dependencies', False),
'generator_filelist_paths':
getattr(generator, 'generator_filelist_paths', None),
}
# Process the input specific to this generator.
result = gyp.input.Load(build_files, default_variables, includes[:],
depth, generator_input_info, check, circular_check,
duplicate_basename_check,
params['parallel'], params['root_targets'])
return [generator] + result
def NameValueListToDict(name_value_list):
"""
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
of the pairs. If a string is simply NAME, then the value in the dictionary
is set to True. If VALUE can be converted to an integer, it is.
"""
result = { }
for item in name_value_list:
tokens = item.split('=', 1)
if len(tokens) == 2:
# If we can make it an int, use that, otherwise, use the string.
try:
token_value = int(tokens[1])
except ValueError:
token_value = tokens[1]
# Set the variable to the supplied value.
result[tokens[0]] = token_value
else:
# No value supplied, treat it as a boolean and set it.
result[tokens[0]] = True
return result
def ShlexEnv(env_name):
flags = os.environ.get(env_name, [])
if flags:
flags = shlex.split(flags)
return flags
def FormatOpt(opt, value):
if opt.startswith('--'):
return '%s=%s' % (opt, value)
return opt + value
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
"""Regenerate a list of command line flags, for an option of action='append'.
The |env_name|, if given, is checked in the environment and used to generate
an initial list of options, then the options that were specified on the
command line (given in |values|) are appended. This matches the handling of
environment variables and command line flags where command line flags override
the environment, while not requiring the environment to be set when the flags
are used again.
"""
flags = []
if options.use_environment and env_name:
for flag_value in ShlexEnv(env_name):
value = FormatOpt(flag, predicate(flag_value))
if value in flags:
flags.remove(value)
flags.append(value)
if values:
for flag_value in values:
flags.append(FormatOpt(flag, predicate(flag_value)))
return flags
def RegenerateFlags(options):
"""Given a parsed options object, and taking the environment variables into
account, returns a list of flags that should regenerate an equivalent options
object (even in the absence of the environment variables.)
Any path options will be normalized relative to depth.
The format flag is not included, as it is assumed the calling generator will
set that as appropriate.
"""
def FixPath(path):
path = gyp.common.FixIfRelativePath(path, options.depth)
if not path:
return os.path.curdir
return path
def Noop(value):
return value
# We always want to ignore the environment when regenerating, to avoid
# duplicate or changed flags in the environment at the time of regeneration.
flags = ['--ignore-environment']
for name, metadata in options._regeneration_metadata.iteritems():
opt = metadata['opt']
value = getattr(options, name)
value_predicate = metadata['type'] == 'path' and FixPath or Noop
action = metadata['action']
env_name = metadata['env_name']
if action == 'append':
flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
env_name, options))
elif action in ('store', None): # None is a synonym for 'store'.
if value:
flags.append(FormatOpt(opt, value_predicate(value)))
elif options.use_environment and env_name and os.environ.get(env_name):
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
elif action in ('store_true', 'store_false'):
if ((action == 'store_true' and value) or
(action == 'store_false' and not value)):
flags.append(opt)
elif options.use_environment and env_name:
print >>sys.stderr, ('Warning: environment regeneration unimplemented '
'for %s flag %r env_name %r' % (action, opt,
env_name))
else:
print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
'flag %r' % (action, opt))
return flags
class RegeneratableOptionParser(optparse.OptionParser):
def __init__(self):
self.__regeneratable_options = {}
optparse.OptionParser.__init__(self)
def add_option(self, *args, **kw):
"""Add an option to the parser.
This accepts the same arguments as OptionParser.add_option, plus the
following:
regenerate: can be set to False to prevent this option from being included
in regeneration.
env_name: name of environment variable that additional values for this
option come from.
type: adds type='path', to tell the regenerator that the values of
this option need to be made relative to options.depth
"""
env_name = kw.pop('env_name', None)
if 'dest' in kw and kw.pop('regenerate', True):
dest = kw['dest']
# The path type is needed for regenerating, for optparse we can just treat
# it as a string.
type = kw.get('type')
if type == 'path':
kw['type'] = 'string'
self.__regeneratable_options[dest] = {
'action': kw.get('action'),
'type': type,
'env_name': env_name,
'opt': args[0],
}
optparse.OptionParser.add_option(self, *args, **kw)
def parse_args(self, *args):
values, args = optparse.OptionParser.parse_args(self, *args)
values._regeneration_metadata = self.__regeneratable_options
return values, args
def gyp_main(args):
my_name = os.path.basename(sys.argv[0])
parser = RegeneratableOptionParser()
usage = 'usage: %s [options ...] [build_file ...]'
parser.set_usage(usage.replace('%s', '%prog'))
parser.add_option('--build', dest='configs', action='append',
help='configuration for build after project generation')
parser.add_option('--check', dest='check', action='store_true',
help='check format of gyp files')
parser.add_option('--config-dir', dest='config_dir', action='store',
env_name='GYP_CONFIG_DIR', default=None,
help='The location for configuration files like '
'include.gypi.')
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
action='append', default=[], help='turn on a debugging '
'mode for debugging GYP. Supported modes are "variables", '
'"includes" and "general" or "all" for all of them.')
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
env_name='GYP_DEFINES',
help='sets variable VAR to value VAL')
parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
help='set DEPTH gyp variable to a relative path to PATH')
parser.add_option('-f', '--format', dest='formats', action='append',
env_name='GYP_GENERATORS', regenerate=False,
help='output formats to generate')
parser.add_option('-G', dest='generator_flags', action='append', default=[],
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
help='sets generator flag FLAG to VAL')
parser.add_option('--generator-output', dest='generator_output',
action='store', default=None, metavar='DIR', type='path',
env_name='GYP_GENERATOR_OUTPUT',
help='puts generated build files under DIR')
parser.add_option('--ignore-environment', dest='use_environment',
action='store_false', default=True, regenerate=False,
help='do not read options from environment variables')
parser.add_option('-I', '--include', dest='includes', action='append',
metavar='INCLUDE', type='path',
help='files to include in all loaded .gyp files')
# --no-circular-check disables the check for circular relationships between
# .gyp files. These relationships should not exist, but they've only been
# observed to be harmful with the Xcode generator. Chromium's .gyp files
# currently have some circular relationships on non-Mac platforms, so this
# option allows the strict behavior to be used on Macs and the lenient
# behavior to be used elsewhere.
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
parser.add_option('--no-circular-check', dest='circular_check',
action='store_false', default=True, regenerate=False,
help="don't check for circular relationships between files")
# --no-duplicate-basename-check disables the check for duplicate basenames
# in a static_library/shared_library project. Visual C++ 2008 generator
# doesn't support this configuration. Libtool on Mac also generates warnings
# when duplicate basenames are passed into Make generator on Mac.
# TODO(yukawa): Remove this option when these legacy generators are
# deprecated.
parser.add_option('--no-duplicate-basename-check',
dest='duplicate_basename_check', action='store_false',
default=True, regenerate=False,
help="don't check for duplicate basenames")
parser.add_option('--no-parallel', action='store_true', default=False,
help='Disable multiprocessing')
parser.add_option('-S', '--suffix', dest='suffix', default='',
help='suffix to add to generated files')
parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
default=None, metavar='DIR', type='path',
help='directory to use as the root of the source tree')
parser.add_option('-R', '--root-target', dest='root_targets',
action='append', metavar='TARGET',
help='include only TARGET and its deep dependencies')
options, build_files_arg = parser.parse_args(args)
build_files = build_files_arg
# Set up the configuration directory (defaults to ~/.gyp)
if not options.config_dir:
home = None
home_dot_gyp = None
if options.use_environment:
home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
if home_dot_gyp:
home_dot_gyp = os.path.expanduser(home_dot_gyp)
if not home_dot_gyp:
home_vars = ['HOME']
if sys.platform in ('cygwin', 'win32'):
home_vars.append('USERPROFILE')
for home_var in home_vars:
home = os.getenv(home_var)
if home != None:
home_dot_gyp = os.path.join(home, '.gyp')
if not os.path.exists(home_dot_gyp):
home_dot_gyp = None
else:
break
else:
home_dot_gyp = os.path.expanduser(options.config_dir)
if home_dot_gyp and not os.path.exists(home_dot_gyp):
home_dot_gyp = None
if not options.formats:
# If no format was given on the command line, then check the env variable.
generate_formats = []
if options.use_environment:
generate_formats = os.environ.get('GYP_GENERATORS', [])
if generate_formats:
generate_formats = re.split(r'[\s,]', generate_formats)
if generate_formats:
options.formats = generate_formats
else:
# Nothing in the variable, default based on platform.
if sys.platform == 'darwin':
options.formats = ['xcode']
elif sys.platform in ('win32', 'cygwin'):
options.formats = ['msvs']
else:
options.formats = ['make']
if not options.generator_output and options.use_environment:
g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
if g_o:
options.generator_output = g_o
options.parallel = not options.no_parallel
for mode in options.debug:
gyp.debug[mode] = 1
# Do an extra check to avoid work when we're not debugging.
if DEBUG_GENERAL in gyp.debug:
DebugOutput(DEBUG_GENERAL, 'running with these options:')
for option, value in sorted(options.__dict__.items()):
if option[0] == '_':
continue
if isinstance(value, basestring):
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
else:
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
if not build_files:
build_files = FindBuildFiles()
if not build_files:
raise GypError((usage + '\n\n%s: error: no build_file') %
(my_name, my_name))
# TODO(mark): Chromium-specific hack!
# For Chromium, the gyp "depth" variable should always be a relative path
# to Chromium's top-level "src" directory. If no depth variable was set
# on the command line, try to find a "src" directory by looking at the
# absolute path to each build file's directory. The first "src" component
# found will be treated as though it were the path used for --depth.
if not options.depth:
for build_file in build_files:
build_file_dir = os.path.abspath(os.path.dirname(build_file))
build_file_dir_components = build_file_dir.split(os.path.sep)
components_len = len(build_file_dir_components)
for index in xrange(components_len - 1, -1, -1):
if build_file_dir_components[index] == 'src':
options.depth = os.path.sep.join(build_file_dir_components)
break
del build_file_dir_components[index]
# If the inner loop found something, break without advancing to another
# build file.
if options.depth:
break
if not options.depth:
raise GypError('Could not automatically locate src directory. This is'
'a temporary Chromium feature that will be removed. Use'
'--depth as a workaround.')
# If toplevel-dir is not set, we assume that depth is the root of our source
# tree.
if not options.toplevel_dir:
options.toplevel_dir = options.depth
# -D on the command line sets variable defaults - D isn't just for define,
# it's for default. Perhaps there should be a way to force (-F?) a
# variable's value so that it can't be overridden by anything else.
cmdline_default_variables = {}
defines = []
if options.use_environment:
defines += ShlexEnv('GYP_DEFINES')
if options.defines:
defines += options.defines
cmdline_default_variables = NameValueListToDict(defines)
if DEBUG_GENERAL in gyp.debug:
DebugOutput(DEBUG_GENERAL,
"cmdline_default_variables: %s", cmdline_default_variables)
# Set up includes.
includes = []
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every
# .gyp file that's loaded, before anything else is included.
if home_dot_gyp != None:
default_include = os.path.join(home_dot_gyp, 'include.gypi')
if os.path.exists(default_include):
print 'Using overrides found in ' + default_include
includes.append(default_include)
# Command-line --include files come after the default include.
if options.includes:
includes.extend(options.includes)
# Generator flags should be prefixed with the target generator since they
# are global across all generator runs.
gen_flags = []
if options.use_environment:
gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
if options.generator_flags:
gen_flags += options.generator_flags
generator_flags = NameValueListToDict(gen_flags)
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
# Generate all requested formats (use a set in case we got one format request
# twice)
for format in set(options.formats):
params = {'options': options,
'build_files': build_files,
'generator_flags': generator_flags,
'cwd': os.getcwd(),
'build_files_arg': build_files_arg,
'gyp_binary': sys.argv[0],
'home_dot_gyp': home_dot_gyp,
'parallel': options.parallel,
'root_targets': options.root_targets,
'target_arch': cmdline_default_variables.get('target_arch', '')}
# Start with the default variables from the command line.
[generator, flat_list, targets, data] = Load(
build_files, format, cmdline_default_variables, includes, options.depth,
params, options.check, options.circular_check,
options.duplicate_basename_check)
# TODO(mark): Pass |data| for now because the generator needs a list of
# build files that came in. In the future, maybe it should just accept
# a list, and not the whole data dict.
# NOTE: flat_list is the flattened dependency graph specifying the order
# that targets may be built. Build systems that operate serially or that
# need to have dependencies defined before dependents reference them should
# generate targets in the order specified in flat_list.
generator.GenerateOutput(flat_list, targets, data, params)
if options.configs:
valid_configs = targets[flat_list[0]]['configurations'].keys()
for conf in options.configs:
if conf not in valid_configs:
raise GypError('Invalid config specified via --build: %s' % conf)
generator.PerformBuild(data, options.configs, params)
# Done
return 0
def main(args):
try:
return gyp_main(args)
except GypError, e:
sys.stderr.write("gyp: %s\n" % e)
return 1
# NOTE: setuptools generated console_scripts calls function with no arguments
def script_main():
return main(sys.argv[1:])
if __name__ == '__main__':
sys.exit(script_main())
|
neonicus/Paralax | refs/heads/master | tools/perf/scripts/python/sched-migration.py | 11215 | #!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <fweisbec@gmail.com>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
|
sternoru/goscalecms | refs/heads/master | goscale/plugins/feeds/migrations/0007_auto__add_field_tumblr_disqus__add_field_blogger_disqus__add_field_fee.py | 1 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Tumblr.disqus'
db.add_column('cmsplugin_tumblr', 'disqus',
self.gf('django.db.models.fields.CharField')(max_length=50, null=True, blank=True),
keep_default=False)
# Adding field 'Blogger.disqus'
db.add_column('cmsplugin_blogger', 'disqus',
self.gf('django.db.models.fields.CharField')(max_length=50, null=True, blank=True),
keep_default=False)
# Adding field 'Feed.disqus'
db.add_column('cmsplugin_feed', 'disqus',
self.gf('django.db.models.fields.CharField')(max_length=50, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Tumblr.disqus'
db.delete_column('cmsplugin_tumblr', 'disqus')
# Deleting field 'Blogger.disqus'
db.delete_column('cmsplugin_blogger', 'disqus')
# Deleting field 'Feed.disqus'
db.delete_column('cmsplugin_feed', 'disqus')
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 1, 15, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'feeds.blogger': {
'Meta': {'object_name': 'Blogger', 'db_table': "'cmsplugin_blogger'"},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'disqus': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'external_links': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'page_size': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '10'}),
'posts': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['goscale.Post']", 'symmetrical': 'False'}),
'show_date': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '250'})
},
'feeds.feed': {
'Meta': {'object_name': 'Feed', 'db_table': "'cmsplugin_feed'"},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'disqus': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'external_links': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'page_size': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '10'}),
'posts': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['goscale.Post']", 'symmetrical': 'False'}),
'show_date': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '250'})
},
'feeds.tumblr': {
'Meta': {'object_name': 'Tumblr', 'db_table': "'cmsplugin_tumblr'"},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'disqus': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'external_links': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'page_size': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '10'}),
'posts': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['goscale.Post']", 'symmetrical': 'False'}),
'show_date': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '250'})
},
'goscale.post': {
'Meta': {'object_name': 'Post'},
'attributes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'categories': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '250', 'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '250', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'permalink': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '250', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '250', 'null': 'True', 'blank': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['feeds'] |
slohse/ansible | refs/heads/devel | lib/ansible/module_utils/net_tools/__init__.py | 12133432 | |
ebigelow/LOTlib | refs/heads/master | LOTlib/Performance/__init__.py | 12133432 | |
ESOedX/edx-platform | refs/heads/master | cms/lib/__init__.py | 12133432 | |
izrik/wodehouse | refs/heads/master | tests/macros/try_/__init__.py | 12133432 | |
adtidiane/Nouabook | refs/heads/master | elections/management/commands/__init__.py | 12133432 | |
tux-00/ansible | refs/heads/devel | test/units/parsing/yaml/test_dumper.py | 28 | # coding: utf-8
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import io
import yaml
try:
from _yaml import ParserError
except ImportError:
from yaml.parser import ParserError
from ansible.compat.tests import unittest
from ansible.parsing import vault
from ansible.parsing.yaml import dumper, objects
from ansible.parsing.yaml.loader import AnsibleLoader
from units.mock.yaml_helper import YamlTestUtils
class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
def setUp(self):
self.vault_password = "hunter42"
self.good_vault = vault.VaultLib(self.vault_password)
self.vault = self.good_vault
self.stream = self._build_stream()
self.dumper = dumper.AnsibleDumper
def _build_stream(self, yaml_text=None):
text = yaml_text or u''
stream = io.StringIO(text)
return stream
def _loader(self, stream):
return AnsibleLoader(stream, vault_password=self.vault_password)
def test(self):
plaintext = 'This is a string we are going to encrypt.'
avu = objects.AnsibleVaultEncryptedUnicode.from_plaintext(plaintext, vault=self.vault)
yaml_out = self._dump_string(avu, dumper=self.dumper)
stream = self._build_stream(yaml_out)
loader = self._loader(stream)
data_from_yaml = loader.get_single_data()
self.assertEquals(plaintext, data_from_yaml.data)
|
SephVelut/codecombat | refs/heads/master | scripts/devSetup/which.py | 79 | __author__ = 'root'
#copied from python3
import os
import sys
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
file.
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
of os.environ.get("PATH"), or can be overridden with a custom search
path.
"""
# Check that a given file can be accessed with the correct mode.
# Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check.
def _access_check(fn, mode):
return (os.path.exists(fn) and os.access(fn, mode)
and not os.path.isdir(fn))
# If we're given a path with a directory part, look it up directly rather
# than referring to PATH directories. This includes checking relative to the
# current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd, mode):
return cmd
return None
if path is None:
path = os.environ.get("PATH", os.defpath)
if not path:
return None
path = path.split(os.pathsep)
if sys.platform == "win32":
# The current directory takes precedence on Windows.
if not os.curdir in path:
path.insert(0, os.curdir)
# PATHEXT is necessary to check on Windows.
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
# See if the given file matches any of the expected path extensions.
# This will allow us to short circuit when given "python.exe".
# If it does match, only test that one, otherwise we have to try
# others.
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
files = [cmd]
else:
files = [cmd + ext for ext in pathext]
else:
# On other platforms you don't have things like PATHEXT to tell you
# what file suffixes are executable, so just pass on cmd as-is.
files = [cmd]
seen = set()
for dir in path:
normdir = os.path.normcase(dir)
if not normdir in seen:
seen.add(normdir)
for thefile in files:
name = os.path.join(dir, thefile)
if _access_check(name, mode):
return name
return None |
QijunPan/ansible | refs/heads/devel | lib/ansible/modules/network/iosxr/__init__.py | 12133432 | |
xwolf12/django | refs/heads/master | tests/datetimes/__init__.py | 12133432 | |
mapsme/omim | refs/heads/master | tools/python/opentable_restaurants.py | 18 | #! /usr/bin/env python2.7
# coding: utf-8
from __future__ import print_function
import argparse
import base64
import copy
import json
import logging
import os
import re
import sys
import urllib2
from datetime import datetime
# Initialize logging.
logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s] %(levelname)s: %(message)s')
class OpentableDownloaderError(Exception):
pass
class OpentableDownloader(object):
def __init__(self, login, password, opentable_filename, tsv_filename=None):
self.login = login
self.password = password
self.token = None
self.opentable_filename = opentable_filename
self.tsv_filename = tsv_filename
# TODO(mgsergio): Check if token is actual in functions.
self._get_token()
def download(self):
headers = self._add_auth_header({'Content-Type': 'application/json'})
url = 'https://platform.opentable.com/sync/listings'
with open(self.opentable_filename, 'w') as f:
offset = 0
while True:
request = urllib2.Request(url + '?offset={}'.format(offset), headers=headers)
logging.debug('Fetching data with headers %s from %s',
str(headers), request.get_full_url())
resp = urllib2.urlopen(request)
# TODO(mgsergio): Handle exceptions
data = json.loads(resp.read())
for rest in data['items']:
print(json.dumps(rest), file=f)
total_items = int(data['total_items'])
offset = int(data['offset'])
items_count = len(data['items'])
if total_items <= offset + items_count:
break
offset += items_count
def _get_token(self):
url = 'https://oauth.opentable.com/api/v2/oauth/token?grant_type=client_credentials'
headers = self._add_auth_header({})
request = urllib2.Request(url, headers=headers)
logging.debug('Fetching token with headers %s', str(headers))
resp = urllib2.urlopen(request)
# TODO(mgsergio): Handle exceptions
if resp.getcode() != 200:
raise OpentableDownloaderError("Cant't get token. Response: {}".format(resp.read()))
self.token = json.loads(resp.read())
logging.debug('Token is %s', self.token)
def _add_auth_header(self, headers):
if self.token is None:
key = base64.b64encode('{}:{}'.format(self.login, self.password))
headers['Authorization'] = 'Basic {}'.format(key)
else:
headers['Authorization'] = '{} {}'.format(self.token['token_type'],
self.token['access_token'])
return headers
def make_tsv(data_file, output_file):
for rest in data_file:
rest = json.loads(rest)
try:
address = ' '.join([rest['address'], rest['city'], rest['country']])
# Some addresses contain \t and maybe other spaces.
address = re.sub(r'\s', ' ', address)
except TypeError:
address = ''
row = '\t'.join(map(unicode, [rest['rid'], rest['latitude'], rest['longitude'],
rest['name'], address, rest['reservation_url'],
rest['phone_number']]))
print(row.encode('utf-8'), file=output_file)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Downloads opentable data.')
parser.add_argument('-d', '--download', action='store_true', help='Download data')
parser.add_argument('--tsv', type=str, nargs='?', const='',
help='A file to put data into, stdout if value is empty '
'If ommited, no tsv data is generated')
parser.add_argument('--opentable_data', type=str, help='Path to opentable data file')
# TODO(mgsergio): Allow config instead.
parser.add_argument('--client', required=True, help='Opentable client id')
parser.add_argument('--secret', required=True, help="Opentable client's secret")
args = parser.parse_args(sys.argv[1:])
if args.download:
logging.info('Downloading')
loader = OpentableDownloader(args.client, args.secret, args.opentable_data)
loader.download()
if args.tsv is not None:
data = open(args.opentable_data)
tsv = open(args.tsv, 'w') if args.tsv else sys.stdout
make_tsv(data, tsv)
|
DDEFISHER/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/py/testing/path/common.py | 162 | import py
import sys
class CommonFSTests(object):
def test_constructor_equality(self, path1):
p = path1.__class__(path1)
assert p == path1
def test_eq_nonstring(self, path1):
p1 = path1.join('sampledir')
p2 = path1.join('sampledir')
assert p1 == p2
def test_new_identical(self, path1):
assert path1 == path1.new()
def test_join(self, path1):
p = path1.join('sampledir')
strp = str(p)
assert strp.endswith('sampledir')
assert strp.startswith(str(path1))
def test_join_normalized(self, path1):
newpath = path1.join(path1.sep+'sampledir')
strp = str(newpath)
assert strp.endswith('sampledir')
assert strp.startswith(str(path1))
newpath = path1.join((path1.sep*2) + 'sampledir')
strp = str(newpath)
assert strp.endswith('sampledir')
assert strp.startswith(str(path1))
def test_join_noargs(self, path1):
newpath = path1.join()
assert path1 == newpath
def test_add_something(self, path1):
p = path1.join('sample')
p = p + 'dir'
assert p.check()
assert p.exists()
assert p.isdir()
assert not p.isfile()
def test_parts(self, path1):
newpath = path1.join('sampledir', 'otherfile')
par = newpath.parts()[-3:]
assert par == [path1, path1.join('sampledir'), newpath]
revpar = newpath.parts(reverse=True)[:3]
assert revpar == [newpath, path1.join('sampledir'), path1]
def test_common(self, path1):
other = path1.join('sampledir')
x = other.common(path1)
assert x == path1
#def test_parents_nonexisting_file(self, path1):
# newpath = path1 / 'dirnoexist' / 'nonexisting file'
# par = list(newpath.parents())
# assert par[:2] == [path1 / 'dirnoexist', path1]
def test_basename_checks(self, path1):
newpath = path1.join('sampledir')
assert newpath.check(basename='sampledir')
assert newpath.check(notbasename='xyz')
assert newpath.basename == 'sampledir'
def test_basename(self, path1):
newpath = path1.join('sampledir')
assert newpath.check(basename='sampledir')
assert newpath.basename, 'sampledir'
def test_dirname(self, path1):
newpath = path1.join('sampledir')
assert newpath.dirname == str(path1)
def test_dirpath(self, path1):
newpath = path1.join('sampledir')
assert newpath.dirpath() == path1
def test_dirpath_with_args(self, path1):
newpath = path1.join('sampledir')
assert newpath.dirpath('x') == path1.join('x')
def test_newbasename(self, path1):
newpath = path1.join('samplefile')
newbase = newpath.new(basename="samplefile2")
assert newbase.basename == "samplefile2"
assert newbase.dirpath() == newpath.dirpath()
def test_not_exists(self, path1):
assert not path1.join('does_not_exist').check()
assert path1.join('does_not_exist').check(exists=0)
def test_exists(self, path1):
assert path1.join("samplefile").check()
assert path1.join("samplefile").check(exists=1)
assert path1.join("samplefile").exists()
assert path1.join("samplefile").isfile()
assert not path1.join("samplefile").isdir()
def test_dir(self, path1):
#print repr(path1.join("sampledir"))
assert path1.join("sampledir").check(dir=1)
assert path1.join('samplefile').check(notdir=1)
assert not path1.join("samplefile").check(dir=1)
assert path1.join("samplefile").exists()
assert not path1.join("samplefile").isdir()
assert path1.join("samplefile").isfile()
def test_fnmatch_file(self, path1):
assert path1.join("samplefile").check(fnmatch='s*e')
assert path1.join("samplefile").fnmatch('s*e')
assert not path1.join("samplefile").fnmatch('s*x')
assert not path1.join("samplefile").check(fnmatch='s*x')
#def test_fnmatch_dir(self, path1):
# pattern = path1.sep.join(['s*file'])
# sfile = path1.join("samplefile")
# assert sfile.check(fnmatch=pattern)
def test_relto(self, path1):
l=path1.join("sampledir", "otherfile")
assert l.relto(path1) == l.sep.join(["sampledir", "otherfile"])
assert l.check(relto=path1)
assert path1.check(notrelto=l)
assert not path1.check(relto=l)
def test_bestrelpath(self, path1):
curdir = path1
sep = curdir.sep
s = curdir.bestrelpath(curdir)
assert s == "."
s = curdir.bestrelpath(curdir.join("hello", "world"))
assert s == "hello" + sep + "world"
s = curdir.bestrelpath(curdir.dirpath().join("sister"))
assert s == ".." + sep + "sister"
assert curdir.bestrelpath(curdir.dirpath()) == ".."
assert curdir.bestrelpath("hello") == "hello"
def test_relto_not_relative(self, path1):
l1=path1.join("bcde")
l2=path1.join("b")
assert not l1.relto(l2)
assert not l2.relto(l1)
@py.test.mark.xfail("sys.platform.startswith('java')")
def test_listdir(self, path1):
l = path1.listdir()
assert path1.join('sampledir') in l
assert path1.join('samplefile') in l
py.test.raises(py.error.ENOTDIR,
"path1.join('samplefile').listdir()")
def test_listdir_fnmatchstring(self, path1):
l = path1.listdir('s*dir')
assert len(l)
assert l[0], path1.join('sampledir')
def test_listdir_filter(self, path1):
l = path1.listdir(lambda x: x.check(dir=1))
assert path1.join('sampledir') in l
assert not path1.join('samplefile') in l
def test_listdir_sorted(self, path1):
l = path1.listdir(lambda x: x.check(basestarts="sample"), sort=True)
assert path1.join('sampledir') == l[0]
assert path1.join('samplefile') == l[1]
assert path1.join('samplepickle') == l[2]
def test_visit_nofilter(self, path1):
l = []
for i in path1.visit():
l.append(i.relto(path1))
assert "sampledir" in l
assert path1.sep.join(["sampledir", "otherfile"]) in l
def test_visit_norecurse(self, path1):
l = []
for i in path1.visit(None, lambda x: x.basename != "sampledir"):
l.append(i.relto(path1))
assert "sampledir" in l
assert not path1.sep.join(["sampledir", "otherfile"]) in l
def test_visit_filterfunc_is_string(self, path1):
l = []
for i in path1.visit('*dir'):
l.append(i.relto(path1))
assert len(l), 2
assert "sampledir" in l
assert "otherdir" in l
@py.test.mark.xfail("sys.platform.startswith('java')")
def test_visit_ignore(self, path1):
p = path1.join('nonexisting')
assert list(p.visit(ignore=py.error.ENOENT)) == []
def test_visit_endswith(self, path1):
l = []
for i in path1.visit(lambda x: x.check(endswith="file")):
l.append(i.relto(path1))
assert path1.sep.join(["sampledir", "otherfile"]) in l
assert "samplefile" in l
def test_endswith(self, path1):
assert path1.check(notendswith='.py')
x = path1.join('samplefile')
assert x.check(endswith='file')
def test_cmp(self, path1):
path1 = path1.join('samplefile')
path2 = path1.join('samplefile2')
assert (path1 < path2) == ('samplefile' < 'samplefile2')
assert not (path1 < path1)
def test_simple_read(self, path1):
x = path1.join('samplefile').read('r')
assert x == 'samplefile\n'
def test_join_div_operator(self, path1):
newpath = path1 / '/sampledir' / '/test//'
newpath2 = path1.join('sampledir', 'test')
assert newpath == newpath2
def test_ext(self, path1):
newpath = path1.join('sampledir.ext')
assert newpath.ext == '.ext'
newpath = path1.join('sampledir')
assert not newpath.ext
def test_purebasename(self, path1):
newpath = path1.join('samplefile.py')
assert newpath.purebasename == 'samplefile'
def test_multiple_parts(self, path1):
newpath = path1.join('samplefile.py')
dirname, purebasename, basename, ext = newpath._getbyspec(
'dirname,purebasename,basename,ext')
assert str(path1).endswith(dirname) # be careful with win32 'drive'
assert purebasename == 'samplefile'
assert basename == 'samplefile.py'
assert ext == '.py'
def test_dotted_name_ext(self, path1):
newpath = path1.join('a.b.c')
ext = newpath.ext
assert ext == '.c'
assert newpath.ext == '.c'
def test_newext(self, path1):
newpath = path1.join('samplefile.py')
newext = newpath.new(ext='.txt')
assert newext.basename == "samplefile.txt"
assert newext.purebasename == "samplefile"
def test_readlines(self, path1):
fn = path1.join('samplefile')
contents = fn.readlines()
assert contents == ['samplefile\n']
def test_readlines_nocr(self, path1):
fn = path1.join('samplefile')
contents = fn.readlines(cr=0)
assert contents == ['samplefile', '']
def test_file(self, path1):
assert path1.join('samplefile').check(file=1)
def test_not_file(self, path1):
assert not path1.join("sampledir").check(file=1)
assert path1.join("sampledir").check(file=0)
def test_non_existent(self, path1):
assert path1.join("sampledir.nothere").check(dir=0)
assert path1.join("sampledir.nothere").check(file=0)
assert path1.join("sampledir.nothere").check(notfile=1)
assert path1.join("sampledir.nothere").check(notdir=1)
assert path1.join("sampledir.nothere").check(notexists=1)
assert not path1.join("sampledir.nothere").check(notfile=0)
# pattern = path1.sep.join(['s*file'])
# sfile = path1.join("samplefile")
# assert sfile.check(fnmatch=pattern)
def test_size(self, path1):
url = path1.join("samplefile")
assert url.size() > len("samplefile")
def test_mtime(self, path1):
url = path1.join("samplefile")
assert url.mtime() > 0
def test_relto_wrong_type(self, path1):
py.test.raises(TypeError, "path1.relto(42)")
def test_load(self, path1):
p = path1.join('samplepickle')
obj = p.load()
assert type(obj) is dict
assert obj.get('answer',None) == 42
def test_visit_filesonly(self, path1):
l = []
for i in path1.visit(lambda x: x.check(file=1)):
l.append(i.relto(path1))
assert not "sampledir" in l
assert path1.sep.join(["sampledir", "otherfile"]) in l
def test_visit_nodotfiles(self, path1):
l = []
for i in path1.visit(lambda x: x.check(dotfile=0)):
l.append(i.relto(path1))
assert "sampledir" in l
assert path1.sep.join(["sampledir", "otherfile"]) in l
assert not ".dotfile" in l
def test_visit_breadthfirst(self, path1):
l = []
for i in path1.visit(bf=True):
l.append(i.relto(path1))
for i, p in enumerate(l):
if path1.sep in p:
for j in range(i, len(l)):
assert path1.sep in l[j]
break
else:
py.test.fail("huh")
def test_visit_sort(self, path1):
l = []
for i in path1.visit(bf=True, sort=True):
l.append(i.relto(path1))
for i, p in enumerate(l):
if path1.sep in p:
break
assert l[:i] == sorted(l[:i])
assert l[i:] == sorted(l[i:])
def test_endswith(self, path1):
def chk(p):
return p.check(endswith="pickle")
assert not chk(path1)
assert not chk(path1.join('samplefile'))
assert chk(path1.join('somepickle'))
def test_copy_file(self, path1):
otherdir = path1.join('otherdir')
initpy = otherdir.join('__init__.py')
copied = otherdir.join('copied')
initpy.copy(copied)
try:
assert copied.check()
s1 = initpy.read()
s2 = copied.read()
assert s1 == s2
finally:
if copied.check():
copied.remove()
def test_copy_dir(self, path1):
otherdir = path1.join('otherdir')
copied = path1.join('newdir')
try:
otherdir.copy(copied)
assert copied.check(dir=1)
assert copied.join('__init__.py').check(file=1)
s1 = otherdir.join('__init__.py').read()
s2 = copied.join('__init__.py').read()
assert s1 == s2
finally:
if copied.check(dir=1):
copied.remove(rec=1)
def test_remove_file(self, path1):
d = path1.ensure('todeleted')
assert d.check()
d.remove()
assert not d.check()
def test_remove_dir_recursive_by_default(self, path1):
d = path1.ensure('to', 'be', 'deleted')
assert d.check()
p = path1.join('to')
p.remove()
assert not p.check()
def test_ensure_dir(self, path1):
b = path1.ensure_dir("001", "002")
assert b.basename == "002"
assert b.isdir()
def test_mkdir_and_remove(self, path1):
tmpdir = path1
py.test.raises(py.error.EEXIST, tmpdir.mkdir, 'sampledir')
new = tmpdir.join('mktest1')
new.mkdir()
assert new.check(dir=1)
new.remove()
new = tmpdir.mkdir('mktest')
assert new.check(dir=1)
new.remove()
assert tmpdir.join('mktest') == new
def test_move_file(self, path1):
p = path1.join('samplefile')
newp = p.dirpath('moved_samplefile')
p.move(newp)
try:
assert newp.check(file=1)
assert not p.check()
finally:
dp = newp.dirpath()
if hasattr(dp, 'revert'):
dp.revert()
else:
newp.move(p)
assert p.check()
def test_move_dir(self, path1):
source = path1.join('sampledir')
dest = path1.join('moveddir')
source.move(dest)
assert dest.check(dir=1)
assert dest.join('otherfile').check(file=1)
assert not source.join('sampledir').check()
def setuptestfs(path):
if path.join('samplefile').check():
return
#print "setting up test fs for", repr(path)
samplefile = path.ensure('samplefile')
samplefile.write('samplefile\n')
execfile = path.ensure('execfile')
execfile.write('x=42')
execfilepy = path.ensure('execfile.py')
execfilepy.write('x=42')
d = {1:2, 'hello': 'world', 'answer': 42}
path.ensure('samplepickle').dump(d)
sampledir = path.ensure('sampledir', dir=1)
sampledir.ensure('otherfile')
otherdir = path.ensure('otherdir', dir=1)
otherdir.ensure('__init__.py')
module_a = otherdir.ensure('a.py')
if sys.version_info >= (2,6):
module_a.write('from .b import stuff as result\n')
else:
module_a.write('from b import stuff as result\n')
module_b = otherdir.ensure('b.py')
module_b.write('stuff="got it"\n')
module_c = otherdir.ensure('c.py')
module_c.write('''import py;
import otherdir.a
value = otherdir.a.result
''')
module_d = otherdir.ensure('d.py')
module_d.write('''import py;
from otherdir import a
value2 = a.result
''')
|
arcz/ansible-modules-core | refs/heads/devel | cloud/openstack/os_auth.py | 131 | #!/usr/bin/python
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
import shade
from shade import meta
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
DOCUMENTATION = '''
---
module: os_auth
short_description: Retrieve an auth token
version_added: "2.0"
author: "Monty Taylor (@emonty)"
description:
- Retrieve an auth token from an OpenStack Cloud
requirements:
- "python >= 2.6"
- "shade"
extends_documentation_fragment: openstack
'''
EXAMPLES = '''
# Authenticate to the cloud and retrieve the service catalog
- os_auth:
cloud: rax-dfw
- debug: var=service_catalog
'''
def main():
argument_spec = openstack_full_argument_spec()
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(argument_spec, **module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
try:
cloud = shade.openstack_cloud(**module.params)
module.exit_json(
changed=False,
ansible_facts=dict(
auth_token=cloud.auth_token,
service_catalog=cloud.service_catalog))
except shade.OpenStackCloudException as e:
module.fail_json(msg=e.message)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
|
antze-k/gw2-addon-updater | refs/heads/master | src/gw2aupd_tray.py | 1 | # (C) unresolved-external@singu-lair.com released under the MIT license (see LICENSE)
import win32api
import win32con
import win32event
import win32gui
import winreg
import winerror
import os
import sys
from multiprocessing import Queue # hack
import common
import gui
import tray_log
import updater_thread
class tray_app:
UUID = "{61053809-1E96-4C84-8D4B-DD1766180FF2}"
WM_USER_SHELLICON = win32con.WM_USER + 1
app_icon = None
icon_class = None
icon_window = None
icon_icon = None
log_class = None
log_window = None
log = None
updater = None
env = {}
def __init__(self):
self.log = tray_log.log()
self.updater = updater_thread.updater_thread()
def run(self):
self.log.start()
self.env = common.load_env(self.log)
if self.env['game_dir'] is None:
win32gui.MessageBox(0, 'Cannot locate Guild Wars 2\r\nSet path to Guild Wars 2 in your \'gw2-addon-updater.ini\' file', 'gw2-addon-updater', win32con.MB_OK | win32con.MB_ICONERROR)
self.log.stop()
return
win32gui.InitCommonControls()
# force single instance
self.mutex = win32event.CreateMutex(None, False, "single_mutex_" + self.UUID)
lasterror = win32api.GetLastError()
if (lasterror == winerror.ERROR_ALREADY_EXISTS) or (lasterror == winerror.ERROR_ACCESS_DENIED):
os._exit(1)
self.app_icon = gui.icon('gw2-addon-updater.ico')
self.icon_class = gui.window_class("Guild Wars 2 Addon Updater", icon = self.app_icon.hIcon, message_map = {
win32con.WM_DESTROY: self.icon_on_destroy,
win32con.WM_CLOSE: self.icon_on_close,
win32con.WM_COMMAND: lambda hWnd, message, wp, lp, self=self: self.icon_process_command(win32api.LOWORD(wp)),
self.WM_USER_SHELLICON: lambda hWnd, message, wp, lp, self=self: self.icon_process_shell_icon(win32api.LOWORD(lp)),
})
self.icon_window = gui.window("Guild Wars 2 Addon Updater", self.icon_class.name)
self.icon_icon = gui.shell_icon(self.icon_window.hWnd, self.WM_USER_SHELLICON)
self.log.bind_hwnd(self.icon_window.hWnd)
desktop_rect = win32gui.GetClientRect(win32gui.GetDesktopWindow())
self.log_class = gui.window_class("Guild Wars 2 Addon Updater Log", icon = 0, message_map = {
win32con.WM_DESTROY: self.log_on_destroy,
win32con.WM_CLOSE: self.log_on_close,
win32con.WM_SIZE: self.log_on_size,
win32con.WM_ACTIVATE: self.log_on_activate,
})
log_style = win32con.WS_OVERLAPPED | win32con.WS_CAPTION | win32con.WS_THICKFRAME
log_ex_style = win32con.WS_EX_TOOLWINDOW
log_width = 600
log_height = 800
log_margin = 60
log_x = desktop_rect[2] - log_width - log_margin
log_y = desktop_rect[3] - log_height - log_margin
self.log_window = gui.window("Log", self.log_class.name, style = log_style, ex_style = log_ex_style, x = log_x, y = log_y, width = log_width, height = log_height)
edit_style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_VSCROLL | win32con.ES_LEFT | win32con.ES_MULTILINE | win32con.ES_AUTOVSCROLL
self.log_window.edit = gui.window("", "EDIT", style = edit_style, x = 0, y = 0, parent = self.log_window)
self.icon_icon.load(self.app_icon)
self.icon_icon.show()
self.updater.start(self.env, self.icon_window.hWnd, self.log)
win32gui.PumpMessages()
self.updater.stop()
self.log.stop()
def icon_on_destroy(self, hWnd, message, wp, lp):
self.icon_icon.hide()
win32gui.PostQuitMessage(0)
return True
def icon_on_close(self, hWnd, message, wp, lp):
win32gui.DestroyWindow(hWnd)
return True
def icon_process_command(self, value):
if value == common.commands.EXIT:
win32gui.DestroyWindow(self.icon_window.hWnd)
elif value == common.commands.SHOW_LOG:
self.log_window.show()
win32gui.SetFocus(self.log_window.hWnd)
log = self.log.extract()
win32gui.SetWindowText(self.log_window.edit.hWnd, '\r\n'.join(log))
elif value == common.commands.UPDATE_LAUNCH:
if self.env['game_dir'] is None:
self.icon_icon.notify('gw2-addon-updater', 'Cannot locate Guild Wars 2')
else:
self.updater.update_launch()
elif value == common.commands.UPDATE:
if self.env['game_dir'] is None:
self.icon_icon.notify('gw2-addon-updater', 'Cannot locate Guild Wars 2')
else:
self.updater.update()
elif value == common.commands.FORCE_UPDATE:
if self.env['game_dir'] is None:
self.icon_icon.notify('gw2-addon-updater', 'Cannot locate Guild Wars 2')
else:
self.updater.force_update()
elif value == common.commands.UPDATE_CA:
self.updater.force_update_ca_bundle()
elif value == common.commands.LAUNCH:
common.launch(self.env['game_dir'], self.env['game_args'])
elif value == common.commands.UPDATE_LOG:
log = self.log.extract()
win32gui.SetWindowText(self.log_window.edit.hWnd, '\r\n'.join(log))
return True
def icon_process_shell_icon(self, value):
if value == win32con.WM_RBUTTONDOWN:
menu = win32gui.CreatePopupMenu()
win32gui.AppendMenu(menu, win32con.MF_STRING, common.commands.UPDATE_LAUNCH, 'Update and launch')
win32gui.AppendMenu(menu, win32con.MF_STRING, common.commands.UPDATE, 'Update only')
win32gui.AppendMenu(menu, win32con.MF_STRING, common.commands.FORCE_UPDATE, 'Force update')
win32gui.AppendMenu(menu, win32con.MF_SEPARATOR, 0, '')
win32gui.AppendMenu(menu, win32con.MF_STRING, common.commands.UPDATE_CA, 'Update CA Bundle')
win32gui.AppendMenu(menu, win32con.MF_STRING, common.commands.SHOW_LOG, 'Show Log')
win32gui.AppendMenu(menu, win32con.MF_STRING, common.commands.EXIT, 'Exit')
win32gui.SetMenuDefaultItem(menu, common.commands.UPDATE_LAUNCH, False)
win32gui.SetForegroundWindow(self.icon_window.hWnd)
pos = win32gui.GetCursorPos()
win32gui.TrackPopupMenu(menu, win32gui.TPM_LEFTALIGN | win32gui.TPM_LEFTBUTTON | win32gui.TPM_BOTTOMALIGN, pos[0], pos[1], 0, self.icon_window.hWnd, None)
win32api.SendMessage(self.icon_window.hWnd, win32con.WM_NULL, 0, 0)
return True
def log_on_destroy(self, hWnd, message, wp, lp):
return True
def log_on_close(self, hWnd, message, wp, lp):
self.log_window.hide()
return True
def log_on_size(self, hWnd, message, wp, lp):
width = win32api.LOWORD(lp)
height = win32api.HIWORD(lp)
win32gui.SetWindowPos(self.log_window.edit.hWnd, 0, 0, 0, width, height, 0)
return True;
def log_on_activate(self, hWnd, message, wp, lp):
if win32api.LOWORD(wp) == win32con.WA_INACTIVE:
self.log_window.hide()
return True
a = tray_app()
a.run()
|
dendisuhubdy/tensorflow | refs/heads/master | tensorflow/contrib/receptive_field/receptive_field_api.py | 48 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module that declares the functions in tf.contrib.receptive_field's API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.contrib.receptive_field.python.util.graph_compute_order import get_compute_order
from tensorflow.contrib.receptive_field.python.util.receptive_field import compute_receptive_field_from_graph_def
# pylint: enable=unused-import
del absolute_import
del division
del print_function
|
jpshort/odoo | refs/heads/8.0 | addons/calendar/contacts.py | 389 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class calendar_contacts(osv.osv):
_name = 'calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Employee',required=True, domain=[]),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
} |
canaltinova/servo | refs/heads/master | tests/wpt/web-platform-tests/content-security-policy/embedded-enforcement/support/echo-required-csp.py | 9 | import json
def main(request, response):
header = request.headers.get("Sec-Required-CSP");
message = {}
message['required_csp'] = header if header else None
second_level_iframe_code = ""
if "include_second_level_iframe" in request.GET:
if "second_level_iframe_csp" in request.GET and request.GET["second_level_iframe_csp"] <> "":
second_level_iframe_code = '''<script>
var i2 = document.createElement('iframe');
i2.src = 'echo-required-csp.py';
i2.csp = "{0}";
document.body.appendChild(i2);
</script>'''.format(request.GET["second_level_iframe_csp"])
else:
second_level_iframe_code = '''<script>
var i2 = document.createElement('iframe');
i2.src = 'echo-required-csp.py';
document.body.appendChild(i2);
</script>'''
return [("Content-Type", "text/html"), ("Allow-CSP-From", "*")], '''
<!DOCTYPE html>
<html>
<head>
<!--{2}-->
<script>
window.addEventListener('message', function(e) {{
window.parent.postMessage(e.data, '*');
}});
window.parent.postMessage({0}, '*');
</script>
</head>
<body>
{1}
</body>
</html>
'''.format(json.dumps(message), second_level_iframe_code, str(request.headers))
|
GetSomeBlocks/ServerStatus | refs/heads/master | resources/lib/twisted/twisted/trial/util.py | 7 | # -*- test-case-name: twisted.trial.test.test_util -*-
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
#
"""
A collection of utility functions and classes, used internally by Trial.
This code is for Trial's internal use. Do NOT use this code if you are writing
tests. It is subject to change at the Trial maintainer's whim. There is
nothing here in this module for you to use unless you are maintaining Trial.
Any non-Trial Twisted code that uses this module will be shot.
Maintainer: Jonathan Lange
"""
import traceback, sys
from twisted.internet import defer, utils, interfaces
from twisted.python.failure import Failure
from twisted.python import deprecate, versions
DEFAULT_TIMEOUT = object()
DEFAULT_TIMEOUT_DURATION = 120.0
class DirtyReactorAggregateError(Exception):
"""
Passed to L{twisted.trial.itrial.IReporter.addError} when the reactor is
left in an unclean state after a test.
@ivar delayedCalls: The L{DelayedCall} objects which weren't cleaned up.
@ivar selectables: The selectables which weren't cleaned up.
"""
def __init__(self, delayedCalls, selectables=None):
self.delayedCalls = delayedCalls
self.selectables = selectables
def __str__(self):
"""
Return a multi-line message describing all of the unclean state.
"""
msg = "Reactor was unclean."
if self.delayedCalls:
msg += ("\nDelayedCalls: (set "
"twisted.internet.base.DelayedCall.debug = True to "
"debug)\n")
msg += "\n".join(map(str, self.delayedCalls))
if self.selectables:
msg += "\nSelectables:\n"
msg += "\n".join(map(str, self.selectables))
return msg
class _Janitor(object):
"""
The guy that cleans up after you.
@ivar test: The L{TestCase} to report errors about.
@ivar result: The L{IReporter} to report errors to.
@ivar reactor: The reactor to use. If None, the global reactor
will be used.
"""
def __init__(self, test, result, reactor=None):
"""
@param test: See L{_Janitor.test}.
@param result: See L{_Janitor.result}.
@param reactor: See L{_Janitor.reactor}.
"""
self.test = test
self.result = result
self.reactor = reactor
def postCaseCleanup(self):
"""
Called by L{unittest.TestCase} after a test to catch any logged errors
or pending L{DelayedCall}s.
"""
calls = self._cleanPending()
if calls:
aggregate = DirtyReactorAggregateError(calls)
self.result.addError(self.test, Failure(aggregate))
return False
return True
def postClassCleanup(self):
"""
Called by L{unittest.TestCase} after the last test in a C{TestCase}
subclass. Ensures the reactor is clean by murdering the threadpool,
catching any pending L{DelayedCall}s, open sockets etc.
"""
selectables = self._cleanReactor()
calls = self._cleanPending()
if selectables or calls:
aggregate = DirtyReactorAggregateError(calls, selectables)
self.result.addError(self.test, Failure(aggregate))
self._cleanThreads()
def _getReactor(self):
"""
Get either the passed-in reactor or the global reactor.
"""
if self.reactor is not None:
reactor = self.reactor
else:
from twisted.internet import reactor
return reactor
def _cleanPending(self):
"""
Cancel all pending calls and return their string representations.
"""
reactor = self._getReactor()
# flush short-range timers
reactor.iterate(0)
reactor.iterate(0)
delayedCallStrings = []
for p in reactor.getDelayedCalls():
if p.active():
delayedString = str(p)
p.cancel()
else:
print "WEIRDNESS! pending timed call not active!"
delayedCallStrings.append(delayedString)
return delayedCallStrings
_cleanPending = utils.suppressWarnings(
_cleanPending, (('ignore',), {'category': DeprecationWarning,
'message':
r'reactor\.iterate cannot be used.*'}))
def _cleanThreads(self):
reactor = self._getReactor()
if interfaces.IReactorThreads.providedBy(reactor):
if reactor.threadpool is not None:
# Stop the threadpool now so that a new one is created.
# This improves test isolation somewhat (although this is a
# post class cleanup hook, so it's only isolating classes
# from each other, not methods from each other).
reactor._stopThreadPool()
def _cleanReactor(self):
"""
Remove all selectables from the reactor, kill any of them that were
processes, and return their string representation.
"""
reactor = self._getReactor()
selectableStrings = []
for sel in reactor.removeAll():
if interfaces.IProcessTransport.providedBy(sel):
sel.signalProcess('KILL')
selectableStrings.append(repr(sel))
return selectableStrings
def excInfoOrFailureToExcInfo(err):
"""
Coerce a Failure to an _exc_info, if err is a Failure.
@param err: Either a tuple such as returned by L{sys.exc_info} or a
L{Failure} object.
@return: A tuple like the one returned by L{sys.exc_info}. e.g.
C{exception_type, exception_object, traceback_object}.
"""
if isinstance(err, Failure):
# Unwrap the Failure into a exc_info tuple.
err = (err.type, err.value, err.getTracebackObject())
return err
def suppress(action='ignore', **kwarg):
"""
Sets up the .suppress tuple properly, pass options to this method as you
would the stdlib warnings.filterwarnings()
So, to use this with a .suppress magic attribute you would do the
following:
>>> from twisted.trial import unittest, util
>>> import warnings
>>>
>>> class TestFoo(unittest.TestCase):
... def testFooBar(self):
... warnings.warn("i am deprecated", DeprecationWarning)
... testFooBar.suppress = [util.suppress(message='i am deprecated')]
...
>>>
Note that as with the todo and timeout attributes: the module level
attribute acts as a default for the class attribute which acts as a default
for the method attribute. The suppress attribute can be overridden at any
level by specifying C{.suppress = []}
"""
return ((action,), kwarg)
def profiled(f, outputFile):
def _(*args, **kwargs):
if sys.version_info[0:2] != (2, 4):
import profile
prof = profile.Profile()
try:
result = prof.runcall(f, *args, **kwargs)
prof.dump_stats(outputFile)
except SystemExit:
pass
prof.print_stats()
return result
else: # use hotshot, profile is broken in 2.4
import hotshot.stats
prof = hotshot.Profile(outputFile)
try:
return prof.runcall(f, *args, **kwargs)
finally:
stats = hotshot.stats.load(outputFile)
stats.strip_dirs()
stats.sort_stats('cum') # 'time'
stats.print_stats(100)
return _
def getPythonContainers(meth):
"""Walk up the Python tree from method 'meth', finding its class, its module
and all containing packages."""
containers = []
containers.append(meth.im_class)
moduleName = meth.im_class.__module__
while moduleName is not None:
module = sys.modules.get(moduleName, None)
if module is None:
module = __import__(moduleName)
containers.append(module)
moduleName = getattr(module, '__module__', None)
return containers
_DEFAULT = object()
def acquireAttribute(objects, attr, default=_DEFAULT):
"""Go through the list 'objects' sequentially until we find one which has
attribute 'attr', then return the value of that attribute. If not found,
return 'default' if set, otherwise, raise AttributeError. """
for obj in objects:
if hasattr(obj, attr):
return getattr(obj, attr)
if default is not _DEFAULT:
return default
raise AttributeError('attribute %r not found in %r' % (attr, objects))
deprecate.deprecatedModuleAttribute(
versions.Version("Twisted", 10, 1, 0),
"Please use twisted.python.reflect.namedAny instead.",
__name__, "findObject")
def findObject(name):
"""Get a fully-named package, module, module-global object or attribute.
Forked from twisted.python.reflect.namedAny.
Returns a tuple of (bool, obj). If bool is True, the named object exists
and is returned as obj. If bool is False, the named object does not exist
and the value of obj is unspecified.
"""
names = name.split('.')
topLevelPackage = None
moduleNames = names[:]
while not topLevelPackage:
trialname = '.'.join(moduleNames)
if len(trialname) == 0:
return (False, None)
try:
topLevelPackage = __import__(trialname)
except ImportError:
# if the ImportError happened in the module being imported,
# this is a failure that should be handed to our caller.
# count stack frames to tell the difference.
exc_info = sys.exc_info()
if len(traceback.extract_tb(exc_info[2])) > 1:
try:
# Clean up garbage left in sys.modules.
del sys.modules[trialname]
except KeyError:
# Python 2.4 has fixed this. Yay!
pass
raise exc_info[0], exc_info[1], exc_info[2]
moduleNames.pop()
obj = topLevelPackage
for n in names[1:]:
try:
obj = getattr(obj, n)
except AttributeError:
return (False, obj)
return (True, obj)
def _runSequentially(callables, stopOnFirstError=False):
"""
Run the given callables one after the other. If a callable returns a
Deferred, wait until it has finished before running the next callable.
@param callables: An iterable of callables that take no parameters.
@param stopOnFirstError: If True, then stop running callables as soon as
one raises an exception or fires an errback. False by default.
@return: A L{Deferred} that fires a list of C{(flag, value)} tuples. Each
tuple will be either C{(SUCCESS, <return value>)} or C{(FAILURE,
<Failure>)}.
"""
results = []
for f in callables:
d = defer.maybeDeferred(f)
thing = defer.waitForDeferred(d)
yield thing
try:
results.append((defer.SUCCESS, thing.getResult()))
except:
results.append((defer.FAILURE, Failure()))
if stopOnFirstError:
break
yield results
_runSequentially = defer.deferredGenerator(_runSequentially)
__all__ = ['FailureError', 'DirtyReactorWarning', 'DirtyReactorError',
'PendingTimedCallsError', 'excInfoOrFailureToExcInfo']
|
danalec/dotfiles | refs/heads/master | sublime/.config/sublime-text-3/Packages/SublimeCodeIntel/libs/SilverCity/DispatchHandler.py | 8 | from . import ScintillaConstants
from . import Utils
def generate_handler_name(state):
return 'handle_' + state[4:].lower()
class DispatchHandler:
def __init__(self, state_prefix):
self.handlers = {}
if state_prefix is not None:
for constant in Utils.list_states(state_prefix):
self.handlers[getattr(ScintillaConstants, constant)] = \
generate_handler_name(constant)
def event_handler(self, style, **kwargs):
kwargs.update({'style': style})
handler = self.handlers.get(style, None)
if handler is None:
self.handle_other(**kwargs)
else:
getattr(self, handler, self.handle_other)(**kwargs)
|
mikeolteanu/livepythonconsole-app-engine | refs/heads/master | boilerplate/external/requests/packages/charade/eucjpprober.py | 2918 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCJPDistributionAnalysis
from .jpcntx import EUCJPContextAnalysis
from .mbcssm import EUCJPSMModel
class EUCJPProber(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(EUCJPSMModel)
self._mDistributionAnalyzer = EUCJPDistributionAnalysis()
self._mContextAnalyzer = EUCJPContextAnalysis()
self.reset()
def reset(self):
MultiByteCharSetProber.reset(self)
self._mContextAnalyzer.reset()
def get_charset_name(self):
return "EUC-JP"
def feed(self, aBuf):
aLen = len(aBuf)
for i in range(0, aLen):
# PY3K: aBuf is a byte array, so aBuf[i] is an int, not a byte
codingState = self._mCodingSM.next_state(aBuf[i])
if codingState == constants.eError:
if constants._debug:
sys.stderr.write(self.get_charset_name()
+ ' prober hit error at byte ' + str(i)
+ '\n')
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
charLen = self._mCodingSM.get_current_charlen()
if i == 0:
self._mLastChar[1] = aBuf[0]
self._mContextAnalyzer.feed(self._mLastChar, charLen)
self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
else:
self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen)
self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
charLen)
self._mLastChar[0] = aBuf[aLen - 1]
if self.get_state() == constants.eDetecting:
if (self._mContextAnalyzer.got_enough_data() and
(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
contxtCf = self._mContextAnalyzer.get_confidence()
distribCf = self._mDistributionAnalyzer.get_confidence()
return max(contxtCf, distribCf)
|
xfournet/intellij-community | refs/heads/master | python/helpers/py2only/docutils/transforms/universal.py | 106 | # $Id: universal.py 7668 2013-06-04 12:46:30Z milde $
# -*- coding: utf-8 -*-
# Authors: David Goodger <goodger@python.org>; Ueli Schlaepfer; Günter Milde
# Maintainer: docutils-develop@lists.sourceforge.net
# Copyright: This module has been placed in the public domain.
"""
Transforms needed by most or all documents:
- `Decorations`: Generate a document's header & footer.
- `Messages`: Placement of system messages stored in
`nodes.document.transform_messages`.
- `TestMessages`: Like `Messages`, used on test runs.
- `FinalReferences`: Resolve remaining references.
"""
__docformat__ = 'reStructuredText'
import re
import sys
import time
from docutils import nodes, utils
from docutils.transforms import TransformError, Transform
from docutils.utils import smartquotes
class Decorations(Transform):
"""
Populate a document's decoration element (header, footer).
"""
default_priority = 820
def apply(self):
header_nodes = self.generate_header()
if header_nodes:
decoration = self.document.get_decoration()
header = decoration.get_header()
header.extend(header_nodes)
footer_nodes = self.generate_footer()
if footer_nodes:
decoration = self.document.get_decoration()
footer = decoration.get_footer()
footer.extend(footer_nodes)
def generate_header(self):
return None
def generate_footer(self):
# @@@ Text is hard-coded for now.
# Should be made dynamic (language-dependent).
settings = self.document.settings
if settings.generator or settings.datestamp or settings.source_link \
or settings.source_url:
text = []
if settings.source_link and settings._source \
or settings.source_url:
if settings.source_url:
source = settings.source_url
else:
source = utils.relative_path(settings._destination,
settings._source)
text.extend([
nodes.reference('', 'View document source',
refuri=source),
nodes.Text('.\n')])
if settings.datestamp:
datestamp = time.strftime(settings.datestamp, time.gmtime())
text.append(nodes.Text('Generated on: ' + datestamp + '.\n'))
if settings.generator:
text.extend([
nodes.Text('Generated by '),
nodes.reference('', 'Docutils', refuri=
'http://docutils.sourceforge.net/'),
nodes.Text(' from '),
nodes.reference('', 'reStructuredText', refuri='http://'
'docutils.sourceforge.net/rst.html'),
nodes.Text(' source.\n')])
return [nodes.paragraph('', '', *text)]
else:
return None
class ExposeInternals(Transform):
"""
Expose internal attributes if ``expose_internals`` setting is set.
"""
default_priority = 840
def not_Text(self, node):
return not isinstance(node, nodes.Text)
def apply(self):
if self.document.settings.expose_internals:
for node in self.document.traverse(self.not_Text):
for att in self.document.settings.expose_internals:
value = getattr(node, att, None)
if value is not None:
node['internal:' + att] = value
class Messages(Transform):
"""
Place any system messages generated after parsing into a dedicated section
of the document.
"""
default_priority = 860
def apply(self):
unfiltered = self.document.transform_messages
threshold = self.document.reporter.report_level
messages = []
for msg in unfiltered:
if msg['level'] >= threshold and not msg.parent:
messages.append(msg)
if messages:
section = nodes.section(classes=['system-messages'])
# @@@ get this from the language module?
section += nodes.title('', 'Docutils System Messages')
section += messages
self.document.transform_messages[:] = []
self.document += section
class FilterMessages(Transform):
"""
Remove system messages below verbosity threshold.
"""
default_priority = 870
def apply(self):
for node in self.document.traverse(nodes.system_message):
if node['level'] < self.document.reporter.report_level:
node.parent.remove(node)
class TestMessages(Transform):
"""
Append all post-parse system messages to the end of the document.
Used for testing purposes.
"""
default_priority = 880
def apply(self):
for msg in self.document.transform_messages:
if not msg.parent:
self.document += msg
class StripComments(Transform):
"""
Remove comment elements from the document tree (only if the
``strip_comments`` setting is enabled).
"""
default_priority = 740
def apply(self):
if self.document.settings.strip_comments:
for node in self.document.traverse(nodes.comment):
node.parent.remove(node)
class StripClassesAndElements(Transform):
"""
Remove from the document tree all elements with classes in
`self.document.settings.strip_elements_with_classes` and all "classes"
attribute values in `self.document.settings.strip_classes`.
"""
default_priority = 420
def apply(self):
if not (self.document.settings.strip_elements_with_classes
or self.document.settings.strip_classes):
return
# prepare dicts for lookup (not sets, for Python 2.2 compatibility):
self.strip_elements = dict(
[(key, None)
for key in (self.document.settings.strip_elements_with_classes
or [])])
self.strip_classes = dict(
[(key, None) for key in (self.document.settings.strip_classes
or [])])
for node in self.document.traverse(self.check_classes):
node.parent.remove(node)
def check_classes(self, node):
if isinstance(node, nodes.Element):
for class_value in node['classes'][:]:
if class_value in self.strip_classes:
node['classes'].remove(class_value)
if class_value in self.strip_elements:
return 1
class SmartQuotes(Transform):
"""
Replace ASCII quotation marks with typographic form.
Also replace multiple dashes with em-dash/en-dash characters.
"""
default_priority = 850
def __init__(self, document, startnode):
Transform.__init__(self, document, startnode=startnode)
self.unsupported_languages = set()
def get_tokens(self, txtnodes):
# A generator that yields ``(texttype, nodetext)`` tuples for a list
# of "Text" nodes (interface to ``smartquotes.educate_tokens()``).
texttype = {True: 'literal', # "literal" text is not changed:
False: 'plain'}
for txtnode in txtnodes:
nodetype = texttype[isinstance(txtnode.parent,
(nodes.literal,
nodes.math,
nodes.image,
nodes.raw,
nodes.problematic))]
yield (nodetype, txtnode.astext())
def apply(self):
smart_quotes = self.document.settings.smart_quotes
if not smart_quotes:
return
try:
alternative = smart_quotes.startswith('alt')
except AttributeError:
alternative = False
# print repr(alternative)
document_language = self.document.settings.language_code
# "Educate" quotes in normal text. Handle each block of text
# (TextElement node) as a unit to keep context around inline nodes:
for node in self.document.traverse(nodes.TextElement):
# skip preformatted text blocks and special elements:
if isinstance(node, (nodes.FixedTextElement, nodes.Special)):
continue
# nested TextElements are not "block-level" elements:
if isinstance(node.parent, nodes.TextElement):
continue
# list of text nodes in the "text block":
txtnodes = [txtnode for txtnode in node.traverse(nodes.Text)
if not isinstance(txtnode.parent,
nodes.option_string)]
# language: use typographical quotes for language "lang"
lang = node.get_language_code(document_language)
# use alternative form if `smart-quotes` setting starts with "alt":
if alternative:
if '-x-altquot' in lang:
lang = lang.replace('-x-altquot', '')
else:
lang += '-x-altquot'
# drop subtags missing in quotes:
for tag in utils.normalize_language_tag(lang):
if tag in smartquotes.smartchars.quotes:
lang = tag
break
else: # language not supported: (keep ASCII quotes)
if lang not in self.unsupported_languages:
self.document.reporter.warning('No smart quotes '
'defined for language "%s".'%lang, base_node=node)
self.unsupported_languages.add(lang)
lang = ''
# Iterator educating quotes in plain text:
# '2': set all, using old school en- and em- dash shortcuts
teacher = smartquotes.educate_tokens(self.get_tokens(txtnodes),
attr='2', language=lang)
for txtnode, newtext in zip(txtnodes, teacher):
txtnode.parent.replace(txtnode, nodes.Text(newtext))
self.unsupported_languages = set() # reset
|
UrusTeam/android_ndk_toolchain_cross | refs/heads/master | lib/python2.7/bsddb/test/test_sequence.py | 69 | import unittest
import os
from test_all import db, test_support, get_new_environment_path, get_new_database_path
class DBSequenceTest(unittest.TestCase):
def setUp(self):
self.int_32_max = 0x100000000
self.homeDir = get_new_environment_path()
self.filename = "test"
self.dbenv = db.DBEnv()
self.dbenv.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL, 0666)
self.d = db.DB(self.dbenv)
self.d.open(self.filename, db.DB_BTREE, db.DB_CREATE, 0666)
def tearDown(self):
if hasattr(self, 'seq'):
self.seq.close()
del self.seq
if hasattr(self, 'd'):
self.d.close()
del self.d
if hasattr(self, 'dbenv'):
self.dbenv.close()
del self.dbenv
test_support.rmtree(self.homeDir)
def test_get(self):
self.seq = db.DBSequence(self.d, flags=0)
start_value = 10 * self.int_32_max
self.assertEqual(0xA00000000, start_value)
self.assertEqual(None, self.seq.initial_value(start_value))
self.assertEqual(None, self.seq.open(key='id', txn=None, flags=db.DB_CREATE))
self.assertEqual(start_value, self.seq.get(5))
self.assertEqual(start_value + 5, self.seq.get())
def test_remove(self):
self.seq = db.DBSequence(self.d, flags=0)
self.assertEqual(None, self.seq.open(key='foo', txn=None, flags=db.DB_CREATE))
self.assertEqual(None, self.seq.remove(txn=None, flags=0))
del self.seq
def test_get_key(self):
self.seq = db.DBSequence(self.d, flags=0)
key = 'foo'
self.assertEqual(None, self.seq.open(key=key, txn=None, flags=db.DB_CREATE))
self.assertEqual(key, self.seq.get_key())
def test_get_dbp(self):
self.seq = db.DBSequence(self.d, flags=0)
self.assertEqual(None, self.seq.open(key='foo', txn=None, flags=db.DB_CREATE))
self.assertEqual(self.d, self.seq.get_dbp())
def test_cachesize(self):
self.seq = db.DBSequence(self.d, flags=0)
cashe_size = 10
self.assertEqual(None, self.seq.set_cachesize(cashe_size))
self.assertEqual(None, self.seq.open(key='foo', txn=None, flags=db.DB_CREATE))
self.assertEqual(cashe_size, self.seq.get_cachesize())
def test_flags(self):
self.seq = db.DBSequence(self.d, flags=0)
flag = db.DB_SEQ_WRAP;
self.assertEqual(None, self.seq.set_flags(flag))
self.assertEqual(None, self.seq.open(key='foo', txn=None, flags=db.DB_CREATE))
self.assertEqual(flag, self.seq.get_flags() & flag)
def test_range(self):
self.seq = db.DBSequence(self.d, flags=0)
seq_range = (10 * self.int_32_max, 11 * self.int_32_max - 1)
self.assertEqual(None, self.seq.set_range(seq_range))
self.seq.initial_value(seq_range[0])
self.assertEqual(None, self.seq.open(key='foo', txn=None, flags=db.DB_CREATE))
self.assertEqual(seq_range, self.seq.get_range())
def test_stat(self):
self.seq = db.DBSequence(self.d, flags=0)
self.assertEqual(None, self.seq.open(key='foo', txn=None, flags=db.DB_CREATE))
stat = self.seq.stat()
for param in ('nowait', 'min', 'max', 'value', 'current',
'flags', 'cache_size', 'last_value', 'wait'):
self.assertTrue(param in stat, "parameter %s isn't in stat info" % param)
if db.version() >= (4,7) :
# This code checks a crash solved in Berkeley DB 4.7
def test_stat_crash(self) :
d=db.DB()
d.open(None,dbtype=db.DB_HASH,flags=db.DB_CREATE) # In RAM
seq = db.DBSequence(d, flags=0)
self.assertRaises(db.DBNotFoundError, seq.open,
key='id', txn=None, flags=0)
self.assertRaises(db.DBInvalidArgError, seq.stat)
d.close()
def test_64bits(self) :
# We don't use both extremes because they are problematic
value_plus=(1L<<63)-2
self.assertEqual(9223372036854775806L,value_plus)
value_minus=(-1L<<63)+1 # Two complement
self.assertEqual(-9223372036854775807L,value_minus)
self.seq = db.DBSequence(self.d, flags=0)
self.assertEqual(None, self.seq.initial_value(value_plus-1))
self.assertEqual(None, self.seq.open(key='id', txn=None,
flags=db.DB_CREATE))
self.assertEqual(value_plus-1, self.seq.get(1))
self.assertEqual(value_plus, self.seq.get(1))
self.seq.remove(txn=None, flags=0)
self.seq = db.DBSequence(self.d, flags=0)
self.assertEqual(None, self.seq.initial_value(value_minus))
self.assertEqual(None, self.seq.open(key='id', txn=None,
flags=db.DB_CREATE))
self.assertEqual(value_minus, self.seq.get(1))
self.assertEqual(value_minus+1, self.seq.get(1))
def test_multiple_close(self):
self.seq = db.DBSequence(self.d)
self.seq.close() # You can close a Sequence multiple times
self.seq.close()
self.seq.close()
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DBSequenceTest))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
|
joaduo/mepinta | refs/heads/master | plugins/c_and_cpp/k3dv1/plugins_tests/c_and_cpp/processors/k3dv1/mesh/modifiers/sds/MakeSDS/__init__.py | 12133432 | |
disqus/django-old | refs/heads/master | tests/modeltests/reserved_names/__init__.py | 12133432 | |
opennode/waldur-mastermind | refs/heads/develop | src/waldur_mastermind/marketplace/management/__init__.py | 12133432 | |
dyyi/moneybook | refs/heads/master | venv/Lib/site-packages/django/contrib/gis/geos/io.py | 588 | """
Module that holds classes for performing I/O operations on GEOS geometry
objects. Specifically, this has Python implementations of WKB/WKT
reader and writer classes.
"""
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.contrib.gis.geos.prototypes.io import (
WKBWriter, WKTWriter, _WKBReader, _WKTReader,
)
__all__ = ['WKBWriter', 'WKTWriter', 'WKBReader', 'WKTReader']
# Public classes for (WKB|WKT)Reader, which return GEOSGeometry
class WKBReader(_WKBReader):
def read(self, wkb):
"Returns a GEOSGeometry for the given WKB buffer."
return GEOSGeometry(super(WKBReader, self).read(wkb))
class WKTReader(_WKTReader):
def read(self, wkt):
"Returns a GEOSGeometry for the given WKT string."
return GEOSGeometry(super(WKTReader, self).read(wkt))
|
yetu/repotools | refs/heads/master | third_party/gsutil/gslib/commands/getwebcfg.py | 51 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from gslib.command import Command
from gslib.command import COMMAND_NAME
from gslib.command import COMMAND_NAME_ALIASES
from gslib.command import CONFIG_REQUIRED
from gslib.command import FILE_URIS_OK
from gslib.command import MAX_ARGS
from gslib.command import MIN_ARGS
from gslib.command import PROVIDER_URIS_OK
from gslib.command import SUPPORTED_SUB_ARGS
from gslib.command import URIS_START_ARG
from gslib.exception import CommandException
from gslib.help_provider import HELP_NAME
from gslib.help_provider import HELP_NAME_ALIASES
from gslib.help_provider import HELP_ONE_LINE_SUMMARY
from gslib.help_provider import HELP_TEXT
from gslib.help_provider import HelpType
from gslib.help_provider import HELP_TYPE
from xml.dom.minidom import parseString as XmlParseString
_detailed_help_text = ("""
<B>SYNOPSIS</B>
gsutil getwebcfg bucket_uri
<B>DESCRIPTION</B>
The Website Configuration feature enables you to configure a Google Cloud
Storage bucket to simulate the behavior of a static website. You can define
main pages or directory indices (for example, index.html) for buckets and
"directories". Also, you can define a custom error page in case a requested
resource does not exist.
The gsutil getwebcfg command gets the web semantics configuration for a
bucket, and displays an XML representation of the configuration.
In Google Cloud Storage, this would look like:
<?xml version="1.0" ?>
<WebsiteConfiguration>
<MainPageSuffix>
index.html
</MainPageSuffix>
<NotFoundPage>
404.html
</NotFoundPage>
</WebsiteConfiguration>
""")
class GetWebcfgCommand(Command):
"""Implementation of gsutil getwebcfg command."""
# Command specification (processed by parent class).
command_spec = {
# Name of command.
COMMAND_NAME : 'getwebcfg',
# List of command name aliases.
COMMAND_NAME_ALIASES : [],
# Min number of args required by this command.
MIN_ARGS : 1,
# Max number of args required by this command, or NO_MAX.
MAX_ARGS : 1, # Getopt-style string specifying acceptable sub args.
SUPPORTED_SUB_ARGS : '',
# True if file URIs acceptable for this command.
FILE_URIS_OK : False,
# True if provider-only URIs acceptable for this command.
PROVIDER_URIS_OK : False,
# Index in args of first URI arg.
URIS_START_ARG : 1,
# True if must configure gsutil before running command.
CONFIG_REQUIRED : True,
}
help_spec = {
# Name of command or auxiliary help info for which this help applies.
HELP_NAME : 'getwebcfg',
# List of help name aliases.
HELP_NAME_ALIASES : [],
# Type of help)
HELP_TYPE : HelpType.COMMAND_HELP,
# One line summary of this help.
HELP_ONE_LINE_SUMMARY : ('Get the website configuration '
'for one or more buckets'),
# The full help text.
HELP_TEXT : _detailed_help_text,
}
# Command entry point.
def RunCommand(self):
uri_args = self.args
# Iterate over URIs, expanding wildcards, and getting the website
# configuration on each.
some_matched = False
for uri_str in uri_args:
for blr in self.WildcardIterator(uri_str):
uri = blr.GetUri()
if not uri.names_bucket():
raise CommandException('URI %s must name a bucket for the %s command'
% (str(uri), self.command_name))
some_matched = True
sys.stderr.write('Getting website config on %s...\n' % uri)
_, xml_body = uri.get_website_config()
sys.stdout.write(XmlParseString(xml_body).toprettyxml())
if not some_matched:
raise CommandException('No URIs matched')
return 0
|
dxd214/TeamTalk | refs/heads/master | win-client/3rdParty/src/json/test/generate_expected.py | 257 | import glob
import os.path
for path in glob.glob( '*.json' ):
text = file(path,'rt').read()
target = os.path.splitext(path)[0] + '.expected'
if os.path.exists( target ):
print 'skipping:', target
else:
print 'creating:', target
file(target,'wt').write(text)
|
AkA84/edx-platform | refs/heads/master | lms/djangoapps/discussion_api/tests/test_pagination.py | 113 | """
Tests for Discussion API pagination support
"""
from unittest import TestCase
from django.test import RequestFactory
from discussion_api.pagination import get_paginated_data
class PaginationSerializerTest(TestCase):
"""Tests for PaginationSerializer"""
def do_case(self, objects, page_num, num_pages, expected):
"""
Make a dummy request, and assert that get_paginated_data with the given
parameters returns the expected result
"""
request = RequestFactory().get("/test")
actual = get_paginated_data(request, objects, page_num, num_pages)
self.assertEqual(actual, expected)
def test_empty(self):
self.do_case(
[], 1, 0,
{
"next": None,
"previous": None,
"results": [],
}
)
def test_only_page(self):
self.do_case(
["foo"], 1, 1,
{
"next": None,
"previous": None,
"results": ["foo"],
}
)
def test_first_of_many(self):
self.do_case(
["foo"], 1, 3,
{
"next": "http://testserver/test?page=2",
"previous": None,
"results": ["foo"],
}
)
def test_last_of_many(self):
self.do_case(
["foo"], 3, 3,
{
"next": None,
"previous": "http://testserver/test?page=2",
"results": ["foo"],
}
)
def test_middle_of_many(self):
self.do_case(
["foo"], 2, 3,
{
"next": "http://testserver/test?page=3",
"previous": "http://testserver/test?page=1",
"results": ["foo"],
}
)
|
the-duck/that-startpage-rocks | refs/heads/master | lib/jinja2/environment.py | 332 | # -*- coding: utf-8 -*-
"""
jinja2.environment
~~~~~~~~~~~~~~~~~~
Provides a class that holds runtime and parsing time options.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
from jinja2 import nodes
from jinja2.defaults import BLOCK_START_STRING, \
BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \
COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \
LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \
DEFAULT_FILTERS, DEFAULT_TESTS, DEFAULT_NAMESPACE, \
KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS
from jinja2.lexer import get_lexer, TokenStream
from jinja2.parser import Parser
from jinja2.nodes import EvalContext
from jinja2.optimizer import optimize
from jinja2.compiler import generate, CodeGenerator
from jinja2.runtime import Undefined, new_context, Context
from jinja2.exceptions import TemplateSyntaxError, TemplateNotFound, \
TemplatesNotFound, TemplateRuntimeError
from jinja2.utils import import_string, LRUCache, Markup, missing, \
concat, consume, internalcode
from jinja2._compat import imap, ifilter, string_types, iteritems, \
text_type, reraise, implements_iterator, implements_to_string, \
get_next, encode_filename, PY2, PYPY
from functools import reduce
# for direct template usage we have up to ten living environments
_spontaneous_environments = LRUCache(10)
# the function to create jinja traceback objects. This is dynamically
# imported on the first exception in the exception handler.
_make_traceback = None
def get_spontaneous_environment(*args):
"""Return a new spontaneous environment. A spontaneous environment is an
unnamed and unaccessible (in theory) environment that is used for
templates generated from a string and not from the file system.
"""
try:
env = _spontaneous_environments.get(args)
except TypeError:
return Environment(*args)
if env is not None:
return env
_spontaneous_environments[args] = env = Environment(*args)
env.shared = True
return env
def create_cache(size):
"""Return the cache class for the given size."""
if size == 0:
return None
if size < 0:
return {}
return LRUCache(size)
def copy_cache(cache):
"""Create an empty copy of the given cache."""
if cache is None:
return None
elif type(cache) is dict:
return {}
return LRUCache(cache.capacity)
def load_extensions(environment, extensions):
"""Load the extensions from the list and bind it to the environment.
Returns a dict of instantiated environments.
"""
result = {}
for extension in extensions:
if isinstance(extension, string_types):
extension = import_string(extension)
result[extension.identifier] = extension(environment)
return result
def _environment_sanity_check(environment):
"""Perform a sanity check on the environment."""
assert issubclass(environment.undefined, Undefined), 'undefined must ' \
'be a subclass of undefined because filters depend on it.'
assert environment.block_start_string != \
environment.variable_start_string != \
environment.comment_start_string, 'block, variable and comment ' \
'start strings must be different'
assert environment.newline_sequence in ('\r', '\r\n', '\n'), \
'newline_sequence set to unknown line ending string.'
return environment
class Environment(object):
r"""The core component of Jinja is the `Environment`. It contains
important shared variables like configuration, filters, tests,
globals and others. Instances of this class may be modified if
they are not shared and if no template was loaded so far.
Modifications on environments after the first template was loaded
will lead to surprising effects and undefined behavior.
Here are the possible initialization parameters:
`block_start_string`
The string marking the beginning of a block. Defaults to ``'{%'``.
`block_end_string`
The string marking the end of a block. Defaults to ``'%}'``.
`variable_start_string`
The string marking the beginning of a print statement.
Defaults to ``'{{'``.
`variable_end_string`
The string marking the end of a print statement. Defaults to
``'}}'``.
`comment_start_string`
The string marking the beginning of a comment. Defaults to ``'{#'``.
`comment_end_string`
The string marking the end of a comment. Defaults to ``'#}'``.
`line_statement_prefix`
If given and a string, this will be used as prefix for line based
statements. See also :ref:`line-statements`.
`line_comment_prefix`
If given and a string, this will be used as prefix for line based
comments. See also :ref:`line-statements`.
.. versionadded:: 2.2
`trim_blocks`
If this is set to ``True`` the first newline after a block is
removed (block, not variable tag!). Defaults to `False`.
`lstrip_blocks`
If this is set to ``True`` leading spaces and tabs are stripped
from the start of a line to a block. Defaults to `False`.
`newline_sequence`
The sequence that starts a newline. Must be one of ``'\r'``,
``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a
useful default for Linux and OS X systems as well as web
applications.
`keep_trailing_newline`
Preserve the trailing newline when rendering templates.
The default is ``False``, which causes a single newline,
if present, to be stripped from the end of the template.
.. versionadded:: 2.7
`extensions`
List of Jinja extensions to use. This can either be import paths
as strings or extension classes. For more information have a
look at :ref:`the extensions documentation <jinja-extensions>`.
`optimized`
should the optimizer be enabled? Default is `True`.
`undefined`
:class:`Undefined` or a subclass of it that is used to represent
undefined values in the template.
`finalize`
A callable that can be used to process the result of a variable
expression before it is output. For example one can convert
`None` implicitly into an empty string here.
`autoescape`
If set to true the XML/HTML autoescaping feature is enabled by
default. For more details about autoescaping see
:class:`~jinja2.utils.Markup`. As of Jinja 2.4 this can also
be a callable that is passed the template name and has to
return `True` or `False` depending on autoescape should be
enabled by default.
.. versionchanged:: 2.4
`autoescape` can now be a function
`loader`
The template loader for this environment.
`cache_size`
The size of the cache. Per default this is ``400`` which means
that if more than 400 templates are loaded the loader will clean
out the least recently used template. If the cache size is set to
``0`` templates are recompiled all the time, if the cache size is
``-1`` the cache will not be cleaned.
.. versionchanged:: 2.8
The cache size was increased to 400 from a low 50.
`auto_reload`
Some loaders load templates from locations where the template
sources may change (ie: file system or database). If
`auto_reload` is set to `True` (default) every time a template is
requested the loader checks if the source changed and if yes, it
will reload the template. For higher performance it's possible to
disable that.
`bytecode_cache`
If set to a bytecode cache object, this object will provide a
cache for the internal Jinja bytecode so that templates don't
have to be parsed if they were not changed.
See :ref:`bytecode-cache` for more information.
"""
#: if this environment is sandboxed. Modifying this variable won't make
#: the environment sandboxed though. For a real sandboxed environment
#: have a look at jinja2.sandbox. This flag alone controls the code
#: generation by the compiler.
sandboxed = False
#: True if the environment is just an overlay
overlayed = False
#: the environment this environment is linked to if it is an overlay
linked_to = None
#: shared environments have this set to `True`. A shared environment
#: must not be modified
shared = False
#: these are currently EXPERIMENTAL undocumented features.
exception_handler = None
exception_formatter = None
#: the class that is used for code generation. See
#: :class:`~jinja2.compiler.CodeGenerator` for more information.
code_generator_class = CodeGenerator
#: the context class thatis used for templates. See
#: :class:`~jinja2.runtime.Context` for more information.
context_class = Context
def __init__(self,
block_start_string=BLOCK_START_STRING,
block_end_string=BLOCK_END_STRING,
variable_start_string=VARIABLE_START_STRING,
variable_end_string=VARIABLE_END_STRING,
comment_start_string=COMMENT_START_STRING,
comment_end_string=COMMENT_END_STRING,
line_statement_prefix=LINE_STATEMENT_PREFIX,
line_comment_prefix=LINE_COMMENT_PREFIX,
trim_blocks=TRIM_BLOCKS,
lstrip_blocks=LSTRIP_BLOCKS,
newline_sequence=NEWLINE_SEQUENCE,
keep_trailing_newline=KEEP_TRAILING_NEWLINE,
extensions=(),
optimized=True,
undefined=Undefined,
finalize=None,
autoescape=False,
loader=None,
cache_size=400,
auto_reload=True,
bytecode_cache=None):
# !!Important notice!!
# The constructor accepts quite a few arguments that should be
# passed by keyword rather than position. However it's important to
# not change the order of arguments because it's used at least
# internally in those cases:
# - spontaneous environments (i18n extension and Template)
# - unittests
# If parameter changes are required only add parameters at the end
# and don't change the arguments (or the defaults!) of the arguments
# existing already.
# lexer / parser information
self.block_start_string = block_start_string
self.block_end_string = block_end_string
self.variable_start_string = variable_start_string
self.variable_end_string = variable_end_string
self.comment_start_string = comment_start_string
self.comment_end_string = comment_end_string
self.line_statement_prefix = line_statement_prefix
self.line_comment_prefix = line_comment_prefix
self.trim_blocks = trim_blocks
self.lstrip_blocks = lstrip_blocks
self.newline_sequence = newline_sequence
self.keep_trailing_newline = keep_trailing_newline
# runtime information
self.undefined = undefined
self.optimized = optimized
self.finalize = finalize
self.autoescape = autoescape
# defaults
self.filters = DEFAULT_FILTERS.copy()
self.tests = DEFAULT_TESTS.copy()
self.globals = DEFAULT_NAMESPACE.copy()
# set the loader provided
self.loader = loader
self.cache = create_cache(cache_size)
self.bytecode_cache = bytecode_cache
self.auto_reload = auto_reload
# load extensions
self.extensions = load_extensions(self, extensions)
_environment_sanity_check(self)
def add_extension(self, extension):
"""Adds an extension after the environment was created.
.. versionadded:: 2.5
"""
self.extensions.update(load_extensions(self, [extension]))
def extend(self, **attributes):
"""Add the items to the instance of the environment if they do not exist
yet. This is used by :ref:`extensions <writing-extensions>` to register
callbacks and configuration values without breaking inheritance.
"""
for key, value in iteritems(attributes):
if not hasattr(self, key):
setattr(self, key, value)
def overlay(self, block_start_string=missing, block_end_string=missing,
variable_start_string=missing, variable_end_string=missing,
comment_start_string=missing, comment_end_string=missing,
line_statement_prefix=missing, line_comment_prefix=missing,
trim_blocks=missing, lstrip_blocks=missing,
extensions=missing, optimized=missing,
undefined=missing, finalize=missing, autoescape=missing,
loader=missing, cache_size=missing, auto_reload=missing,
bytecode_cache=missing):
"""Create a new overlay environment that shares all the data with the
current environment except for cache and the overridden attributes.
Extensions cannot be removed for an overlayed environment. An overlayed
environment automatically gets all the extensions of the environment it
is linked to plus optional extra extensions.
Creating overlays should happen after the initial environment was set
up completely. Not all attributes are truly linked, some are just
copied over so modifications on the original environment may not shine
through.
"""
args = dict(locals())
del args['self'], args['cache_size'], args['extensions']
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.overlayed = True
rv.linked_to = self
for key, value in iteritems(args):
if value is not missing:
setattr(rv, key, value)
if cache_size is not missing:
rv.cache = create_cache(cache_size)
else:
rv.cache = copy_cache(self.cache)
rv.extensions = {}
for key, value in iteritems(self.extensions):
rv.extensions[key] = value.bind(rv)
if extensions is not missing:
rv.extensions.update(load_extensions(rv, extensions))
return _environment_sanity_check(rv)
lexer = property(get_lexer, doc="The lexer for this environment.")
def iter_extensions(self):
"""Iterates over the extensions by priority."""
return iter(sorted(self.extensions.values(),
key=lambda x: x.priority))
def getitem(self, obj, argument):
"""Get an item or attribute of an object but prefer the item."""
try:
return obj[argument]
except (TypeError, LookupError):
if isinstance(argument, string_types):
try:
attr = str(argument)
except Exception:
pass
else:
try:
return getattr(obj, attr)
except AttributeError:
pass
return self.undefined(obj=obj, name=argument)
def getattr(self, obj, attribute):
"""Get an item or attribute of an object but prefer the attribute.
Unlike :meth:`getitem` the attribute *must* be a bytestring.
"""
try:
return getattr(obj, attribute)
except AttributeError:
pass
try:
return obj[attribute]
except (TypeError, LookupError, AttributeError):
return self.undefined(obj=obj, name=attribute)
def call_filter(self, name, value, args=None, kwargs=None,
context=None, eval_ctx=None):
"""Invokes a filter on a value the same way the compiler does it.
.. versionadded:: 2.7
"""
func = self.filters.get(name)
if func is None:
raise TemplateRuntimeError('no filter named %r' % name)
args = [value] + list(args or ())
if getattr(func, 'contextfilter', False):
if context is None:
raise TemplateRuntimeError('Attempted to invoke context '
'filter without context')
args.insert(0, context)
elif getattr(func, 'evalcontextfilter', False):
if eval_ctx is None:
if context is not None:
eval_ctx = context.eval_ctx
else:
eval_ctx = EvalContext(self)
args.insert(0, eval_ctx)
elif getattr(func, 'environmentfilter', False):
args.insert(0, self)
return func(*args, **(kwargs or {}))
def call_test(self, name, value, args=None, kwargs=None):
"""Invokes a test on a value the same way the compiler does it.
.. versionadded:: 2.7
"""
func = self.tests.get(name)
if func is None:
raise TemplateRuntimeError('no test named %r' % name)
return func(value, *(args or ()), **(kwargs or {}))
@internalcode
def parse(self, source, name=None, filename=None):
"""Parse the sourcecode and return the abstract syntax tree. This
tree of nodes is used by the compiler to convert the template into
executable source- or bytecode. This is useful for debugging or to
extract information from templates.
If you are :ref:`developing Jinja2 extensions <writing-extensions>`
this gives you a good overview of the node tree generated.
"""
try:
return self._parse(source, name, filename)
except TemplateSyntaxError:
exc_info = sys.exc_info()
self.handle_exception(exc_info, source_hint=source)
def _parse(self, source, name, filename):
"""Internal parsing function used by `parse` and `compile`."""
return Parser(self, source, name, encode_filename(filename)).parse()
def lex(self, source, name=None, filename=None):
"""Lex the given sourcecode and return a generator that yields
tokens as tuples in the form ``(lineno, token_type, value)``.
This can be useful for :ref:`extension development <writing-extensions>`
and debugging templates.
This does not perform preprocessing. If you want the preprocessing
of the extensions to be applied you have to filter source through
the :meth:`preprocess` method.
"""
source = text_type(source)
try:
return self.lexer.tokeniter(source, name, filename)
except TemplateSyntaxError:
exc_info = sys.exc_info()
self.handle_exception(exc_info, source_hint=source)
def preprocess(self, source, name=None, filename=None):
"""Preprocesses the source with all extensions. This is automatically
called for all parsing and compiling methods but *not* for :meth:`lex`
because there you usually only want the actual source tokenized.
"""
return reduce(lambda s, e: e.preprocess(s, name, filename),
self.iter_extensions(), text_type(source))
def _tokenize(self, source, name, filename=None, state=None):
"""Called by the parser to do the preprocessing and filtering
for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
"""
source = self.preprocess(source, name, filename)
stream = self.lexer.tokenize(source, name, filename, state)
for ext in self.iter_extensions():
stream = ext.filter_stream(stream)
if not isinstance(stream, TokenStream):
stream = TokenStream(stream, name, filename)
return stream
def _generate(self, source, name, filename, defer_init=False):
"""Internal hook that can be overridden to hook a different generate
method in.
.. versionadded:: 2.5
"""
return generate(source, self, name, filename, defer_init=defer_init)
def _compile(self, source, filename):
"""Internal hook that can be overridden to hook a different compile
method in.
.. versionadded:: 2.5
"""
return compile(source, filename, 'exec')
@internalcode
def compile(self, source, name=None, filename=None, raw=False,
defer_init=False):
"""Compile a node or template source code. The `name` parameter is
the load name of the template after it was joined using
:meth:`join_path` if necessary, not the filename on the file system.
the `filename` parameter is the estimated filename of the template on
the file system. If the template came from a database or memory this
can be omitted.
The return value of this method is a python code object. If the `raw`
parameter is `True` the return value will be a string with python
code equivalent to the bytecode returned otherwise. This method is
mainly used internally.
`defer_init` is use internally to aid the module code generator. This
causes the generated code to be able to import without the global
environment variable to be set.
.. versionadded:: 2.4
`defer_init` parameter added.
"""
source_hint = None
try:
if isinstance(source, string_types):
source_hint = source
source = self._parse(source, name, filename)
if self.optimized:
source = optimize(source, self)
source = self._generate(source, name, filename,
defer_init=defer_init)
if raw:
return source
if filename is None:
filename = '<template>'
else:
filename = encode_filename(filename)
return self._compile(source, filename)
except TemplateSyntaxError:
exc_info = sys.exc_info()
self.handle_exception(exc_info, source_hint=source_hint)
def compile_expression(self, source, undefined_to_none=True):
"""A handy helper method that returns a callable that accepts keyword
arguments that appear as variables in the expression. If called it
returns the result of the expression.
This is useful if applications want to use the same rules as Jinja
in template "configuration files" or similar situations.
Example usage:
>>> env = Environment()
>>> expr = env.compile_expression('foo == 42')
>>> expr(foo=23)
False
>>> expr(foo=42)
True
Per default the return value is converted to `None` if the
expression returns an undefined value. This can be changed
by setting `undefined_to_none` to `False`.
>>> env.compile_expression('var')() is None
True
>>> env.compile_expression('var', undefined_to_none=False)()
Undefined
.. versionadded:: 2.1
"""
parser = Parser(self, source, state='variable')
exc_info = None
try:
expr = parser.parse_expression()
if not parser.stream.eos:
raise TemplateSyntaxError('chunk after expression',
parser.stream.current.lineno,
None, None)
expr.set_environment(self)
except TemplateSyntaxError:
exc_info = sys.exc_info()
if exc_info is not None:
self.handle_exception(exc_info, source_hint=source)
body = [nodes.Assign(nodes.Name('result', 'store'), expr, lineno=1)]
template = self.from_string(nodes.Template(body, lineno=1))
return TemplateExpression(template, undefined_to_none)
def compile_templates(self, target, extensions=None, filter_func=None,
zip='deflated', log_function=None,
ignore_errors=True, py_compile=False):
"""Finds all the templates the loader can find, compiles them
and stores them in `target`. If `zip` is `None`, instead of in a
zipfile, the templates will be stored in a directory.
By default a deflate zip algorithm is used. To switch to
the stored algorithm, `zip` can be set to ``'stored'``.
`extensions` and `filter_func` are passed to :meth:`list_templates`.
Each template returned will be compiled to the target folder or
zipfile.
By default template compilation errors are ignored. In case a
log function is provided, errors are logged. If you want template
syntax errors to abort the compilation you can set `ignore_errors`
to `False` and you will get an exception on syntax errors.
If `py_compile` is set to `True` .pyc files will be written to the
target instead of standard .py files. This flag does not do anything
on pypy and Python 3 where pyc files are not picked up by itself and
don't give much benefit.
.. versionadded:: 2.4
"""
from jinja2.loaders import ModuleLoader
if log_function is None:
log_function = lambda x: None
if py_compile:
if not PY2 or PYPY:
from warnings import warn
warn(Warning('py_compile has no effect on pypy or Python 3'))
py_compile = False
else:
import imp
import marshal
py_header = imp.get_magic() + \
u'\xff\xff\xff\xff'.encode('iso-8859-15')
# Python 3.3 added a source filesize to the header
if sys.version_info >= (3, 3):
py_header += u'\x00\x00\x00\x00'.encode('iso-8859-15')
def write_file(filename, data, mode):
if zip:
info = ZipInfo(filename)
info.external_attr = 0o755 << 16
zip_file.writestr(info, data)
else:
f = open(os.path.join(target, filename), mode)
try:
f.write(data)
finally:
f.close()
if zip is not None:
from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED
zip_file = ZipFile(target, 'w', dict(deflated=ZIP_DEFLATED,
stored=ZIP_STORED)[zip])
log_function('Compiling into Zip archive "%s"' % target)
else:
if not os.path.isdir(target):
os.makedirs(target)
log_function('Compiling into folder "%s"' % target)
try:
for name in self.list_templates(extensions, filter_func):
source, filename, _ = self.loader.get_source(self, name)
try:
code = self.compile(source, name, filename, True, True)
except TemplateSyntaxError as e:
if not ignore_errors:
raise
log_function('Could not compile "%s": %s' % (name, e))
continue
filename = ModuleLoader.get_module_filename(name)
if py_compile:
c = self._compile(code, encode_filename(filename))
write_file(filename + 'c', py_header +
marshal.dumps(c), 'wb')
log_function('Byte-compiled "%s" as %s' %
(name, filename + 'c'))
else:
write_file(filename, code, 'w')
log_function('Compiled "%s" as %s' % (name, filename))
finally:
if zip:
zip_file.close()
log_function('Finished compiling templates')
def list_templates(self, extensions=None, filter_func=None):
"""Returns a list of templates for this environment. This requires
that the loader supports the loader's
:meth:`~BaseLoader.list_templates` method.
If there are other files in the template folder besides the
actual templates, the returned list can be filtered. There are two
ways: either `extensions` is set to a list of file extensions for
templates, or a `filter_func` can be provided which is a callable that
is passed a template name and should return `True` if it should end up
in the result list.
If the loader does not support that, a :exc:`TypeError` is raised.
.. versionadded:: 2.4
"""
x = self.loader.list_templates()
if extensions is not None:
if filter_func is not None:
raise TypeError('either extensions or filter_func '
'can be passed, but not both')
filter_func = lambda x: '.' in x and \
x.rsplit('.', 1)[1] in extensions
if filter_func is not None:
x = list(ifilter(filter_func, x))
return x
def handle_exception(self, exc_info=None, rendered=False, source_hint=None):
"""Exception handling helper. This is used internally to either raise
rewritten exceptions or return a rendered traceback for the template.
"""
global _make_traceback
if exc_info is None:
exc_info = sys.exc_info()
# the debugging module is imported when it's used for the first time.
# we're doing a lot of stuff there and for applications that do not
# get any exceptions in template rendering there is no need to load
# all of that.
if _make_traceback is None:
from jinja2.debug import make_traceback as _make_traceback
traceback = _make_traceback(exc_info, source_hint)
if rendered and self.exception_formatter is not None:
return self.exception_formatter(traceback)
if self.exception_handler is not None:
self.exception_handler(traceback)
exc_type, exc_value, tb = traceback.standard_exc_info
reraise(exc_type, exc_value, tb)
def join_path(self, template, parent):
"""Join a template with the parent. By default all the lookups are
relative to the loader root so this method returns the `template`
parameter unchanged, but if the paths should be relative to the
parent template, this function can be used to calculate the real
template name.
Subclasses may override this method and implement template path
joining here.
"""
return template
@internalcode
def _load_template(self, name, globals):
if self.loader is None:
raise TypeError('no loader for this environment specified')
try:
# use abs path for cache key
cache_key = self.loader.get_source(self, name)[1]
except RuntimeError:
# if loader does not implement get_source()
cache_key = None
# if template is not file, use name for cache key
if cache_key is None:
cache_key = name
if self.cache is not None:
template = self.cache.get(cache_key)
if template is not None and (not self.auto_reload or
template.is_up_to_date):
return template
template = self.loader.load(self, name, globals)
if self.cache is not None:
self.cache[cache_key] = template
return template
@internalcode
def get_template(self, name, parent=None, globals=None):
"""Load a template from the loader. If a loader is configured this
method ask the loader for the template and returns a :class:`Template`.
If the `parent` parameter is not `None`, :meth:`join_path` is called
to get the real template name before loading.
The `globals` parameter can be used to provide template wide globals.
These variables are available in the context at render time.
If the template does not exist a :exc:`TemplateNotFound` exception is
raised.
.. versionchanged:: 2.4
If `name` is a :class:`Template` object it is returned from the
function unchanged.
"""
if isinstance(name, Template):
return name
if parent is not None:
name = self.join_path(name, parent)
return self._load_template(name, self.make_globals(globals))
@internalcode
def select_template(self, names, parent=None, globals=None):
"""Works like :meth:`get_template` but tries a number of templates
before it fails. If it cannot find any of the templates, it will
raise a :exc:`TemplatesNotFound` exception.
.. versionadded:: 2.3
.. versionchanged:: 2.4
If `names` contains a :class:`Template` object it is returned
from the function unchanged.
"""
if not names:
raise TemplatesNotFound(message=u'Tried to select from an empty list '
u'of templates.')
globals = self.make_globals(globals)
for name in names:
if isinstance(name, Template):
return name
if parent is not None:
name = self.join_path(name, parent)
try:
return self._load_template(name, globals)
except TemplateNotFound:
pass
raise TemplatesNotFound(names)
@internalcode
def get_or_select_template(self, template_name_or_list,
parent=None, globals=None):
"""Does a typecheck and dispatches to :meth:`select_template`
if an iterable of template names is given, otherwise to
:meth:`get_template`.
.. versionadded:: 2.3
"""
if isinstance(template_name_or_list, string_types):
return self.get_template(template_name_or_list, parent, globals)
elif isinstance(template_name_or_list, Template):
return template_name_or_list
return self.select_template(template_name_or_list, parent, globals)
def from_string(self, source, globals=None, template_class=None):
"""Load a template from a string. This parses the source given and
returns a :class:`Template` object.
"""
globals = self.make_globals(globals)
cls = template_class or self.template_class
return cls.from_code(self, self.compile(source), globals, None)
def make_globals(self, d):
"""Return a dict for the globals."""
if not d:
return self.globals
return dict(self.globals, **d)
class Template(object):
"""The central template object. This class represents a compiled template
and is used to evaluate it.
Normally the template object is generated from an :class:`Environment` but
it also has a constructor that makes it possible to create a template
instance directly using the constructor. It takes the same arguments as
the environment constructor but it's not possible to specify a loader.
Every template object has a few methods and members that are guaranteed
to exist. However it's important that a template object should be
considered immutable. Modifications on the object are not supported.
Template objects created from the constructor rather than an environment
do have an `environment` attribute that points to a temporary environment
that is probably shared with other templates created with the constructor
and compatible settings.
>>> template = Template('Hello {{ name }}!')
>>> template.render(name='John Doe') == u'Hello John Doe!'
True
>>> stream = template.stream(name='John Doe')
>>> next(stream) == u'Hello John Doe!'
True
>>> next(stream)
Traceback (most recent call last):
...
StopIteration
"""
def __new__(cls, source,
block_start_string=BLOCK_START_STRING,
block_end_string=BLOCK_END_STRING,
variable_start_string=VARIABLE_START_STRING,
variable_end_string=VARIABLE_END_STRING,
comment_start_string=COMMENT_START_STRING,
comment_end_string=COMMENT_END_STRING,
line_statement_prefix=LINE_STATEMENT_PREFIX,
line_comment_prefix=LINE_COMMENT_PREFIX,
trim_blocks=TRIM_BLOCKS,
lstrip_blocks=LSTRIP_BLOCKS,
newline_sequence=NEWLINE_SEQUENCE,
keep_trailing_newline=KEEP_TRAILING_NEWLINE,
extensions=(),
optimized=True,
undefined=Undefined,
finalize=None,
autoescape=False):
env = get_spontaneous_environment(
block_start_string, block_end_string, variable_start_string,
variable_end_string, comment_start_string, comment_end_string,
line_statement_prefix, line_comment_prefix, trim_blocks,
lstrip_blocks, newline_sequence, keep_trailing_newline,
frozenset(extensions), optimized, undefined, finalize, autoescape,
None, 0, False, None)
return env.from_string(source, template_class=cls)
@classmethod
def from_code(cls, environment, code, globals, uptodate=None):
"""Creates a template object from compiled code and the globals. This
is used by the loaders and environment to create a template object.
"""
namespace = {
'environment': environment,
'__file__': code.co_filename
}
exec(code, namespace)
rv = cls._from_namespace(environment, namespace, globals)
rv._uptodate = uptodate
return rv
@classmethod
def from_module_dict(cls, environment, module_dict, globals):
"""Creates a template object from a module. This is used by the
module loader to create a template object.
.. versionadded:: 2.4
"""
return cls._from_namespace(environment, module_dict, globals)
@classmethod
def _from_namespace(cls, environment, namespace, globals):
t = object.__new__(cls)
t.environment = environment
t.globals = globals
t.name = namespace['name']
t.filename = namespace['__file__']
t.blocks = namespace['blocks']
# render function and module
t.root_render_func = namespace['root']
t._module = None
# debug and loader helpers
t._debug_info = namespace['debug_info']
t._uptodate = None
# store the reference
namespace['environment'] = environment
namespace['__jinja_template__'] = t
return t
def render(self, *args, **kwargs):
"""This method accepts the same arguments as the `dict` constructor:
A dict, a dict subclass or some keyword arguments. If no arguments
are given the context will be empty. These two calls do the same::
template.render(knights='that say nih')
template.render({'knights': 'that say nih'})
This will return the rendered template as unicode string.
"""
vars = dict(*args, **kwargs)
try:
return concat(self.root_render_func(self.new_context(vars)))
except Exception:
exc_info = sys.exc_info()
return self.environment.handle_exception(exc_info, True)
def stream(self, *args, **kwargs):
"""Works exactly like :meth:`generate` but returns a
:class:`TemplateStream`.
"""
return TemplateStream(self.generate(*args, **kwargs))
def generate(self, *args, **kwargs):
"""For very large templates it can be useful to not render the whole
template at once but evaluate each statement after another and yield
piece for piece. This method basically does exactly that and returns
a generator that yields one item after another as unicode strings.
It accepts the same arguments as :meth:`render`.
"""
vars = dict(*args, **kwargs)
try:
for event in self.root_render_func(self.new_context(vars)):
yield event
except Exception:
exc_info = sys.exc_info()
else:
return
yield self.environment.handle_exception(exc_info, True)
def new_context(self, vars=None, shared=False, locals=None):
"""Create a new :class:`Context` for this template. The vars
provided will be passed to the template. Per default the globals
are added to the context. If shared is set to `True` the data
is passed as it to the context without adding the globals.
`locals` can be a dict of local variables for internal usage.
"""
return new_context(self.environment, self.name, self.blocks,
vars, shared, self.globals, locals)
def make_module(self, vars=None, shared=False, locals=None):
"""This method works like the :attr:`module` attribute when called
without arguments but it will evaluate the template on every call
rather than caching it. It's also possible to provide
a dict which is then used as context. The arguments are the same
as for the :meth:`new_context` method.
"""
return TemplateModule(self, self.new_context(vars, shared, locals))
@property
def module(self):
"""The template as module. This is used for imports in the
template runtime but is also useful if one wants to access
exported template variables from the Python layer:
>>> t = Template('{% macro foo() %}42{% endmacro %}23')
>>> str(t.module)
'23'
>>> t.module.foo() == u'42'
True
"""
if self._module is not None:
return self._module
self._module = rv = self.make_module()
return rv
def get_corresponding_lineno(self, lineno):
"""Return the source line number of a line number in the
generated bytecode as they are not in sync.
"""
for template_line, code_line in reversed(self.debug_info):
if code_line <= lineno:
return template_line
return 1
@property
def is_up_to_date(self):
"""If this variable is `False` there is a newer version available."""
if self._uptodate is None:
return True
return self._uptodate()
@property
def debug_info(self):
"""The debug info mapping."""
return [tuple(imap(int, x.split('='))) for x in
self._debug_info.split('&')]
def __repr__(self):
if self.name is None:
name = 'memory:%x' % id(self)
else:
name = repr(self.name)
return '<%s %s>' % (self.__class__.__name__, name)
@implements_to_string
class TemplateModule(object):
"""Represents an imported template. All the exported names of the
template are available as attributes on this object. Additionally
converting it into an unicode- or bytestrings renders the contents.
"""
def __init__(self, template, context):
self._body_stream = list(template.root_render_func(context))
self.__dict__.update(context.get_exported())
self.__name__ = template.name
def __html__(self):
return Markup(concat(self._body_stream))
def __str__(self):
return concat(self._body_stream)
def __repr__(self):
if self.__name__ is None:
name = 'memory:%x' % id(self)
else:
name = repr(self.__name__)
return '<%s %s>' % (self.__class__.__name__, name)
class TemplateExpression(object):
"""The :meth:`jinja2.Environment.compile_expression` method returns an
instance of this object. It encapsulates the expression-like access
to the template with an expression it wraps.
"""
def __init__(self, template, undefined_to_none):
self._template = template
self._undefined_to_none = undefined_to_none
def __call__(self, *args, **kwargs):
context = self._template.new_context(dict(*args, **kwargs))
consume(self._template.root_render_func(context))
rv = context.vars['result']
if self._undefined_to_none and isinstance(rv, Undefined):
rv = None
return rv
@implements_iterator
class TemplateStream(object):
"""A template stream works pretty much like an ordinary python generator
but it can buffer multiple items to reduce the number of total iterations.
Per default the output is unbuffered which means that for every unbuffered
instruction in the template one unicode string is yielded.
If buffering is enabled with a buffer size of 5, five items are combined
into a new unicode string. This is mainly useful if you are streaming
big templates to a client via WSGI which flushes after each iteration.
"""
def __init__(self, gen):
self._gen = gen
self.disable_buffering()
def dump(self, fp, encoding=None, errors='strict'):
"""Dump the complete stream into a file or file-like object.
Per default unicode strings are written, if you want to encode
before writing specify an `encoding`.
Example usage::
Template('Hello {{ name }}!').stream(name='foo').dump('hello.html')
"""
close = False
if isinstance(fp, string_types):
if encoding is None:
encoding = 'utf-8'
fp = open(fp, 'wb')
close = True
try:
if encoding is not None:
iterable = (x.encode(encoding, errors) for x in self)
else:
iterable = self
if hasattr(fp, 'writelines'):
fp.writelines(iterable)
else:
for item in iterable:
fp.write(item)
finally:
if close:
fp.close()
def disable_buffering(self):
"""Disable the output buffering."""
self._next = get_next(self._gen)
self.buffered = False
def enable_buffering(self, size=5):
"""Enable buffering. Buffer `size` items before yielding them."""
if size <= 1:
raise ValueError('buffer size too small')
def generator(next):
buf = []
c_size = 0
push = buf.append
while 1:
try:
while c_size < size:
c = next()
push(c)
if c:
c_size += 1
except StopIteration:
if not c_size:
return
yield concat(buf)
del buf[:]
c_size = 0
self.buffered = True
self._next = get_next(generator(get_next(self._gen)))
def __iter__(self):
return self
def __next__(self):
return self._next()
# hook in default template class. if anyone reads this comment: ignore that
# it's possible to use custom templates ;-)
Environment.template_class = Template
|
andaag/scikit-learn | refs/heads/master | sklearn/datasets/mlcomp.py | 289 | # Copyright (c) 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
"""Glue code to load http://mlcomp.org data as a scikit.learn dataset"""
import os
import numbers
from sklearn.datasets.base import load_files
def _load_document_classification(dataset_path, metadata, set_=None, **kwargs):
if set_ is not None:
dataset_path = os.path.join(dataset_path, set_)
return load_files(dataset_path, metadata.get('description'), **kwargs)
LOADERS = {
'DocumentClassification': _load_document_classification,
# TODO: implement the remaining domain formats
}
def load_mlcomp(name_or_id, set_="raw", mlcomp_root=None, **kwargs):
"""Load a datasets as downloaded from http://mlcomp.org
Parameters
----------
name_or_id : the integer id or the string name metadata of the MLComp
dataset to load
set_ : select the portion to load: 'train', 'test' or 'raw'
mlcomp_root : the filesystem path to the root folder where MLComp datasets
are stored, if mlcomp_root is None, the MLCOMP_DATASETS_HOME
environment variable is looked up instead.
**kwargs : domain specific kwargs to be passed to the dataset loader.
Read more in the :ref:`User Guide <datasets>`.
Returns
-------
data : Bunch
Dictionary-like object, the interesting attributes are:
'filenames', the files holding the raw to learn, 'target', the
classification labels (integer index), 'target_names',
the meaning of the labels, and 'DESCR', the full description of the
dataset.
Note on the lookup process: depending on the type of name_or_id,
will choose between integer id lookup or metadata name lookup by
looking at the unzipped archives and metadata file.
TODO: implement zip dataset loading too
"""
if mlcomp_root is None:
try:
mlcomp_root = os.environ['MLCOMP_DATASETS_HOME']
except KeyError:
raise ValueError("MLCOMP_DATASETS_HOME env variable is undefined")
mlcomp_root = os.path.expanduser(mlcomp_root)
mlcomp_root = os.path.abspath(mlcomp_root)
mlcomp_root = os.path.normpath(mlcomp_root)
if not os.path.exists(mlcomp_root):
raise ValueError("Could not find folder: " + mlcomp_root)
# dataset lookup
if isinstance(name_or_id, numbers.Integral):
# id lookup
dataset_path = os.path.join(mlcomp_root, str(name_or_id))
else:
# assume name based lookup
dataset_path = None
expected_name_line = "name: " + name_or_id
for dataset in os.listdir(mlcomp_root):
metadata_file = os.path.join(mlcomp_root, dataset, 'metadata')
if not os.path.exists(metadata_file):
continue
with open(metadata_file) as f:
for line in f:
if line.strip() == expected_name_line:
dataset_path = os.path.join(mlcomp_root, dataset)
break
if dataset_path is None:
raise ValueError("Could not find dataset with metadata line: " +
expected_name_line)
# loading the dataset metadata
metadata = dict()
metadata_file = os.path.join(dataset_path, 'metadata')
if not os.path.exists(metadata_file):
raise ValueError(dataset_path + ' is not a valid MLComp dataset')
with open(metadata_file) as f:
for line in f:
if ":" in line:
key, value = line.split(":", 1)
metadata[key.strip()] = value.strip()
format = metadata.get('format', 'unknow')
loader = LOADERS.get(format)
if loader is None:
raise ValueError("No loader implemented for format: " + format)
return loader(dataset_path, metadata, set_=set_, **kwargs)
|
abenzbiria/clients_odoo | refs/heads/master | addons/sale_service/__openerp__.py | 30 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Create Tasks on SO',
'version': '1.0',
'category': 'Project Management',
'description': """
Automatically creates project tasks from procurement lines.
===========================================================
This module will automatically create a new task for each procurement order line
(e.g. for sale order lines), if the corresponding product meets the following
characteristics:
* Product Type = Service
* Procurement Method (Order fulfillment) = MTO (Make to Order)
* Supply/Procurement Method = Manufacture
If on top of that a projet is specified on the product form (in the Procurement
tab), then the new task will be created in that specific project. Otherwise, the
new task will not belong to any project, and may be added to a project manually
later.
When the project task is completed or cancelled, the corresponding procurement
is updated accordingly. For example, if this procurement corresponds to a sale
order line, the sale order line will be considered delivered when the task is
completed.
""",
'author': 'OpenERP SA',
'website': 'http://www.openerp.com',
'images': ['images/product.jpeg', 'images/task_from_SO.jpeg'],
'depends': ['project', 'procurement', 'sale', 'procurement_jit'],
'data': ['views/sale_service_view.xml'],
'demo': ['demo/sale_service_demo.xml'],
'test': ['test/project_task_procurement.yml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
dahlstrom-g/intellij-community | refs/heads/master | python/testData/inspections/ChainedComparison6.py | 35 | class A:
def foo(self):
if index >= self.current and<caret> index - self.history_length < self.current:
pass |
enthought/pyside | refs/heads/master | tests/QtOpenGL/qglwidget_test.py | 6 | import unittest
from PySide.QtGui import *
from PySide.QtOpenGL import *
class TestQGLWidget (unittest.TestCase):
def testIt(self):
"""Just test if the bindTexture(*, GLenum, GLint) methods overloads exists"""
app = QApplication([])
img = QImage()
w = QGLWidget()
a = w.bindTexture(img, 0, 0) # ok if it throws nothing.. :-)
if __name__ == "__main__":
unittest.main()
|
akash1808/python-novaclient | refs/heads/master | novaclient/v1_1/security_group_default_rules.py | 4 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Security group default rules interface.
"""
from novaclient import base
from novaclient import exceptions
from novaclient.openstack.common.gettextutils import _
class SecurityGroupDefaultRule(base.Resource):
def __str__(self):
return str(self.id)
def delete(self):
self.manager.delete(self)
class SecurityGroupDefaultRuleManager(base.Manager):
resource_class = SecurityGroupDefaultRule
def create(self, ip_protocol=None, from_port=None, to_port=None,
cidr=None):
"""
Create a security group default rule
:param ip_protocol: IP protocol, one of 'tcp', 'udp' or 'icmp'
:param from_port: Source port
:param to_port: Destination port
:param cidr: Destination IP address(es) in CIDR notation
"""
try:
from_port = int(from_port)
except (TypeError, ValueError):
raise exceptions.CommandError(_("From port must be an integer."))
try:
to_port = int(to_port)
except (TypeError, ValueError):
raise exceptions.CommandError(_("To port must be an integer."))
if ip_protocol.upper() not in ['TCP', 'UDP', 'ICMP']:
raise exceptions.CommandError(_("Ip protocol must be 'tcp', 'udp'"
", or 'icmp'."))
body = {"security_group_default_rule": {
"ip_protocol": ip_protocol,
"from_port": from_port,
"to_port": to_port,
"cidr": cidr}}
return self._create('/os-security-group-default-rules', body,
'security_group_default_rule')
def delete(self, rule):
"""
Delete a security group default rule
:param rule: The security group default rule to delete (ID or Class)
"""
self._delete('/os-security-group-default-rules/%s' % base.getid(rule))
def list(self):
"""
Get a list of all security group default rules
:rtype: list of :class:`SecurityGroupDefaultRule`
"""
return self._list('/os-security-group-default-rules',
'security_group_default_rules')
|
gciteam6/xgboost | refs/heads/master | src/data/base.py | 1 | # Built-in modules
from copy import deepcopy
import csv
from os import pardir, path, makedirs
import datetime
# Third-party modules
import numpy as np
import pandas as pd
import bloscpack as bp
PROJECT_ROOT_PATH = path.join(path.dirname(__file__), pardir, pardir)
RAW_DATA_BASEPATH = path.join(PROJECT_ROOT_PATH, "data/raw")
INTERIM_DATA_BASEPATH = path.join(PROJECT_ROOT_PATH, "data/interim")
PROCESSED_DATA_BASEPATH = path.join(PROJECT_ROOT_PATH, "data/processed")
DATETIME_FORMAT = "(?P<year>\d{4})(?P<month>\d{1,2})(?P<day>\d{1,2})(?P<hour>\d{2})(?P<minute>\d{2})"
TRAIN_DATE_RANGE = (
pd.to_datetime("2012-01-01 00:10:00"),
pd.to_datetime("2016-01-01 00:00:00")
)
TEST_DATE_RANGE = (
pd.to_datetime("2016-01-01 00:10:00"),
pd.to_datetime("2017-04-01 00:00:00")
)
KWARGS_READ_CSV_BASE = {
"sep": "\t",
"header": 0,
"na_values": ['', ' ']
}
KWARGS_TO_CSV_BASE = {
"sep": "\t"
}
KWARGS_OUTER_MERGE = {
"how": "outer",
"left_index": True,
"right_index": True
}
LABEL_LAT_HOUR, LABEL_LAT_MINUTE = "lat1", "lat2"
LABEL_LNG_HOUR, LABEL_LNG_MINUTE = "lng1", "lng2"
LABEL_LAT_DECIMAL, LABEL_LNG_DECIMAL = "lat_dec", "lng_dec"
class PathHandlerBase(object):
def __init__(self):
self.PROJECT_ROOT_PATH = PROJECT_ROOT_PATH
self.RAW_DATA_BASEPATH = RAW_DATA_BASEPATH
self.INTERIM_DATA_BASEPATH = INTERIM_DATA_BASEPATH
self.PROCESSED_DATA_BASEPATH = PROCESSED_DATA_BASEPATH
self.path = path
@staticmethod
def gen_abspath(relpath):
abspath = path.abspath(relpath)
makedirs(path.dirname(abspath), exist_ok=True)
return abspath
class BloscpackMixin:
@staticmethod
def read_blp(serialized_filepath):
return bp.unpack_ndarray_file(serialized_filepath)
@staticmethod
def to_blp(ndarray: np.array, serialized_filepath):
bp.pack_ndarray_file(ndarray, serialized_filepath)
class DataFrameHandlerBase(PathHandlerBase):
def __init__(self):
super().__init__()
self.DATETIME_FORMAT = DATETIME_FORMAT
self.TRAIN_DATE_RANGE = TRAIN_DATE_RANGE
self.TEST_DATE_RANGE = TEST_DATE_RANGE
self.KWARGS_READ_CSV_BASE = KWARGS_READ_CSV_BASE
self.KWARGS_TO_CSV_BASE = KWARGS_TO_CSV_BASE
self.KWARGS_OUTER_MERGE = KWARGS_OUTER_MERGE
def gen_read_csv_kwargs(self, kwargs_to_add: dict):
ret_dict = deepcopy(self.KWARGS_READ_CSV_BASE)
if kwargs_to_add is not None:
ret_dict.update(kwargs_to_add)
return ret_dict
def gen_to_csv_kwargs(self, kwargs_to_add: dict):
ret_dict = deepcopy(self.KWARGS_TO_CSV_BASE)
if kwargs_to_add is not None:
ret_dict.update(kwargs_to_add)
return ret_dict
def parse_datetime(self, df):
return pd.to_datetime(df.str.extract(self.DATETIME_FORMAT, expand=False))
@staticmethod
def gen_datetime_index(start, end, freq_min: int = 10):
return pd.date_range(start, end, freq=pd.offsets.Minute(freq_min))
@staticmethod
def gen_norm_datetime(year, month, day, hour, minute, second):
return datetime.datetime(year, month, day) + \
datetime.timedelta(hours=hour, minutes=minute, seconds=second)
@staticmethod
def add_annotations_to_column_names(df, attribute_name, location_name):
return [
'_'.join([
str(column_name), attribute_name, location_name
]) for column_name in df.columns
]
class LocationHandlerBase(DataFrameHandlerBase):
def __init__(self, master_filepath, **kwargs_location):
super().__init__()
self.location = pd.read_csv(
master_filepath, **self.gen_read_csv_kwargs(kwargs_location)
)
self.location[LABEL_LAT_DECIMAL] = self.location.apply(
lambda df: self.cast_60_to_10(df[LABEL_LAT_HOUR], df[LABEL_LAT_MINUTE]), axis=1
)
self.location[LABEL_LNG_DECIMAL] = self.location.apply(
lambda df: self.cast_60_to_10(df[LABEL_LNG_HOUR], df[LABEL_LNG_MINUTE]), axis=1
)
def get_near_observation_points(self, lat_mid, lng_mid, half_grid_size):
lat_max, lat_min = lat_mid + half_grid_size, lat_mid - half_grid_size
lng_max, lng_min = lng_mid + half_grid_size, lng_mid - half_grid_size
lat_within_mesh = self.location[LABEL_LAT_DECIMAL].apply(
lambda lat: True if (lat_min <= lat <= lat_max) else False
)
lng_within_mesh = self.location[LABEL_LNG_DECIMAL].apply(
lambda lng: True if lng_min <= lng <= lng_max else False
)
flg_within_mesh = [is_lat and ls_lng for (is_lat, ls_lng) in zip(lat_within_mesh, lng_within_mesh)]
return self.location.loc[flg_within_mesh, :]
@staticmethod
def cast_60_to_10(hour, minute, second=0):
return hour + (minute / 60) + (second / 3600)
if __name__ == '__main__':
print("Here is src/data/base.py !")
|
Star2Billing/newfies-dialer | refs/heads/develop | newfies/apirest/tier_serializers.py | 4 | # -*- coding: utf-8 -*-
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2015 Star2Billing S.L.
#
# The primary maintainer of this project is
# Arezqui Belaid <info@star2billing.com>
#
from rest_framework import serializers
from callcenter.models import Queue, Tier
from agent.models import AgentProfile
class TierSerializer(serializers.HyperlinkedModelSerializer):
"""
**Create**:
CURL Usage::
curl -u username:password --dump-header - -H "Content-Type:application/json" -X POST --data '{"level": "2", "position": "1"}' http://localhost:8000/rest-api/tier/
Response::
HTTP/1.0 201 CREATED
Date: Fri, 14 Jun 2013 09:52:27 GMT
Server: WSGIServer/0.1 Python/2.7.3
Vary: Accept, Accept-Language, Cookie
Content-Type: application/json; charset=utf-8
Content-Language: en-us
Location: http://localhost:8000/rest-api/tier/1/
Allow: GET, POST, HEAD, OPTIONS
**Read**:
CURL Usage::
curl -u username:password -H 'Accept: application/json' http://localhost:8000/rest-api/tier/
Response::
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"manager": "manager",
"agent": "agent",
"queue": "Sample queue",
"url": "http://127.0.0.1:8000/rest-api/tier/1/",
"level": 1,
"position": 1,
"created_date": "2013-10-23T13:09:43.311Z",
"updated_date": "2013-10-23T13:09:43.311Z"
}
]
}
**Update**:
CURL Usage::
curl -u username:password --dump-header - -H "Content-Type: application/json" -X PUT --data '{"level": "2"}' http://localhost:8000/rest-api/tier/%dtier-id%/
Response::
HTTP/1.0 202 NO CONTENT
Date: Fri, 23 Sep 2011 06:46:12 GMT
Server: WSGIServer/0.1 Python/2.7.1+
Vary: Accept-Language, Cookie
Content-Length: 0
Content-Type: text/html; charset=utf-8
Content-Language: en-us
"""
manager = serializers.Field(source='manager')
#agent = serializers.Field(source='agent')
class Meta:
model = Tier
def get_fields(self, *args, **kwargs):
"""filter field"""
fields = super(TierSerializer, self).get_fields(*args, **kwargs)
request = self.context['request']
fields['agent'].queryset = AgentProfile.objects.filter(manager=request.user)
fields['queue'].queryset = Queue.objects.filter(manager=request.user)
return fields
|
XiaosongWei/crosswalk-test-suite | refs/heads/master | webapi/tct-csp-w3c-tests/inst.xpk.py | 456 | #!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
import string
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PKG_NAME = os.path.basename(SCRIPT_DIR)
PARAMETERS = None
#XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/5000/dbus/user_bus_socket"
SRC_DIR = ""
PKG_SRC_DIR = ""
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code is not None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def updateCMD(cmd=None):
if "pkgcmd" in cmd:
cmd = "su - %s -c '%s;%s'" % (PARAMETERS.user, XW_ENV, cmd)
return cmd
def getUSERID():
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell id -u %s" % (
PARAMETERS.device, PARAMETERS.user)
else:
cmd = "ssh %s \"id -u %s\"" % (
PARAMETERS.device, PARAMETERS.user)
return doCMD(cmd)
def getPKGID(pkg_name=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
else:
cmd = "ssh %s \"%s\"" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
(return_code, output) = doCMD(cmd)
if return_code != 0:
return None
test_pkg_id = None
for line in output:
if line.find("[" + pkg_name + "]") != -1:
pkgidIndex = line.split().index("pkgid")
test_pkg_id = line.split()[pkgidIndex + 1].strip("[]")
break
return test_pkg_id
def doRemoteCMD(cmd=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (PARAMETERS.device, updateCMD(cmd))
else:
cmd = "ssh %s \"%s\"" % (PARAMETERS.device, updateCMD(cmd))
return doCMD(cmd)
def doRemoteCopy(src=None, dest=None):
if PARAMETERS.mode == "SDB":
cmd_prefix = "sdb -s %s push" % PARAMETERS.device
cmd = "%s %s %s" % (cmd_prefix, src, dest)
else:
cmd = "scp -r %s %s:/%s" % (src, PARAMETERS.device, dest)
(return_code, output) = doCMD(cmd)
doRemoteCMD("sync")
if return_code != 0:
return True
else:
return False
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".xpk"):
pkg_id = getPKGID(os.path.basename(os.path.splitext(file)[0]))
if not pkg_id:
action_status = False
continue
(return_code, output) = doRemoteCMD(
"pkgcmd -u -t xpk -q -n %s" % pkg_id)
for line in output:
if "Failure" in line:
action_status = False
break
(return_code, output) = doRemoteCMD(
"rm -rf %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
return action_status
def instPKGs():
action_status = True
(return_code, output) = doRemoteCMD(
"mkdir -p %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".xpk"):
if not doRemoteCopy(
os.path.join(root, file), "%s/%s" % (SRC_DIR, file)):
action_status = False
(return_code, output) = doRemoteCMD(
"pkgcmd -i -t xpk -q -p %s/%s" % (SRC_DIR, file))
doRemoteCMD("rm -rf %s/%s" % (SRC_DIR, file))
for line in output:
if "Failure" in line:
action_status = False
break
# Do some special copy/delete... steps
'''
(return_code, output) = doRemoteCMD(
"mkdir -p %s/tests" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
if not doRemoteCopy("specname/tests", "%s/tests" % PKG_SRC_DIR):
action_status = False
'''
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-m", dest="mode", action="store", help="Specify mode")
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
opts_parser.add_option(
"-a", dest="user", action="store", help="User name")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception as e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.user:
PARAMETERS.user = "app"
global SRC_DIR, PKG_SRC_DIR
SRC_DIR = "/home/%s/content" % PARAMETERS.user
PKG_SRC_DIR = "%s/tct/opt/%s" % (SRC_DIR, PKG_NAME)
if not PARAMETERS.mode:
PARAMETERS.mode = "SDB"
if PARAMETERS.mode == "SDB":
if not PARAMETERS.device:
(return_code, output) = doCMD("sdb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
else:
PARAMETERS.mode = "SSH"
if not PARAMETERS.device:
print "No device provided"
sys.exit(1)
user_info = getUSERID()
re_code = user_info[0]
if re_code == 0:
global XW_ENV
userid = user_info[1][0]
XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/%s/dbus/user_bus_socket" % str(
userid)
else:
print "[Error] cmd commands error : %s" % str(user_info[1])
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
|
patrickstocklin/chattR | refs/heads/master | lib/python2.7/site-packages/nltk/probability.py | 8 | # -*- coding: utf-8 -*-
# Natural Language Toolkit: Probability and Statistics
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Edward Loper <edloper@gmail.com>
# Steven Bird <stevenbird1@gmail.com> (additions)
# Trevor Cohn <tacohn@cs.mu.oz.au> (additions)
# Peter Ljunglöf <peter.ljunglof@heatherleaf.se> (additions)
# Liang Dong <ldong@clemson.edu> (additions)
# Geoffrey Sampson <sampson@cantab.net> (additions)
# Ilia Kurenkov <ilia.kurenkov@gmail.com> (additions)
#
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
Classes for representing and processing probabilistic information.
The ``FreqDist`` class is used to encode "frequency distributions",
which count the number of times that each outcome of an experiment
occurs.
The ``ProbDistI`` class defines a standard interface for "probability
distributions", which encode the probability of each outcome for an
experiment. There are two types of probability distribution:
- "derived probability distributions" are created from frequency
distributions. They attempt to model the probability distribution
that generated the frequency distribution.
- "analytic probability distributions" are created directly from
parameters (such as variance).
The ``ConditionalFreqDist`` class and ``ConditionalProbDistI`` interface
are used to encode conditional distributions. Conditional probability
distributions can be derived or analytic; but currently the only
implementation of the ``ConditionalProbDistI`` interface is
``ConditionalProbDist``, a derived distribution.
"""
from __future__ import print_function, unicode_literals
import math
import random
import warnings
import array
from operator import itemgetter
from collections import defaultdict
from functools import reduce
from nltk import compat
from nltk.compat import Counter
from nltk.internals import raise_unorderable_types
_NINF = float('-1e300')
##//////////////////////////////////////////////////////
## Frequency Distributions
##//////////////////////////////////////////////////////
@compat.python_2_unicode_compatible
class FreqDist(Counter):
"""
A frequency distribution for the outcomes of an experiment. A
frequency distribution records the number of times each outcome of
an experiment has occurred. For example, a frequency distribution
could be used to record the frequency of each word type in a
document. Formally, a frequency distribution can be defined as a
function mapping from each sample to the number of times that
sample occurred as an outcome.
Frequency distributions are generally constructed by running a
number of experiments, and incrementing the count for a sample
every time it is an outcome of an experiment. For example, the
following code will produce a frequency distribution that encodes
how often each word occurs in a text:
>>> from nltk.tokenize import word_tokenize
>>> from nltk.probability import FreqDist
>>> sent = 'This is an example sentence'
>>> fdist = FreqDist()
>>> for word in word_tokenize(sent):
... fdist[word.lower()] += 1
An equivalent way to do this is with the initializer:
>>> fdist = FreqDist(word.lower() for word in word_tokenize(sent))
"""
def __init__(self, samples=None):
"""
Construct a new frequency distribution. If ``samples`` is
given, then the frequency distribution will be initialized
with the count of each object in ``samples``; otherwise, it
will be initialized to be empty.
In particular, ``FreqDist()`` returns an empty frequency
distribution; and ``FreqDist(samples)`` first creates an empty
frequency distribution, and then calls ``update`` with the
list ``samples``.
:param samples: The samples to initialize the frequency
distribution with.
:type samples: Sequence
"""
Counter.__init__(self, samples)
def N(self):
"""
Return the total number of sample outcomes that have been
recorded by this FreqDist. For the number of unique
sample values (or bins) with counts greater than zero, use
``FreqDist.B()``.
:rtype: int
"""
return sum(self.values())
def B(self):
"""
Return the total number of sample values (or "bins") that
have counts greater than zero. For the total
number of sample outcomes recorded, use ``FreqDist.N()``.
(FreqDist.B() is the same as len(FreqDist).)
:rtype: int
"""
return len(self)
def hapaxes(self):
"""
Return a list of all samples that occur once (hapax legomena)
:rtype: list
"""
return [item for item in self if self[item] == 1]
def Nr(self, r, bins=None):
return self.r_Nr(bins)[r]
def r_Nr(self, bins=None):
"""
Return the dictionary mapping r to Nr, the number of samples with frequency r, where Nr > 0.
:type bins: int
:param bins: The number of possible sample outcomes. ``bins``
is used to calculate Nr(0). In particular, Nr(0) is
``bins-self.B()``. If ``bins`` is not specified, it
defaults to ``self.B()`` (so Nr(0) will be 0).
:rtype: int
"""
_r_Nr = defaultdict(int)
for count in self.values():
_r_Nr[count] += 1
# Special case for Nr[0]:
_r_Nr[0] = bins - self.B() if bins is not None else 0
return _r_Nr
def _cumulative_frequencies(self, samples):
"""
Return the cumulative frequencies of the specified samples.
If no samples are specified, all counts are returned, starting
with the largest.
:param samples: the samples whose frequencies should be returned.
:type samples: any
:rtype: list(float)
"""
cf = 0.0
for sample in samples:
cf += self[sample]
yield cf
# slightly odd nomenclature freq() if FreqDist does counts and ProbDist does probs,
# here, freq() does probs
def freq(self, sample):
"""
Return the frequency of a given sample. The frequency of a
sample is defined as the count of that sample divided by the
total number of sample outcomes that have been recorded by
this FreqDist. The count of a sample is defined as the
number of times that sample outcome was recorded by this
FreqDist. Frequencies are always real numbers in the range
[0, 1].
:param sample: the sample whose frequency
should be returned.
:type sample: any
:rtype: float
"""
if self.N() == 0:
return 0
return float(self[sample]) / self.N()
def max(self):
"""
Return the sample with the greatest number of outcomes in this
frequency distribution. If two or more samples have the same
number of outcomes, return one of them; which sample is
returned is undefined. If no outcomes have occurred in this
frequency distribution, return None.
:return: The sample with the maximum number of outcomes in this
frequency distribution.
:rtype: any or None
"""
if len(self) == 0:
raise ValueError('A FreqDist must have at least one sample before max is defined.')
return self.most_common(1)[0][0]
def plot(self, *args, **kwargs):
"""
Plot samples from the frequency distribution
displaying the most frequent sample first. If an integer
parameter is supplied, stop after this many samples have been
plotted. For a cumulative plot, specify cumulative=True.
(Requires Matplotlib to be installed.)
:param title: The title for the graph
:type title: str
:param cumulative: A flag to specify whether the plot is cumulative (default = False)
:type title: bool
"""
try:
from matplotlib import pylab
except ImportError:
raise ValueError('The plot function requires matplotlib to be installed.'
'See http://matplotlib.org/')
if len(args) == 0:
args = [len(self)]
samples = [item for item, _ in self.most_common(*args)]
cumulative = _get_kwarg(kwargs, 'cumulative', False)
if cumulative:
freqs = list(self._cumulative_frequencies(samples))
ylabel = "Cumulative Counts"
else:
freqs = [self[sample] for sample in samples]
ylabel = "Counts"
# percents = [f * 100 for f in freqs] only in ProbDist?
pylab.grid(True, color="silver")
if not "linewidth" in kwargs:
kwargs["linewidth"] = 2
if "title" in kwargs:
pylab.title(kwargs["title"])
del kwargs["title"]
pylab.plot(freqs, **kwargs)
pylab.xticks(range(len(samples)), [compat.text_type(s) for s in samples], rotation=90)
pylab.xlabel("Samples")
pylab.ylabel(ylabel)
pylab.show()
def tabulate(self, *args, **kwargs):
"""
Tabulate the given samples from the frequency distribution (cumulative),
displaying the most frequent sample first. If an integer
parameter is supplied, stop after this many samples have been
plotted.
:param samples: The samples to plot (default is all samples)
:type samples: list
"""
if len(args) == 0:
args = [len(self)]
samples = [item for item, _ in self.most_common(*args)]
cumulative = _get_kwarg(kwargs, 'cumulative', False)
if cumulative:
freqs = list(self._cumulative_frequencies(samples))
else:
freqs = [self[sample] for sample in samples]
# percents = [f * 100 for f in freqs] only in ProbDist?
for i in range(len(samples)):
print("%4s" % samples[i], end=' ')
print()
for i in range(len(samples)):
print("%4d" % freqs[i], end=' ')
print()
def copy(self):
"""
Create a copy of this frequency distribution.
:rtype: FreqDist
"""
return self.__class__(self)
def __le__(self, other):
if not isinstance(other, FreqDist):
raise_unorderable_types("<=", self, other)
return set(self).issubset(other) and all(self[key] <= other[key] for key in self)
# @total_ordering doesn't work here, since the class inherits from a builtin class
__ge__ = lambda self, other: not self <= other or self == other
__lt__ = lambda self, other: self <= other and not self == other
__gt__ = lambda self, other: not self <= other
def __repr__(self):
"""
Return a string representation of this FreqDist.
:rtype: string
"""
return self.pformat()
def pprint(self, maxlen=10, stream=None):
"""
Print a string representation of this FreqDist to 'stream'
:param maxlen: The maximum number of items to print
:type maxlen: int
:param stream: The stream to print to. stdout by default
"""
print(self.pformat(maxlen=maxlen), file=stream)
def pformat(self, maxlen=10):
"""
Return a string representation of this FreqDist.
:param maxlen: The maximum number of items to display
:type maxlen: int
:rtype: string
"""
items = ['{0!r}: {1!r}'.format(*item) for item in self.most_common(maxlen)]
if len(self) > maxlen:
items.append('...')
return 'FreqDist({{{0}}})'.format(', '.join(items))
def __str__(self):
"""
Return a string representation of this FreqDist.
:rtype: string
"""
return '<FreqDist with %d samples and %d outcomes>' % (len(self), self.N())
##//////////////////////////////////////////////////////
## Probability Distributions
##//////////////////////////////////////////////////////
class ProbDistI(object):
"""
A probability distribution for the outcomes of an experiment. A
probability distribution specifies how likely it is that an
experiment will have any given outcome. For example, a
probability distribution could be used to predict the probability
that a token in a document will have a given type. Formally, a
probability distribution can be defined as a function mapping from
samples to nonnegative real numbers, such that the sum of every
number in the function's range is 1.0. A ``ProbDist`` is often
used to model the probability distribution of the experiment used
to generate a frequency distribution.
"""
SUM_TO_ONE = True
"""True if the probabilities of the samples in this probability
distribution will always sum to one."""
def __init__(self):
if self.__class__ == ProbDistI:
raise NotImplementedError("Interfaces can't be instantiated")
def prob(self, sample):
"""
Return the probability for a given sample. Probabilities
are always real numbers in the range [0, 1].
:param sample: The sample whose probability
should be returned.
:type sample: any
:rtype: float
"""
raise NotImplementedError()
def logprob(self, sample):
"""
Return the base 2 logarithm of the probability for a given sample.
:param sample: The sample whose probability
should be returned.
:type sample: any
:rtype: float
"""
# Default definition, in terms of prob()
p = self.prob(sample)
return (math.log(p, 2) if p != 0 else _NINF)
def max(self):
"""
Return the sample with the greatest probability. If two or
more samples have the same probability, return one of them;
which sample is returned is undefined.
:rtype: any
"""
raise NotImplementedError()
def samples(self):
"""
Return a list of all samples that have nonzero probabilities.
Use ``prob`` to find the probability of each sample.
:rtype: list
"""
raise NotImplementedError()
# cf self.SUM_TO_ONE
def discount(self):
"""
Return the ratio by which counts are discounted on average: c*/c
:rtype: float
"""
return 0.0
# Subclasses should define more efficient implementations of this,
# where possible.
def generate(self):
"""
Return a randomly selected sample from this probability distribution.
The probability of returning each sample ``samp`` is equal to
``self.prob(samp)``.
"""
p = random.random()
p_init = p
for sample in self.samples():
p -= self.prob(sample)
if p <= 0: return sample
# allow for some rounding error:
if p < .0001:
return sample
# we *should* never get here
if self.SUM_TO_ONE:
warnings.warn("Probability distribution %r sums to %r; generate()"
" is returning an arbitrary sample." % (self, p_init-p))
return random.choice(list(self.samples()))
@compat.python_2_unicode_compatible
class UniformProbDist(ProbDistI):
"""
A probability distribution that assigns equal probability to each
sample in a given set; and a zero probability to all other
samples.
"""
def __init__(self, samples):
"""
Construct a new uniform probability distribution, that assigns
equal probability to each sample in ``samples``.
:param samples: The samples that should be given uniform
probability.
:type samples: list
:raise ValueError: If ``samples`` is empty.
"""
if len(samples) == 0:
raise ValueError('A Uniform probability distribution must '+
'have at least one sample.')
self._sampleset = set(samples)
self._prob = 1.0/len(self._sampleset)
self._samples = list(self._sampleset)
def prob(self, sample):
return (self._prob if sample in self._sampleset else 0)
def max(self):
return self._samples[0]
def samples(self):
return self._samples
def __repr__(self):
return '<UniformProbDist with %d samples>' % len(self._sampleset)
@compat.python_2_unicode_compatible
class RandomProbDist(ProbDistI):
"""
Generates a random probability distribution whereby each sample
will be between 0 and 1 with equal probability (uniform random distribution.
Also called a continuous uniform distribution).
"""
def __init__(self, samples):
if len(samples) == 0:
raise ValueError('A probability distribution must '+
'have at least one sample.')
self._probs = self.unirand(samples)
self._samples = list(self._probs.keys())
@classmethod
def unirand(cls, samples):
"""
The key function that creates a randomized initial distribution
that still sums to 1. Set as a dictionary of prob values so that
it can still be passed to MutableProbDist and called with identical
syntax to UniformProbDist
"""
randrow = [random.random() for i in range(len(samples))]
total = sum(randrow)
for i, x in enumerate(randrow):
randrow[i] = x/total
total = sum(randrow)
if total != 1:
#this difference, if present, is so small (near NINF) that it
#can be subtracted from any element without risking probs not (0 1)
randrow[-1] -= total - 1
return dict((s, randrow[i]) for i, s in enumerate(samples))
def prob(self, sample):
return self._probs.get(sample, 0)
def samples(self):
return self._samples
def __repr__(self):
return '<RandomUniformProbDist with %d samples>' %len(self._probs)
@compat.python_2_unicode_compatible
class DictionaryProbDist(ProbDistI):
"""
A probability distribution whose probabilities are directly
specified by a given dictionary. The given dictionary maps
samples to probabilities.
"""
def __init__(self, prob_dict=None, log=False, normalize=False):
"""
Construct a new probability distribution from the given
dictionary, which maps values to probabilities (or to log
probabilities, if ``log`` is true). If ``normalize`` is
true, then the probability values are scaled by a constant
factor such that they sum to 1.
If called without arguments, the resulting probability
distribution assigns zero probability to all values.
"""
self._prob_dict = (prob_dict.copy() if prob_dict is not None else {})
self._log = log
# Normalize the distribution, if requested.
if normalize:
if len(prob_dict) == 0:
raise ValueError('A DictionaryProbDist must have at least one sample ' +
'before it can be normalized.')
if log:
value_sum = sum_logs(list(self._prob_dict.values()))
if value_sum <= _NINF:
logp = math.log(1.0/len(prob_dict), 2)
for x in prob_dict:
self._prob_dict[x] = logp
else:
for (x, p) in self._prob_dict.items():
self._prob_dict[x] -= value_sum
else:
value_sum = sum(self._prob_dict.values())
if value_sum == 0:
p = 1.0/len(prob_dict)
for x in prob_dict:
self._prob_dict[x] = p
else:
norm_factor = 1.0/value_sum
for (x, p) in self._prob_dict.items():
self._prob_dict[x] *= norm_factor
def prob(self, sample):
if self._log:
return (2**(self._prob_dict[sample]) if sample in self._prob_dict else 0)
else:
return self._prob_dict.get(sample, 0)
def logprob(self, sample):
if self._log:
return self._prob_dict.get(sample, _NINF)
else:
if sample not in self._prob_dict: return _NINF
elif self._prob_dict[sample] == 0: return _NINF
else: return math.log(self._prob_dict[sample], 2)
def max(self):
if not hasattr(self, '_max'):
self._max = max((p,v) for (v,p) in self._prob_dict.items())[1]
return self._max
def samples(self):
return self._prob_dict.keys()
def __repr__(self):
return '<ProbDist with %d samples>' % len(self._prob_dict)
@compat.python_2_unicode_compatible
class MLEProbDist(ProbDistI):
"""
The maximum likelihood estimate for the probability distribution
of the experiment used to generate a frequency distribution. The
"maximum likelihood estimate" approximates the probability of
each sample as the frequency of that sample in the frequency
distribution.
"""
def __init__(self, freqdist, bins=None):
"""
Use the maximum likelihood estimate to create a probability
distribution for the experiment used to generate ``freqdist``.
:type freqdist: FreqDist
:param freqdist: The frequency distribution that the
probability estimates should be based on.
"""
self._freqdist = freqdist
def freqdist(self):
"""
Return the frequency distribution that this probability
distribution is based on.
:rtype: FreqDist
"""
return self._freqdist
def prob(self, sample):
return self._freqdist.freq(sample)
def max(self):
return self._freqdist.max()
def samples(self):
return self._freqdist.keys()
def __repr__(self):
"""
:rtype: str
:return: A string representation of this ``ProbDist``.
"""
return '<MLEProbDist based on %d samples>' % self._freqdist.N()
@compat.python_2_unicode_compatible
class LidstoneProbDist(ProbDistI):
"""
The Lidstone estimate for the probability distribution of the
experiment used to generate a frequency distribution. The
"Lidstone estimate" is parameterized by a real number *gamma*,
which typically ranges from 0 to 1. The Lidstone estimate
approximates the probability of a sample with count *c* from an
experiment with *N* outcomes and *B* bins as
``c+gamma)/(N+B*gamma)``. This is equivalent to adding
*gamma* to the count for each bin, and taking the maximum
likelihood estimate of the resulting frequency distribution.
"""
SUM_TO_ONE = False
def __init__(self, freqdist, gamma, bins=None):
"""
Use the Lidstone estimate to create a probability distribution
for the experiment used to generate ``freqdist``.
:type freqdist: FreqDist
:param freqdist: The frequency distribution that the
probability estimates should be based on.
:type gamma: float
:param gamma: A real number used to parameterize the
estimate. The Lidstone estimate is equivalent to adding
*gamma* to the count for each bin, and taking the
maximum likelihood estimate of the resulting frequency
distribution.
:type bins: int
:param bins: The number of sample values that can be generated
by the experiment that is described by the probability
distribution. This value must be correctly set for the
probabilities of the sample values to sum to one. If
``bins`` is not specified, it defaults to ``freqdist.B()``.
"""
if (bins == 0) or (bins is None and freqdist.N() == 0):
name = self.__class__.__name__[:-8]
raise ValueError('A %s probability distribution ' % name +
'must have at least one bin.')
if (bins is not None) and (bins < freqdist.B()):
name = self.__class__.__name__[:-8]
raise ValueError('\nThe number of bins in a %s distribution ' % name +
'(%d) must be greater than or equal to\n' % bins +
'the number of bins in the FreqDist used ' +
'to create it (%d).' % freqdist.B())
self._freqdist = freqdist
self._gamma = float(gamma)
self._N = self._freqdist.N()
if bins is None:
bins = freqdist.B()
self._bins = bins
self._divisor = self._N + bins * gamma
if self._divisor == 0.0:
# In extreme cases we force the probability to be 0,
# which it will be, since the count will be 0:
self._gamma = 0
self._divisor = 1
def freqdist(self):
"""
Return the frequency distribution that this probability
distribution is based on.
:rtype: FreqDist
"""
return self._freqdist
def prob(self, sample):
c = self._freqdist[sample]
return (c + self._gamma) / self._divisor
def max(self):
# For Lidstone distributions, probability is monotonic with
# frequency, so the most probable sample is the one that
# occurs most frequently.
return self._freqdist.max()
def samples(self):
return self._freqdist.keys()
def discount(self):
gb = self._gamma * self._bins
return gb / (self._N + gb)
def __repr__(self):
"""
Return a string representation of this ``ProbDist``.
:rtype: str
"""
return '<LidstoneProbDist based on %d samples>' % self._freqdist.N()
@compat.python_2_unicode_compatible
class LaplaceProbDist(LidstoneProbDist):
"""
The Laplace estimate for the probability distribution of the
experiment used to generate a frequency distribution. The
"Laplace estimate" approximates the probability of a sample with
count *c* from an experiment with *N* outcomes and *B* bins as
*(c+1)/(N+B)*. This is equivalent to adding one to the count for
each bin, and taking the maximum likelihood estimate of the
resulting frequency distribution.
"""
def __init__(self, freqdist, bins=None):
"""
Use the Laplace estimate to create a probability distribution
for the experiment used to generate ``freqdist``.
:type freqdist: FreqDist
:param freqdist: The frequency distribution that the
probability estimates should be based on.
:type bins: int
:param bins: The number of sample values that can be generated
by the experiment that is described by the probability
distribution. This value must be correctly set for the
probabilities of the sample values to sum to one. If
``bins`` is not specified, it defaults to ``freqdist.B()``.
"""
LidstoneProbDist.__init__(self, freqdist, 1, bins)
def __repr__(self):
"""
:rtype: str
:return: A string representation of this ``ProbDist``.
"""
return '<LaplaceProbDist based on %d samples>' % self._freqdist.N()
@compat.python_2_unicode_compatible
class ELEProbDist(LidstoneProbDist):
"""
The expected likelihood estimate for the probability distribution
of the experiment used to generate a frequency distribution. The
"expected likelihood estimate" approximates the probability of a
sample with count *c* from an experiment with *N* outcomes and
*B* bins as *(c+0.5)/(N+B/2)*. This is equivalent to adding 0.5
to the count for each bin, and taking the maximum likelihood
estimate of the resulting frequency distribution.
"""
def __init__(self, freqdist, bins=None):
"""
Use the expected likelihood estimate to create a probability
distribution for the experiment used to generate ``freqdist``.
:type freqdist: FreqDist
:param freqdist: The frequency distribution that the
probability estimates should be based on.
:type bins: int
:param bins: The number of sample values that can be generated
by the experiment that is described by the probability
distribution. This value must be correctly set for the
probabilities of the sample values to sum to one. If
``bins`` is not specified, it defaults to ``freqdist.B()``.
"""
LidstoneProbDist.__init__(self, freqdist, 0.5, bins)
def __repr__(self):
"""
Return a string representation of this ``ProbDist``.
:rtype: str
"""
return '<ELEProbDist based on %d samples>' % self._freqdist.N()
@compat.python_2_unicode_compatible
class HeldoutProbDist(ProbDistI):
"""
The heldout estimate for the probability distribution of the
experiment used to generate two frequency distributions. These
two frequency distributions are called the "heldout frequency
distribution" and the "base frequency distribution." The
"heldout estimate" uses uses the "heldout frequency
distribution" to predict the probability of each sample, given its
frequency in the "base frequency distribution".
In particular, the heldout estimate approximates the probability
for a sample that occurs *r* times in the base distribution as
the average frequency in the heldout distribution of all samples
that occur *r* times in the base distribution.
This average frequency is *Tr[r]/(Nr[r].N)*, where:
- *Tr[r]* is the total count in the heldout distribution for
all samples that occur *r* times in the base distribution.
- *Nr[r]* is the number of samples that occur *r* times in
the base distribution.
- *N* is the number of outcomes recorded by the heldout
frequency distribution.
In order to increase the efficiency of the ``prob`` member
function, *Tr[r]/(Nr[r].N)* is precomputed for each value of *r*
when the ``HeldoutProbDist`` is created.
:type _estimate: list(float)
:ivar _estimate: A list mapping from *r*, the number of
times that a sample occurs in the base distribution, to the
probability estimate for that sample. ``_estimate[r]`` is
calculated by finding the average frequency in the heldout
distribution of all samples that occur *r* times in the base
distribution. In particular, ``_estimate[r]`` =
*Tr[r]/(Nr[r].N)*.
:type _max_r: int
:ivar _max_r: The maximum number of times that any sample occurs
in the base distribution. ``_max_r`` is used to decide how
large ``_estimate`` must be.
"""
SUM_TO_ONE = False
def __init__(self, base_fdist, heldout_fdist, bins=None):
"""
Use the heldout estimate to create a probability distribution
for the experiment used to generate ``base_fdist`` and
``heldout_fdist``.
:type base_fdist: FreqDist
:param base_fdist: The base frequency distribution.
:type heldout_fdist: FreqDist
:param heldout_fdist: The heldout frequency distribution.
:type bins: int
:param bins: The number of sample values that can be generated
by the experiment that is described by the probability
distribution. This value must be correctly set for the
probabilities of the sample values to sum to one. If
``bins`` is not specified, it defaults to ``freqdist.B()``.
"""
self._base_fdist = base_fdist
self._heldout_fdist = heldout_fdist
# The max number of times any sample occurs in base_fdist.
self._max_r = base_fdist[base_fdist.max()]
# Calculate Tr, Nr, and N.
Tr = self._calculate_Tr()
r_Nr = base_fdist.r_Nr(bins)
Nr = [r_Nr[r] for r in range(self._max_r+1)]
N = heldout_fdist.N()
# Use Tr, Nr, and N to compute the probability estimate for
# each value of r.
self._estimate = self._calculate_estimate(Tr, Nr, N)
def _calculate_Tr(self):
"""
Return the list *Tr*, where *Tr[r]* is the total count in
``heldout_fdist`` for all samples that occur *r*
times in ``base_fdist``.
:rtype: list(float)
"""
Tr = [0.0] * (self._max_r+1)
for sample in self._heldout_fdist:
r = self._base_fdist[sample]
Tr[r] += self._heldout_fdist[sample]
return Tr
def _calculate_estimate(self, Tr, Nr, N):
"""
Return the list *estimate*, where *estimate[r]* is the probability
estimate for any sample that occurs *r* times in the base frequency
distribution. In particular, *estimate[r]* is *Tr[r]/(N[r].N)*.
In the special case that *N[r]=0*, *estimate[r]* will never be used;
so we define *estimate[r]=None* for those cases.
:rtype: list(float)
:type Tr: list(float)
:param Tr: the list *Tr*, where *Tr[r]* is the total count in
the heldout distribution for all samples that occur *r*
times in base distribution.
:type Nr: list(float)
:param Nr: The list *Nr*, where *Nr[r]* is the number of
samples that occur *r* times in the base distribution.
:type N: int
:param N: The total number of outcomes recorded by the heldout
frequency distribution.
"""
estimate = []
for r in range(self._max_r+1):
if Nr[r] == 0: estimate.append(None)
else: estimate.append(Tr[r]/(Nr[r]*N))
return estimate
def base_fdist(self):
"""
Return the base frequency distribution that this probability
distribution is based on.
:rtype: FreqDist
"""
return self._base_fdist
def heldout_fdist(self):
"""
Return the heldout frequency distribution that this
probability distribution is based on.
:rtype: FreqDist
"""
return self._heldout_fdist
def samples(self):
return self._base_fdist.keys()
def prob(self, sample):
# Use our precomputed probability estimate.
r = self._base_fdist[sample]
return self._estimate[r]
def max(self):
# Note: the Heldout estimation is *not* necessarily monotonic;
# so this implementation is currently broken. However, it
# should give the right answer *most* of the time. :)
return self._base_fdist.max()
def discount(self):
raise NotImplementedError()
def __repr__(self):
"""
:rtype: str
:return: A string representation of this ``ProbDist``.
"""
s = '<HeldoutProbDist: %d base samples; %d heldout samples>'
return s % (self._base_fdist.N(), self._heldout_fdist.N())
@compat.python_2_unicode_compatible
class CrossValidationProbDist(ProbDistI):
"""
The cross-validation estimate for the probability distribution of
the experiment used to generate a set of frequency distribution.
The "cross-validation estimate" for the probability of a sample
is found by averaging the held-out estimates for the sample in
each pair of frequency distributions.
"""
SUM_TO_ONE = False
def __init__(self, freqdists, bins):
"""
Use the cross-validation estimate to create a probability
distribution for the experiment used to generate
``freqdists``.
:type freqdists: list(FreqDist)
:param freqdists: A list of the frequency distributions
generated by the experiment.
:type bins: int
:param bins: The number of sample values that can be generated
by the experiment that is described by the probability
distribution. This value must be correctly set for the
probabilities of the sample values to sum to one. If
``bins`` is not specified, it defaults to ``freqdist.B()``.
"""
self._freqdists = freqdists
# Create a heldout probability distribution for each pair of
# frequency distributions in freqdists.
self._heldout_probdists = []
for fdist1 in freqdists:
for fdist2 in freqdists:
if fdist1 is not fdist2:
probdist = HeldoutProbDist(fdist1, fdist2, bins)
self._heldout_probdists.append(probdist)
def freqdists(self):
"""
Return the list of frequency distributions that this ``ProbDist`` is based on.
:rtype: list(FreqDist)
"""
return self._freqdists
def samples(self):
# [xx] nb: this is not too efficient
return set(sum([list(fd) for fd in self._freqdists], []))
def prob(self, sample):
# Find the average probability estimate returned by each
# heldout distribution.
prob = 0.0
for heldout_probdist in self._heldout_probdists:
prob += heldout_probdist.prob(sample)
return prob/len(self._heldout_probdists)
def discount(self):
raise NotImplementedError()
def __repr__(self):
"""
Return a string representation of this ``ProbDist``.
:rtype: str
"""
return '<CrossValidationProbDist: %d-way>' % len(self._freqdists)
@compat.python_2_unicode_compatible
class WittenBellProbDist(ProbDistI):
"""
The Witten-Bell estimate of a probability distribution. This distribution
allocates uniform probability mass to as yet unseen events by using the
number of events that have only been seen once. The probability mass
reserved for unseen events is equal to *T / (N + T)*
where *T* is the number of observed event types and *N* is the total
number of observed events. This equates to the maximum likelihood estimate
of a new type event occurring. The remaining probability mass is discounted
such that all probability estimates sum to one, yielding:
- *p = T / Z (N + T)*, if count = 0
- *p = c / (N + T)*, otherwise
"""
def __init__(self, freqdist, bins=None):
"""
Creates a distribution of Witten-Bell probability estimates. This
distribution allocates uniform probability mass to as yet unseen
events by using the number of events that have only been seen once. The
probability mass reserved for unseen events is equal to *T / (N + T)*
where *T* is the number of observed event types and *N* is the total
number of observed events. This equates to the maximum likelihood
estimate of a new type event occurring. The remaining probability mass
is discounted such that all probability estimates sum to one,
yielding:
- *p = T / Z (N + T)*, if count = 0
- *p = c / (N + T)*, otherwise
The parameters *T* and *N* are taken from the ``freqdist`` parameter
(the ``B()`` and ``N()`` values). The normalizing factor *Z* is
calculated using these values along with the ``bins`` parameter.
:param freqdist: The frequency counts upon which to base the
estimation.
:type freqdist: FreqDist
:param bins: The number of possible event types. This must be at least
as large as the number of bins in the ``freqdist``. If None, then
it's assumed to be equal to that of the ``freqdist``
:type bins: int
"""
assert bins is None or bins >= freqdist.B(),\
'bins parameter must not be less than %d=freqdist.B()' % freqdist.B()
if bins is None:
bins = freqdist.B()
self._freqdist = freqdist
self._T = self._freqdist.B()
self._Z = bins - self._freqdist.B()
self._N = self._freqdist.N()
# self._P0 is P(0), precalculated for efficiency:
if self._N==0:
# if freqdist is empty, we approximate P(0) by a UniformProbDist:
self._P0 = 1.0 / self._Z
else:
self._P0 = self._T / float(self._Z * (self._N + self._T))
def prob(self, sample):
# inherit docs from ProbDistI
c = self._freqdist[sample]
return (c / float(self._N + self._T) if c != 0 else self._P0)
def max(self):
return self._freqdist.max()
def samples(self):
return self._freqdist.keys()
def freqdist(self):
return self._freqdist
def discount(self):
raise NotImplementedError()
def __repr__(self):
"""
Return a string representation of this ``ProbDist``.
:rtype: str
"""
return '<WittenBellProbDist based on %d samples>' % self._freqdist.N()
##//////////////////////////////////////////////////////
## Good-Turing Probability Distributions
##//////////////////////////////////////////////////////
# Good-Turing frequency estimation was contributed by Alan Turing and
# his statistical assistant I.J. Good, during their collaboration in
# the WWII. It is a statistical technique for predicting the
# probability of occurrence of objects belonging to an unknown number
# of species, given past observations of such objects and their
# species. (In drawing balls from an urn, the 'objects' would be balls
# and the 'species' would be the distinct colors of the balls (finite
# but unknown in number).
#
# Good-Turing method calculates the probability mass to assign to
# events with zero or low counts based on the number of events with
# higher counts. It does so by using the adjusted count *c\**:
#
# - *c\* = (c + 1) N(c + 1) / N(c)* for c >= 1
# - *things with frequency zero in training* = N(1) for c == 0
#
# where *c* is the original count, *N(i)* is the number of event types
# observed with count *i*. We can think the count of unseen as the count
# of frequency one (see Jurafsky & Martin 2nd Edition, p101).
#
# This method is problematic because the situation ``N(c+1) == 0``
# is quite common in the original Good-Turing estimation; smoothing or
# interpolation of *N(i)* values is essential in practice.
#
# Bill Gale and Geoffrey Sampson present a simple and effective approach,
# Simple Good-Turing. As a smoothing curve they simply use a power curve:
#
# Nr = a*r^b (with b < -1 to give the appropriate hyperbolic
# relationship)
#
# They estimate a and b by simple linear regression technique on the
# logarithmic form of the equation:
#
# log Nr = a + b*log(r)
#
# However, they suggest that such a simple curve is probably only
# appropriate for high values of r. For low values of r, they use the
# measured Nr directly. (see M&S, p.213)
#
# Gale and Sampson propose to use r while the difference between r and
# r* is 1.96 greater than the standard deviation, and switch to r* if
# it is less or equal:
#
# |r - r*| > 1.96 * sqrt((r + 1)^2 (Nr+1 / Nr^2) (1 + Nr+1 / Nr))
#
# The 1.96 coefficient correspond to a 0.05 significance criterion,
# some implementations can use a coefficient of 1.65 for a 0.1
# significance criterion.
#
##//////////////////////////////////////////////////////
## Simple Good-Turing Probablity Distributions
##//////////////////////////////////////////////////////
@compat.python_2_unicode_compatible
class SimpleGoodTuringProbDist(ProbDistI):
"""
SimpleGoodTuring ProbDist approximates from frequency to frequency of
frequency into a linear line under log space by linear regression.
Details of Simple Good-Turing algorithm can be found in:
- Good Turing smoothing without tears" (Gale & Sampson 1995),
Journal of Quantitative Linguistics, vol. 2 pp. 217-237.
- "Speech and Language Processing (Jurafsky & Martin),
2nd Edition, Chapter 4.5 p103 (log(Nc) = a + b*log(c))
- http://www.grsampson.net/RGoodTur.html
Given a set of pair (xi, yi), where the xi denotes the frequency and
yi denotes the frequency of frequency, we want to minimize their
square variation. E(x) and E(y) represent the mean of xi and yi.
- slope: b = sigma ((xi-E(x)(yi-E(y))) / sigma ((xi-E(x))(xi-E(x)))
- intercept: a = E(y) - b.E(x)
"""
SUM_TO_ONE = False
def __init__(self, freqdist, bins=None):
"""
:param freqdist: The frequency counts upon which to base the
estimation.
:type freqdist: FreqDist
:param bins: The number of possible event types. This must be
larger than the number of bins in the ``freqdist``. If None,
then it's assumed to be equal to ``freqdist``.B() + 1
:type bins: int
"""
assert bins is None or bins > freqdist.B(),\
'bins parameter must not be less than %d=freqdist.B()+1' % (freqdist.B()+1)
if bins is None:
bins = freqdist.B() + 1
self._freqdist = freqdist
self._bins = bins
r, nr = self._r_Nr()
self.find_best_fit(r, nr)
self._switch(r, nr)
self._renormalize(r, nr)
def _r_Nr_non_zero(self):
r_Nr = self._freqdist.r_Nr()
del r_Nr[0]
return r_Nr
def _r_Nr(self):
"""
Split the frequency distribution in two list (r, Nr), where Nr(r) > 0
"""
nonzero = self._r_Nr_non_zero()
if not nonzero:
return [], []
return zip(*sorted(nonzero.items()))
def find_best_fit(self, r, nr):
"""
Use simple linear regression to tune parameters self._slope and
self._intercept in the log-log space based on count and Nr(count)
(Work in log space to avoid floating point underflow.)
"""
# For higher sample frequencies the data points becomes horizontal
# along line Nr=1. To create a more evident linear model in log-log
# space, we average positive Nr values with the surrounding zero
# values. (Church and Gale, 1991)
if not r or not nr:
# Empty r or nr?
return
zr = []
for j in range(len(r)):
i = (r[j-1] if j > 0 else 0)
k = (2 * r[j] - i if j == len(r) - 1 else r[j+1])
zr_ = 2.0 * nr[j] / (k - i)
zr.append(zr_)
log_r = [math.log(i) for i in r]
log_zr = [math.log(i) for i in zr]
xy_cov = x_var = 0.0
x_mean = 1.0 * sum(log_r) / len(log_r)
y_mean = 1.0 * sum(log_zr) / len(log_zr)
for (x, y) in zip(log_r, log_zr):
xy_cov += (x - x_mean) * (y - y_mean)
x_var += (x - x_mean)**2
self._slope = (xy_cov / x_var if x_var != 0 else 0.0)
if self._slope >= -1:
warnings.warn('SimpleGoodTuring did not find a proper best fit '
'line for smoothing probabilities of occurrences. '
'The probability estimates are likely to be '
'unreliable.')
self._intercept = y_mean - self._slope * x_mean
def _switch(self, r, nr):
"""
Calculate the r frontier where we must switch from Nr to Sr
when estimating E[Nr].
"""
for i, r_ in enumerate(r):
if len(r) == i + 1 or r[i+1] != r_ + 1:
# We are at the end of r, or there is a gap in r
self._switch_at = r_
break
Sr = self.smoothedNr
smooth_r_star = (r_ + 1) * Sr(r_+1) / Sr(r_)
unsmooth_r_star = 1.0 * (r_ + 1) * nr[i+1] / nr[i]
std = math.sqrt(self._variance(r_, nr[i], nr[i+1]))
if abs(unsmooth_r_star-smooth_r_star) <= 1.96 * std:
self._switch_at = r_
break
def _variance(self, r, nr, nr_1):
r = float(r)
nr = float(nr)
nr_1 = float(nr_1)
return (r + 1.0)**2 * (nr_1 / nr**2) * (1.0 + nr_1 / nr)
def _renormalize(self, r, nr):
"""
It is necessary to renormalize all the probability estimates to
ensure a proper probability distribution results. This can be done
by keeping the estimate of the probability mass for unseen items as
N(1)/N and renormalizing all the estimates for previously seen items
(as Gale and Sampson (1995) propose). (See M&S P.213, 1999)
"""
prob_cov = 0.0
for r_, nr_ in zip(r, nr):
prob_cov += nr_ * self._prob_measure(r_)
if prob_cov:
self._renormal = (1 - self._prob_measure(0)) / prob_cov
def smoothedNr(self, r):
"""
Return the number of samples with count r.
:param r: The amount of frequency.
:type r: int
:rtype: float
"""
# Nr = a*r^b (with b < -1 to give the appropriate hyperbolic
# relationship)
# Estimate a and b by simple linear regression technique on
# the logarithmic form of the equation: log Nr = a + b*log(r)
return math.exp(self._intercept + self._slope * math.log(r))
def prob(self, sample):
"""
Return the sample's probability.
:param sample: sample of the event
:type sample: str
:rtype: float
"""
count = self._freqdist[sample]
p = self._prob_measure(count)
if count == 0:
if self._bins == self._freqdist.B():
p = 0.0
else:
p = p / (1.0 * self._bins - self._freqdist.B())
else:
p = p * self._renormal
return p
def _prob_measure(self, count):
if count == 0 and self._freqdist.N() == 0 :
return 1.0
elif count == 0 and self._freqdist.N() != 0:
return 1.0 * self._freqdist.Nr(1) / self._freqdist.N()
if self._switch_at > count:
Er_1 = 1.0 * self._freqdist.Nr(count+1)
Er = 1.0 * self._freqdist.Nr(count)
else:
Er_1 = self.smoothedNr(count+1)
Er = self.smoothedNr(count)
r_star = (count + 1) * Er_1 / Er
return r_star / self._freqdist.N()
def check(self):
prob_sum = 0.0
for i in range(0, len(self._Nr)):
prob_sum += self._Nr[i] * self._prob_measure(i) / self._renormal
print("Probability Sum:", prob_sum)
#assert prob_sum != 1.0, "probability sum should be one!"
def discount(self):
"""
This function returns the total mass of probability transfers from the
seen samples to the unseen samples.
"""
return 1.0 * self.smoothedNr(1) / self._freqdist.N()
def max(self):
return self._freqdist.max()
def samples(self):
return self._freqdist.keys()
def freqdist(self):
return self._freqdist
def __repr__(self):
"""
Return a string representation of this ``ProbDist``.
:rtype: str
"""
return '<SimpleGoodTuringProbDist based on %d samples>'\
% self._freqdist.N()
class MutableProbDist(ProbDistI):
"""
An mutable probdist where the probabilities may be easily modified. This
simply copies an existing probdist, storing the probability values in a
mutable dictionary and providing an update method.
"""
def __init__(self, prob_dist, samples, store_logs=True):
"""
Creates the mutable probdist based on the given prob_dist and using
the list of samples given. These values are stored as log
probabilities if the store_logs flag is set.
:param prob_dist: the distribution from which to garner the
probabilities
:type prob_dist: ProbDist
:param samples: the complete set of samples
:type samples: sequence of any
:param store_logs: whether to store the probabilities as logarithms
:type store_logs: bool
"""
self._samples = samples
self._sample_dict = dict((samples[i], i) for i in range(len(samples)))
self._data = array.array(str("d"), [0.0]) * len(samples)
for i in range(len(samples)):
if store_logs:
self._data[i] = prob_dist.logprob(samples[i])
else:
self._data[i] = prob_dist.prob(samples[i])
self._logs = store_logs
def samples(self):
# inherit documentation
return self._samples
def prob(self, sample):
# inherit documentation
i = self._sample_dict.get(sample)
if i is None:
return 0.0
return (2**(self._data[i]) if self._logs else self._data[i])
def logprob(self, sample):
# inherit documentation
i = self._sample_dict.get(sample)
if i is None:
return float('-inf')
return (self._data[i] if self._logs else math.log(self._data[i], 2))
def update(self, sample, prob, log=True):
"""
Update the probability for the given sample. This may cause the object
to stop being the valid probability distribution - the user must
ensure that they update the sample probabilities such that all samples
have probabilities between 0 and 1 and that all probabilities sum to
one.
:param sample: the sample for which to update the probability
:type sample: any
:param prob: the new probability
:type prob: float
:param log: is the probability already logged
:type log: bool
"""
i = self._sample_dict.get(sample)
assert i is not None
if self._logs:
self._data[i] = (prob if log else math.log(prob, 2))
else:
self._data[i] = (2**(prob) if log else prob)
##/////////////////////////////////////////////////////
## Kneser-Ney Probability Distribution
##//////////////////////////////////////////////////////
# This method for calculating probabilities was introduced in 1995 by Reinhard
# Kneser and Hermann Ney. It was meant to improve the accuracy of language
# models that use backing-off to deal with sparse data. The authors propose two
# ways of doing so: a marginal distribution constraint on the back-off
# distribution and a leave-one-out distribution. For a start, the first one is
# implemented as a class below.
#
# The idea behind a back-off n-gram model is that we have a series of
# frequency distributions for our n-grams so that in case we have not seen a
# given n-gram during training (and as a result have a 0 probability for it) we
# can 'back off' (hence the name!) and try testing whether we've seen the
# n-1-gram part of the n-gram in training.
#
# The novelty of Kneser and Ney's approach was that they decided to fiddle
# around with the way this latter, backed off probability was being calculated
# whereas their peers seemed to focus on the primary probability.
#
# The implementation below uses one of the techniques described in their paper
# titled "Improved backing-off for n-gram language modeling." In the same paper
# another technique is introduced to attempt to smooth the back-off
# distribution as well as the primary one. There is also a much-cited
# modification of this method proposed by Chen and Goodman.
#
# In order for the implementation of Kneser-Ney to be more efficient, some
# changes have been made to the original algorithm. Namely, the calculation of
# the normalizing function gamma has been significantly simplified and
# combined slightly differently with beta. None of these changes affect the
# nature of the algorithm, but instead aim to cut out unnecessary calculations
# and take advantage of storing and retrieving information in dictionaries
# where possible.
@compat.python_2_unicode_compatible
class KneserNeyProbDist(ProbDistI):
"""
Kneser-Ney estimate of a probability distribution. This is a version of
back-off that counts how likely an n-gram is provided the n-1-gram had
been seen in training. Extends the ProbDistI interface, requires a trigram
FreqDist instance to train on. Optionally, a different from default discount
value can be specified. The default discount is set to 0.75.
"""
def __init__(self, freqdist, bins=None, discount=0.75):
"""
:param freqdist: The trigram frequency distribution upon which to base
the estimation
:type freqdist: FreqDist
:param bins: Included for compatibility with nltk.tag.hmm
:type bins: int or float
:param discount: The discount applied when retrieving counts of
trigrams
:type discount: float (preferred, but can be set to int)
"""
if not bins:
self._bins = freqdist.B()
else:
self._bins = bins
self._D = discount
# cache for probability calculation
self._cache = {}
# internal bigram and trigram frequency distributions
self._bigrams = defaultdict(int)
self._trigrams = freqdist
# helper dictionaries used to calculate probabilities
self._wordtypes_after = defaultdict(float)
self._trigrams_contain = defaultdict(float)
self._wordtypes_before = defaultdict(float)
for w0, w1, w2 in freqdist:
self._bigrams[(w0,w1)] += freqdist[(w0, w1, w2)]
self._wordtypes_after[(w0,w1)] += 1
self._trigrams_contain[w1] += 1
self._wordtypes_before[(w1,w2)] += 1
def prob(self, trigram):
# sample must be a triple
if len(trigram) != 3:
raise ValueError('Expected an iterable with 3 members.')
trigram = tuple(trigram)
w0, w1, w2 = trigram
if trigram in self._cache:
return self._cache[trigram]
else:
# if the sample trigram was seen during training
if trigram in self._trigrams:
prob = (self._trigrams[trigram]
- self.discount())/self._bigrams[(w0, w1)]
# else if the 'rougher' environment was seen during training
elif (w0,w1) in self._bigrams and (w1,w2) in self._wordtypes_before:
aftr = self._wordtypes_after[(w0, w1)]
bfr = self._wordtypes_before[(w1, w2)]
# the probability left over from alphas
leftover_prob = ((aftr * self.discount())
/ self._bigrams[(w0, w1)])
# the beta (including normalization)
beta = bfr /(self._trigrams_contain[w1] - aftr)
prob = leftover_prob * beta
# else the sample was completely unseen during training
else:
prob = 0.0
self._cache[trigram] = prob
return prob
def discount(self):
"""
Return the value by which counts are discounted. By default set to 0.75.
:rtype: float
"""
return self._D
def set_discount(self, discount):
"""
Set the value by which counts are discounted to the value of discount.
:param discount: the new value to discount counts by
:type discount: float (preferred, but int possible)
:rtype: None
"""
self._D = discount
def samples(self):
return self._trigrams.keys()
def max(self):
return self._trigrams.max()
def __repr__(self):
'''
Return a string representation of this ProbDist
:rtype: str
'''
return '<KneserNeyProbDist based on {0} trigrams'.format(self._trigrams.N())
##//////////////////////////////////////////////////////
## Probability Distribution Operations
##//////////////////////////////////////////////////////
def log_likelihood(test_pdist, actual_pdist):
if (not isinstance(test_pdist, ProbDistI) or
not isinstance(actual_pdist, ProbDistI)):
raise ValueError('expected a ProbDist.')
# Is this right?
return sum(actual_pdist.prob(s) * math.log(test_pdist.prob(s), 2)
for s in actual_pdist)
def entropy(pdist):
probs = (pdist.prob(s) for s in pdist.samples())
return -sum(p * math.log(p,2) for p in probs)
##//////////////////////////////////////////////////////
## Conditional Distributions
##//////////////////////////////////////////////////////
@compat.python_2_unicode_compatible
class ConditionalFreqDist(defaultdict):
"""
A collection of frequency distributions for a single experiment
run under different conditions. Conditional frequency
distributions are used to record the number of times each sample
occurred, given the condition under which the experiment was run.
For example, a conditional frequency distribution could be used to
record the frequency of each word (type) in a document, given its
length. Formally, a conditional frequency distribution can be
defined as a function that maps from each condition to the
FreqDist for the experiment under that condition.
Conditional frequency distributions are typically constructed by
repeatedly running an experiment under a variety of conditions,
and incrementing the sample outcome counts for the appropriate
conditions. For example, the following code will produce a
conditional frequency distribution that encodes how often each
word type occurs, given the length of that word type:
>>> from nltk.probability import ConditionalFreqDist
>>> from nltk.tokenize import word_tokenize
>>> sent = "the the the dog dog some other words that we do not care about"
>>> cfdist = ConditionalFreqDist()
>>> for word in word_tokenize(sent):
... condition = len(word)
... cfdist[condition][word] += 1
An equivalent way to do this is with the initializer:
>>> cfdist = ConditionalFreqDist((len(word), word) for word in word_tokenize(sent))
The frequency distribution for each condition is accessed using
the indexing operator:
>>> cfdist[3]
FreqDist({'the': 3, 'dog': 2, 'not': 1})
>>> cfdist[3].freq('the')
0.5
>>> cfdist[3]['dog']
2
When the indexing operator is used to access the frequency
distribution for a condition that has not been accessed before,
``ConditionalFreqDist`` creates a new empty FreqDist for that
condition.
"""
def __init__(self, cond_samples=None):
"""
Construct a new empty conditional frequency distribution. In
particular, the count for every sample, under every condition,
is zero.
:param cond_samples: The samples to initialize the conditional
frequency distribution with
:type cond_samples: Sequence of (condition, sample) tuples
"""
defaultdict.__init__(self, FreqDist)
if cond_samples:
for (cond, sample) in cond_samples:
self[cond][sample] += 1
def __reduce__(self):
kv_pairs = ((cond, self[cond]) for cond in self.conditions())
return (self.__class__, (), None, None, kv_pairs)
def conditions(self):
"""
Return a list of the conditions that have been accessed for
this ``ConditionalFreqDist``. Use the indexing operator to
access the frequency distribution for a given condition.
Note that the frequency distributions for some conditions
may contain zero sample outcomes.
:rtype: list
"""
return list(self.keys())
def N(self):
"""
Return the total number of sample outcomes that have been
recorded by this ``ConditionalFreqDist``.
:rtype: int
"""
return sum(fdist.N() for fdist in compat.itervalues(self))
def plot(self, *args, **kwargs):
"""
Plot the given samples from the conditional frequency distribution.
For a cumulative plot, specify cumulative=True.
(Requires Matplotlib to be installed.)
:param samples: The samples to plot
:type samples: list
:param title: The title for the graph
:type title: str
:param conditions: The conditions to plot (default is all)
:type conditions: list
"""
try:
from matplotlib import pylab
except ImportError:
raise ValueError('The plot function requires matplotlib to be installed.'
'See http://matplotlib.org/')
cumulative = _get_kwarg(kwargs, 'cumulative', False)
conditions = _get_kwarg(kwargs, 'conditions', sorted(self.conditions()))
title = _get_kwarg(kwargs, 'title', '')
samples = _get_kwarg(kwargs, 'samples',
sorted(set(v for c in conditions for v in self[c]))) # this computation could be wasted
if not "linewidth" in kwargs:
kwargs["linewidth"] = 2
for condition in conditions:
if cumulative:
freqs = list(self[condition]._cumulative_frequencies(samples))
ylabel = "Cumulative Counts"
legend_loc = 'lower right'
else:
freqs = [self[condition][sample] for sample in samples]
ylabel = "Counts"
legend_loc = 'upper right'
# percents = [f * 100 for f in freqs] only in ConditionalProbDist?
kwargs['label'] = "%s" % condition
pylab.plot(freqs, *args, **kwargs)
pylab.legend(loc=legend_loc)
pylab.grid(True, color="silver")
pylab.xticks(range(len(samples)), [compat.text_type(s) for s in samples], rotation=90)
if title:
pylab.title(title)
pylab.xlabel("Samples")
pylab.ylabel(ylabel)
pylab.show()
def tabulate(self, *args, **kwargs):
"""
Tabulate the given samples from the conditional frequency distribution.
:param samples: The samples to plot
:type samples: list
:param title: The title for the graph
:type title: str
:param conditions: The conditions to plot (default is all)
:type conditions: list
"""
cumulative = _get_kwarg(kwargs, 'cumulative', False)
conditions = _get_kwarg(kwargs, 'conditions', sorted(self.conditions()))
samples = _get_kwarg(kwargs, 'samples',
sorted(set(v for c in conditions for v in self[c]))) # this computation could be wasted
condition_size = max(len("%s" % c) for c in conditions)
print(' ' * condition_size, end=' ')
for s in samples:
print("%4s" % s, end=' ')
print()
for c in conditions:
print("%*s" % (condition_size, c), end=' ')
if cumulative:
freqs = list(self[c]._cumulative_frequencies(samples))
else:
freqs = [self[c][sample] for sample in samples]
for f in freqs:
print("%4d" % f, end=' ')
print()
# @total_ordering doesn't work here, since the class inherits from a builtin class
def __le__(self, other):
if not isinstance(other, ConditionalFreqDist):
raise_unorderable_types("<=", self, other)
return set(self.conditions()).issubset(other.conditions()) \
and all(self[c] <= other[c] for c in self.conditions())
def __lt__(self, other):
if not isinstance(other, ConditionalFreqDist):
raise_unorderable_types("<", self, other)
return self <= other and self != other
def __ge__(self, other):
if not isinstance(other, ConditionalFreqDist):
raise_unorderable_types(">=", self, other)
return other <= self
def __gt__(self, other):
if not isinstance(other, ConditionalFreqDist):
raise_unorderable_types(">", self, other)
return other < self
def __repr__(self):
"""
Return a string representation of this ``ConditionalFreqDist``.
:rtype: str
"""
return '<ConditionalFreqDist with %d conditions>' % len(self)
@compat.python_2_unicode_compatible
class ConditionalProbDistI(dict):
"""
A collection of probability distributions for a single experiment
run under different conditions. Conditional probability
distributions are used to estimate the likelihood of each sample,
given the condition under which the experiment was run. For
example, a conditional probability distribution could be used to
estimate the probability of each word type in a document, given
the length of the word type. Formally, a conditional probability
distribution can be defined as a function that maps from each
condition to the ``ProbDist`` for the experiment under that
condition.
"""
def __init__(self):
raise NotImplementedError("Interfaces can't be instantiated")
def conditions(self):
"""
Return a list of the conditions that are represented by
this ``ConditionalProbDist``. Use the indexing operator to
access the probability distribution for a given condition.
:rtype: list
"""
return list(self.keys())
def __repr__(self):
"""
Return a string representation of this ``ConditionalProbDist``.
:rtype: str
"""
return '<%s with %d conditions>' % (type(self).__name__, len(self))
class ConditionalProbDist(ConditionalProbDistI):
"""
A conditional probability distribution modeling the experiments
that were used to generate a conditional frequency distribution.
A ConditionalProbDist is constructed from a
``ConditionalFreqDist`` and a ``ProbDist`` factory:
- The ``ConditionalFreqDist`` specifies the frequency
distribution for each condition.
- The ``ProbDist`` factory is a function that takes a
condition's frequency distribution, and returns its
probability distribution. A ``ProbDist`` class's name (such as
``MLEProbDist`` or ``HeldoutProbDist``) can be used to specify
that class's constructor.
The first argument to the ``ProbDist`` factory is the frequency
distribution that it should model; and the remaining arguments are
specified by the ``factory_args`` parameter to the
``ConditionalProbDist`` constructor. For example, the following
code constructs a ``ConditionalProbDist``, where the probability
distribution for each condition is an ``ELEProbDist`` with 10 bins:
>>> from nltk.corpus import brown
>>> from nltk.probability import ConditionalFreqDist
>>> from nltk.probability import ConditionalProbDist, ELEProbDist
>>> cfdist = ConditionalFreqDist(brown.tagged_words()[:5000])
>>> cpdist = ConditionalProbDist(cfdist, ELEProbDist, 10)
>>> cpdist['passed'].max()
'VBD'
>>> cpdist['passed'].prob('VBD')
0.423...
"""
def __init__(self, cfdist, probdist_factory,
*factory_args, **factory_kw_args):
"""
Construct a new conditional probability distribution, based on
the given conditional frequency distribution and ``ProbDist``
factory.
:type cfdist: ConditionalFreqDist
:param cfdist: The ``ConditionalFreqDist`` specifying the
frequency distribution for each condition.
:type probdist_factory: class or function
:param probdist_factory: The function or class that maps
a condition's frequency distribution to its probability
distribution. The function is called with the frequency
distribution as its first argument,
``factory_args`` as its remaining arguments, and
``factory_kw_args`` as keyword arguments.
:type factory_args: (any)
:param factory_args: Extra arguments for ``probdist_factory``.
These arguments are usually used to specify extra
properties for the probability distributions of individual
conditions, such as the number of bins they contain.
:type factory_kw_args: (any)
:param factory_kw_args: Extra keyword arguments for ``probdist_factory``.
"""
self._probdist_factory = probdist_factory
self._factory_args = factory_args
self._factory_kw_args = factory_kw_args
for condition in cfdist:
self[condition] = probdist_factory(cfdist[condition],
*factory_args, **factory_kw_args)
def __missing__(self, key):
self[key] = self._probdist_factory(FreqDist(),
*self._factory_args,
**self._factory_kw_args)
return self[key]
class DictionaryConditionalProbDist(ConditionalProbDistI):
"""
An alternative ConditionalProbDist that simply wraps a dictionary of
ProbDists rather than creating these from FreqDists.
"""
def __init__(self, probdist_dict):
"""
:param probdist_dict: a dictionary containing the probdists indexed
by the conditions
:type probdist_dict: dict any -> probdist
"""
self.update(probdist_dict)
def __missing__(self, key):
self[key] = DictionaryProbDist()
return self[key]
##//////////////////////////////////////////////////////
## Adding in log-space.
##//////////////////////////////////////////////////////
# If the difference is bigger than this, then just take the bigger one:
_ADD_LOGS_MAX_DIFF = math.log(1e-30, 2)
def add_logs(logx, logy):
"""
Given two numbers ``logx`` = *log(x)* and ``logy`` = *log(y)*, return
*log(x+y)*. Conceptually, this is the same as returning
``log(2**(logx)+2**(logy))``, but the actual implementation
avoids overflow errors that could result from direct computation.
"""
if (logx < logy + _ADD_LOGS_MAX_DIFF):
return logy
if (logy < logx + _ADD_LOGS_MAX_DIFF):
return logx
base = min(logx, logy)
return base + math.log(2**(logx-base) + 2**(logy-base), 2)
def sum_logs(logs):
return (reduce(add_logs, logs[1:], logs[0]) if len(logs) != 0 else _NINF)
##//////////////////////////////////////////////////////
## Probabilistic Mix-in
##//////////////////////////////////////////////////////
class ProbabilisticMixIn(object):
"""
A mix-in class to associate probabilities with other classes
(trees, rules, etc.). To use the ``ProbabilisticMixIn`` class,
define a new class that derives from an existing class and from
ProbabilisticMixIn. You will need to define a new constructor for
the new class, which explicitly calls the constructors of both its
parent classes. For example:
>>> from nltk.probability import ProbabilisticMixIn
>>> class A:
... def __init__(self, x, y): self.data = (x,y)
...
>>> class ProbabilisticA(A, ProbabilisticMixIn):
... def __init__(self, x, y, **prob_kwarg):
... A.__init__(self, x, y)
... ProbabilisticMixIn.__init__(self, **prob_kwarg)
See the documentation for the ProbabilisticMixIn
``constructor<__init__>`` for information about the arguments it
expects.
You should generally also redefine the string representation
methods, the comparison methods, and the hashing method.
"""
def __init__(self, **kwargs):
"""
Initialize this object's probability. This initializer should
be called by subclass constructors. ``prob`` should generally be
the first argument for those constructors.
:param prob: The probability associated with the object.
:type prob: float
:param logprob: The log of the probability associated with
the object.
:type logprob: float
"""
if 'prob' in kwargs:
if 'logprob' in kwargs:
raise TypeError('Must specify either prob or logprob '
'(not both)')
else:
ProbabilisticMixIn.set_prob(self, kwargs['prob'])
elif 'logprob' in kwargs:
ProbabilisticMixIn.set_logprob(self, kwargs['logprob'])
else:
self.__prob = self.__logprob = None
def set_prob(self, prob):
"""
Set the probability associated with this object to ``prob``.
:param prob: The new probability
:type prob: float
"""
self.__prob = prob
self.__logprob = None
def set_logprob(self, logprob):
"""
Set the log probability associated with this object to
``logprob``. I.e., set the probability associated with this
object to ``2**(logprob)``.
:param logprob: The new log probability
:type logprob: float
"""
self.__logprob = logprob
self.__prob = None
def prob(self):
"""
Return the probability associated with this object.
:rtype: float
"""
if self.__prob is None:
if self.__logprob is None: return None
self.__prob = 2**(self.__logprob)
return self.__prob
def logprob(self):
"""
Return ``log(p)``, where ``p`` is the probability associated
with this object.
:rtype: float
"""
if self.__logprob is None:
if self.__prob is None: return None
self.__logprob = math.log(self.__prob, 2)
return self.__logprob
class ImmutableProbabilisticMixIn(ProbabilisticMixIn):
def set_prob(self, prob):
raise ValueError('%s is immutable' % self.__class__.__name__)
def set_logprob(self, prob):
raise ValueError('%s is immutable' % self.__class__.__name__)
## Helper function for processing keyword arguments
def _get_kwarg(kwargs, key, default):
if key in kwargs:
arg = kwargs[key]
del kwargs[key]
else:
arg = default
return arg
##//////////////////////////////////////////////////////
## Demonstration
##//////////////////////////////////////////////////////
def _create_rand_fdist(numsamples, numoutcomes):
"""
Create a new frequency distribution, with random samples. The
samples are numbers from 1 to ``numsamples``, and are generated by
summing two numbers, each of which has a uniform distribution.
"""
import random
fdist = FreqDist()
for x in range(numoutcomes):
y = (random.randint(1, (1 + numsamples) // 2) +
random.randint(0, numsamples // 2))
fdist[y] += 1
return fdist
def _create_sum_pdist(numsamples):
"""
Return the true probability distribution for the experiment
``_create_rand_fdist(numsamples, x)``.
"""
fdist = FreqDist()
for x in range(1, (1 + numsamples) // 2 + 1):
for y in range(0, numsamples // 2 + 1):
fdist[x+y] += 1
return MLEProbDist(fdist)
def demo(numsamples=6, numoutcomes=500):
"""
A demonstration of frequency distributions and probability
distributions. This demonstration creates three frequency
distributions with, and uses them to sample a random process with
``numsamples`` samples. Each frequency distribution is sampled
``numoutcomes`` times. These three frequency distributions are
then used to build six probability distributions. Finally, the
probability estimates of these distributions are compared to the
actual probability of each sample.
:type numsamples: int
:param numsamples: The number of samples to use in each demo
frequency distributions.
:type numoutcomes: int
:param numoutcomes: The total number of outcomes for each
demo frequency distribution. These outcomes are divided into
``numsamples`` bins.
:rtype: None
"""
# Randomly sample a stochastic process three times.
fdist1 = _create_rand_fdist(numsamples, numoutcomes)
fdist2 = _create_rand_fdist(numsamples, numoutcomes)
fdist3 = _create_rand_fdist(numsamples, numoutcomes)
# Use our samples to create probability distributions.
pdists = [
MLEProbDist(fdist1),
LidstoneProbDist(fdist1, 0.5, numsamples),
HeldoutProbDist(fdist1, fdist2, numsamples),
HeldoutProbDist(fdist2, fdist1, numsamples),
CrossValidationProbDist([fdist1, fdist2, fdist3], numsamples),
SimpleGoodTuringProbDist(fdist1),
SimpleGoodTuringProbDist(fdist1, 7),
_create_sum_pdist(numsamples),
]
# Find the probability of each sample.
vals = []
for n in range(1,numsamples+1):
vals.append(tuple([n, fdist1.freq(n)] +
[pdist.prob(n) for pdist in pdists]))
# Print the results in a formatted table.
print(('%d samples (1-%d); %d outcomes were sampled for each FreqDist' %
(numsamples, numsamples, numoutcomes)))
print('='*9*(len(pdists)+2))
FORMATSTR = ' FreqDist '+ '%8s '*(len(pdists)-1) + '| Actual'
print(FORMATSTR % tuple(repr(pdist)[1:9] for pdist in pdists[:-1]))
print('-'*9*(len(pdists)+2))
FORMATSTR = '%3d %8.6f ' + '%8.6f '*(len(pdists)-1) + '| %8.6f'
for val in vals:
print(FORMATSTR % val)
# Print the totals for each column (should all be 1.0)
zvals = list(zip(*vals))
sums = [sum(val) for val in zvals[1:]]
print('-'*9*(len(pdists)+2))
FORMATSTR = 'Total ' + '%8.6f '*(len(pdists)) + '| %8.6f'
print(FORMATSTR % tuple(sums))
print('='*9*(len(pdists)+2))
# Display the distributions themselves, if they're short enough.
if len("%s" % fdist1) < 70:
print(' fdist1: %s' % fdist1)
print(' fdist2: %s' % fdist2)
print(' fdist3: %s' % fdist3)
print()
print('Generating:')
for pdist in pdists:
fdist = FreqDist(pdist.generate() for i in range(5000))
print('%20s %s' % (pdist.__class__.__name__[:20], ("%s" % fdist)[:55]))
print()
def gt_demo():
from nltk import corpus
emma_words = corpus.gutenberg.words('austen-emma.txt')
fd = FreqDist(emma_words)
sgt = SimpleGoodTuringProbDist(fd)
print('%18s %8s %14s' \
% ("word", "freqency", "SimpleGoodTuring"))
fd_keys_sorted=(key for key, value in sorted(fd.items(), key=lambda item: item[1], reverse=True))
for key in fd_keys_sorted:
print('%18s %8d %14e' \
% (key, fd[key], sgt.prob(key)))
if __name__ == '__main__':
demo(6, 10)
demo(5, 5000)
gt_demo()
__all__ = ['ConditionalFreqDist', 'ConditionalProbDist',
'ConditionalProbDistI', 'CrossValidationProbDist',
'DictionaryConditionalProbDist', 'DictionaryProbDist', 'ELEProbDist',
'FreqDist', 'SimpleGoodTuringProbDist', 'HeldoutProbDist',
'ImmutableProbabilisticMixIn', 'LaplaceProbDist', 'LidstoneProbDist',
'MLEProbDist', 'MutableProbDist', 'KneserNeyProbDist', 'ProbDistI', 'ProbabilisticMixIn',
'UniformProbDist', 'WittenBellProbDist', 'add_logs',
'log_likelihood', 'sum_logs', 'entropy']
|
adviti/melange | refs/heads/master | thirdparty/google_appengine/lib/django_1_2/tests/regressiontests/servers/models.py | 12133432 | |
geimer/easybuild-easyconfigs | refs/heads/master | test/easyconfigs/__init__.py | 12133432 | |
tylert/kb_builder | refs/heads/master | lib/__init__.py | 12133432 | |
sivel/ansible-modules-extras | refs/heads/devel | cloud/cloudstack/cs_affinitygroup.py | 48 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, René Moser <mail@renemoser.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: cs_affinitygroup
short_description: Manages affinity groups on Apache CloudStack based clouds.
description:
- Create and remove affinity groups.
version_added: '2.0'
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the affinity group.
required: true
affinty_type:
description:
- Type of the affinity group. If not specified, first found affinity type is used.
required: false
default: null
description:
description:
- Description of the affinity group.
required: false
default: null
state:
description:
- State of the affinity group.
required: false
default: 'present'
choices: [ 'present', 'absent' ]
domain:
description:
- Domain the affinity group is related to.
required: false
default: null
account:
description:
- Account the affinity group is related to.
required: false
default: null
project:
description:
- Name of the project the affinity group is related to.
required: false
default: null
poll_async:
description:
- Poll async jobs until job has finished.
required: false
default: true
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# Create a affinity group
- local_action:
module: cs_affinitygroup
name: haproxy
affinty_type: host anti-affinity
# Remove a affinity group
- local_action:
module: cs_affinitygroup
name: haproxy
state: absent
'''
RETURN = '''
---
id:
description: UUID of the affinity group.
returned: success
type: string
sample: 87b1e0ce-4e01-11e4-bb66-0050569e64b8
name:
description: Name of affinity group.
returned: success
type: string
sample: app
description:
description: Description of affinity group.
returned: success
type: string
sample: application affinity group
affinity_type:
description: Type of affinity group.
returned: success
type: string
sample: host anti-affinity
project:
description: Name of project the affinity group is related to.
returned: success
type: string
sample: Production
domain:
description: Domain the affinity group is related to.
returned: success
type: string
sample: example domain
account:
description: Account the affinity group is related to.
returned: success
type: string
sample: example account
'''
# import cloudstack common
from ansible.module_utils.cloudstack import *
class AnsibleCloudStackAffinityGroup(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackAffinityGroup, self).__init__(module)
self.returns = {
'type': 'affinity_type',
}
self.affinity_group = None
def get_affinity_group(self):
if not self.affinity_group:
args = {
'projectid': self.get_project(key='id'),
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'name': self.module.params.get('name'),
}
affinity_groups = self.cs.listAffinityGroups(**args)
if affinity_groups:
self.affinity_group = affinity_groups['affinitygroup'][0]
return self.affinity_group
def get_affinity_type(self):
affinity_type = self.module.params.get('affinty_type')
affinity_types = self.cs.listAffinityGroupTypes()
if affinity_types:
if not affinity_type:
return affinity_types['affinityGroupType'][0]['type']
for a in affinity_types['affinityGroupType']:
if a['type'] == affinity_type:
return a['type']
self.module.fail_json(msg="affinity group type '%s' not found" % affinity_type)
def create_affinity_group(self):
affinity_group = self.get_affinity_group()
if not affinity_group:
self.result['changed'] = True
args = {
'name': self.module.params.get('name'),
'type': self.get_affinity_type(),
'description': self.module.params.get('description'),
'projectid': self.get_project(key='id'),
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
}
if not self.module.check_mode:
res = self.cs.createAffinityGroup(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if res and poll_async:
affinity_group = self.poll_job(res, 'affinitygroup')
return affinity_group
def remove_affinity_group(self):
affinity_group = self.get_affinity_group()
if affinity_group:
self.result['changed'] = True
args = {
'name': self.module.params.get('name'),
'projectid': self.get_project(key='id'),
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
}
if not self.module.check_mode:
res = self.cs.deleteAffinityGroup(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if res and poll_async:
self.poll_job(res, 'affinitygroup')
return affinity_group
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
affinty_type=dict(default=None),
description=dict(default=None),
state=dict(choices=['present', 'absent'], default='present'),
domain=dict(default=None),
account=dict(default=None),
project=dict(default=None),
poll_async=dict(type='bool', default=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
try:
acs_ag = AnsibleCloudStackAffinityGroup(module)
state = module.params.get('state')
if state in ['absent']:
affinity_group = acs_ag.remove_affinity_group()
else:
affinity_group = acs_ag.create_affinity_group()
result = acs_ag.get_result(affinity_group)
except CloudStackException as e:
module.fail_json(msg='CloudStackException: %s' % str(e))
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
ahmadiga/min_edx | refs/heads/master | lms/djangoapps/certificates/tests/test_views.py | 11 | """Tests for certificates views. """
import json
import ddt
from uuid import uuid4
from nose.plugins.attrib import attr
from mock import patch
from django.conf import settings
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from django.test.utils import override_settings
from opaque_keys.edx.locator import CourseLocator
from openedx.core.lib.tests.assertions.events import assert_event_matches
from student.tests.factories import UserFactory
from track.tests import EventTrackingTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from util.testing import UrlResetMixin
from certificates.api import get_certificate_url
from certificates.models import (
ExampleCertificateSet,
ExampleCertificate,
GeneratedCertificate,
CertificateHtmlViewConfiguration,
)
from certificates.tests.factories import (
BadgeAssertionFactory,
)
FEATURES_WITH_CERTS_ENABLED = settings.FEATURES.copy()
FEATURES_WITH_CERTS_ENABLED['CERTIFICATES_HTML_VIEW'] = True
FEATURES_WITH_CERTS_DISABLED = settings.FEATURES.copy()
FEATURES_WITH_CERTS_DISABLED['CERTIFICATES_HTML_VIEW'] = False
FEATURES_WITH_CUSTOM_CERTS_ENABLED = {
"CUSTOM_CERTIFICATE_TEMPLATES_ENABLED": True
}
FEATURES_WITH_CUSTOM_CERTS_ENABLED.update(FEATURES_WITH_CERTS_ENABLED)
@attr('shard_1')
@ddt.ddt
class UpdateExampleCertificateViewTest(TestCase):
"""Tests for the XQueue callback that updates example certificates. """
COURSE_KEY = CourseLocator(org='test', course='test', run='test')
DESCRIPTION = 'test'
TEMPLATE = 'test.pdf'
DOWNLOAD_URL = 'http://www.example.com'
ERROR_REASON = 'Kaboom!'
def setUp(self):
super(UpdateExampleCertificateViewTest, self).setUp()
self.cert_set = ExampleCertificateSet.objects.create(course_key=self.COURSE_KEY)
self.cert = ExampleCertificate.objects.create(
example_cert_set=self.cert_set,
description=self.DESCRIPTION,
template=self.TEMPLATE,
)
self.url = reverse('certificates.views.update_example_certificate')
# Since rate limit counts are cached, we need to clear
# this before each test.
cache.clear()
def test_update_example_certificate_success(self):
response = self._post_to_view(self.cert, download_url=self.DOWNLOAD_URL)
self._assert_response(response)
self.cert = ExampleCertificate.objects.get()
self.assertEqual(self.cert.status, ExampleCertificate.STATUS_SUCCESS)
self.assertEqual(self.cert.download_url, self.DOWNLOAD_URL)
def test_update_example_certificate_invalid_key(self):
payload = {
'xqueue_header': json.dumps({
'lms_key': 'invalid'
}),
'xqueue_body': json.dumps({
'username': self.cert.uuid,
'url': self.DOWNLOAD_URL
})
}
response = self.client.post(self.url, data=payload)
self.assertEqual(response.status_code, 404)
def test_update_example_certificate_error(self):
response = self._post_to_view(self.cert, error_reason=self.ERROR_REASON)
self._assert_response(response)
self.cert = ExampleCertificate.objects.get()
self.assertEqual(self.cert.status, ExampleCertificate.STATUS_ERROR)
self.assertEqual(self.cert.error_reason, self.ERROR_REASON)
@ddt.data('xqueue_header', 'xqueue_body')
def test_update_example_certificate_invalid_params(self, missing_param):
payload = {
'xqueue_header': json.dumps({
'lms_key': self.cert.access_key
}),
'xqueue_body': json.dumps({
'username': self.cert.uuid,
'url': self.DOWNLOAD_URL
})
}
del payload[missing_param]
response = self.client.post(self.url, data=payload)
self.assertEqual(response.status_code, 400)
def test_update_example_certificate_missing_download_url(self):
payload = {
'xqueue_header': json.dumps({
'lms_key': self.cert.access_key
}),
'xqueue_body': json.dumps({
'username': self.cert.uuid
})
}
response = self.client.post(self.url, data=payload)
self.assertEqual(response.status_code, 400)
def test_update_example_cetificate_non_json_param(self):
payload = {
'xqueue_header': '{/invalid',
'xqueue_body': '{/invalid'
}
response = self.client.post(self.url, data=payload)
self.assertEqual(response.status_code, 400)
def test_unsupported_http_method(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 405)
def test_bad_request_rate_limiting(self):
payload = {
'xqueue_header': json.dumps({
'lms_key': 'invalid'
}),
'xqueue_body': json.dumps({
'username': self.cert.uuid,
'url': self.DOWNLOAD_URL
})
}
# Exceed the rate limit for invalid requests
# (simulate a DDOS with invalid keys)
for _ in range(100):
response = self.client.post(self.url, data=payload)
if response.status_code == 403:
break
# The final status code should indicate that the rate
# limit was exceeded.
self.assertEqual(response.status_code, 403)
def _post_to_view(self, cert, download_url=None, error_reason=None):
"""Simulate a callback from the XQueue to the example certificate end-point. """
header = {'lms_key': cert.access_key}
body = {'username': cert.uuid}
if download_url is not None:
body['url'] = download_url
if error_reason is not None:
body['error'] = 'error'
body['error_reason'] = self.ERROR_REASON
payload = {
'xqueue_header': json.dumps(header),
'xqueue_body': json.dumps(body)
}
return self.client.post(self.url, data=payload)
def _assert_response(self, response):
"""Check the response from the callback end-point. """
content = json.loads(response.content)
self.assertEqual(response.status_code, 200)
self.assertEqual(content['return_code'], 0)
def fakemicrosite(name, default=None):
"""
This is a test mocking function to return a microsite configuration
"""
if name == 'microsite_config_key':
return 'test_microsite'
else:
return default
@attr('shard_1')
class MicrositeCertificatesViewsTests(ModuleStoreTestCase):
"""
Tests for the microsite certificates web/html views
"""
def setUp(self):
super(MicrositeCertificatesViewsTests, self).setUp()
self.client = Client()
self.course = CourseFactory.create(
org='testorg', number='run1', display_name='refundable course'
)
self.course_id = self.course.location.course_key
self.user = UserFactory.create(
email='joe_user@edx.org',
username='joeuser',
password='foo'
)
self.user.profile.name = "Joe User"
self.user.profile.save()
self.client.login(username=self.user.username, password='foo')
self.cert = GeneratedCertificate.objects.create(
user=self.user,
course_id=self.course_id,
verify_uuid=uuid4(),
download_uuid=uuid4(),
grade="0.95",
key='the_key',
distinction=True,
status='generated',
mode='honor',
name=self.user.profile.name,
)
def _certificate_html_view_configuration(self, configuration_string, enabled=True):
"""
This will create a certificate html configuration
"""
config = CertificateHtmlViewConfiguration(enabled=enabled, configuration=configuration_string)
config.save()
return config
def _add_course_certificates(self, count=1, signatory_count=0, is_active=True):
"""
Create certificate for the course.
"""
signatories = [
{
'name': 'Signatory_Name ' + str(i),
'title': 'Signatory_Title ' + str(i),
'organization': 'Signatory_Organization ' + str(i),
'signature_image_path': '/static/certificates/images/demo-sig{}.png'.format(i),
'id': i,
} for i in xrange(signatory_count)
]
certificates = [
{
'id': i,
'name': 'Name ' + str(i),
'description': 'Description ' + str(i),
'course_title': 'course_title_' + str(i),
'signatories': signatories,
'version': 1,
'is_active': is_active
} for i in xrange(count)
]
self.course.certificates = {'certificates': certificates}
self.course.cert_html_view_enabled = True
self.course.save()
self.store.update_item(self.course, self.user.id)
@patch("microsite_configuration.microsite.get_value", fakemicrosite)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_html_view_for_microsite(self):
test_configuration_string = """{
"default": {
"accomplishment_class_append": "accomplishment-certificate",
"platform_name": "edX",
"company_about_url": "http://www.edx.org/about-us",
"company_privacy_url": "http://www.edx.org/edx-privacy-policy",
"company_tos_url": "http://www.edx.org/edx-terms-service",
"company_verified_certificate_url": "http://www.edx.org/verified-certificate",
"document_stylesheet_url_application": "/static/certificates/sass/main-ltr.css",
"logo_src": "/static/certificates/images/logo-edx.svg",
"logo_url": "http://www.edx.org"
},
"test_microsite": {
"accomplishment_class_append": "accomplishment-certificate",
"platform_name": "platform_microsite",
"company_about_url": "http://www.microsite.org/about-us",
"company_privacy_url": "http://www.microsite.org/edx-privacy-policy",
"company_tos_url": "http://www.microsite.org/microsite-terms-service",
"company_verified_certificate_url": "http://www.microsite.org/verified-certificate",
"document_stylesheet_url_application": "/static/certificates/sass/main-ltr.css",
"logo_src": "/static/certificates/images/logo-microsite.svg",
"logo_url": "http://www.microsite.org",
"company_about_description": "This is special microsite aware company_about_description content",
"company_about_title": "Microsite title"
},
"honor": {
"certificate_type": "Honor Code"
}
}"""
config = self._certificate_html_view_configuration(configuration_string=test_configuration_string)
self.assertEquals(config.configuration, test_configuration_string)
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
self._add_course_certificates(count=1, signatory_count=2)
response = self.client.get(test_url)
self.assertIn('platform_microsite', response.content)
self.assertIn('http://www.microsite.org', response.content)
self.assertIn('This is special microsite aware company_about_description content', response.content)
self.assertIn('Microsite title', response.content)
@patch("microsite_configuration.microsite.get_value", fakemicrosite)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_html_view_microsite_configuration_missing(self):
test_configuration_string = """{
"default": {
"accomplishment_class_append": "accomplishment-certificate",
"platform_name": "edX",
"company_about_url": "http://www.edx.org/about-us",
"company_privacy_url": "http://www.edx.org/edx-privacy-policy",
"company_tos_url": "http://www.edx.org/edx-terms-service",
"company_verified_certificate_url": "http://www.edx.org/verified-certificate",
"document_stylesheet_url_application": "/static/certificates/sass/main-ltr.css",
"logo_src": "/static/certificates/images/logo-edx.svg",
"logo_url": "http://www.edx.org",
"company_about_description": "This should not survive being overwritten by static content"
},
"honor": {
"certificate_type": "Honor Code"
}
}"""
config = self._certificate_html_view_configuration(configuration_string=test_configuration_string)
self.assertEquals(config.configuration, test_configuration_string)
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
self._add_course_certificates(count=1, signatory_count=2)
response = self.client.get(test_url)
self.assertIn('edX', response.content)
self.assertNotIn('platform_microsite', response.content)
self.assertNotIn('http://www.microsite.org', response.content)
self.assertNotIn('This should not survive being overwritten by static content', response.content)
class TrackShareRedirectTest(UrlResetMixin, ModuleStoreTestCase, EventTrackingTestCase):
"""
Verifies the badge image share event is sent out.
"""
@patch.dict(settings.FEATURES, {"ENABLE_OPENBADGES": True})
def setUp(self):
super(TrackShareRedirectTest, self).setUp('certificates.urls')
self.client = Client()
self.course = CourseFactory.create(
org='testorg', number='run1', display_name='trackable course'
)
self.assertion = BadgeAssertionFactory(
user=self.user, course_id=self.course.id, data={
'image': 'http://www.example.com/image.png',
'json': {'id': 'http://www.example.com/assertion.json'},
'issuer': 'http://www.example.com/issuer.json'
},
)
def test_social_event_sent(self):
test_url = '/certificates/badge_share_tracker/{}/social_network/{}/'.format(
unicode(self.course.id),
self.user.username,
)
self.recreate_tracker()
response = self.client.get(test_url)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], 'http://www.example.com/image.png')
assert_event_matches(
{
'name': 'edx.badge.assertion.shared',
'data': {
'course_id': 'testorg/run1/trackable_course',
'social_network': 'social_network',
# pylint: disable=no-member
'assertion_id': self.assertion.id,
'assertion_json_url': 'http://www.example.com/assertion.json',
'assertion_image_url': 'http://www.example.com/image.png',
'user_id': self.user.id,
'issuer': 'http://www.example.com/issuer.json',
'enrollment_mode': 'honor'
},
},
self.get_event()
)
|
pschmitt/home-assistant | refs/heads/dev | tests/components/facebox/__init__.py | 36 | """Tests for the facebox component."""
|
mith1979/ansible_automation | refs/heads/master | applied_python/applied_python/lib/python2.7/site-packages/astroid/tests/testdata/python2/data/module1abs/__init__.py | 12 | from __future__ import absolute_import, print_function
from . import core
from .core import *
print(sys.version)
|
ldirer/scikit-learn | refs/heads/master | sklearn/utils/tests/test_random.py | 85 | from __future__ import division
import numpy as np
import scipy.sparse as sp
from scipy.misc import comb as combinations
from numpy.testing import assert_array_almost_equal
from sklearn.utils.random import sample_without_replacement
from sklearn.utils.random import random_choice_csc
from sklearn.utils.testing import (
assert_raises,
assert_equal,
assert_true)
###############################################################################
# test custom sampling without replacement algorithm
###############################################################################
def test_invalid_sample_without_replacement_algorithm():
assert_raises(ValueError, sample_without_replacement, 5, 4, "unknown")
def test_sample_without_replacement_algorithms():
methods = ("auto", "tracking_selection", "reservoir_sampling", "pool")
for m in methods:
def sample_without_replacement_method(n_population, n_samples,
random_state=None):
return sample_without_replacement(n_population, n_samples,
method=m,
random_state=random_state)
check_edge_case_of_sample_int(sample_without_replacement_method)
check_sample_int(sample_without_replacement_method)
check_sample_int_distribution(sample_without_replacement_method)
def check_edge_case_of_sample_int(sample_without_replacement):
# n_population < n_sample
assert_raises(ValueError, sample_without_replacement, 0, 1)
assert_raises(ValueError, sample_without_replacement, 1, 2)
# n_population == n_samples
assert_equal(sample_without_replacement(0, 0).shape, (0, ))
assert_equal(sample_without_replacement(1, 1).shape, (1, ))
# n_population >= n_samples
assert_equal(sample_without_replacement(5, 0).shape, (0, ))
assert_equal(sample_without_replacement(5, 1).shape, (1, ))
# n_population < 0 or n_samples < 0
assert_raises(ValueError, sample_without_replacement, -1, 5)
assert_raises(ValueError, sample_without_replacement, 5, -1)
def check_sample_int(sample_without_replacement):
# This test is heavily inspired from test_random.py of python-core.
#
# For the entire allowable range of 0 <= k <= N, validate that
# the sample is of the correct length and contains only unique items
n_population = 100
for n_samples in range(n_population + 1):
s = sample_without_replacement(n_population, n_samples)
assert_equal(len(s), n_samples)
unique = np.unique(s)
assert_equal(np.size(unique), n_samples)
assert_true(np.all(unique < n_population))
# test edge case n_population == n_samples == 0
assert_equal(np.size(sample_without_replacement(0, 0)), 0)
def check_sample_int_distribution(sample_without_replacement):
# This test is heavily inspired from test_random.py of python-core.
#
# For the entire allowable range of 0 <= k <= N, validate that
# sample generates all possible permutations
n_population = 10
# a large number of trials prevents false negatives without slowing normal
# case
n_trials = 10000
for n_samples in range(n_population):
# Counting the number of combinations is not as good as counting the
# the number of permutations. However, it works with sampling algorithm
# that does not provide a random permutation of the subset of integer.
n_expected = combinations(n_population, n_samples, exact=True)
output = {}
for i in range(n_trials):
output[frozenset(sample_without_replacement(n_population,
n_samples))] = None
if len(output) == n_expected:
break
else:
raise AssertionError(
"number of combinations != number of expected (%s != %s)" %
(len(output), n_expected))
def test_random_choice_csc(n_samples=10000, random_state=24):
# Explicit class probabilities
classes = [np.array([0, 1]), np.array([0, 1, 2])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
got = random_choice_csc(n_samples, classes, class_probabilites,
random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel()) / float(n_samples)
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
# Implicit class probabilities
classes = [[0, 1], [1, 2]] # test for array-like support
class_probabilites = [np.array([0.5, 0.5]), np.array([0, 1/2, 1/2])]
got = random_choice_csc(n_samples=n_samples,
classes=classes,
random_state=random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel()) / float(n_samples)
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
# Edge case probabilities 1.0 and 0.0
classes = [np.array([0, 1]), np.array([0, 1, 2])]
class_probabilites = [np.array([1.0, 0.0]), np.array([0.0, 1.0, 0.0])]
got = random_choice_csc(n_samples, classes, class_probabilites,
random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel(),
minlength=len(class_probabilites[k])) / n_samples
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
# One class target data
classes = [[1], [0]] # test for array-like support
class_probabilites = [np.array([0.0, 1.0]), np.array([1.0])]
got = random_choice_csc(n_samples=n_samples,
classes=classes,
random_state=random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel()) / n_samples
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
def test_random_choice_csc_errors():
# the length of an array in classes and class_probabilites is mismatched
classes = [np.array([0, 1]), np.array([0, 1, 2, 3])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
# the class dtype is not supported
classes = [np.array(["a", "1"]), np.array(["z", "1", "2"])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
# the class dtype is not supported
classes = [np.array([4.2, 0.1]), np.array([0.1, 0.2, 9.4])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
# Given probabilities don't sum to 1
classes = [np.array([0, 1]), np.array([0, 1, 2])]
class_probabilites = [np.array([0.5, 0.6]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
|
padmasambhava/tiki2hugo | refs/heads/master | t2h/__init__.py | 12133432 | |
niemmi/algolib | refs/heads/master | algolib/binary_tree/__init__.py | 12133432 | |
beni55/edx-platform | refs/heads/master | lms/djangoapps/instructor/__init__.py | 12133432 | |
teltek/edx-platform | refs/heads/master | lms/djangoapps/badges/apps.py | 35 | """
Badges Application Configuration
Signal handlers are connected here.
"""
from django.apps import AppConfig
class BadgesConfig(AppConfig):
"""
Application Configuration for Badges.
"""
name = u'badges'
def ready(self):
"""
Connect signal handlers.
"""
from . import handlers # pylint: disable=unused-variable
|
marinho/geraldo | refs/heads/master | site/newsite/site-geraldo/django/core/template_loader.py | 382 | # This module is DEPRECATED!
#
# You should no longer be using django.template_loader.
#
# Use django.template.loader instead.
from django.template.loader import *
|
obiben/profitpy | refs/heads/master | examples/accountkeysdialog.py | 18 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2007 Troy Melhase <troy@gci.net>
# Distributed under the terms of the GNU General Public License v2
from PyQt4.QtCore import Qt
from PyQt4.QtGui import QDialog, QStandardItem, QStandardItemModel
from .ui_accountkeysdialog import Ui_AccountKeysDialog
class AccountKeysModel(QStandardItemModel):
def __init__(self, parent=None):
QStandardItemModel.__init__(self, parent)
self.setHorizontalHeaderLabels(['Display', 'Item', 'Currency'])
def keys(self):
item = self.item
rows = self.rowCount(self.indexFromItem(self.invisibleRootItem()))
for r in range(rows):
yield (str(item(r, 1).text()), str(item(r, 2).text())), \
item(r, 0).checkState()==Qt.Checked
class AccountKeysItem(QStandardItem):
def __init__(self, text='', checked=0, checkable=False):
QStandardItem.__init__(self, text)
self.setEditable(False)
self.setCheckable(checkable)
if checkable:
self.setCheckState(Qt.CheckState(2 if checked else 0))
class AccountKeysDialog(QDialog, Ui_AccountKeysDialog):
def __init__(self, parent=None):
QDialog.__init__(self, parent)
self.setupUi(self)
self.dataModel = AccountKeysModel()
self.tableView.setModel(self.dataModel)
self.tableView.verticalHeader().hide()
def setupKeys(self, mapping):
model = self.dataModel
for (key, currency), checked in sorted(mapping):
model.appendRow([
AccountKeysItem(checked=checked, checkable=True),
AccountKeysItem(key),
AccountKeysItem(currency)
])
view = self.tableView
view.resizeRowsToContents()
view.resizeColumnsToContents()
def keys(self):
return self.dataModel.keys()
|
NeveHanter/conparser | refs/heads/master | tests/test_vcard_30.py | 3 | # -*- coding: utf-8 -*-
__author__ = "NeveHanter <nevehanter@gmail.com>" |
OPM/opm-common | refs/heads/master | python/tests/test_time_vector.py | 5 | import unittest
import datetime
from opm.tools import *
from opm.io.parser import Parser
try:
from tests.utils import test_path, tmp
except ImportError:
from utils import test_path, tmp
class TestTimeVector(unittest.TestCase):
def setUp(self):
pass
def test_create(self):
start_date = datetime.date(2018,1,1)
start_datetime = datetime.datetime(2018,1,1)
with self.assertRaises(ValueError):
tv = TimeVector(start_date, base_string = "string", base_file="XYZ")
tv = TimeVector( start_date )
self.assertEqual(len(tv), 1)
with self.assertRaises(IndexError):
tv[1]
passed_date = datetime.datetime(2000,1,1)
with self.assertRaises(ValueError):
tv.add_keywords(passed_date, [])
next_date = datetime.datetime(2018,2,1)
tv.add_keywords(next_date, ["KEY1"])
self.assertEqual(len(tv), 2)
middle_date = datetime.datetime(2018,1,15)
tv.add_keywords(middle_date,[])
self.assertEqual(len(tv) ,3)
ts1 = tv[0]
self.assertEqual(ts1.dt, start_datetime)
tv.add_keywords(next_date, ["KEY2"])
self.assertEqual(len(tv),3)
ts = tv[-1]
self.assertEqual(ts.keywords, ["KEY1", "KEY2"])
self.assertIn(middle_date, tv)
self.assertEqual(tv.dates, [start_datetime, middle_date, next_date])
with self.assertRaises(KeyError):
tv[datetime.datetime(1980,1,1)]
ts1 = tv[next_date]
ts2 = tv[datetime.date(next_date.year, next_date.month, next_date.day)]
self.assertEqual(ts1,ts2)
def test_load(self):
tv = TimeVector(datetime.date(1997, 11, 6), base_file = test_path("data/schedule/part1.sch"))
tv.load(test_path("data/schedule/part3.sch"))
tv.load(test_path("data/schedule/fragment_dates.sch"))
tv.load(test_path("data/schedule/part2.sch"))
self.assertEqual(tv.dates, [datetime.datetime(1997, 11, 6),
datetime.datetime(1997, 11, 14, 0, 0, 0, 1000),
datetime.datetime(1997, 12, 1),
datetime.datetime(1997, 12, 17),
datetime.datetime(1998, 1, 1),
datetime.datetime(1998, 2, 1),
datetime.datetime(1998, 3, 1),
datetime.datetime(1998, 3, 29),
datetime.datetime(1998, 3, 30),
datetime.datetime(1998, 4, 1),
datetime.datetime(1998, 4, 23),
datetime.datetime(1998, 5, 1),
datetime.datetime(1998, 5, 26),
datetime.datetime(1998, 5, 27),
datetime.datetime(1998, 6, 1),
datetime.datetime(1998, 8, 1)])
def test_str(self):
tv = TimeVector(datetime.date(1997, 11, 6), base_string = open(test_path("data/schedule/part1.sch")).read())
tv.load(test_path("data/schedule/part3.sch"))
tv.load(test_path("data/schedule/part2.sch"))
s = str(tv)
tv2 = TimeVector(datetime.date(1997, 11, 6))
tv2.load_string(s, date=datetime.datetime(1997, 11, 6))
for ts1,ts2 in zip(tv,tv2):
self.assertEqual(ts1.dt, ts2.dt)
def test_optional(self):
tv = TimeVector(datetime.date(1997, 11, 6), base_file = test_path("data/schedule/part1.sch"))
# Must have a starting date, either as first keyword in loaded file,
# or alternatively as the optional date argument.
with self.assertRaises(ValueError):
tv.load(test_path("data/schedule/fragment.sch"))
with self.assertRaises(ValueError):
tv.load(test_path("data/schedule/fragment_dates.sch"), date = datetime.datetime(1998, 1,1))
tv.load(test_path("data/schedule/fragment.sch"), date = datetime.datetime(1998, 1, 10))
ts = tv[-1]
self.assertEqual(ts.dt, datetime.datetime(1998, 1 , 10))
self.assertEqual(ts.keywords[0].name, "WCONINJE")
def test_user_test(self):
tv=TimeVector(datetime.date(1999,12,31))
tv.load(test_path('data/schedule/TEMPLATE.SCH'), date=datetime.datetime(1999,12,31))
self.assertListEqual(tv.dates, [datetime.datetime(1999,12,31),
datetime.datetime(2000,1,1),
datetime.datetime(2000,2,1),
datetime.datetime(2000,3,1)])
def test_no_leading_DATES(self):
tv = TimeVector(datetime.date(1997, 11, 6), base_file=test_path("data/schedule/part1.sch"))
s = str(tv)
d = Parser().parse_string(s)
kw0 = d[0]
self.assertEqual(kw0.name, "WELSPECS")
tv2 = TimeVector(datetime.date(2000,1,1))
self.assertEqual("", str(tv2))
def test_drop_dates(self):
tv = TimeVector(datetime.date(1997, 11, 6), base_file=test_path("data/schedule/part1.sch"))
with self.assertRaises(KeyError):
tv.delete(datetime.datetime(2019,1,1))
ts = tv[datetime.datetime(1997,11,14,0,0,0,1000)]
self.assertTrue("WTEST" in ts)
tv.delete(datetime.datetime(1997,11,14,0,0,0,1000))
with self.assertRaises(KeyError):
tv.delete(datetime.datetime(1997,11,14,0,0,0,1000))
for ts in tv:
self.assertFalse("WTEST" in ts)
def test_drop_dates2(self):
tv = TimeVector(datetime.datetime(2017,1,1))
tv.add_keywords(datetime.datetime(2018,1,1), ['FOO18'])
tv.add_keywords(datetime.datetime(2019,1,1), ['FOO19'])
tv.add_keywords(datetime.datetime(2020,1,1), ['FOO20'])
tv.delete(datetime.datetime(2019,1,1))
tv.delete(datetime.datetime(2020,1,1))
if __name__ == "__main__":
unittest.main()
|
mandeepdhami/nova | refs/heads/master | nova/tests/unit/virt/vmwareapi/stubs.py | 52 | # Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Stubouts for the test suite
"""
from oslo_vmware import exceptions as vexc
from nova.tests.unit.virt.vmwareapi import fake
from nova.virt.vmwareapi import driver
from nova.virt.vmwareapi import images
from nova.virt.vmwareapi import network_util
def fake_get_vim_object(arg):
"""Stubs out the VMwareAPISession's get_vim_object method."""
return fake.FakeVim()
@property
def fake_vim_prop(arg):
"""Stubs out the VMwareAPISession's vim property access method."""
return fake.get_fake_vim_object(arg)
def fake_is_vim_object(arg, module):
"""Stubs out the VMwareAPISession's is_vim_object method."""
return isinstance(module, fake.FakeVim)
def fake_temp_method_exception():
raise vexc.VimFaultException(
[vexc.NOT_AUTHENTICATED],
"Session Empty/Not Authenticated")
def fake_temp_session_exception():
raise vexc.VimConnectionException("it's a fake!",
"Session Exception")
def fake_session_file_exception():
fault_list = [vexc.FILE_ALREADY_EXISTS]
raise vexc.VimFaultException(fault_list,
Exception('fake'))
def fake_session_permission_exception():
fault_list = [vexc.NO_PERMISSION]
fault_string = 'Permission to perform this operation was denied.'
details = {'privilegeId': 'Resource.AssignVMToPool', 'object': 'domain-c7'}
raise vexc.VimFaultException(fault_list, fault_string, details=details)
def set_stubs(stubs):
"""Set the stubs."""
stubs.Set(network_util, 'get_network_with_the_name',
fake.fake_get_network)
stubs.Set(images, 'upload_image_stream_optimized', fake.fake_upload_image)
stubs.Set(images, 'fetch_image', fake.fake_fetch_image)
stubs.Set(driver.VMwareAPISession, "vim", fake_vim_prop)
stubs.Set(driver.VMwareAPISession, "_is_vim_object",
fake_is_vim_object)
|
kaiweifan/horizon | refs/heads/vip2 | openstack_dashboard/test/tests/__init__.py | 12133432 | |
jphilipsen05/zulip | refs/heads/master | analytics/__init__.py | 12133432 | |
hirokiky/oauthlib | refs/heads/master | tests/oauth2/rfc6749/clients/__init__.py | 12133432 | |
flh/odoo | refs/heads/master | addons/mrp_repair/__init__.py | 380 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import mrp_repair
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
nervous-laughter/qiime2 | refs/heads/master | qiime2/sdk/tests/test_result.py | 2 | # ----------------------------------------------------------------------------
# Copyright (c) 2016-2017, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import os
import tempfile
import unittest
import qiime2.core.type
from qiime2.sdk import Result, Artifact, Visualization
from qiime2.sdk.result import ResultMetadata
import qiime2.core.archive as archive
from qiime2.core.testing.type import FourInts
from qiime2.core.testing.util import get_dummy_plugin, ArchiveTestingMixin
from qiime2.core.testing.visualizer import mapping_viz
class TestResult(unittest.TestCase, ArchiveTestingMixin):
def make_provenance_capture(self):
# You can't actually import a visualization, but I won't tell
# visualization if you don't...
return archive.ImportProvenanceCapture()
def setUp(self):
# Ignore the returned dummy plugin object, just run this to verify the
# plugin exists as the tests rely on it being loaded.
get_dummy_plugin()
# TODO standardize temporary directories created by QIIME 2
self.test_dir = tempfile.TemporaryDirectory(prefix='qiime2-test-temp-')
self.data_dir = os.path.join(self.test_dir.name, 'viz-output')
os.mkdir(self.data_dir)
mapping_viz(self.data_dir,
{'abc': 'foo', 'def': 'bar'},
{'ghi': 'baz', 'jkl': 'bazz'},
key_label='Key', value_label='Value')
def tearDown(self):
self.test_dir.cleanup()
def test_private_constructor(self):
with self.assertRaisesRegex(
NotImplementedError,
'Result constructor.*private.*Result.load'):
Result()
def test_load_artifact(self):
saved_artifact = Artifact.import_data(FourInts, [-1, 42, 0, 43])
fp = os.path.join(self.test_dir.name, 'artifact.qza')
saved_artifact.save(fp)
artifact = Result.load(fp)
self.assertIsInstance(artifact, Artifact)
self.assertEqual(artifact.type, FourInts)
self.assertEqual(artifact.uuid, saved_artifact.uuid)
self.assertEqual(artifact.view(list), [-1, 42, 0, 43])
def test_load_visualization(self):
saved_visualization = Visualization._from_data_dir(
self.data_dir, self.make_provenance_capture())
fp = os.path.join(self.test_dir.name, 'visualization.qzv')
saved_visualization.save(fp)
visualization = Result.load(fp)
self.assertIsInstance(visualization, Visualization)
self.assertEqual(visualization.type, qiime2.core.type.Visualization)
self.assertEqual(visualization.uuid, saved_visualization.uuid)
def test_extract_artifact(self):
fp = os.path.join(self.test_dir.name, 'artifact.qza')
artifact = Artifact.import_data(FourInts, [-1, 42, 0, 43])
artifact.save(fp)
root_dir = str(artifact.uuid)
output_dir = os.path.join(self.test_dir.name, 'artifact-extract-test')
result_dir = Result.extract(fp, output_dir=output_dir)
self.assertEqual(result_dir, os.path.join(output_dir, root_dir))
expected = {
'VERSION',
'metadata.yaml',
'data/file1.txt',
'data/file2.txt',
'data/nested/file3.txt',
'data/nested/file4.txt',
'provenance/metadata.yaml',
'provenance/VERSION',
'provenance/action/action.yaml'
}
self.assertExtractedArchiveMembers(output_dir, root_dir, expected)
def test_extract_visualization(self):
fp = os.path.join(self.test_dir.name, 'visualization.qzv')
visualization = Visualization._from_data_dir(
self.data_dir, self.make_provenance_capture())
visualization.save(fp)
root_dir = str(visualization.uuid)
output_dir = os.path.join(self.test_dir.name, 'viz-extract-test')
result_dir = Result.extract(fp, output_dir=output_dir)
self.assertEqual(result_dir, os.path.join(output_dir, root_dir))
expected = {
'VERSION',
'metadata.yaml',
'data/index.html',
'data/css/style.css',
'provenance/metadata.yaml',
'provenance/VERSION',
'provenance/action/action.yaml'
}
self.assertExtractedArchiveMembers(output_dir, root_dir, expected)
def test_peek_artifact(self):
artifact = Artifact.import_data(FourInts, [0, 0, 42, 1000])
fp = os.path.join(self.test_dir.name, 'artifact.qza')
artifact.save(fp)
metadata = Result.peek(fp)
self.assertIsInstance(metadata, ResultMetadata)
self.assertEqual(metadata.type, 'FourInts')
self.assertEqual(metadata.uuid, str(artifact.uuid))
self.assertEqual(metadata.format, 'FourIntsDirectoryFormat')
def test_peek_visualization(self):
visualization = Visualization._from_data_dir(
self.data_dir, self.make_provenance_capture())
fp = os.path.join(self.test_dir.name, 'visualization.qzv')
visualization.save(fp)
metadata = Result.peek(fp)
self.assertIsInstance(metadata, ResultMetadata)
self.assertEqual(metadata.type, 'Visualization')
self.assertEqual(metadata.uuid, str(visualization.uuid))
self.assertIsNone(metadata.format)
def test_save_artifact_auto_extension(self):
artifact = Artifact.import_data(FourInts, [0, 0, 42, 1000])
# No extension.
fp = os.path.join(self.test_dir.name, 'artifact')
obs_fp = artifact.save(fp)
obs_filename = os.path.basename(obs_fp)
self.assertEqual(obs_filename, 'artifact.qza')
# Wrong extension.
fp = os.path.join(self.test_dir.name, 'artifact.zip')
obs_fp = artifact.save(fp)
obs_filename = os.path.basename(obs_fp)
self.assertEqual(obs_filename, 'artifact.zip.qza')
# Correct extension.
fp = os.path.join(self.test_dir.name, 'artifact.qza')
obs_fp = artifact.save(fp)
obs_filename = os.path.basename(obs_fp)
self.assertEqual(obs_filename, 'artifact.qza')
def test_save_visualization_auto_extension(self):
visualization = Visualization._from_data_dir(
self.data_dir, self.make_provenance_capture())
# No extension.
fp = os.path.join(self.test_dir.name, 'visualization')
obs_fp = visualization.save(fp)
obs_filename = os.path.basename(obs_fp)
self.assertEqual(obs_filename, 'visualization.qzv')
# Wrong extension.
fp = os.path.join(self.test_dir.name, 'visualization.zip')
obs_fp = visualization.save(fp)
obs_filename = os.path.basename(obs_fp)
self.assertEqual(obs_filename, 'visualization.zip.qzv')
# Correct extension.
fp = os.path.join(self.test_dir.name, 'visualization.qzv')
obs_fp = visualization.save(fp)
obs_filename = os.path.basename(obs_fp)
self.assertEqual(obs_filename, 'visualization.qzv')
if __name__ == '__main__':
unittest.main()
|
anhstudios/swganh | refs/heads/develop | data/scripts/templates/object/tangible/loot/collectible/collectible_parts/shared_gong_structure_07.py | 2 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/loot/collectible/collectible_parts/shared_gong_structure_07.iff"
result.attribute_template_id = -1
result.stfName("collectible_loot_items_n","gong_structure_07")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
jedie/pypyjs-standalone | refs/heads/master | website/js/pypy.js-0.3.0/lib/modules/distutils/fancy_getopt.py | 250 | """distutils.fancy_getopt
Wrapper around the standard getopt module that provides the following
additional features:
* short and long options are tied together
* options have help strings, so fancy_getopt could potentially
create a complete usage summary
* options set attributes of a passed-in object
"""
__revision__ = "$Id$"
import sys
import string
import re
import getopt
from distutils.errors import DistutilsGetoptError, DistutilsArgError
# Much like command_re in distutils.core, this is close to but not quite
# the same as a Python NAME -- except, in the spirit of most GNU
# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
# The similarities to NAME are again not a coincidence...
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
longopt_re = re.compile(r'^%s$' % longopt_pat)
# For recognizing "negative alias" options, eg. "quiet=!verbose"
neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
# This is used to translate long options to legitimate Python identifiers
# (for use as attributes of some object).
longopt_xlate = string.maketrans('-', '_')
class FancyGetopt:
"""Wrapper around the standard 'getopt()' module that provides some
handy extra functionality:
* short and long options are tied together
* options have help strings, and help text can be assembled
from them
* options set attributes of a passed-in object
* boolean options can have "negative aliases" -- eg. if
--quiet is the "negative alias" of --verbose, then "--quiet"
on the command line sets 'verbose' to false
"""
def __init__ (self, option_table=None):
# The option table is (currently) a list of tuples. The
# tuples may have 3 or four values:
# (long_option, short_option, help_string [, repeatable])
# if an option takes an argument, its long_option should have '='
# appended; short_option should just be a single character, no ':'
# in any case. If a long_option doesn't have a corresponding
# short_option, short_option should be None. All option tuples
# must have long options.
self.option_table = option_table
# 'option_index' maps long option names to entries in the option
# table (ie. those 3-tuples).
self.option_index = {}
if self.option_table:
self._build_index()
# 'alias' records (duh) alias options; {'foo': 'bar'} means
# --foo is an alias for --bar
self.alias = {}
# 'negative_alias' keeps track of options that are the boolean
# opposite of some other option
self.negative_alias = {}
# These keep track of the information in the option table. We
# don't actually populate these structures until we're ready to
# parse the command-line, since the 'option_table' passed in here
# isn't necessarily the final word.
self.short_opts = []
self.long_opts = []
self.short2long = {}
self.attr_name = {}
self.takes_arg = {}
# And 'option_order' is filled up in 'getopt()'; it records the
# original order of options (and their values) on the command-line,
# but expands short options, converts aliases, etc.
self.option_order = []
# __init__ ()
def _build_index (self):
self.option_index.clear()
for option in self.option_table:
self.option_index[option[0]] = option
def set_option_table (self, option_table):
self.option_table = option_table
self._build_index()
def add_option (self, long_option, short_option=None, help_string=None):
if long_option in self.option_index:
raise DistutilsGetoptError, \
"option conflict: already an option '%s'" % long_option
else:
option = (long_option, short_option, help_string)
self.option_table.append(option)
self.option_index[long_option] = option
def has_option (self, long_option):
"""Return true if the option table for this parser has an
option with long name 'long_option'."""
return long_option in self.option_index
def get_attr_name (self, long_option):
"""Translate long option name 'long_option' to the form it
has as an attribute of some object: ie., translate hyphens
to underscores."""
return string.translate(long_option, longopt_xlate)
def _check_alias_dict (self, aliases, what):
assert isinstance(aliases, dict)
for (alias, opt) in aliases.items():
if alias not in self.option_index:
raise DistutilsGetoptError, \
("invalid %s '%s': "
"option '%s' not defined") % (what, alias, alias)
if opt not in self.option_index:
raise DistutilsGetoptError, \
("invalid %s '%s': "
"aliased option '%s' not defined") % (what, alias, opt)
def set_aliases (self, alias):
"""Set the aliases for this option parser."""
self._check_alias_dict(alias, "alias")
self.alias = alias
def set_negative_aliases (self, negative_alias):
"""Set the negative aliases for this option parser.
'negative_alias' should be a dictionary mapping option names to
option names, both the key and value must already be defined
in the option table."""
self._check_alias_dict(negative_alias, "negative alias")
self.negative_alias = negative_alias
def _grok_option_table (self):
"""Populate the various data structures that keep tabs on the
option table. Called by 'getopt()' before it can do anything
worthwhile.
"""
self.long_opts = []
self.short_opts = []
self.short2long.clear()
self.repeat = {}
for option in self.option_table:
if len(option) == 3:
long, short, help = option
repeat = 0
elif len(option) == 4:
long, short, help, repeat = option
else:
# the option table is part of the code, so simply
# assert that it is correct
raise ValueError, "invalid option tuple: %r" % (option,)
# Type- and value-check the option names
if not isinstance(long, str) or len(long) < 2:
raise DistutilsGetoptError, \
("invalid long option '%s': "
"must be a string of length >= 2") % long
if (not ((short is None) or
(isinstance(short, str) and len(short) == 1))):
raise DistutilsGetoptError, \
("invalid short option '%s': "
"must a single character or None") % short
self.repeat[long] = repeat
self.long_opts.append(long)
if long[-1] == '=': # option takes an argument?
if short: short = short + ':'
long = long[0:-1]
self.takes_arg[long] = 1
else:
# Is option is a "negative alias" for some other option (eg.
# "quiet" == "!verbose")?
alias_to = self.negative_alias.get(long)
if alias_to is not None:
if self.takes_arg[alias_to]:
raise DistutilsGetoptError, \
("invalid negative alias '%s': "
"aliased option '%s' takes a value") % \
(long, alias_to)
self.long_opts[-1] = long # XXX redundant?!
self.takes_arg[long] = 0
else:
self.takes_arg[long] = 0
# If this is an alias option, make sure its "takes arg" flag is
# the same as the option it's aliased to.
alias_to = self.alias.get(long)
if alias_to is not None:
if self.takes_arg[long] != self.takes_arg[alias_to]:
raise DistutilsGetoptError, \
("invalid alias '%s': inconsistent with "
"aliased option '%s' (one of them takes a value, "
"the other doesn't") % (long, alias_to)
# Now enforce some bondage on the long option name, so we can
# later translate it to an attribute name on some object. Have
# to do this a bit late to make sure we've removed any trailing
# '='.
if not longopt_re.match(long):
raise DistutilsGetoptError, \
("invalid long option name '%s' " +
"(must be letters, numbers, hyphens only") % long
self.attr_name[long] = self.get_attr_name(long)
if short:
self.short_opts.append(short)
self.short2long[short[0]] = long
# for option_table
# _grok_option_table()
def getopt (self, args=None, object=None):
"""Parse command-line options in args. Store as attributes on object.
If 'args' is None or not supplied, uses 'sys.argv[1:]'. If
'object' is None or not supplied, creates a new OptionDummy
object, stores option values there, and returns a tuple (args,
object). If 'object' is supplied, it is modified in place and
'getopt()' just returns 'args'; in both cases, the returned
'args' is a modified copy of the passed-in 'args' list, which
is left untouched.
"""
if args is None:
args = sys.argv[1:]
if object is None:
object = OptionDummy()
created_object = 1
else:
created_object = 0
self._grok_option_table()
short_opts = string.join(self.short_opts)
try:
opts, args = getopt.getopt(args, short_opts, self.long_opts)
except getopt.error, msg:
raise DistutilsArgError, msg
for opt, val in opts:
if len(opt) == 2 and opt[0] == '-': # it's a short option
opt = self.short2long[opt[1]]
else:
assert len(opt) > 2 and opt[:2] == '--'
opt = opt[2:]
alias = self.alias.get(opt)
if alias:
opt = alias
if not self.takes_arg[opt]: # boolean option?
assert val == '', "boolean option can't have value"
alias = self.negative_alias.get(opt)
if alias:
opt = alias
val = 0
else:
val = 1
attr = self.attr_name[opt]
# The only repeating option at the moment is 'verbose'.
# It has a negative option -q quiet, which should set verbose = 0.
if val and self.repeat.get(attr) is not None:
val = getattr(object, attr, 0) + 1
setattr(object, attr, val)
self.option_order.append((opt, val))
# for opts
if created_object:
return args, object
else:
return args
# getopt()
def get_option_order (self):
"""Returns the list of (option, value) tuples processed by the
previous run of 'getopt()'. Raises RuntimeError if
'getopt()' hasn't been called yet.
"""
if self.option_order is None:
raise RuntimeError, "'getopt()' hasn't been called yet"
else:
return self.option_order
def generate_help (self, header=None):
"""Generate help text (a list of strings, one per suggested line of
output) from the option table for this FancyGetopt object.
"""
# Blithely assume the option table is good: probably wouldn't call
# 'generate_help()' unless you've already called 'getopt()'.
# First pass: determine maximum length of long option names
max_opt = 0
for option in self.option_table:
long = option[0]
short = option[1]
l = len(long)
if long[-1] == '=':
l = l - 1
if short is not None:
l = l + 5 # " (-x)" where short == 'x'
if l > max_opt:
max_opt = l
opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter
# Typical help block looks like this:
# --foo controls foonabulation
# Help block for longest option looks like this:
# --flimflam set the flim-flam level
# and with wrapped text:
# --flimflam set the flim-flam level (must be between
# 0 and 100, except on Tuesdays)
# Options with short names will have the short name shown (but
# it doesn't contribute to max_opt):
# --foo (-f) controls foonabulation
# If adding the short option would make the left column too wide,
# we push the explanation off to the next line
# --flimflam (-l)
# set the flim-flam level
# Important parameters:
# - 2 spaces before option block start lines
# - 2 dashes for each long option name
# - min. 2 spaces between option and explanation (gutter)
# - 5 characters (incl. space) for short option name
# Now generate lines of help text. (If 80 columns were good enough
# for Jesus, then 78 columns are good enough for me!)
line_width = 78
text_width = line_width - opt_width
big_indent = ' ' * opt_width
if header:
lines = [header]
else:
lines = ['Option summary:']
for option in self.option_table:
long, short, help = option[:3]
text = wrap_text(help, text_width)
if long[-1] == '=':
long = long[0:-1]
# Case 1: no short option at all (makes life easy)
if short is None:
if text:
lines.append(" --%-*s %s" % (max_opt, long, text[0]))
else:
lines.append(" --%-*s " % (max_opt, long))
# Case 2: we have a short option, so we have to include it
# just after the long option
else:
opt_names = "%s (-%s)" % (long, short)
if text:
lines.append(" --%-*s %s" %
(max_opt, opt_names, text[0]))
else:
lines.append(" --%-*s" % opt_names)
for l in text[1:]:
lines.append(big_indent + l)
# for self.option_table
return lines
# generate_help ()
def print_help (self, header=None, file=None):
if file is None:
file = sys.stdout
for line in self.generate_help(header):
file.write(line + "\n")
# class FancyGetopt
def fancy_getopt (options, negative_opt, object, args):
parser = FancyGetopt(options)
parser.set_negative_aliases(negative_opt)
return parser.getopt(args, object)
WS_TRANS = string.maketrans(string.whitespace, ' ' * len(string.whitespace))
def wrap_text (text, width):
"""wrap_text(text : string, width : int) -> [string]
Split 'text' into multiple lines of no more than 'width' characters
each, and return the list of strings that results.
"""
if text is None:
return []
if len(text) <= width:
return [text]
text = string.expandtabs(text)
text = string.translate(text, WS_TRANS)
chunks = re.split(r'( +|-+)', text)
chunks = filter(None, chunks) # ' - ' results in empty strings
lines = []
while chunks:
cur_line = [] # list of chunks (to-be-joined)
cur_len = 0 # length of current line
while chunks:
l = len(chunks[0])
if cur_len + l <= width: # can squeeze (at least) this chunk in
cur_line.append(chunks[0])
del chunks[0]
cur_len = cur_len + l
else: # this line is full
# drop last chunk if all space
if cur_line and cur_line[-1][0] == ' ':
del cur_line[-1]
break
if chunks: # any chunks left to process?
# if the current line is still empty, then we had a single
# chunk that's too big too fit on a line -- so we break
# down and break it up at the line width
if cur_len == 0:
cur_line.append(chunks[0][0:width])
chunks[0] = chunks[0][width:]
# all-whitespace chunks at the end of a line can be discarded
# (and we know from the re.split above that if a chunk has
# *any* whitespace, it is *all* whitespace)
if chunks[0][0] == ' ':
del chunks[0]
# and store this line in the list-of-all-lines -- as a single
# string, of course!
lines.append(string.join(cur_line, ''))
# while chunks
return lines
def translate_longopt(opt):
"""Convert a long option name to a valid Python identifier by
changing "-" to "_".
"""
return string.translate(opt, longopt_xlate)
class OptionDummy:
"""Dummy class just used as a place to hold command-line option
values as instance attributes."""
def __init__ (self, options=[]):
"""Create a new OptionDummy instance. The attributes listed in
'options' will be initialized to None."""
for opt in options:
setattr(self, opt, None)
|
t-amerssonis/okami | refs/heads/master | src/Okami/third-parts/__init__.py | 12133432 | |
hurricup/intellij-community | refs/heads/master | python/testData/refactoring/move/moveSymbolFromStatementList/before/src/b.py | 12133432 | |
mpetyx/psymbiosys-rapidapps-middleware-workersTool | refs/heads/master | hello/__init__.py | 12133432 | |
itswindtw/pyMega | refs/heads/master | megadb/execution/__init__.py | 12133432 | |
saurabh6790/trufil_app | refs/heads/master | hr/doctype/test_name/__init__.py | 12133432 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.