repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
|---|---|---|---|---|---|---|
grpc
|
grpc-master/src/python/grpcio_tests/tests/testing/_time_test.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import threading
import time
import unittest
import grpc_testing
_QUANTUM = 0.3
_MANY = 10000
# Tests that run in real time can either wait for the scheduler to
# eventually run what needs to be run (and risk timing out) or declare
# that the scheduler didn't schedule work reasonably fast enough. We
# choose the latter for this test.
_PATHOLOGICAL_SCHEDULING = "pathological thread scheduling!"
class _TimeNoter(object):
def __init__(self, time):
self._condition = threading.Condition()
self._time = time
self._call_times = []
def __call__(self):
with self._condition:
self._call_times.append(self._time.time())
def call_times(self):
with self._condition:
return tuple(self._call_times)
class TimeTest(object):
def test_sleep_for(self):
start_time = self._time.time()
self._time.sleep_for(_QUANTUM)
end_time = self._time.time()
self.assertLessEqual(start_time + _QUANTUM, end_time)
def test_sleep_until(self):
start_time = self._time.time()
self._time.sleep_until(start_time + _QUANTUM)
end_time = self._time.time()
self.assertLessEqual(start_time + _QUANTUM, end_time)
def test_call_in(self):
time_noter = _TimeNoter(self._time)
start_time = self._time.time()
self._time.call_in(time_noter, _QUANTUM)
self._time.sleep_for(_QUANTUM * 2)
call_times = time_noter.call_times()
self.assertTrue(call_times, msg=_PATHOLOGICAL_SCHEDULING)
self.assertLessEqual(start_time + _QUANTUM, call_times[0])
def test_call_at(self):
time_noter = _TimeNoter(self._time)
start_time = self._time.time()
self._time.call_at(time_noter, self._time.time() + _QUANTUM)
self._time.sleep_for(_QUANTUM * 2)
call_times = time_noter.call_times()
self.assertTrue(call_times, msg=_PATHOLOGICAL_SCHEDULING)
self.assertLessEqual(start_time + _QUANTUM, call_times[0])
def test_cancel(self):
time_noter = _TimeNoter(self._time)
future = self._time.call_in(time_noter, _QUANTUM * 2)
self._time.sleep_for(_QUANTUM)
cancelled = future.cancel()
self._time.sleep_for(_QUANTUM * 2)
call_times = time_noter.call_times()
self.assertFalse(call_times, msg=_PATHOLOGICAL_SCHEDULING)
self.assertTrue(cancelled)
self.assertTrue(future.cancelled())
def test_many(self):
test_events = tuple(threading.Event() for _ in range(_MANY))
possibly_cancelled_futures = {}
background_noise_futures = []
for test_event in test_events:
possibly_cancelled_futures[test_event] = self._time.call_in(
test_event.set, _QUANTUM * (2 + random.random())
)
for _ in range(_MANY):
background_noise_futures.append(
self._time.call_in(
threading.Event().set, _QUANTUM * 1000 * random.random()
)
)
self._time.sleep_for(_QUANTUM)
cancelled = set()
for test_event, test_future in possibly_cancelled_futures.items():
if bool(random.randint(0, 1)) and test_future.cancel():
cancelled.add(test_event)
self._time.sleep_for(_QUANTUM * 3)
for test_event in test_events:
(self.assertFalse if test_event in cancelled else self.assertTrue)(
test_event.is_set()
)
for background_noise_future in background_noise_futures:
background_noise_future.cancel()
def test_same_behavior_used_several_times(self):
time_noter = _TimeNoter(self._time)
start_time = self._time.time()
first_future_at_one = self._time.call_in(time_noter, _QUANTUM)
second_future_at_one = self._time.call_in(time_noter, _QUANTUM)
first_future_at_three = self._time.call_in(time_noter, _QUANTUM * 3)
second_future_at_three = self._time.call_in(time_noter, _QUANTUM * 3)
self._time.sleep_for(_QUANTUM * 2)
first_future_at_one_cancelled = first_future_at_one.cancel()
second_future_at_one_cancelled = second_future_at_one.cancel()
first_future_at_three_cancelled = first_future_at_three.cancel()
self._time.sleep_for(_QUANTUM * 2)
second_future_at_three_cancelled = second_future_at_three.cancel()
first_future_at_three_cancelled_again = first_future_at_three.cancel()
call_times = time_noter.call_times()
self.assertEqual(3, len(call_times), msg=_PATHOLOGICAL_SCHEDULING)
self.assertFalse(first_future_at_one_cancelled)
self.assertFalse(second_future_at_one_cancelled)
self.assertTrue(first_future_at_three_cancelled)
self.assertFalse(second_future_at_three_cancelled)
self.assertTrue(first_future_at_three_cancelled_again)
self.assertLessEqual(start_time + _QUANTUM, call_times[0])
self.assertLessEqual(start_time + _QUANTUM, call_times[1])
self.assertLessEqual(start_time + _QUANTUM * 3, call_times[2])
class StrictRealTimeTest(TimeTest, unittest.TestCase):
def setUp(self):
self._time = grpc_testing.strict_real_time()
class StrictFakeTimeTest(TimeTest, unittest.TestCase):
def setUp(self):
self._time = grpc_testing.strict_fake_time(
random.randint(0, int(time.time()))
)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 6,101
| 35.538922
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/testing/_application_testing_common.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc_testing
from tests.testing.proto import requests_pb2
from tests.testing.proto import services_pb2
# TODO(https://github.com/grpc/grpc/issues/11657): Eliminate this entirely.
# TODO(https://github.com/protocolbuffers/protobuf/issues/3452): Eliminate this if/else.
if services_pb2.DESCRIPTOR.services_by_name.get("FirstService") is None:
FIRST_SERVICE = "Fix protobuf issue 3452!"
FIRST_SERVICE_UNUN = "Fix protobuf issue 3452!"
FIRST_SERVICE_UNSTRE = "Fix protobuf issue 3452!"
FIRST_SERVICE_STREUN = "Fix protobuf issue 3452!"
FIRST_SERVICE_STRESTRE = "Fix protobuf issue 3452!"
else:
FIRST_SERVICE = services_pb2.DESCRIPTOR.services_by_name["FirstService"]
FIRST_SERVICE_UNUN = FIRST_SERVICE.methods_by_name["UnUn"]
FIRST_SERVICE_UNSTRE = FIRST_SERVICE.methods_by_name["UnStre"]
FIRST_SERVICE_STREUN = FIRST_SERVICE.methods_by_name["StreUn"]
FIRST_SERVICE_STRESTRE = FIRST_SERVICE.methods_by_name["StreStre"]
| 1,542
| 44.382353
| 88
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/testing/_client_application.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An example gRPC Python-using client-side application."""
import collections
import enum
import threading
import time
import grpc
from tests.testing import _application_common
from tests.testing.proto import requests_pb2
from tests.testing.proto import services_pb2
from tests.testing.proto import services_pb2_grpc
from tests.unit.framework.common import test_constants
@enum.unique
class Scenario(enum.Enum):
UNARY_UNARY = "unary unary"
UNARY_STREAM = "unary stream"
STREAM_UNARY = "stream unary"
STREAM_STREAM = "stream stream"
CONCURRENT_STREAM_UNARY = "concurrent stream unary"
CONCURRENT_STREAM_STREAM = "concurrent stream stream"
CANCEL_UNARY_UNARY = "cancel unary unary"
CANCEL_UNARY_STREAM = "cancel unary stream"
INFINITE_REQUEST_STREAM = "infinite request stream"
class Outcome(collections.namedtuple("Outcome", ("kind", "code", "details"))):
"""Outcome of a client application scenario.
Attributes:
kind: A Kind value describing the overall kind of scenario execution.
code: A grpc.StatusCode value. Only valid if kind is Kind.RPC_ERROR.
details: A status details string. Only valid if kind is Kind.RPC_ERROR.
"""
@enum.unique
class Kind(enum.Enum):
SATISFACTORY = "satisfactory"
UNSATISFACTORY = "unsatisfactory"
RPC_ERROR = "rpc error"
_SATISFACTORY_OUTCOME = Outcome(Outcome.Kind.SATISFACTORY, None, None)
_UNSATISFACTORY_OUTCOME = Outcome(Outcome.Kind.UNSATISFACTORY, None, None)
class _Pipe(object):
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._open = True
def __iter__(self):
return self
def _next(self):
with self._condition:
while True:
if self._values:
return self._values.pop(0)
elif not self._open:
raise StopIteration()
else:
self._condition.wait()
def __next__(self): # (Python 3 Iterator Protocol)
return self._next()
def next(self): # (Python 2 Iterator Protocol)
return self._next()
def add(self, value):
with self._condition:
self._values.append(value)
self._condition.notify_all()
def close(self):
with self._condition:
self._open = False
self._condition.notify_all()
def _run_unary_unary(stub):
response = stub.UnUn(_application_common.UNARY_UNARY_REQUEST)
if _application_common.UNARY_UNARY_RESPONSE == response:
return _SATISFACTORY_OUTCOME
else:
return _UNSATISFACTORY_OUTCOME
def _run_unary_stream(stub):
response_iterator = stub.UnStre(_application_common.UNARY_STREAM_REQUEST)
try:
next(response_iterator)
except StopIteration:
return _SATISFACTORY_OUTCOME
else:
return _UNSATISFACTORY_OUTCOME
def _run_stream_unary(stub):
response, call = stub.StreUn.with_call(
iter((_application_common.STREAM_UNARY_REQUEST,) * 3)
)
if (
_application_common.STREAM_UNARY_RESPONSE == response
and call.code() is grpc.StatusCode.OK
):
return _SATISFACTORY_OUTCOME
else:
return _UNSATISFACTORY_OUTCOME
def _run_stream_stream(stub):
request_pipe = _Pipe()
response_iterator = stub.StreStre(iter(request_pipe))
request_pipe.add(_application_common.STREAM_STREAM_REQUEST)
first_responses = next(response_iterator), next(response_iterator)
request_pipe.add(_application_common.STREAM_STREAM_REQUEST)
second_responses = next(response_iterator), next(response_iterator)
request_pipe.close()
try:
next(response_iterator)
except StopIteration:
unexpected_extra_response = False
else:
unexpected_extra_response = True
if (
first_responses == _application_common.TWO_STREAM_STREAM_RESPONSES
and second_responses == _application_common.TWO_STREAM_STREAM_RESPONSES
and not unexpected_extra_response
):
return _SATISFACTORY_OUTCOME
else:
return _UNSATISFACTORY_OUTCOME
def _run_concurrent_stream_unary(stub):
future_calls = tuple(
stub.StreUn.future(
iter((_application_common.STREAM_UNARY_REQUEST,) * 3)
)
for _ in range(test_constants.THREAD_CONCURRENCY)
)
for future_call in future_calls:
if future_call.code() is grpc.StatusCode.OK:
response = future_call.result()
if _application_common.STREAM_UNARY_RESPONSE != response:
return _UNSATISFACTORY_OUTCOME
else:
return _UNSATISFACTORY_OUTCOME
else:
return _SATISFACTORY_OUTCOME
def _run_concurrent_stream_stream(stub):
condition = threading.Condition()
outcomes = [None] * test_constants.RPC_CONCURRENCY
def run_stream_stream(index):
outcome = _run_stream_stream(stub)
with condition:
outcomes[index] = outcome
condition.notify()
for index in range(test_constants.RPC_CONCURRENCY):
thread = threading.Thread(target=run_stream_stream, args=(index,))
thread.start()
with condition:
while True:
if all(outcomes):
for outcome in outcomes:
if outcome.kind is not Outcome.Kind.SATISFACTORY:
return _UNSATISFACTORY_OUTCOME
else:
return _SATISFACTORY_OUTCOME
else:
condition.wait()
def _run_cancel_unary_unary(stub):
response_future_call = stub.UnUn.future(
_application_common.UNARY_UNARY_REQUEST
)
initial_metadata = response_future_call.initial_metadata()
cancelled = response_future_call.cancel()
if initial_metadata is not None and cancelled:
return _SATISFACTORY_OUTCOME
else:
return _UNSATISFACTORY_OUTCOME
def _run_infinite_request_stream(stub):
def infinite_request_iterator():
while True:
yield _application_common.STREAM_UNARY_REQUEST
response_future_call = stub.StreUn.future(
infinite_request_iterator(),
timeout=_application_common.INFINITE_REQUEST_STREAM_TIMEOUT,
)
if response_future_call.code() is grpc.StatusCode.DEADLINE_EXCEEDED:
return _SATISFACTORY_OUTCOME
else:
return _UNSATISFACTORY_OUTCOME
_IMPLEMENTATIONS = {
Scenario.UNARY_UNARY: _run_unary_unary,
Scenario.UNARY_STREAM: _run_unary_stream,
Scenario.STREAM_UNARY: _run_stream_unary,
Scenario.STREAM_STREAM: _run_stream_stream,
Scenario.CONCURRENT_STREAM_UNARY: _run_concurrent_stream_unary,
Scenario.CONCURRENT_STREAM_STREAM: _run_concurrent_stream_stream,
Scenario.CANCEL_UNARY_UNARY: _run_cancel_unary_unary,
Scenario.INFINITE_REQUEST_STREAM: _run_infinite_request_stream,
}
def run(scenario, channel):
stub = services_pb2_grpc.FirstServiceStub(channel)
try:
return _IMPLEMENTATIONS[scenario](stub)
except grpc.RpcError as rpc_error:
return Outcome(
Outcome.Kind.RPC_ERROR, rpc_error.code(), rpc_error.details()
)
| 7,765
| 30.827869
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/testing/__init__.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/testing/_application_common.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An example gRPC Python-using application's common code elements."""
from tests.testing.proto import requests_pb2
from tests.testing.proto import services_pb2
SERVICE_NAME = "tests_of_grpc_testing.FirstService"
UNARY_UNARY_METHOD_NAME = "UnUn"
UNARY_STREAM_METHOD_NAME = "UnStre"
STREAM_UNARY_METHOD_NAME = "StreUn"
STREAM_STREAM_METHOD_NAME = "StreStre"
UNARY_UNARY_REQUEST = requests_pb2.Up(first_up_field=2)
ERRONEOUS_UNARY_UNARY_REQUEST = requests_pb2.Up(first_up_field=3)
UNARY_UNARY_RESPONSE = services_pb2.Down(first_down_field=5)
ERRONEOUS_UNARY_UNARY_RESPONSE = services_pb2.Down(first_down_field=7)
UNARY_STREAM_REQUEST = requests_pb2.Charm(first_charm_field=11)
STREAM_UNARY_REQUEST = requests_pb2.Charm(first_charm_field=13)
STREAM_UNARY_RESPONSE = services_pb2.Strange(first_strange_field=17)
STREAM_STREAM_REQUEST = requests_pb2.Top(first_top_field=19)
STREAM_STREAM_RESPONSE = services_pb2.Bottom(first_bottom_field=23)
TWO_STREAM_STREAM_RESPONSES = (STREAM_STREAM_RESPONSE,) * 2
ABORT_REQUEST = requests_pb2.Up(first_up_field=42)
ABORT_SUCCESS_QUERY = requests_pb2.Up(first_up_field=43)
ABORT_NO_STATUS_RESPONSE = services_pb2.Down(first_down_field=50)
ABORT_SUCCESS_RESPONSE = services_pb2.Down(first_down_field=51)
ABORT_FAILURE_RESPONSE = services_pb2.Down(first_down_field=52)
STREAM_STREAM_MUTATING_REQUEST = requests_pb2.Top(first_top_field=24601)
STREAM_STREAM_MUTATING_COUNT = 2
INFINITE_REQUEST_STREAM_TIMEOUT = 0.2
| 2,024
| 45.022727
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/testing/_client_test.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from concurrent import futures
import time
import unittest
import grpc
from grpc.framework.foundation import logging_pool
import grpc_testing
from tests.testing import _application_common
from tests.testing import _application_testing_common
from tests.testing import _client_application
from tests.testing.proto import requests_pb2
from tests.testing.proto import services_pb2
from tests.unit.framework.common import test_constants
# TODO(https://github.com/protocolbuffers/protobuf/issues/3452): Drop this skip.
@unittest.skipIf(
services_pb2.DESCRIPTOR.services_by_name.get("FirstService") is None,
"Fix protobuf issue 3452!",
)
class ClientTest(unittest.TestCase):
def setUp(self):
# In this test the client-side application under test executes in
# a separate thread while we retain use of the test thread to "play
# server".
self._client_execution_thread_pool = logging_pool.pool(1)
self._fake_time = grpc_testing.strict_fake_time(time.time())
self._real_time = grpc_testing.strict_real_time()
self._fake_time_channel = grpc_testing.channel(
services_pb2.DESCRIPTOR.services_by_name.values(), self._fake_time
)
self._real_time_channel = grpc_testing.channel(
services_pb2.DESCRIPTOR.services_by_name.values(), self._real_time
)
def tearDown(self):
self._client_execution_thread_pool.shutdown(wait=True)
def test_successful_unary_unary(self):
application_future = self._client_execution_thread_pool.submit(
_client_application.run,
_client_application.Scenario.UNARY_UNARY,
self._real_time_channel,
)
(
invocation_metadata,
request,
rpc,
) = self._real_time_channel.take_unary_unary(
_application_testing_common.FIRST_SERVICE_UNUN
)
rpc.send_initial_metadata(())
rpc.terminate(
_application_common.UNARY_UNARY_RESPONSE, (), grpc.StatusCode.OK, ""
)
application_return_value = application_future.result()
self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request)
self.assertIs(
application_return_value.kind,
_client_application.Outcome.Kind.SATISFACTORY,
)
def test_successful_unary_stream(self):
application_future = self._client_execution_thread_pool.submit(
_client_application.run,
_client_application.Scenario.UNARY_STREAM,
self._fake_time_channel,
)
(
invocation_metadata,
request,
rpc,
) = self._fake_time_channel.take_unary_stream(
_application_testing_common.FIRST_SERVICE_UNSTRE
)
rpc.send_initial_metadata(())
rpc.terminate((), grpc.StatusCode.OK, "")
application_return_value = application_future.result()
self.assertEqual(_application_common.UNARY_STREAM_REQUEST, request)
self.assertIs(
application_return_value.kind,
_client_application.Outcome.Kind.SATISFACTORY,
)
def test_successful_stream_unary(self):
application_future = self._client_execution_thread_pool.submit(
_client_application.run,
_client_application.Scenario.STREAM_UNARY,
self._real_time_channel,
)
invocation_metadata, rpc = self._real_time_channel.take_stream_unary(
_application_testing_common.FIRST_SERVICE_STREUN
)
rpc.send_initial_metadata(())
first_request = rpc.take_request()
second_request = rpc.take_request()
third_request = rpc.take_request()
rpc.requests_closed()
rpc.terminate(
_application_common.STREAM_UNARY_RESPONSE,
(),
grpc.StatusCode.OK,
"",
)
application_return_value = application_future.result()
self.assertEqual(
_application_common.STREAM_UNARY_REQUEST, first_request
)
self.assertEqual(
_application_common.STREAM_UNARY_REQUEST, second_request
)
self.assertEqual(
_application_common.STREAM_UNARY_REQUEST, third_request
)
self.assertIs(
application_return_value.kind,
_client_application.Outcome.Kind.SATISFACTORY,
)
def test_successful_stream_stream(self):
application_future = self._client_execution_thread_pool.submit(
_client_application.run,
_client_application.Scenario.STREAM_STREAM,
self._fake_time_channel,
)
invocation_metadata, rpc = self._fake_time_channel.take_stream_stream(
_application_testing_common.FIRST_SERVICE_STRESTRE
)
first_request = rpc.take_request()
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
second_request = rpc.take_request()
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.requests_closed()
rpc.terminate((), grpc.StatusCode.OK, "")
application_return_value = application_future.result()
self.assertEqual(
_application_common.STREAM_STREAM_REQUEST, first_request
)
self.assertEqual(
_application_common.STREAM_STREAM_REQUEST, second_request
)
self.assertIs(
application_return_value.kind,
_client_application.Outcome.Kind.SATISFACTORY,
)
def test_concurrent_stream_stream(self):
application_future = self._client_execution_thread_pool.submit(
_client_application.run,
_client_application.Scenario.CONCURRENT_STREAM_STREAM,
self._real_time_channel,
)
rpcs = []
for _ in range(test_constants.RPC_CONCURRENCY):
(
invocation_metadata,
rpc,
) = self._real_time_channel.take_stream_stream(
_application_testing_common.FIRST_SERVICE_STRESTRE
)
rpcs.append(rpc)
requests = {}
for rpc in rpcs:
requests[rpc] = [rpc.take_request()]
for rpc in rpcs:
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
for rpc in rpcs:
requests[rpc].append(rpc.take_request())
for rpc in rpcs:
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
for rpc in rpcs:
rpc.requests_closed()
for rpc in rpcs:
rpc.terminate((), grpc.StatusCode.OK, "")
application_return_value = application_future.result()
for requests_of_one_rpc in requests.values():
for request in requests_of_one_rpc:
self.assertEqual(
_application_common.STREAM_STREAM_REQUEST, request
)
self.assertIs(
application_return_value.kind,
_client_application.Outcome.Kind.SATISFACTORY,
)
def test_cancelled_unary_unary(self):
application_future = self._client_execution_thread_pool.submit(
_client_application.run,
_client_application.Scenario.CANCEL_UNARY_UNARY,
self._fake_time_channel,
)
(
invocation_metadata,
request,
rpc,
) = self._fake_time_channel.take_unary_unary(
_application_testing_common.FIRST_SERVICE_UNUN
)
rpc.send_initial_metadata(())
rpc.cancelled()
application_return_value = application_future.result()
self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request)
self.assertIs(
application_return_value.kind,
_client_application.Outcome.Kind.SATISFACTORY,
)
def test_status_stream_unary(self):
application_future = self._client_execution_thread_pool.submit(
_client_application.run,
_client_application.Scenario.CONCURRENT_STREAM_UNARY,
self._fake_time_channel,
)
rpcs = tuple(
self._fake_time_channel.take_stream_unary(
_application_testing_common.FIRST_SERVICE_STREUN
)[1]
for _ in range(test_constants.THREAD_CONCURRENCY)
)
for rpc in rpcs:
rpc.take_request()
rpc.take_request()
rpc.take_request()
rpc.requests_closed()
rpc.send_initial_metadata(
(
(
"my_metadata_key",
"My Metadata Value!",
),
)
)
for rpc in rpcs[:-1]:
rpc.terminate(
_application_common.STREAM_UNARY_RESPONSE,
(),
grpc.StatusCode.OK,
"",
)
rpcs[-1].terminate(
_application_common.STREAM_UNARY_RESPONSE,
(),
grpc.StatusCode.RESOURCE_EXHAUSTED,
"nope; not able to handle all those RPCs!",
)
application_return_value = application_future.result()
self.assertIs(
application_return_value.kind,
_client_application.Outcome.Kind.UNSATISFACTORY,
)
def test_status_stream_stream(self):
code = grpc.StatusCode.DEADLINE_EXCEEDED
details = "test deadline exceeded!"
application_future = self._client_execution_thread_pool.submit(
_client_application.run,
_client_application.Scenario.STREAM_STREAM,
self._real_time_channel,
)
invocation_metadata, rpc = self._real_time_channel.take_stream_stream(
_application_testing_common.FIRST_SERVICE_STRESTRE
)
first_request = rpc.take_request()
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
second_request = rpc.take_request()
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.requests_closed()
rpc.terminate((), code, details)
application_return_value = application_future.result()
self.assertEqual(
_application_common.STREAM_STREAM_REQUEST, first_request
)
self.assertEqual(
_application_common.STREAM_STREAM_REQUEST, second_request
)
self.assertIs(
application_return_value.kind,
_client_application.Outcome.Kind.RPC_ERROR,
)
self.assertIs(application_return_value.code, code)
self.assertEqual(application_return_value.details, details)
def test_misbehaving_server_unary_unary(self):
application_future = self._client_execution_thread_pool.submit(
_client_application.run,
_client_application.Scenario.UNARY_UNARY,
self._fake_time_channel,
)
(
invocation_metadata,
request,
rpc,
) = self._fake_time_channel.take_unary_unary(
_application_testing_common.FIRST_SERVICE_UNUN
)
rpc.send_initial_metadata(())
rpc.terminate(
_application_common.ERRONEOUS_UNARY_UNARY_RESPONSE,
(),
grpc.StatusCode.OK,
"",
)
application_return_value = application_future.result()
self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request)
self.assertIs(
application_return_value.kind,
_client_application.Outcome.Kind.UNSATISFACTORY,
)
def test_misbehaving_server_stream_stream(self):
application_future = self._client_execution_thread_pool.submit(
_client_application.run,
_client_application.Scenario.STREAM_STREAM,
self._real_time_channel,
)
invocation_metadata, rpc = self._real_time_channel.take_stream_stream(
_application_testing_common.FIRST_SERVICE_STRESTRE
)
first_request = rpc.take_request()
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
second_request = rpc.take_request()
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
rpc.requests_closed()
rpc.terminate((), grpc.StatusCode.OK, "")
application_return_value = application_future.result()
self.assertEqual(
_application_common.STREAM_STREAM_REQUEST, first_request
)
self.assertEqual(
_application_common.STREAM_STREAM_REQUEST, second_request
)
self.assertIs(
application_return_value.kind,
_client_application.Outcome.Kind.UNSATISFACTORY,
)
def test_infinite_request_stream_real_time(self):
application_future = self._client_execution_thread_pool.submit(
_client_application.run,
_client_application.Scenario.INFINITE_REQUEST_STREAM,
self._real_time_channel,
)
invocation_metadata, rpc = self._real_time_channel.take_stream_unary(
_application_testing_common.FIRST_SERVICE_STREUN
)
rpc.send_initial_metadata(())
first_request = rpc.take_request()
second_request = rpc.take_request()
third_request = rpc.take_request()
self._real_time.sleep_for(
_application_common.INFINITE_REQUEST_STREAM_TIMEOUT
)
rpc.terminate(
_application_common.STREAM_UNARY_RESPONSE,
(),
grpc.StatusCode.DEADLINE_EXCEEDED,
"",
)
application_return_value = application_future.result()
self.assertEqual(
_application_common.STREAM_UNARY_REQUEST, first_request
)
self.assertEqual(
_application_common.STREAM_UNARY_REQUEST, second_request
)
self.assertEqual(
_application_common.STREAM_UNARY_REQUEST, third_request
)
self.assertIs(
application_return_value.kind,
_client_application.Outcome.Kind.SATISFACTORY,
)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 15,493
| 36.33494
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/testing/_server_application.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An example gRPC Python-using server-side application."""
import threading
import grpc
# requests_pb2 is a semantic dependency of this module.
from tests.testing import _application_common
from tests.testing.proto import requests_pb2 # pylint: disable=unused-import
from tests.testing.proto import services_pb2
from tests.testing.proto import services_pb2_grpc
class FirstServiceServicer(services_pb2_grpc.FirstServiceServicer):
"""Services RPCs."""
def __init__(self):
self._abort_lock = threading.RLock()
self._abort_response = _application_common.ABORT_NO_STATUS_RESPONSE
def UnUn(self, request, context):
if request == _application_common.UNARY_UNARY_REQUEST:
return _application_common.UNARY_UNARY_RESPONSE
elif request == _application_common.ABORT_REQUEST:
with self._abort_lock:
try:
context.abort(
grpc.StatusCode.PERMISSION_DENIED,
"Denying permission to test abort.",
)
except Exception as e: # pylint: disable=broad-except
self._abort_response = (
_application_common.ABORT_SUCCESS_RESPONSE
)
else:
self._abort_status = (
_application_common.ABORT_FAILURE_RESPONSE
)
return None # NOTE: For the linter.
elif request == _application_common.ABORT_SUCCESS_QUERY:
with self._abort_lock:
return self._abort_response
else:
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
context.set_details("Something is wrong with your request!")
return services_pb2.Down()
def UnStre(self, request, context):
if _application_common.UNARY_STREAM_REQUEST != request:
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
context.set_details("Something is wrong with your request!")
return
yield services_pb2.Strange() # pylint: disable=unreachable
def StreUn(self, request_iterator, context):
context.send_initial_metadata(
(
(
"server_application_metadata_key",
"Hi there!",
),
)
)
for request in request_iterator:
if request != _application_common.STREAM_UNARY_REQUEST:
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
context.set_details("Something is wrong with your request!")
return services_pb2.Strange()
elif not context.is_active():
return services_pb2.Strange()
else:
return _application_common.STREAM_UNARY_RESPONSE
def StreStre(self, request_iterator, context):
valid_requests = (
_application_common.STREAM_STREAM_REQUEST,
_application_common.STREAM_STREAM_MUTATING_REQUEST,
)
for request in request_iterator:
if request not in valid_requests:
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
context.set_details("Something is wrong with your request!")
return
elif not context.is_active():
return
elif request == _application_common.STREAM_STREAM_REQUEST:
yield _application_common.STREAM_STREAM_RESPONSE
yield _application_common.STREAM_STREAM_RESPONSE
elif request == _application_common.STREAM_STREAM_MUTATING_REQUEST:
response = services_pb2.Bottom()
for i in range(
_application_common.STREAM_STREAM_MUTATING_COUNT
):
response.first_bottom_field = i
yield response
| 4,482
| 40.12844
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/testing/_server_test.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import unittest
import grpc
import grpc_testing
from tests.testing import _application_common
from tests.testing import _application_testing_common
from tests.testing import _server_application
from tests.testing.proto import services_pb2
class FirstServiceServicerTest(unittest.TestCase):
def setUp(self):
self._real_time = grpc_testing.strict_real_time()
self._fake_time = grpc_testing.strict_fake_time(time.time())
servicer = _server_application.FirstServiceServicer()
descriptors_to_servicers = {
_application_testing_common.FIRST_SERVICE: servicer
}
self._real_time_server = grpc_testing.server_from_dictionary(
descriptors_to_servicers, self._real_time
)
self._fake_time_server = grpc_testing.server_from_dictionary(
descriptors_to_servicers, self._fake_time
)
def test_successful_unary_unary(self):
rpc = self._real_time_server.invoke_unary_unary(
_application_testing_common.FIRST_SERVICE_UNUN,
(),
_application_common.UNARY_UNARY_REQUEST,
None,
)
initial_metadata = rpc.initial_metadata()
response, trailing_metadata, code, details = rpc.termination()
self.assertEqual(_application_common.UNARY_UNARY_RESPONSE, response)
self.assertIs(code, grpc.StatusCode.OK)
def test_successful_unary_stream(self):
rpc = self._real_time_server.invoke_unary_stream(
_application_testing_common.FIRST_SERVICE_UNSTRE,
(),
_application_common.UNARY_STREAM_REQUEST,
None,
)
initial_metadata = rpc.initial_metadata()
trailing_metadata, code, details = rpc.termination()
self.assertIs(code, grpc.StatusCode.OK)
def test_successful_stream_unary(self):
rpc = self._real_time_server.invoke_stream_unary(
_application_testing_common.FIRST_SERVICE_STREUN, (), None
)
rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
rpc.requests_closed()
initial_metadata = rpc.initial_metadata()
response, trailing_metadata, code, details = rpc.termination()
self.assertEqual(_application_common.STREAM_UNARY_RESPONSE, response)
self.assertIs(code, grpc.StatusCode.OK)
def test_successful_stream_stream(self):
rpc = self._real_time_server.invoke_stream_stream(
_application_testing_common.FIRST_SERVICE_STRESTRE, (), None
)
rpc.send_request(_application_common.STREAM_STREAM_REQUEST)
initial_metadata = rpc.initial_metadata()
responses = [
rpc.take_response(),
rpc.take_response(),
]
rpc.send_request(_application_common.STREAM_STREAM_REQUEST)
rpc.send_request(_application_common.STREAM_STREAM_REQUEST)
responses.extend(
[
rpc.take_response(),
rpc.take_response(),
rpc.take_response(),
rpc.take_response(),
]
)
rpc.requests_closed()
trailing_metadata, code, details = rpc.termination()
for response in responses:
self.assertEqual(
_application_common.STREAM_STREAM_RESPONSE, response
)
self.assertIs(code, grpc.StatusCode.OK)
def test_mutating_stream_stream(self):
rpc = self._real_time_server.invoke_stream_stream(
_application_testing_common.FIRST_SERVICE_STRESTRE, (), None
)
rpc.send_request(_application_common.STREAM_STREAM_MUTATING_REQUEST)
initial_metadata = rpc.initial_metadata()
responses = [
rpc.take_response()
for _ in range(_application_common.STREAM_STREAM_MUTATING_COUNT)
]
rpc.send_request(_application_common.STREAM_STREAM_MUTATING_REQUEST)
responses.extend(
[
rpc.take_response()
for _ in range(_application_common.STREAM_STREAM_MUTATING_COUNT)
]
)
rpc.requests_closed()
_, _, _ = rpc.termination()
expected_responses = (
services_pb2.Bottom(first_bottom_field=0),
services_pb2.Bottom(first_bottom_field=1),
services_pb2.Bottom(first_bottom_field=0),
services_pb2.Bottom(first_bottom_field=1),
)
self.assertSequenceEqual(expected_responses, responses)
def test_server_rpc_idempotence(self):
rpc = self._real_time_server.invoke_unary_unary(
_application_testing_common.FIRST_SERVICE_UNUN,
(),
_application_common.UNARY_UNARY_REQUEST,
None,
)
first_initial_metadata = rpc.initial_metadata()
second_initial_metadata = rpc.initial_metadata()
third_initial_metadata = rpc.initial_metadata()
first_termination = rpc.termination()
second_termination = rpc.termination()
third_termination = rpc.termination()
for later_initial_metadata in (
second_initial_metadata,
third_initial_metadata,
):
self.assertEqual(first_initial_metadata, later_initial_metadata)
response = first_termination[0]
terminal_metadata = first_termination[1]
code = first_termination[2]
details = first_termination[3]
for later_termination in (
second_termination,
third_termination,
):
self.assertEqual(response, later_termination[0])
self.assertEqual(terminal_metadata, later_termination[1])
self.assertIs(code, later_termination[2])
self.assertEqual(details, later_termination[3])
self.assertEqual(_application_common.UNARY_UNARY_RESPONSE, response)
self.assertIs(code, grpc.StatusCode.OK)
def test_misbehaving_client_unary_unary(self):
rpc = self._real_time_server.invoke_unary_unary(
_application_testing_common.FIRST_SERVICE_UNUN,
(),
_application_common.ERRONEOUS_UNARY_UNARY_REQUEST,
None,
)
initial_metadata = rpc.initial_metadata()
response, trailing_metadata, code, details = rpc.termination()
self.assertIsNot(code, grpc.StatusCode.OK)
def test_infinite_request_stream_real_time(self):
rpc = self._real_time_server.invoke_stream_unary(
_application_testing_common.FIRST_SERVICE_STREUN,
(),
_application_common.INFINITE_REQUEST_STREAM_TIMEOUT,
)
rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
initial_metadata = rpc.initial_metadata()
self._real_time.sleep_for(
_application_common.INFINITE_REQUEST_STREAM_TIMEOUT * 2
)
rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
response, trailing_metadata, code, details = rpc.termination()
self.assertIs(code, grpc.StatusCode.DEADLINE_EXCEEDED)
def test_infinite_request_stream_fake_time(self):
rpc = self._fake_time_server.invoke_stream_unary(
_application_testing_common.FIRST_SERVICE_STREUN,
(),
_application_common.INFINITE_REQUEST_STREAM_TIMEOUT,
)
rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
initial_metadata = rpc.initial_metadata()
self._fake_time.sleep_for(
_application_common.INFINITE_REQUEST_STREAM_TIMEOUT * 2
)
rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
response, trailing_metadata, code, details = rpc.termination()
self.assertIs(code, grpc.StatusCode.DEADLINE_EXCEEDED)
def test_servicer_context_abort(self):
rpc = self._real_time_server.invoke_unary_unary(
_application_testing_common.FIRST_SERVICE_UNUN,
(),
_application_common.ABORT_REQUEST,
None,
)
_, _, code, _ = rpc.termination()
self.assertIs(code, grpc.StatusCode.PERMISSION_DENIED)
rpc = self._real_time_server.invoke_unary_unary(
_application_testing_common.FIRST_SERVICE_UNUN,
(),
_application_common.ABORT_SUCCESS_QUERY,
None,
)
response, _, code, _ = rpc.termination()
self.assertEqual(_application_common.ABORT_SUCCESS_RESPONSE, response)
self.assertIs(code, grpc.StatusCode.OK)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 9,538
| 38.580913
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/testing/proto/__init__.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/csds/test_csds.py
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple test to ensure that the Python wrapper can get xDS config."""
from concurrent.futures import ThreadPoolExecutor
import logging
import os
import queue
import sys
import time
import unittest
from envoy.service.status.v3 import csds_pb2
from envoy.service.status.v3 import csds_pb2_grpc
from google.protobuf import json_format
import grpc
import grpc_csds
_DUMMY_XDS_ADDRESS = "xds:///foo.bar"
_DUMMY_BOOTSTRAP_FILE = """
{
\"xds_servers\": [
{
\"server_uri\": \"fake:///xds_server\",
\"channel_creds\": [
{
\"type\": \"fake\"
}
],
\"server_features\": [\"xds_v3\"]
}
],
\"node\": {
\"id\": \"python_test_csds\",
\"cluster\": \"test\",
\"metadata\": {
\"foo\": \"bar\"
},
\"locality\": {
\"region\": \"corp\",
\"zone\": \"svl\",
\"sub_zone\": \"mp3\"
}
}
}\
"""
@unittest.skipIf(
sys.version_info[0] < 3, "ProtoBuf descriptor has moved on from Python2"
)
class TestCsds(unittest.TestCase):
def setUp(self):
os.environ["GRPC_XDS_BOOTSTRAP_CONFIG"] = _DUMMY_BOOTSTRAP_FILE
self._server = grpc.server(ThreadPoolExecutor())
port = self._server.add_insecure_port("localhost:0")
grpc_csds.add_csds_servicer(self._server)
self._server.start()
self._channel = grpc.insecure_channel("localhost:%s" % port)
self._stub = csds_pb2_grpc.ClientStatusDiscoveryServiceStub(
self._channel
)
def tearDown(self):
self._channel.close()
self._server.stop(0)
os.environ.pop("GRPC_XDS_BOOTSTRAP_CONFIG", None)
def get_xds_config_dump(self):
return self._stub.FetchClientStatus(csds_pb2.ClientStatusRequest())
def test_has_node(self):
resp = self.get_xds_config_dump()
self.assertEqual(1, len(resp.config))
self.assertEqual("python_test_csds", resp.config[0].node.id)
self.assertEqual("test", resp.config[0].node.cluster)
def test_no_lds_found(self):
dummy_channel = grpc.insecure_channel(_DUMMY_XDS_ADDRESS)
# Force the XdsClient to initialize and request a resource
with self.assertRaises(grpc.RpcError) as rpc_error:
dummy_channel.unary_unary("")(b"", wait_for_ready=False, timeout=1)
self.assertEqual(
grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.exception.code()
)
# The resource request will fail with DOES_NOT_EXIST (after 15s)
while True:
resp = self.get_xds_config_dump()
config = json_format.MessageToDict(resp)
ok = False
try:
for xds_config in config["config"][0].get("xdsConfig", []):
if "listenerConfig" in xds_config:
listener = xds_config["listenerConfig"][
"dynamicListeners"
][0]
if listener["clientStatus"] == "REQUESTED":
ok = True
break
for generic_xds_config in config["config"][0].get(
"genericXdsConfigs", []
):
if "Listener" in generic_xds_config["typeUrl"]:
if generic_xds_config["clientStatus"] == "REQUESTED":
ok = True
break
except KeyError as e:
logging.debug("Invalid config: %s\n%s: %s", config, type(e), e)
pass
if ok:
break
time.sleep(1)
dummy_channel.close()
@unittest.skipIf(
sys.version_info[0] < 3, "ProtoBuf descriptor has moved on from Python2"
)
class TestCsdsStream(TestCsds):
def get_xds_config_dump(self):
if not hasattr(self, "request_queue"):
request_queue = queue.Queue()
response_iterator = self._stub.StreamClientStatus(
iter(request_queue.get, None)
)
request_queue.put(csds_pb2.ClientStatusRequest())
return next(response_iterator)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 4,793
| 31.612245
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_tcp_proxy.py
|
# Copyright 2019 the gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Proxies a TCP connection between a single client-server pair.
This proxy is not suitable for production, but should work well for cases in
which a test needs to spy on the bytes put on the wire between a server and
a client.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import select
import socket
import threading
from tests.unit.framework.common import get_socket
_TCP_PROXY_BUFFER_SIZE = 1024
_TCP_PROXY_TIMEOUT = datetime.timedelta(milliseconds=500)
def _init_proxy_socket(gateway_address, gateway_port):
proxy_socket = socket.create_connection((gateway_address, gateway_port))
return proxy_socket
class TcpProxy(object):
"""Proxies a TCP connection between one client and one server."""
def __init__(self, bind_address, gateway_address, gateway_port):
self._bind_address = bind_address
self._gateway_address = gateway_address
self._gateway_port = gateway_port
self._byte_count_lock = threading.RLock()
self._sent_byte_count = 0
self._received_byte_count = 0
self._stop_event = threading.Event()
self._port = None
self._listen_socket = None
self._proxy_socket = None
# The following three attributes are owned by the serving thread.
self._northbound_data = b""
self._southbound_data = b""
self._client_sockets = []
self._thread = threading.Thread(target=self._run_proxy)
def start(self):
_, self._port, self._listen_socket = get_socket(
bind_address=self._bind_address
)
self._proxy_socket = _init_proxy_socket(
self._gateway_address, self._gateway_port
)
self._thread.start()
def get_port(self):
return self._port
def _handle_reads(self, sockets_to_read):
for socket_to_read in sockets_to_read:
if socket_to_read is self._listen_socket:
client_socket, client_address = socket_to_read.accept()
self._client_sockets.append(client_socket)
elif socket_to_read is self._proxy_socket:
data = socket_to_read.recv(_TCP_PROXY_BUFFER_SIZE)
with self._byte_count_lock:
self._received_byte_count += len(data)
self._northbound_data += data
elif socket_to_read in self._client_sockets:
data = socket_to_read.recv(_TCP_PROXY_BUFFER_SIZE)
if data:
with self._byte_count_lock:
self._sent_byte_count += len(data)
self._southbound_data += data
else:
self._client_sockets.remove(socket_to_read)
else:
raise RuntimeError("Unidentified socket appeared in read set.")
def _handle_writes(self, sockets_to_write):
for socket_to_write in sockets_to_write:
if socket_to_write is self._proxy_socket:
if self._southbound_data:
self._proxy_socket.sendall(self._southbound_data)
self._southbound_data = b""
elif socket_to_write in self._client_sockets:
if self._northbound_data:
socket_to_write.sendall(self._northbound_data)
self._northbound_data = b""
def _run_proxy(self):
while not self._stop_event.is_set():
expected_reads = (self._listen_socket, self._proxy_socket) + tuple(
self._client_sockets
)
expected_writes = expected_reads
sockets_to_read, sockets_to_write, _ = select.select(
expected_reads,
expected_writes,
(),
_TCP_PROXY_TIMEOUT.total_seconds(),
)
self._handle_reads(sockets_to_read)
self._handle_writes(sockets_to_write)
for client_socket in self._client_sockets:
client_socket.close()
def stop(self):
self._stop_event.set()
self._thread.join()
self._listen_socket.close()
self._proxy_socket.close()
def get_byte_count(self):
with self._byte_count_lock:
return self._sent_byte_count, self._received_byte_count
def reset_byte_count(self):
with self._byte_count_lock:
self._byte_count = 0
self._received_byte_count = 0
def __enter__(self):
self.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop()
| 5,205
| 34.175676
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_channel_args_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of channel arguments on client/server side."""
from concurrent import futures
import logging
import unittest
import grpc
class TestPointerWrapper(object):
def __int__(self):
return 123456
TEST_CHANNEL_ARGS = (
("arg1", b"bytes_val"),
("arg2", "str_val"),
("arg3", 1),
(b"arg4", "str_val"),
("arg6", TestPointerWrapper()),
)
INVALID_TEST_CHANNEL_ARGS = [
{"foo": "bar"},
(("key",),),
"str",
]
class ChannelArgsTest(unittest.TestCase):
def test_client(self):
grpc.insecure_channel("localhost:8080", options=TEST_CHANNEL_ARGS)
def test_server(self):
grpc.server(
futures.ThreadPoolExecutor(max_workers=1), options=TEST_CHANNEL_ARGS
)
def test_invalid_client_args(self):
for invalid_arg in INVALID_TEST_CHANNEL_ARGS:
self.assertRaises(
ValueError,
grpc.insecure_channel,
"localhost:8080",
options=invalid_arg,
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 1,679
| 24.846154
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests application-provided metadata, status code, and details."""
import logging
import threading
import unittest
import grpc
from tests.unit import test_common
from tests.unit.framework.common import test_constants
from tests.unit.framework.common import test_control
_SERIALIZED_REQUEST = b"\x46\x47\x48"
_SERIALIZED_RESPONSE = b"\x49\x50\x51"
_REQUEST_SERIALIZER = lambda unused_request: _SERIALIZED_REQUEST
_REQUEST_DESERIALIZER = lambda unused_serialized_request: object()
_RESPONSE_SERIALIZER = lambda unused_response: _SERIALIZED_RESPONSE
_RESPONSE_DESERIALIZER = lambda unused_serialized_response: object()
_SERVICE = "test.TestService"
_UNARY_UNARY = "UnaryUnary"
_UNARY_STREAM = "UnaryStream"
_STREAM_UNARY = "StreamUnary"
_STREAM_STREAM = "StreamStream"
_CLIENT_METADATA = (
("client-md-key", "client-md-key"),
("client-md-key-bin", b"\x00\x01"),
)
_SERVER_INITIAL_METADATA = (
("server-initial-md-key", "server-initial-md-value"),
("server-initial-md-key-bin", b"\x00\x02"),
)
_SERVER_TRAILING_METADATA = (
("server-trailing-md-key", "server-trailing-md-value"),
("server-trailing-md-key-bin", b"\x00\x03"),
)
_NON_OK_CODE = grpc.StatusCode.NOT_FOUND
_DETAILS = "Test details!"
# calling abort should always fail an RPC, even for "invalid" codes
_ABORT_CODES = (_NON_OK_CODE, 3, grpc.StatusCode.OK)
_EXPECTED_CLIENT_CODES = (
_NON_OK_CODE,
grpc.StatusCode.UNKNOWN,
grpc.StatusCode.UNKNOWN,
)
_EXPECTED_DETAILS = (_DETAILS, _DETAILS, "")
class _Servicer(object):
def __init__(self):
self._lock = threading.Lock()
self._abort_call = False
self._code = None
self._details = None
self._exception = False
self._return_none = False
self._received_client_metadata = None
def unary_unary(self, request, context):
with self._lock:
self._received_client_metadata = context.invocation_metadata()
context.send_initial_metadata(_SERVER_INITIAL_METADATA)
context.set_trailing_metadata(_SERVER_TRAILING_METADATA)
if self._abort_call:
context.abort(self._code, self._details)
else:
if self._code is not None:
context.set_code(self._code)
if self._details is not None:
context.set_details(self._details)
if self._exception:
raise test_control.Defect()
else:
return None if self._return_none else object()
def unary_stream(self, request, context):
with self._lock:
self._received_client_metadata = context.invocation_metadata()
context.send_initial_metadata(_SERVER_INITIAL_METADATA)
context.set_trailing_metadata(_SERVER_TRAILING_METADATA)
if self._abort_call:
context.abort(self._code, self._details)
else:
if self._code is not None:
context.set_code(self._code)
if self._details is not None:
context.set_details(self._details)
for _ in range(test_constants.STREAM_LENGTH // 2):
yield _SERIALIZED_RESPONSE
if self._exception:
raise test_control.Defect()
def stream_unary(self, request_iterator, context):
with self._lock:
self._received_client_metadata = context.invocation_metadata()
context.send_initial_metadata(_SERVER_INITIAL_METADATA)
context.set_trailing_metadata(_SERVER_TRAILING_METADATA)
# TODO(https://github.com/grpc/grpc/issues/6891): just ignore the
# request iterator.
list(request_iterator)
if self._abort_call:
context.abort(self._code, self._details)
else:
if self._code is not None:
context.set_code(self._code)
if self._details is not None:
context.set_details(self._details)
if self._exception:
raise test_control.Defect()
else:
return None if self._return_none else _SERIALIZED_RESPONSE
def stream_stream(self, request_iterator, context):
with self._lock:
self._received_client_metadata = context.invocation_metadata()
context.send_initial_metadata(_SERVER_INITIAL_METADATA)
context.set_trailing_metadata(_SERVER_TRAILING_METADATA)
# TODO(https://github.com/grpc/grpc/issues/6891): just ignore the
# request iterator.
list(request_iterator)
if self._abort_call:
context.abort(self._code, self._details)
else:
if self._code is not None:
context.set_code(self._code)
if self._details is not None:
context.set_details(self._details)
for _ in range(test_constants.STREAM_LENGTH // 3):
yield object()
if self._exception:
raise test_control.Defect()
def set_abort_call(self):
with self._lock:
self._abort_call = True
def set_code(self, code):
with self._lock:
self._code = code
def set_details(self, details):
with self._lock:
self._details = details
def set_exception(self):
with self._lock:
self._exception = True
def set_return_none(self):
with self._lock:
self._return_none = True
def received_client_metadata(self):
with self._lock:
return self._received_client_metadata
def _generic_handler(servicer):
method_handlers = {
_UNARY_UNARY: grpc.unary_unary_rpc_method_handler(
servicer.unary_unary,
request_deserializer=_REQUEST_DESERIALIZER,
response_serializer=_RESPONSE_SERIALIZER,
),
_UNARY_STREAM: grpc.unary_stream_rpc_method_handler(
servicer.unary_stream
),
_STREAM_UNARY: grpc.stream_unary_rpc_method_handler(
servicer.stream_unary
),
_STREAM_STREAM: grpc.stream_stream_rpc_method_handler(
servicer.stream_stream,
request_deserializer=_REQUEST_DESERIALIZER,
response_serializer=_RESPONSE_SERIALIZER,
),
}
return grpc.method_handlers_generic_handler(_SERVICE, method_handlers)
class MetadataCodeDetailsTest(unittest.TestCase):
def setUp(self):
self._servicer = _Servicer()
self._server = test_common.test_server()
self._server.add_generic_rpc_handlers(
(_generic_handler(self._servicer),)
)
port = self._server.add_insecure_port("[::]:0")
self._server.start()
self._channel = grpc.insecure_channel("localhost:{}".format(port))
self._unary_unary = self._channel.unary_unary(
"/".join(
(
"",
_SERVICE,
_UNARY_UNARY,
)
),
request_serializer=_REQUEST_SERIALIZER,
response_deserializer=_RESPONSE_DESERIALIZER,
)
self._unary_stream = self._channel.unary_stream(
"/".join(
(
"",
_SERVICE,
_UNARY_STREAM,
)
),
)
self._stream_unary = self._channel.stream_unary(
"/".join(
(
"",
_SERVICE,
_STREAM_UNARY,
)
),
)
self._stream_stream = self._channel.stream_stream(
"/".join(
(
"",
_SERVICE,
_STREAM_STREAM,
)
),
request_serializer=_REQUEST_SERIALIZER,
response_deserializer=_RESPONSE_DESERIALIZER,
)
def tearDown(self):
self._server.stop(None)
self._channel.close()
def testSuccessfulUnaryUnary(self):
self._servicer.set_details(_DETAILS)
unused_response, call = self._unary_unary.with_call(
object(), metadata=_CLIENT_METADATA
)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, call.initial_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA, call.trailing_metadata()
)
)
self.assertIs(grpc.StatusCode.OK, call.code())
def testSuccessfulUnaryStream(self):
self._servicer.set_details(_DETAILS)
response_iterator_call = self._unary_stream(
_SERIALIZED_REQUEST, metadata=_CLIENT_METADATA
)
received_initial_metadata = response_iterator_call.initial_metadata()
list(response_iterator_call)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, received_initial_metadata
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
response_iterator_call.trailing_metadata(),
)
)
self.assertIs(grpc.StatusCode.OK, response_iterator_call.code())
def testSuccessfulStreamUnary(self):
self._servicer.set_details(_DETAILS)
unused_response, call = self._stream_unary.with_call(
iter([_SERIALIZED_REQUEST] * test_constants.STREAM_LENGTH),
metadata=_CLIENT_METADATA,
)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, call.initial_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA, call.trailing_metadata()
)
)
self.assertIs(grpc.StatusCode.OK, call.code())
def testSuccessfulStreamStream(self):
self._servicer.set_details(_DETAILS)
response_iterator_call = self._stream_stream(
iter([object()] * test_constants.STREAM_LENGTH),
metadata=_CLIENT_METADATA,
)
received_initial_metadata = response_iterator_call.initial_metadata()
list(response_iterator_call)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, received_initial_metadata
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
response_iterator_call.trailing_metadata(),
)
)
self.assertIs(grpc.StatusCode.OK, response_iterator_call.code())
def testAbortedUnaryUnary(self):
test_cases = zip(
_ABORT_CODES, _EXPECTED_CLIENT_CODES, _EXPECTED_DETAILS
)
for abort_code, expected_code, expected_details in test_cases:
self._servicer.set_code(abort_code)
self._servicer.set_details(_DETAILS)
self._servicer.set_abort_call()
with self.assertRaises(grpc.RpcError) as exception_context:
self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA,
exception_context.exception.initial_metadata(),
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
exception_context.exception.trailing_metadata(),
)
)
self.assertIs(expected_code, exception_context.exception.code())
self.assertEqual(
expected_details, exception_context.exception.details()
)
def testAbortedUnaryStream(self):
test_cases = zip(
_ABORT_CODES, _EXPECTED_CLIENT_CODES, _EXPECTED_DETAILS
)
for abort_code, expected_code, expected_details in test_cases:
self._servicer.set_code(abort_code)
self._servicer.set_details(_DETAILS)
self._servicer.set_abort_call()
response_iterator_call = self._unary_stream(
_SERIALIZED_REQUEST, metadata=_CLIENT_METADATA
)
received_initial_metadata = (
response_iterator_call.initial_metadata()
)
with self.assertRaises(grpc.RpcError):
self.assertEqual(len(list(response_iterator_call)), 0)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, received_initial_metadata
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
response_iterator_call.trailing_metadata(),
)
)
self.assertIs(expected_code, response_iterator_call.code())
self.assertEqual(expected_details, response_iterator_call.details())
def testAbortedStreamUnary(self):
test_cases = zip(
_ABORT_CODES, _EXPECTED_CLIENT_CODES, _EXPECTED_DETAILS
)
for abort_code, expected_code, expected_details in test_cases:
self._servicer.set_code(abort_code)
self._servicer.set_details(_DETAILS)
self._servicer.set_abort_call()
with self.assertRaises(grpc.RpcError) as exception_context:
self._stream_unary.with_call(
iter([_SERIALIZED_REQUEST] * test_constants.STREAM_LENGTH),
metadata=_CLIENT_METADATA,
)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA,
exception_context.exception.initial_metadata(),
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
exception_context.exception.trailing_metadata(),
)
)
self.assertIs(expected_code, exception_context.exception.code())
self.assertEqual(
expected_details, exception_context.exception.details()
)
def testAbortedStreamStream(self):
test_cases = zip(
_ABORT_CODES, _EXPECTED_CLIENT_CODES, _EXPECTED_DETAILS
)
for abort_code, expected_code, expected_details in test_cases:
self._servicer.set_code(abort_code)
self._servicer.set_details(_DETAILS)
self._servicer.set_abort_call()
response_iterator_call = self._stream_stream(
iter([object()] * test_constants.STREAM_LENGTH),
metadata=_CLIENT_METADATA,
)
received_initial_metadata = (
response_iterator_call.initial_metadata()
)
with self.assertRaises(grpc.RpcError):
self.assertEqual(len(list(response_iterator_call)), 0)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, received_initial_metadata
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
response_iterator_call.trailing_metadata(),
)
)
self.assertIs(expected_code, response_iterator_call.code())
self.assertEqual(expected_details, response_iterator_call.details())
def testCustomCodeUnaryUnary(self):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
with self.assertRaises(grpc.RpcError) as exception_context:
self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA,
exception_context.exception.initial_metadata(),
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
exception_context.exception.trailing_metadata(),
)
)
self.assertIs(_NON_OK_CODE, exception_context.exception.code())
self.assertEqual(_DETAILS, exception_context.exception.details())
def testCustomCodeUnaryStream(self):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
response_iterator_call = self._unary_stream(
_SERIALIZED_REQUEST, metadata=_CLIENT_METADATA
)
received_initial_metadata = response_iterator_call.initial_metadata()
with self.assertRaises(grpc.RpcError):
list(response_iterator_call)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, received_initial_metadata
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
response_iterator_call.trailing_metadata(),
)
)
self.assertIs(_NON_OK_CODE, response_iterator_call.code())
self.assertEqual(_DETAILS, response_iterator_call.details())
def testCustomCodeStreamUnary(self):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
with self.assertRaises(grpc.RpcError) as exception_context:
self._stream_unary.with_call(
iter([_SERIALIZED_REQUEST] * test_constants.STREAM_LENGTH),
metadata=_CLIENT_METADATA,
)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA,
exception_context.exception.initial_metadata(),
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
exception_context.exception.trailing_metadata(),
)
)
self.assertIs(_NON_OK_CODE, exception_context.exception.code())
self.assertEqual(_DETAILS, exception_context.exception.details())
def testCustomCodeStreamStream(self):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
response_iterator_call = self._stream_stream(
iter([object()] * test_constants.STREAM_LENGTH),
metadata=_CLIENT_METADATA,
)
received_initial_metadata = response_iterator_call.initial_metadata()
with self.assertRaises(grpc.RpcError) as exception_context:
list(response_iterator_call)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, received_initial_metadata
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
exception_context.exception.trailing_metadata(),
)
)
self.assertIs(_NON_OK_CODE, exception_context.exception.code())
self.assertEqual(_DETAILS, exception_context.exception.details())
def testCustomCodeExceptionUnaryUnary(self):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
self._servicer.set_exception()
with self.assertRaises(grpc.RpcError) as exception_context:
self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA,
exception_context.exception.initial_metadata(),
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
exception_context.exception.trailing_metadata(),
)
)
self.assertIs(_NON_OK_CODE, exception_context.exception.code())
self.assertEqual(_DETAILS, exception_context.exception.details())
def testCustomCodeExceptionUnaryStream(self):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
self._servicer.set_exception()
response_iterator_call = self._unary_stream(
_SERIALIZED_REQUEST, metadata=_CLIENT_METADATA
)
received_initial_metadata = response_iterator_call.initial_metadata()
with self.assertRaises(grpc.RpcError):
list(response_iterator_call)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, received_initial_metadata
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
response_iterator_call.trailing_metadata(),
)
)
self.assertIs(_NON_OK_CODE, response_iterator_call.code())
self.assertEqual(_DETAILS, response_iterator_call.details())
def testCustomCodeExceptionStreamUnary(self):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
self._servicer.set_exception()
with self.assertRaises(grpc.RpcError) as exception_context:
self._stream_unary.with_call(
iter([_SERIALIZED_REQUEST] * test_constants.STREAM_LENGTH),
metadata=_CLIENT_METADATA,
)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA,
exception_context.exception.initial_metadata(),
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
exception_context.exception.trailing_metadata(),
)
)
self.assertIs(_NON_OK_CODE, exception_context.exception.code())
self.assertEqual(_DETAILS, exception_context.exception.details())
def testCustomCodeExceptionStreamStream(self):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
self._servicer.set_exception()
response_iterator_call = self._stream_stream(
iter([object()] * test_constants.STREAM_LENGTH),
metadata=_CLIENT_METADATA,
)
received_initial_metadata = response_iterator_call.initial_metadata()
with self.assertRaises(grpc.RpcError):
list(response_iterator_call)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, received_initial_metadata
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
response_iterator_call.trailing_metadata(),
)
)
self.assertIs(_NON_OK_CODE, response_iterator_call.code())
self.assertEqual(_DETAILS, response_iterator_call.details())
def testCustomCodeReturnNoneUnaryUnary(self):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
self._servicer.set_return_none()
with self.assertRaises(grpc.RpcError) as exception_context:
self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA,
exception_context.exception.initial_metadata(),
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
exception_context.exception.trailing_metadata(),
)
)
self.assertIs(_NON_OK_CODE, exception_context.exception.code())
self.assertEqual(_DETAILS, exception_context.exception.details())
def testCustomCodeReturnNoneStreamUnary(self):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
self._servicer.set_return_none()
with self.assertRaises(grpc.RpcError) as exception_context:
self._stream_unary.with_call(
iter([_SERIALIZED_REQUEST] * test_constants.STREAM_LENGTH),
metadata=_CLIENT_METADATA,
)
self.assertTrue(
test_common.metadata_transmitted(
_CLIENT_METADATA, self._servicer.received_client_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA,
exception_context.exception.initial_metadata(),
)
)
self.assertTrue(
test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA,
exception_context.exception.trailing_metadata(),
)
)
self.assertIs(_NON_OK_CODE, exception_context.exception.code())
self.assertEqual(_DETAILS, exception_context.exception.details())
class _InspectServicer(_Servicer):
def __init__(self):
super(_InspectServicer, self).__init__()
self.actual_code = None
self.actual_details = None
self.actual_trailing_metadata = None
def unary_unary(self, request, context):
super(_InspectServicer, self).unary_unary(request, context)
self.actual_code = context.code()
self.actual_details = context.details()
self.actual_trailing_metadata = context.trailing_metadata()
class InspectContextTest(unittest.TestCase):
def setUp(self):
self._servicer = _InspectServicer()
self._server = test_common.test_server()
self._server.add_generic_rpc_handlers(
(_generic_handler(self._servicer),)
)
port = self._server.add_insecure_port("[::]:0")
self._server.start()
self._channel = grpc.insecure_channel("localhost:{}".format(port))
self._unary_unary = self._channel.unary_unary(
"/".join(
(
"",
_SERVICE,
_UNARY_UNARY,
)
),
request_serializer=_REQUEST_SERIALIZER,
response_deserializer=_RESPONSE_DESERIALIZER,
)
def tearDown(self):
self._server.stop(None)
self._channel.close()
def testCodeDetailsInContext(self):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
with self.assertRaises(grpc.RpcError) as exc_info:
self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA)
err = exc_info.exception
self.assertEqual(_NON_OK_CODE, err.code())
self.assertEqual(self._servicer.actual_code, _NON_OK_CODE)
self.assertEqual(
self._servicer.actual_details.decode("utf-8"), _DETAILS
)
self.assertEqual(
self._servicer.actual_trailing_metadata, _SERVER_TRAILING_METADATA
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 31,133
| 34.827388
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/resources.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Constants and functions for data used in testing."""
import os
import pkgutil
_ROOT_CERTIFICATES_RESOURCE_PATH = "credentials/ca.pem"
_PRIVATE_KEY_RESOURCE_PATH = "credentials/server1.key"
_CERTIFICATE_CHAIN_RESOURCE_PATH = "credentials/server1.pem"
def test_root_certificates():
return pkgutil.get_data(__name__, _ROOT_CERTIFICATES_RESOURCE_PATH)
def private_key():
return pkgutil.get_data(__name__, _PRIVATE_KEY_RESOURCE_PATH)
def certificate_chain():
return pkgutil.get_data(__name__, _CERTIFICATE_CHAIN_RESOURCE_PATH)
def cert_hier_1_root_ca_cert():
return pkgutil.get_data(
__name__, "credentials/certificate_hierarchy_1/certs/ca.cert.pem"
)
def cert_hier_1_intermediate_ca_cert():
return pkgutil.get_data(
__name__,
"credentials/certificate_hierarchy_1/intermediate/certs/intermediate.cert.pem",
)
def cert_hier_1_client_1_key():
return pkgutil.get_data(
__name__,
"credentials/certificate_hierarchy_1/intermediate/private/client.key.pem",
)
def cert_hier_1_client_1_cert():
return pkgutil.get_data(
__name__,
"credentials/certificate_hierarchy_1/intermediate/certs/client.cert.pem",
)
def cert_hier_1_server_1_key():
return pkgutil.get_data(
__name__,
"credentials/certificate_hierarchy_1/intermediate/private/localhost-1.key.pem",
)
def cert_hier_1_server_1_cert():
return pkgutil.get_data(
__name__,
"credentials/certificate_hierarchy_1/intermediate/certs/localhost-1.cert.pem",
)
def cert_hier_2_root_ca_cert():
return pkgutil.get_data(
__name__, "credentials/certificate_hierarchy_2/certs/ca.cert.pem"
)
def cert_hier_2_intermediate_ca_cert():
return pkgutil.get_data(
__name__,
"credentials/certificate_hierarchy_2/intermediate/certs/intermediate.cert.pem",
)
def cert_hier_2_client_1_key():
return pkgutil.get_data(
__name__,
"credentials/certificate_hierarchy_2/intermediate/private/client.key.pem",
)
def cert_hier_2_client_1_cert():
return pkgutil.get_data(
__name__,
"credentials/certificate_hierarchy_2/intermediate/certs/client.cert.pem",
)
def cert_hier_2_server_1_key():
return pkgutil.get_data(
__name__,
"credentials/certificate_hierarchy_2/intermediate/private/localhost-1.key.pem",
)
def cert_hier_2_server_1_cert():
return pkgutil.get_data(
__name__,
"credentials/certificate_hierarchy_2/intermediate/certs/localhost-1.cert.pem",
)
| 3,154
| 26.198276
| 87
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_abort_test.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests server context abort mechanism"""
import collections
import gc
import logging
import unittest
import weakref
import grpc
from tests.unit import test_common
from tests.unit.framework.common import test_constants
_ABORT = "/test/abort"
_ABORT_WITH_STATUS = "/test/AbortWithStatus"
_INVALID_CODE = "/test/InvalidCode"
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x00\x00\x00"
_ABORT_DETAILS = "Abandon ship!"
_ABORT_METADATA = (("a-trailing-metadata", "42"),)
class _Status(
collections.namedtuple("_Status", ("code", "details", "trailing_metadata")),
grpc.Status,
):
pass
class _Object(object):
pass
do_not_leak_me = _Object()
def abort_unary_unary(request, servicer_context):
this_should_not_be_leaked = do_not_leak_me
servicer_context.abort(
grpc.StatusCode.INTERNAL,
_ABORT_DETAILS,
)
raise Exception("This line should not be executed!")
def abort_with_status_unary_unary(request, servicer_context):
servicer_context.abort_with_status(
_Status(
code=grpc.StatusCode.INTERNAL,
details=_ABORT_DETAILS,
trailing_metadata=_ABORT_METADATA,
)
)
raise Exception("This line should not be executed!")
def invalid_code_unary_unary(request, servicer_context):
servicer_context.abort(
42,
_ABORT_DETAILS,
)
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == _ABORT:
return grpc.unary_unary_rpc_method_handler(abort_unary_unary)
elif handler_call_details.method == _ABORT_WITH_STATUS:
return grpc.unary_unary_rpc_method_handler(
abort_with_status_unary_unary
)
elif handler_call_details.method == _INVALID_CODE:
return grpc.stream_stream_rpc_method_handler(
invalid_code_unary_unary
)
else:
return None
class AbortTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server()
port = self._server.add_insecure_port("[::]:0")
self._server.add_generic_rpc_handlers((_GenericHandler(),))
self._server.start()
self._channel = grpc.insecure_channel("localhost:%d" % port)
def tearDown(self):
self._channel.close()
self._server.stop(0)
def test_abort(self):
with self.assertRaises(grpc.RpcError) as exception_context:
self._channel.unary_unary(_ABORT)(_REQUEST)
rpc_error = exception_context.exception
self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
self.assertEqual(rpc_error.details(), _ABORT_DETAILS)
# This test ensures that abort() does not store the raised exception, which
# on Python 3 (via the `__traceback__` attribute) holds a reference to
# all local vars. Storing the raised exception can prevent GC and stop the
# grpc_call from being unref'ed, even after server shutdown.
@unittest.skip("https://github.com/grpc/grpc/issues/17927")
def test_abort_does_not_leak_local_vars(self):
global do_not_leak_me # pylint: disable=global-statement
weak_ref = weakref.ref(do_not_leak_me)
# Servicer will abort() after creating a local ref to do_not_leak_me.
with self.assertRaises(grpc.RpcError):
self._channel.unary_unary(_ABORT)(_REQUEST)
# Server may still have a stack frame reference to the exception even
# after client sees error, so ensure server has shutdown.
self._server.stop(None)
do_not_leak_me = None
self.assertIsNone(weak_ref())
def test_abort_with_status(self):
with self.assertRaises(grpc.RpcError) as exception_context:
self._channel.unary_unary(_ABORT_WITH_STATUS)(_REQUEST)
rpc_error = exception_context.exception
self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
self.assertEqual(rpc_error.details(), _ABORT_DETAILS)
self.assertEqual(rpc_error.trailing_metadata(), _ABORT_METADATA)
def test_invalid_code(self):
with self.assertRaises(grpc.RpcError) as exception_context:
self._channel.unary_unary(_INVALID_CODE)(_REQUEST)
rpc_error = exception_context.exception
self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN)
self.assertEqual(rpc_error.details(), _ABORT_DETAILS)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 5,100
| 31.698718
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_rpc_part_1_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of RPCs made against gRPC Python's application-layer API."""
from concurrent import futures
import itertools
import logging
import threading
import unittest
import grpc
from grpc.framework.foundation import logging_pool
from tests.unit._rpc_test_helpers import (
stream_stream_non_blocking_multi_callable,
)
from tests.unit._rpc_test_helpers import (
unary_stream_non_blocking_multi_callable,
)
from tests.unit._rpc_test_helpers import BaseRPCTest
from tests.unit._rpc_test_helpers import Callback
from tests.unit._rpc_test_helpers import TIMEOUT_SHORT
from tests.unit._rpc_test_helpers import stream_stream_multi_callable
from tests.unit._rpc_test_helpers import stream_unary_multi_callable
from tests.unit._rpc_test_helpers import unary_stream_multi_callable
from tests.unit._rpc_test_helpers import unary_unary_multi_callable
from tests.unit.framework.common import test_constants
class RPCPart1Test(BaseRPCTest, unittest.TestCase):
def testExpiredStreamRequestBlockingUnaryResponse(self):
requests = tuple(
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
multi_callable = stream_unary_multi_callable(self._channel)
with self._control.pause():
with self.assertRaises(grpc.RpcError) as exception_context:
multi_callable(
request_iterator,
timeout=TIMEOUT_SHORT,
metadata=(
("test", "ExpiredStreamRequestBlockingUnaryResponse"),
),
)
self.assertIsInstance(exception_context.exception, grpc.RpcError)
self.assertIsInstance(exception_context.exception, grpc.Call)
self.assertIsNotNone(exception_context.exception.initial_metadata())
self.assertIs(
grpc.StatusCode.DEADLINE_EXCEEDED,
exception_context.exception.code(),
)
self.assertIsNotNone(exception_context.exception.details())
self.assertIsNotNone(exception_context.exception.trailing_metadata())
def testExpiredStreamRequestFutureUnaryResponse(self):
requests = tuple(
b"\x07\x18" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
callback = Callback()
multi_callable = stream_unary_multi_callable(self._channel)
with self._control.pause():
response_future = multi_callable.future(
request_iterator,
timeout=TIMEOUT_SHORT,
metadata=(("test", "ExpiredStreamRequestFutureUnaryResponse"),),
)
with self.assertRaises(grpc.FutureTimeoutError):
response_future.result(timeout=TIMEOUT_SHORT / 2.0)
response_future.add_done_callback(callback)
value_passed_to_callback = callback.value()
with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result()
self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, response_future.code())
self.assertIs(
grpc.StatusCode.DEADLINE_EXCEEDED,
exception_context.exception.code(),
)
self.assertIsInstance(response_future.exception(), grpc.RpcError)
self.assertIsNotNone(response_future.traceback())
self.assertIs(response_future, value_passed_to_callback)
self.assertIsNotNone(response_future.initial_metadata())
self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, response_future.code())
self.assertIsNotNone(response_future.details())
self.assertIsNotNone(response_future.trailing_metadata())
def testExpiredStreamRequestStreamResponse(self):
self._expired_stream_request_stream_response(
stream_stream_multi_callable(self._channel)
)
def testExpiredStreamRequestStreamResponseNonBlocking(self):
self._expired_stream_request_stream_response(
stream_stream_non_blocking_multi_callable(self._channel)
)
def testFailedUnaryRequestBlockingUnaryResponse(self):
request = b"\x37\x17"
multi_callable = unary_unary_multi_callable(self._channel)
with self._control.fail():
with self.assertRaises(grpc.RpcError) as exception_context:
multi_callable.with_call(
request,
metadata=(
("test", "FailedUnaryRequestBlockingUnaryResponse"),
),
)
self.assertIs(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
# sanity checks on to make sure returned string contains default members
# of the error
debug_error_string = exception_context.exception.debug_error_string()
self.assertIn("grpc_status", debug_error_string)
self.assertIn("grpc_message", debug_error_string)
def testFailedUnaryRequestFutureUnaryResponse(self):
request = b"\x37\x17"
callback = Callback()
multi_callable = unary_unary_multi_callable(self._channel)
with self._control.fail():
response_future = multi_callable.future(
request,
metadata=(("test", "FailedUnaryRequestFutureUnaryResponse"),),
)
response_future.add_done_callback(callback)
value_passed_to_callback = callback.value()
self.assertIsInstance(response_future, grpc.Future)
self.assertIsInstance(response_future, grpc.Call)
with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result()
self.assertIs(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
self.assertIsInstance(response_future.exception(), grpc.RpcError)
self.assertIsNotNone(response_future.traceback())
self.assertIs(
grpc.StatusCode.UNKNOWN, response_future.exception().code()
)
self.assertIs(response_future, value_passed_to_callback)
def testFailedUnaryRequestStreamResponse(self):
self._failed_unary_request_stream_response(
unary_stream_multi_callable(self._channel)
)
def testFailedUnaryRequestStreamResponseNonBlocking(self):
self._failed_unary_request_stream_response(
unary_stream_non_blocking_multi_callable(self._channel)
)
def testFailedStreamRequestBlockingUnaryResponse(self):
requests = tuple(
b"\x47\x58" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
multi_callable = stream_unary_multi_callable(self._channel)
with self._control.fail():
with self.assertRaises(grpc.RpcError) as exception_context:
multi_callable(
request_iterator,
metadata=(
("test", "FailedStreamRequestBlockingUnaryResponse"),
),
)
self.assertIs(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
def testFailedStreamRequestFutureUnaryResponse(self):
requests = tuple(
b"\x07\x18" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
callback = Callback()
multi_callable = stream_unary_multi_callable(self._channel)
with self._control.fail():
response_future = multi_callable.future(
request_iterator,
metadata=(("test", "FailedStreamRequestFutureUnaryResponse"),),
)
response_future.add_done_callback(callback)
value_passed_to_callback = callback.value()
with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result()
self.assertIs(grpc.StatusCode.UNKNOWN, response_future.code())
self.assertIs(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
self.assertIsInstance(response_future.exception(), grpc.RpcError)
self.assertIsNotNone(response_future.traceback())
self.assertIs(response_future, value_passed_to_callback)
def testFailedStreamRequestStreamResponse(self):
self._failed_stream_request_stream_response(
stream_stream_multi_callable(self._channel)
)
def testFailedStreamRequestStreamResponseNonBlocking(self):
self._failed_stream_request_stream_response(
stream_stream_non_blocking_multi_callable(self._channel)
)
def testIgnoredUnaryRequestFutureUnaryResponse(self):
request = b"\x37\x17"
multi_callable = unary_unary_multi_callable(self._channel)
multi_callable.future(
request,
metadata=(("test", "IgnoredUnaryRequestFutureUnaryResponse"),),
)
def testIgnoredUnaryRequestStreamResponse(self):
self._ignored_unary_stream_request_future_unary_response(
unary_stream_multi_callable(self._channel)
)
def testIgnoredUnaryRequestStreamResponseNonBlocking(self):
self._ignored_unary_stream_request_future_unary_response(
unary_stream_non_blocking_multi_callable(self._channel)
)
def testIgnoredStreamRequestFutureUnaryResponse(self):
requests = tuple(
b"\x07\x18" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
multi_callable = stream_unary_multi_callable(self._channel)
multi_callable.future(
request_iterator,
metadata=(("test", "IgnoredStreamRequestFutureUnaryResponse"),),
)
def testIgnoredStreamRequestStreamResponse(self):
self._ignored_stream_request_stream_response(
stream_stream_multi_callable(self._channel)
)
def testIgnoredStreamRequestStreamResponseNonBlocking(self):
self._ignored_stream_request_stream_response(
stream_stream_non_blocking_multi_callable(self._channel)
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=3)
| 10,789
| 38.52381
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc._channel.Channel connectivity."""
import logging
import threading
import time
import unittest
import grpc
from tests.unit import thread_pool
from tests.unit.framework.common import test_constants
def _ready_in_connectivities(connectivities):
return grpc.ChannelConnectivity.READY in connectivities
def _last_connectivity_is_not_ready(connectivities):
return connectivities[-1] is not grpc.ChannelConnectivity.READY
class _Callback(object):
def __init__(self):
self._condition = threading.Condition()
self._connectivities = []
def update(self, connectivity):
with self._condition:
self._connectivities.append(connectivity)
self._condition.notify()
def connectivities(self):
with self._condition:
return tuple(self._connectivities)
def block_until_connectivities_satisfy(self, predicate):
with self._condition:
while True:
connectivities = tuple(self._connectivities)
if predicate(connectivities):
return connectivities
else:
self._condition.wait()
class ChannelConnectivityTest(unittest.TestCase):
def test_lonely_channel_connectivity(self):
callback = _Callback()
channel = grpc.insecure_channel("localhost:12345")
channel.subscribe(callback.update, try_to_connect=False)
first_connectivities = callback.block_until_connectivities_satisfy(bool)
channel.subscribe(callback.update, try_to_connect=True)
second_connectivities = callback.block_until_connectivities_satisfy(
lambda connectivities: 2 <= len(connectivities)
)
# Wait for a connection that will never happen.
time.sleep(test_constants.SHORT_TIMEOUT)
third_connectivities = callback.connectivities()
channel.unsubscribe(callback.update)
fourth_connectivities = callback.connectivities()
channel.unsubscribe(callback.update)
fifth_connectivities = callback.connectivities()
channel.close()
self.assertSequenceEqual(
(grpc.ChannelConnectivity.IDLE,), first_connectivities
)
self.assertNotIn(grpc.ChannelConnectivity.READY, second_connectivities)
self.assertNotIn(grpc.ChannelConnectivity.READY, third_connectivities)
self.assertNotIn(grpc.ChannelConnectivity.READY, fourth_connectivities)
self.assertNotIn(grpc.ChannelConnectivity.READY, fifth_connectivities)
def test_immediately_connectable_channel_connectivity(self):
recording_thread_pool = thread_pool.RecordingThreadPool(
max_workers=None
)
server = grpc.server(
recording_thread_pool, options=(("grpc.so_reuseport", 0),)
)
port = server.add_insecure_port("[::]:0")
server.start()
first_callback = _Callback()
second_callback = _Callback()
channel = grpc.insecure_channel("localhost:{}".format(port))
channel.subscribe(first_callback.update, try_to_connect=False)
first_connectivities = (
first_callback.block_until_connectivities_satisfy(bool)
)
# Wait for a connection that will never happen because try_to_connect=True
# has not yet been passed.
time.sleep(test_constants.SHORT_TIMEOUT)
second_connectivities = first_callback.connectivities()
channel.subscribe(second_callback.update, try_to_connect=True)
third_connectivities = (
first_callback.block_until_connectivities_satisfy(
lambda connectivities: 2 <= len(connectivities)
)
)
fourth_connectivities = (
second_callback.block_until_connectivities_satisfy(bool)
)
# Wait for a connection that will happen (or may already have happened).
first_callback.block_until_connectivities_satisfy(
_ready_in_connectivities
)
second_callback.block_until_connectivities_satisfy(
_ready_in_connectivities
)
channel.close()
server.stop(None)
self.assertSequenceEqual(
(grpc.ChannelConnectivity.IDLE,), first_connectivities
)
self.assertSequenceEqual(
(grpc.ChannelConnectivity.IDLE,), second_connectivities
)
self.assertNotIn(
grpc.ChannelConnectivity.TRANSIENT_FAILURE, third_connectivities
)
self.assertNotIn(
grpc.ChannelConnectivity.SHUTDOWN, third_connectivities
)
self.assertNotIn(
grpc.ChannelConnectivity.TRANSIENT_FAILURE, fourth_connectivities
)
self.assertNotIn(
grpc.ChannelConnectivity.SHUTDOWN, fourth_connectivities
)
self.assertFalse(recording_thread_pool.was_used())
def test_reachable_then_unreachable_channel_connectivity(self):
recording_thread_pool = thread_pool.RecordingThreadPool(
max_workers=None
)
server = grpc.server(
recording_thread_pool, options=(("grpc.so_reuseport", 0),)
)
port = server.add_insecure_port("[::]:0")
server.start()
callback = _Callback()
channel = grpc.insecure_channel("localhost:{}".format(port))
channel.subscribe(callback.update, try_to_connect=True)
callback.block_until_connectivities_satisfy(_ready_in_connectivities)
# Now take down the server and confirm that channel readiness is repudiated.
server.stop(None)
callback.block_until_connectivities_satisfy(
_last_connectivity_is_not_ready
)
channel.unsubscribe(callback.update)
channel.close()
self.assertFalse(recording_thread_pool.was_used())
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 6,498
| 36.137143
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
import grpc
from tests.unit import test_common
from tests.unit.framework.common import test_constants
from tests.unit.framework.common import test_control
_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2
_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2 :]
_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3
_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[: len(bytestring) // 3]
_UNARY_UNARY = "/test/UnaryUnary"
_UNARY_UNARY_NESTED_EXCEPTION = "/test/UnaryUnaryNestedException"
_UNARY_STREAM = "/test/UnaryStream"
_STREAM_UNARY = "/test/StreamUnary"
_STREAM_STREAM = "/test/StreamStream"
_DEFECTIVE_GENERIC_RPC_HANDLER = "/test/DefectiveGenericRpcHandler"
class _Handler(object):
def __init__(self, control):
self._control = control
def handle_unary_unary(self, request, servicer_context):
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
return request
def handle_unary_unary_with_nested_exception(
self, request, servicer_context
):
raise test_control.NestedDefect()
def handle_unary_stream(self, request, servicer_context):
for _ in range(test_constants.STREAM_LENGTH):
self._control.control()
yield request
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
def handle_stream_unary(self, request_iterator, servicer_context):
if servicer_context is not None:
servicer_context.invocation_metadata()
self._control.control()
response_elements = []
for request in request_iterator:
self._control.control()
response_elements.append(request)
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
return b"".join(response_elements)
def handle_stream_stream(self, request_iterator, servicer_context):
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
for request in request_iterator:
self._control.control()
yield request
self._control.control()
def defective_generic_rpc_handler(self):
raise test_control.Defect()
class _MethodHandler(grpc.RpcMethodHandler):
def __init__(
self,
request_streaming,
response_streaming,
request_deserializer,
response_serializer,
unary_unary,
unary_stream,
stream_unary,
stream_stream,
):
self.request_streaming = request_streaming
self.response_streaming = response_streaming
self.request_deserializer = request_deserializer
self.response_serializer = response_serializer
self.unary_unary = unary_unary
self.unary_stream = unary_stream
self.stream_unary = stream_unary
self.stream_stream = stream_stream
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self, handler):
self._handler = handler
def service(self, handler_call_details):
if handler_call_details.method == _UNARY_UNARY:
return _MethodHandler(
False,
False,
None,
None,
self._handler.handle_unary_unary,
None,
None,
None,
)
elif handler_call_details.method == _UNARY_STREAM:
return _MethodHandler(
False,
True,
_DESERIALIZE_REQUEST,
_SERIALIZE_RESPONSE,
None,
self._handler.handle_unary_stream,
None,
None,
)
elif handler_call_details.method == _STREAM_UNARY:
return _MethodHandler(
True,
False,
_DESERIALIZE_REQUEST,
_SERIALIZE_RESPONSE,
None,
None,
self._handler.handle_stream_unary,
None,
)
elif handler_call_details.method == _STREAM_STREAM:
return _MethodHandler(
True,
True,
None,
None,
None,
None,
None,
self._handler.handle_stream_stream,
)
elif handler_call_details.method == _DEFECTIVE_GENERIC_RPC_HANDLER:
return self._handler.defective_generic_rpc_handler()
elif handler_call_details.method == _UNARY_UNARY_NESTED_EXCEPTION:
return _MethodHandler(
False,
False,
None,
None,
self._handler.handle_unary_unary_with_nested_exception,
None,
None,
None,
)
else:
return None
class FailAfterFewIterationsCounter(object):
def __init__(self, high, bytestring):
self._current = 0
self._high = high
self._bytestring = bytestring
def __iter__(self):
return self
def __next__(self):
if self._current >= self._high:
raise test_control.Defect()
else:
self._current += 1
return self._bytestring
next = __next__
def _unary_unary_multi_callable(channel):
return channel.unary_unary(_UNARY_UNARY)
def _unary_stream_multi_callable(channel):
return channel.unary_stream(
_UNARY_STREAM,
request_serializer=_SERIALIZE_REQUEST,
response_deserializer=_DESERIALIZE_RESPONSE,
)
def _stream_unary_multi_callable(channel):
return channel.stream_unary(
_STREAM_UNARY,
request_serializer=_SERIALIZE_REQUEST,
response_deserializer=_DESERIALIZE_RESPONSE,
)
def _stream_stream_multi_callable(channel):
return channel.stream_stream(_STREAM_STREAM)
def _defective_handler_multi_callable(channel):
return channel.unary_unary(_DEFECTIVE_GENERIC_RPC_HANDLER)
def _defective_nested_exception_handler_multi_callable(channel):
return channel.unary_unary(_UNARY_UNARY_NESTED_EXCEPTION)
class InvocationDefectsTest(unittest.TestCase):
"""Tests the handling of exception-raising user code on the client-side."""
def setUp(self):
self._control = test_control.PauseFailControl()
self._handler = _Handler(self._control)
self._server = test_common.test_server()
port = self._server.add_insecure_port("[::]:0")
self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),))
self._server.start()
self._channel = grpc.insecure_channel("localhost:%d" % port)
def tearDown(self):
self._server.stop(0)
self._channel.close()
def testIterableStreamRequestBlockingUnaryResponse(self):
requests = object()
multi_callable = _stream_unary_multi_callable(self._channel)
with self.assertRaises(grpc.RpcError) as exception_context:
multi_callable(
requests,
metadata=(
("test", "IterableStreamRequestBlockingUnaryResponse"),
),
)
self.assertIs(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
def testIterableStreamRequestFutureUnaryResponse(self):
requests = object()
multi_callable = _stream_unary_multi_callable(self._channel)
response_future = multi_callable.future(
requests,
metadata=(("test", "IterableStreamRequestFutureUnaryResponse"),),
)
with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result()
self.assertIs(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
def testIterableStreamRequestStreamResponse(self):
requests = object()
multi_callable = _stream_stream_multi_callable(self._channel)
response_iterator = multi_callable(
requests,
metadata=(("test", "IterableStreamRequestStreamResponse"),),
)
with self.assertRaises(grpc.RpcError) as exception_context:
next(response_iterator)
self.assertIs(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
def testIteratorStreamRequestStreamResponse(self):
requests_iterator = FailAfterFewIterationsCounter(
test_constants.STREAM_LENGTH // 2, b"\x07\x08"
)
multi_callable = _stream_stream_multi_callable(self._channel)
response_iterator = multi_callable(
requests_iterator,
metadata=(("test", "IteratorStreamRequestStreamResponse"),),
)
with self.assertRaises(grpc.RpcError) as exception_context:
for _ in range(test_constants.STREAM_LENGTH // 2 + 1):
next(response_iterator)
self.assertIs(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
def testDefectiveGenericRpcHandlerUnaryResponse(self):
request = b"\x07\x08"
multi_callable = _defective_handler_multi_callable(self._channel)
with self.assertRaises(grpc.RpcError) as exception_context:
multi_callable(
request, metadata=(("test", "DefectiveGenericRpcHandlerUnary"),)
)
self.assertIs(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
def testNestedExceptionGenericRpcHandlerUnaryResponse(self):
request = b"\x07\x08"
multi_callable = _defective_nested_exception_handler_multi_callable(
self._channel
)
with self.assertRaises(grpc.RpcError) as exception_context:
multi_callable(
request, metadata=(("test", "DefectiveGenericRpcHandlerUnary"),)
)
self.assertIs(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 11,547
| 30.46594
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_api_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of gRPC Python's application-layer API."""
import logging
import unittest
import grpc
from tests.unit import _from_grpc_import_star
class AllTest(unittest.TestCase):
def testAll(self):
expected_grpc_code_elements = (
"FutureTimeoutError",
"FutureCancelledError",
"Future",
"ChannelConnectivity",
"Compression",
"StatusCode",
"Status",
"RpcError",
"RpcContext",
"Call",
"ChannelCredentials",
"CallCredentials",
"AuthMetadataContext",
"AuthMetadataPluginCallback",
"AuthMetadataPlugin",
"ServerCertificateConfiguration",
"ServerCredentials",
"UnaryUnaryMultiCallable",
"UnaryStreamMultiCallable",
"StreamUnaryMultiCallable",
"StreamStreamMultiCallable",
"UnaryUnaryClientInterceptor",
"UnaryStreamClientInterceptor",
"StreamUnaryClientInterceptor",
"StreamStreamClientInterceptor",
"Channel",
"ServicerContext",
"RpcMethodHandler",
"HandlerCallDetails",
"GenericRpcHandler",
"ServiceRpcHandler",
"Server",
"ServerInterceptor",
"LocalConnectionType",
"local_channel_credentials",
"local_server_credentials",
"alts_channel_credentials",
"alts_server_credentials",
"unary_unary_rpc_method_handler",
"unary_stream_rpc_method_handler",
"stream_unary_rpc_method_handler",
"ClientCallDetails",
"stream_stream_rpc_method_handler",
"method_handlers_generic_handler",
"ssl_channel_credentials",
"metadata_call_credentials",
"access_token_call_credentials",
"composite_call_credentials",
"composite_channel_credentials",
"compute_engine_channel_credentials",
"ssl_server_credentials",
"ssl_server_certificate_configuration",
"dynamic_ssl_server_credentials",
"channel_ready_future",
"insecure_channel",
"secure_channel",
"intercept_channel",
"server",
"protos",
"services",
"protos_and_services",
"xds_channel_credentials",
"xds_server_credentials",
"insecure_server_credentials",
)
self.assertCountEqual(
expected_grpc_code_elements, _from_grpc_import_star.GRPC_ELEMENTS
)
class ChannelConnectivityTest(unittest.TestCase):
def testChannelConnectivity(self):
self.assertSequenceEqual(
(
grpc.ChannelConnectivity.IDLE,
grpc.ChannelConnectivity.CONNECTING,
grpc.ChannelConnectivity.READY,
grpc.ChannelConnectivity.TRANSIENT_FAILURE,
grpc.ChannelConnectivity.SHUTDOWN,
),
tuple(grpc.ChannelConnectivity),
)
class ChannelTest(unittest.TestCase):
def test_secure_channel(self):
channel_credentials = grpc.ssl_channel_credentials()
channel = grpc.secure_channel("google.com:443", channel_credentials)
channel.close()
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 4,059
| 32.278689
| 77
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_credentials_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of credentials."""
import logging
import unittest
import grpc
class CredentialsTest(unittest.TestCase):
def test_call_credentials_composition(self):
first = grpc.access_token_call_credentials("abc")
second = grpc.access_token_call_credentials("def")
third = grpc.access_token_call_credentials("ghi")
first_and_second = grpc.composite_call_credentials(first, second)
first_second_and_third = grpc.composite_call_credentials(
first, second, third
)
self.assertIsInstance(first_and_second, grpc.CallCredentials)
self.assertIsInstance(first_second_and_third, grpc.CallCredentials)
def test_channel_credentials_composition(self):
first_call_credentials = grpc.access_token_call_credentials("abc")
second_call_credentials = grpc.access_token_call_credentials("def")
third_call_credentials = grpc.access_token_call_credentials("ghi")
channel_credentials = grpc.ssl_channel_credentials()
channel_and_first = grpc.composite_channel_credentials(
channel_credentials, first_call_credentials
)
channel_first_and_second = grpc.composite_channel_credentials(
channel_credentials, first_call_credentials, second_call_credentials
)
channel_first_second_and_third = grpc.composite_channel_credentials(
channel_credentials,
first_call_credentials,
second_call_credentials,
third_call_credentials,
)
self.assertIsInstance(channel_and_first, grpc.ChannelCredentials)
self.assertIsInstance(channel_first_and_second, grpc.ChannelCredentials)
self.assertIsInstance(
channel_first_second_and_third, grpc.ChannelCredentials
)
def test_invalid_string_certificate(self):
self.assertRaises(
TypeError,
grpc.ssl_channel_credentials,
root_certificates="A Certificate",
private_key=None,
certificate_chain=None,
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 2,725
| 35.837838
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_exit_scenarios.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines a number of module-scope gRPC scenarios to test clean exit."""
import argparse
import logging
import threading
import time
import grpc
from tests.unit.framework.common import test_constants
WAIT_TIME = 1000
REQUEST = b"request"
UNSTARTED_SERVER = "unstarted_server"
RUNNING_SERVER = "running_server"
POLL_CONNECTIVITY_NO_SERVER = "poll_connectivity_no_server"
POLL_CONNECTIVITY = "poll_connectivity"
IN_FLIGHT_UNARY_UNARY_CALL = "in_flight_unary_unary_call"
IN_FLIGHT_UNARY_STREAM_CALL = "in_flight_unary_stream_call"
IN_FLIGHT_STREAM_UNARY_CALL = "in_flight_stream_unary_call"
IN_FLIGHT_STREAM_STREAM_CALL = "in_flight_stream_stream_call"
IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL = "in_flight_partial_unary_stream_call"
IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL = "in_flight_partial_stream_unary_call"
IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL = "in_flight_partial_stream_stream_call"
UNARY_UNARY = b"/test/UnaryUnary"
UNARY_STREAM = b"/test/UnaryStream"
STREAM_UNARY = b"/test/StreamUnary"
STREAM_STREAM = b"/test/StreamStream"
PARTIAL_UNARY_STREAM = b"/test/PartialUnaryStream"
PARTIAL_STREAM_UNARY = b"/test/PartialStreamUnary"
PARTIAL_STREAM_STREAM = b"/test/PartialStreamStream"
TEST_TO_METHOD = {
IN_FLIGHT_UNARY_UNARY_CALL: UNARY_UNARY,
IN_FLIGHT_UNARY_STREAM_CALL: UNARY_STREAM,
IN_FLIGHT_STREAM_UNARY_CALL: STREAM_UNARY,
IN_FLIGHT_STREAM_STREAM_CALL: STREAM_STREAM,
IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL: PARTIAL_UNARY_STREAM,
IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL: PARTIAL_STREAM_UNARY,
IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL: PARTIAL_STREAM_STREAM,
}
def hang_unary_unary(request, servicer_context):
time.sleep(WAIT_TIME)
def hang_unary_stream(request, servicer_context):
time.sleep(WAIT_TIME)
def hang_partial_unary_stream(request, servicer_context):
for _ in range(test_constants.STREAM_LENGTH // 2):
yield request
time.sleep(WAIT_TIME)
def hang_stream_unary(request_iterator, servicer_context):
time.sleep(WAIT_TIME)
def hang_partial_stream_unary(request_iterator, servicer_context):
for _ in range(test_constants.STREAM_LENGTH // 2):
next(request_iterator)
time.sleep(WAIT_TIME)
def hang_stream_stream(request_iterator, servicer_context):
time.sleep(WAIT_TIME)
def hang_partial_stream_stream(request_iterator, servicer_context):
for _ in range(test_constants.STREAM_LENGTH // 2):
yield next(request_iterator) # pylint: disable=stop-iteration-return
time.sleep(WAIT_TIME)
class MethodHandler(grpc.RpcMethodHandler):
def __init__(self, request_streaming, response_streaming, partial_hang):
self.request_streaming = request_streaming
self.response_streaming = response_streaming
self.request_deserializer = None
self.response_serializer = None
self.unary_unary = None
self.unary_stream = None
self.stream_unary = None
self.stream_stream = None
if self.request_streaming and self.response_streaming:
if partial_hang:
self.stream_stream = hang_partial_stream_stream
else:
self.stream_stream = hang_stream_stream
elif self.request_streaming:
if partial_hang:
self.stream_unary = hang_partial_stream_unary
else:
self.stream_unary = hang_stream_unary
elif self.response_streaming:
if partial_hang:
self.unary_stream = hang_partial_unary_stream
else:
self.unary_stream = hang_unary_stream
else:
self.unary_unary = hang_unary_unary
class GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == UNARY_UNARY:
return MethodHandler(False, False, False)
elif handler_call_details.method == UNARY_STREAM:
return MethodHandler(False, True, False)
elif handler_call_details.method == STREAM_UNARY:
return MethodHandler(True, False, False)
elif handler_call_details.method == STREAM_STREAM:
return MethodHandler(True, True, False)
elif handler_call_details.method == PARTIAL_UNARY_STREAM:
return MethodHandler(False, True, True)
elif handler_call_details.method == PARTIAL_STREAM_UNARY:
return MethodHandler(True, False, True)
elif handler_call_details.method == PARTIAL_STREAM_STREAM:
return MethodHandler(True, True, True)
else:
return None
# Traditional executors will not exit until all their
# current jobs complete. Because we submit jobs that will
# never finish, we don't want to block exit on these jobs.
class DaemonPool(object):
def submit(self, fn, *args, **kwargs):
thread = threading.Thread(target=fn, args=args, kwargs=kwargs)
thread.daemon = True
thread.start()
def shutdown(self, wait=True):
pass
def infinite_request_iterator():
while True:
yield REQUEST
if __name__ == "__main__":
logging.basicConfig()
parser = argparse.ArgumentParser()
parser.add_argument("scenario", type=str)
parser.add_argument(
"--wait_for_interrupt", dest="wait_for_interrupt", action="store_true"
)
args = parser.parse_args()
if args.scenario == UNSTARTED_SERVER:
server = grpc.server(DaemonPool(), options=(("grpc.so_reuseport", 0),))
if args.wait_for_interrupt:
time.sleep(WAIT_TIME)
elif args.scenario == RUNNING_SERVER:
server = grpc.server(DaemonPool(), options=(("grpc.so_reuseport", 0),))
port = server.add_insecure_port("[::]:0")
server.start()
if args.wait_for_interrupt:
time.sleep(WAIT_TIME)
elif args.scenario == POLL_CONNECTIVITY_NO_SERVER:
channel = grpc.insecure_channel("localhost:12345")
def connectivity_callback(connectivity):
pass
channel.subscribe(connectivity_callback, try_to_connect=True)
if args.wait_for_interrupt:
time.sleep(WAIT_TIME)
elif args.scenario == POLL_CONNECTIVITY:
server = grpc.server(DaemonPool(), options=(("grpc.so_reuseport", 0),))
port = server.add_insecure_port("[::]:0")
server.start()
channel = grpc.insecure_channel("localhost:%d" % port)
def connectivity_callback(connectivity):
pass
channel.subscribe(connectivity_callback, try_to_connect=True)
if args.wait_for_interrupt:
time.sleep(WAIT_TIME)
else:
handler = GenericHandler()
server = grpc.server(DaemonPool(), options=(("grpc.so_reuseport", 0),))
port = server.add_insecure_port("[::]:0")
server.add_generic_rpc_handlers((handler,))
server.start()
channel = grpc.insecure_channel("localhost:%d" % port)
method = TEST_TO_METHOD[args.scenario]
if args.scenario == IN_FLIGHT_UNARY_UNARY_CALL:
multi_callable = channel.unary_unary(method)
future = multi_callable.future(REQUEST)
result, call = multi_callable.with_call(REQUEST)
elif (
args.scenario == IN_FLIGHT_UNARY_STREAM_CALL
or args.scenario == IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL
):
multi_callable = channel.unary_stream(method)
response_iterator = multi_callable(REQUEST)
for response in response_iterator:
pass
elif (
args.scenario == IN_FLIGHT_STREAM_UNARY_CALL
or args.scenario == IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL
):
multi_callable = channel.stream_unary(method)
future = multi_callable.future(infinite_request_iterator())
result, call = multi_callable.with_call(
iter([REQUEST] * test_constants.STREAM_LENGTH)
)
elif (
args.scenario == IN_FLIGHT_STREAM_STREAM_CALL
or args.scenario == IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL
):
multi_callable = channel.stream_stream(method)
response_iterator = multi_callable(infinite_request_iterator())
for response in response_iterator:
pass
| 8,835
| 35.6639
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_channel_close_test.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests server and client side compression."""
import itertools
import logging
import threading
import time
import unittest
import grpc
from tests.unit import test_common
from tests.unit.framework.common import test_constants
_BEAT = 0.5
_SOME_TIME = 5
_MORE_TIME = 10
_STREAM_URI = "Meffod"
_UNARY_URI = "MeffodMan"
class _StreamingMethodHandler(grpc.RpcMethodHandler):
request_streaming = True
response_streaming = True
request_deserializer = None
response_serializer = None
def stream_stream(self, request_iterator, servicer_context):
for request in request_iterator:
yield request * 2
class _UnaryMethodHandler(grpc.RpcMethodHandler):
request_streaming = False
response_streaming = False
request_deserializer = None
response_serializer = None
def unary_unary(self, request, servicer_context):
return request * 2
_STREAMING_METHOD_HANDLER = _StreamingMethodHandler()
_UNARY_METHOD_HANDLER = _UnaryMethodHandler()
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == _STREAM_URI:
return _STREAMING_METHOD_HANDLER
else:
return _UNARY_METHOD_HANDLER
_GENERIC_HANDLER = _GenericHandler()
class _Pipe(object):
def __init__(self, values):
self._condition = threading.Condition()
self._values = list(values)
self._open = True
def __iter__(self):
return self
def _next(self):
with self._condition:
while not self._values and self._open:
self._condition.wait()
if self._values:
return self._values.pop(0)
else:
raise StopIteration()
def next(self):
return self._next()
def __next__(self):
return self._next()
def add(self, value):
with self._condition:
self._values.append(value)
self._condition.notify()
def close(self):
with self._condition:
self._open = False
self._condition.notify()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
class ChannelCloseTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server(
max_workers=test_constants.THREAD_CONCURRENCY
)
self._server.add_generic_rpc_handlers((_GENERIC_HANDLER,))
self._port = self._server.add_insecure_port("[::]:0")
self._server.start()
def tearDown(self):
self._server.stop(None)
def test_close_immediately_after_call_invocation(self):
channel = grpc.insecure_channel("localhost:{}".format(self._port))
multi_callable = channel.stream_stream(_STREAM_URI)
request_iterator = _Pipe(())
response_iterator = multi_callable(request_iterator)
channel.close()
request_iterator.close()
self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED)
def test_close_while_call_active(self):
channel = grpc.insecure_channel("localhost:{}".format(self._port))
multi_callable = channel.stream_stream(_STREAM_URI)
request_iterator = _Pipe((b"abc",))
response_iterator = multi_callable(request_iterator)
next(response_iterator)
channel.close()
request_iterator.close()
self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED)
def test_context_manager_close_while_call_active(self):
with grpc.insecure_channel(
"localhost:{}".format(self._port)
) as channel: # pylint: disable=bad-continuation
multi_callable = channel.stream_stream(_STREAM_URI)
request_iterator = _Pipe((b"abc",))
response_iterator = multi_callable(request_iterator)
next(response_iterator)
request_iterator.close()
self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED)
def test_context_manager_close_while_many_calls_active(self):
with grpc.insecure_channel(
"localhost:{}".format(self._port)
) as channel: # pylint: disable=bad-continuation
multi_callable = channel.stream_stream(_STREAM_URI)
request_iterators = tuple(
_Pipe((b"abc",))
for _ in range(test_constants.THREAD_CONCURRENCY)
)
response_iterators = []
for request_iterator in request_iterators:
response_iterator = multi_callable(request_iterator)
next(response_iterator)
response_iterators.append(response_iterator)
for request_iterator in request_iterators:
request_iterator.close()
for response_iterator in response_iterators:
self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED)
def test_many_concurrent_closes(self):
channel = grpc.insecure_channel("localhost:{}".format(self._port))
multi_callable = channel.stream_stream(_STREAM_URI)
request_iterator = _Pipe((b"abc",))
response_iterator = multi_callable(request_iterator)
next(response_iterator)
start = time.time()
end = start + _MORE_TIME
def sleep_some_time_then_close():
time.sleep(_SOME_TIME)
channel.close()
for _ in range(test_constants.THREAD_CONCURRENCY):
close_thread = threading.Thread(target=sleep_some_time_then_close)
close_thread.start()
while True:
request_iterator.add(b"def")
time.sleep(_BEAT)
if end < time.time():
break
request_iterator.close()
self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED)
def test_exception_in_callback(self):
with grpc.insecure_channel(
"localhost:{}".format(self._port)
) as channel:
stream_multi_callable = channel.stream_stream(_STREAM_URI)
endless_iterator = itertools.repeat(b"abc")
stream_response_iterator = stream_multi_callable(endless_iterator)
future = channel.unary_unary(_UNARY_URI).future(b"abc")
def on_done_callback(future):
raise Exception("This should not cause a deadlock.")
future.add_done_callback(on_done_callback)
future.result()
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 7,130
| 31.266968
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/test_common.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common code used throughout tests of gRPC."""
import collections
from concurrent import futures
import threading
import grpc
INVOCATION_INITIAL_METADATA = (
("0", "abc"),
("1", "def"),
("2", "ghi"),
)
SERVICE_INITIAL_METADATA = (
("3", "jkl"),
("4", "mno"),
("5", "pqr"),
)
SERVICE_TERMINAL_METADATA = (
("6", "stu"),
("7", "vwx"),
("8", "yza"),
)
DETAILS = "test details"
def metadata_transmitted(original_metadata, transmitted_metadata):
"""Judges whether or not metadata was acceptably transmitted.
gRPC is allowed to insert key-value pairs into the metadata values given by
applications and to reorder key-value pairs with different keys but it is not
allowed to alter existing key-value pairs or to reorder key-value pairs with
the same key.
Args:
original_metadata: A metadata value used in a test of gRPC. An iterable over
iterables of length 2.
transmitted_metadata: A metadata value corresponding to original_metadata
after having been transmitted via gRPC. An iterable over iterables of
length 2.
Returns:
A boolean indicating whether transmitted_metadata accurately reflects
original_metadata after having been transmitted via gRPC.
"""
original = collections.defaultdict(list)
for key, value in original_metadata:
original[key].append(value)
transmitted = collections.defaultdict(list)
for key, value in transmitted_metadata:
transmitted[key].append(value)
for key, values in original.items():
transmitted_values = transmitted[key]
transmitted_iterator = iter(transmitted_values)
try:
for value in values:
while True:
transmitted_value = next(transmitted_iterator)
if value == transmitted_value:
break
except StopIteration:
return False
else:
return True
def test_secure_channel(target, channel_credentials, server_host_override):
"""Creates an insecure Channel to a remote host.
Args:
host: The name of the remote host to which to connect.
port: The port of the remote host to which to connect.
channel_credentials: The implementations.ChannelCredentials with which to
connect.
server_host_override: The target name used for SSL host name checking.
Returns:
An implementations.Channel to the remote host through which RPCs may be
conducted.
"""
channel = grpc.secure_channel(
target,
channel_credentials,
(
(
"grpc.ssl_target_name_override",
server_host_override,
),
),
)
return channel
def test_server(max_workers=10, reuse_port=False):
"""Creates an insecure grpc server.
These servers have SO_REUSEPORT disabled to prevent cross-talk.
"""
return grpc.server(
futures.ThreadPoolExecutor(max_workers=max_workers),
options=(("grpc.so_reuseport", int(reuse_port)),),
)
class WaitGroup(object):
def __init__(self, n=0):
self.count = n
self.cv = threading.Condition()
def add(self, n):
self.cv.acquire()
self.count += n
self.cv.release()
def done(self):
self.cv.acquire()
self.count -= 1
if self.count == 0:
self.cv.notify_all()
self.cv.release()
def wait(self):
self.cv.acquire()
while self.count > 0:
self.cv.wait()
self.cv.release()
def running_under_gevent():
try:
from gevent import monkey
import gevent.socket
except ImportError:
return False
else:
import socket
return socket.socket is gevent.socket.socket
| 4,417
| 27.875817
| 82
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_resource_exhausted_test.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests server responding with RESOURCE_EXHAUSTED."""
import logging
import threading
import unittest
import grpc
from grpc import _channel
from grpc.framework.foundation import logging_pool
from tests.unit import test_common
from tests.unit.framework.common import test_constants
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x00\x00\x00"
_UNARY_UNARY = "/test/UnaryUnary"
_UNARY_STREAM = "/test/UnaryStream"
_STREAM_UNARY = "/test/StreamUnary"
_STREAM_STREAM = "/test/StreamStream"
class _TestTrigger(object):
def __init__(self, total_call_count):
self._total_call_count = total_call_count
self._pending_calls = 0
self._triggered = False
self._finish_condition = threading.Condition()
self._start_condition = threading.Condition()
# Wait for all calls be blocked in their handler
def await_calls(self):
with self._start_condition:
while self._pending_calls < self._total_call_count:
self._start_condition.wait()
# Block in a response handler and wait for a trigger
def await_trigger(self):
with self._start_condition:
self._pending_calls += 1
self._start_condition.notify()
with self._finish_condition:
if not self._triggered:
self._finish_condition.wait()
# Finish all response handlers
def trigger(self):
with self._finish_condition:
self._triggered = True
self._finish_condition.notify_all()
def handle_unary_unary(trigger, request, servicer_context):
trigger.await_trigger()
return _RESPONSE
def handle_unary_stream(trigger, request, servicer_context):
trigger.await_trigger()
for _ in range(test_constants.STREAM_LENGTH):
yield _RESPONSE
def handle_stream_unary(trigger, request_iterator, servicer_context):
trigger.await_trigger()
# TODO(issue:#6891) We should be able to remove this loop
for request in request_iterator:
pass
return _RESPONSE
def handle_stream_stream(trigger, request_iterator, servicer_context):
trigger.await_trigger()
# TODO(issue:#6891) We should be able to remove this loop,
# and replace with return; yield
for request in request_iterator:
yield _RESPONSE
class _MethodHandler(grpc.RpcMethodHandler):
def __init__(self, trigger, request_streaming, response_streaming):
self.request_streaming = request_streaming
self.response_streaming = response_streaming
self.request_deserializer = None
self.response_serializer = None
self.unary_unary = None
self.unary_stream = None
self.stream_unary = None
self.stream_stream = None
if self.request_streaming and self.response_streaming:
self.stream_stream = lambda x, y: handle_stream_stream(
trigger, x, y
)
elif self.request_streaming:
self.stream_unary = lambda x, y: handle_stream_unary(trigger, x, y)
elif self.response_streaming:
self.unary_stream = lambda x, y: handle_unary_stream(trigger, x, y)
else:
self.unary_unary = lambda x, y: handle_unary_unary(trigger, x, y)
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self, trigger):
self._trigger = trigger
def service(self, handler_call_details):
if handler_call_details.method == _UNARY_UNARY:
return _MethodHandler(self._trigger, False, False)
elif handler_call_details.method == _UNARY_STREAM:
return _MethodHandler(self._trigger, False, True)
elif handler_call_details.method == _STREAM_UNARY:
return _MethodHandler(self._trigger, True, False)
elif handler_call_details.method == _STREAM_STREAM:
return _MethodHandler(self._trigger, True, True)
else:
return None
class ResourceExhaustedTest(unittest.TestCase):
def setUp(self):
self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
self._trigger = _TestTrigger(test_constants.THREAD_CONCURRENCY)
self._server = grpc.server(
self._server_pool,
handlers=(_GenericHandler(self._trigger),),
options=(("grpc.so_reuseport", 0),),
maximum_concurrent_rpcs=test_constants.THREAD_CONCURRENCY,
)
port = self._server.add_insecure_port("[::]:0")
self._server.start()
self._channel = grpc.insecure_channel("localhost:%d" % port)
def tearDown(self):
self._server.stop(0)
self._channel.close()
def testUnaryUnary(self):
multi_callable = self._channel.unary_unary(_UNARY_UNARY)
futures = []
for _ in range(test_constants.THREAD_CONCURRENCY):
futures.append(multi_callable.future(_REQUEST))
self._trigger.await_calls()
with self.assertRaises(grpc.RpcError) as exception_context:
multi_callable(_REQUEST)
self.assertEqual(
grpc.StatusCode.RESOURCE_EXHAUSTED,
exception_context.exception.code(),
)
future_exception = multi_callable.future(_REQUEST)
self.assertEqual(
grpc.StatusCode.RESOURCE_EXHAUSTED,
future_exception.exception().code(),
)
self._trigger.trigger()
for future in futures:
self.assertEqual(_RESPONSE, future.result())
# Ensure a new request can be handled
self.assertEqual(_RESPONSE, multi_callable(_REQUEST))
def testUnaryStream(self):
multi_callable = self._channel.unary_stream(_UNARY_STREAM)
calls = []
for _ in range(test_constants.THREAD_CONCURRENCY):
calls.append(multi_callable(_REQUEST))
self._trigger.await_calls()
with self.assertRaises(grpc.RpcError) as exception_context:
next(multi_callable(_REQUEST))
self.assertEqual(
grpc.StatusCode.RESOURCE_EXHAUSTED,
exception_context.exception.code(),
)
self._trigger.trigger()
for call in calls:
for response in call:
self.assertEqual(_RESPONSE, response)
# Ensure a new request can be handled
new_call = multi_callable(_REQUEST)
for response in new_call:
self.assertEqual(_RESPONSE, response)
def testStreamUnary(self):
multi_callable = self._channel.stream_unary(_STREAM_UNARY)
futures = []
request = iter([_REQUEST] * test_constants.STREAM_LENGTH)
for _ in range(test_constants.THREAD_CONCURRENCY):
futures.append(multi_callable.future(request))
self._trigger.await_calls()
with self.assertRaises(grpc.RpcError) as exception_context:
multi_callable(request)
self.assertEqual(
grpc.StatusCode.RESOURCE_EXHAUSTED,
exception_context.exception.code(),
)
future_exception = multi_callable.future(request)
self.assertEqual(
grpc.StatusCode.RESOURCE_EXHAUSTED,
future_exception.exception().code(),
)
self._trigger.trigger()
for future in futures:
self.assertEqual(_RESPONSE, future.result())
# Ensure a new request can be handled
self.assertEqual(_RESPONSE, multi_callable(request))
def testStreamStream(self):
multi_callable = self._channel.stream_stream(_STREAM_STREAM)
calls = []
request = iter([_REQUEST] * test_constants.STREAM_LENGTH)
for _ in range(test_constants.THREAD_CONCURRENCY):
calls.append(multi_callable(request))
self._trigger.await_calls()
with self.assertRaises(grpc.RpcError) as exception_context:
next(multi_callable(request))
self.assertEqual(
grpc.StatusCode.RESOURCE_EXHAUSTED,
exception_context.exception.code(),
)
self._trigger.trigger()
for call in calls:
for response in call:
self.assertEqual(_RESPONSE, response)
# Ensure a new request can be handled
new_call = multi_callable(request)
for response in new_call:
self.assertEqual(_RESPONSE, response)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 8,946
| 32.137037
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_signal_handling_test.py
|
# Copyright 2019 the gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of responsiveness to signals."""
import logging
import os
import signal
import subprocess
import sys
import tempfile
import threading
import unittest
import grpc
from tests.unit import _signal_client
from tests.unit import test_common
_CLIENT_PATH = None
if sys.executable is not None:
_CLIENT_PATH = os.path.abspath(os.path.realpath(_signal_client.__file__))
else:
# NOTE(rbellevi): For compatibility with internal testing.
if len(sys.argv) != 2:
raise RuntimeError("Must supply path to executable client.")
client_name = sys.argv[1].split("/")[-1]
del sys.argv[1] # For compatibility with test runner.
_CLIENT_PATH = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), client_name)
)
_HOST = "localhost"
# The gevent test harness cannot run the monkeypatch code for the child process,
# so we need to instrument it manually.
_GEVENT_ARG = ("--gevent",) if test_common.running_under_gevent() else ()
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self):
self._connected_clients_lock = threading.RLock()
self._connected_clients_event = threading.Event()
self._connected_clients = 0
self._unary_unary_handler = grpc.unary_unary_rpc_method_handler(
self._handle_unary_unary
)
self._unary_stream_handler = grpc.unary_stream_rpc_method_handler(
self._handle_unary_stream
)
def _on_client_connect(self):
with self._connected_clients_lock:
self._connected_clients += 1
self._connected_clients_event.set()
def _on_client_disconnect(self):
with self._connected_clients_lock:
self._connected_clients -= 1
if self._connected_clients == 0:
self._connected_clients_event.clear()
def await_connected_client(self):
"""Blocks until a client connects to the server."""
self._connected_clients_event.wait()
def _handle_unary_unary(self, request, servicer_context):
"""Handles a unary RPC.
Blocks until the client disconnects and then echoes.
"""
stop_event = threading.Event()
def on_rpc_end():
self._on_client_disconnect()
stop_event.set()
servicer_context.add_callback(on_rpc_end)
self._on_client_connect()
stop_event.wait()
return request
def _handle_unary_stream(self, request, servicer_context):
"""Handles a server streaming RPC.
Blocks until the client disconnects and then echoes.
"""
stop_event = threading.Event()
def on_rpc_end():
self._on_client_disconnect()
stop_event.set()
servicer_context.add_callback(on_rpc_end)
self._on_client_connect()
stop_event.wait()
yield request
def service(self, handler_call_details):
if handler_call_details.method == _signal_client.UNARY_UNARY:
return self._unary_unary_handler
elif handler_call_details.method == _signal_client.UNARY_STREAM:
return self._unary_stream_handler
else:
return None
def _read_stream(stream):
stream.seek(0)
return stream.read()
def _start_client(args, stdout, stderr):
invocation = None
if sys.executable is not None:
invocation = (sys.executable, _CLIENT_PATH) + tuple(args)
else:
invocation = (_CLIENT_PATH,) + tuple(args)
return subprocess.Popen(invocation, stdout=stdout, stderr=stderr)
class SignalHandlingTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server()
self._port = self._server.add_insecure_port("{}:0".format(_HOST))
self._handler = _GenericHandler()
self._server.add_generic_rpc_handlers((self._handler,))
self._server.start()
def tearDown(self):
self._server.stop(None)
@unittest.skipIf(os.name == "nt", "SIGINT not supported on windows")
def testUnary(self):
"""Tests that the server unary code path does not stall signal handlers."""
server_target = "{}:{}".format(_HOST, self._port)
with tempfile.TemporaryFile(mode="r") as client_stdout:
with tempfile.TemporaryFile(mode="r") as client_stderr:
client = _start_client(
(server_target, "unary") + _GEVENT_ARG,
client_stdout,
client_stderr,
)
self._handler.await_connected_client()
client.send_signal(signal.SIGINT)
self.assertFalse(client.wait(), msg=_read_stream(client_stderr))
client_stdout.seek(0)
self.assertIn(
_signal_client.SIGTERM_MESSAGE, client_stdout.read()
)
@unittest.skipIf(os.name == "nt", "SIGINT not supported on windows")
def testStreaming(self):
"""Tests that the server streaming code path does not stall signal handlers."""
server_target = "{}:{}".format(_HOST, self._port)
with tempfile.TemporaryFile(mode="r") as client_stdout:
with tempfile.TemporaryFile(mode="r") as client_stderr:
client = _start_client(
(server_target, "streaming") + _GEVENT_ARG,
client_stdout,
client_stderr,
)
self._handler.await_connected_client()
client.send_signal(signal.SIGINT)
self.assertFalse(client.wait(), msg=_read_stream(client_stderr))
client_stdout.seek(0)
self.assertIn(
_signal_client.SIGTERM_MESSAGE, client_stdout.read()
)
@unittest.skipIf(os.name == "nt", "SIGINT not supported on windows")
def testUnaryWithException(self):
server_target = "{}:{}".format(_HOST, self._port)
with tempfile.TemporaryFile(mode="r") as client_stdout:
with tempfile.TemporaryFile(mode="r") as client_stderr:
client = _start_client(
("--exception", server_target, "unary") + _GEVENT_ARG,
client_stdout,
client_stderr,
)
self._handler.await_connected_client()
client.send_signal(signal.SIGINT)
client.wait()
self.assertEqual(0, client.returncode)
@unittest.skipIf(os.name == "nt", "SIGINT not supported on windows")
def testStreamingHandlerWithException(self):
server_target = "{}:{}".format(_HOST, self._port)
with tempfile.TemporaryFile(mode="r") as client_stdout:
with tempfile.TemporaryFile(mode="r") as client_stderr:
client = _start_client(
("--exception", server_target, "streaming") + _GEVENT_ARG,
client_stdout,
client_stderr,
)
self._handler.await_connected_client()
client.send_signal(signal.SIGINT)
client.wait()
print(_read_stream(client_stderr))
self.assertEqual(0, client.returncode)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 7,891
| 35.368664
| 87
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for an actual dns resolution."""
import logging
import unittest
import grpc
from tests.unit import test_common
from tests.unit.framework.common import test_constants
_METHOD = "/ANY/METHOD"
_REQUEST = b"\x00\x00\x00"
_RESPONSE = _REQUEST
class GenericHandler(grpc.GenericRpcHandler):
def service(self, unused_handler_details):
return grpc.unary_unary_rpc_method_handler(
lambda request, unused_context: request,
)
class DNSResolverTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server()
self._server.add_generic_rpc_handlers((GenericHandler(),))
self._port = self._server.add_insecure_port("[::]:0")
self._server.start()
def tearDown(self):
self._server.stop(None)
def test_connect_loopback(self):
# NOTE(https://github.com/grpc/grpc/issues/18422)
# In short, Gevent + C-Ares = Segfault. The C-Ares driver is not
# supported by custom io manager like "gevent"
# NOTE(b/201064791): use loopback46.unittest.grpc.io since
# it returns the expected responses even when DNS64 dns servers
# are used on the test worker (and for purposes of this
# test the use of loopback4 vs loopback46 makes no difference).
with grpc.insecure_channel(
"loopback46.unittest.grpc.io:%d" % self._port
) as channel:
self.assertEqual(
channel.unary_unary(_METHOD)(
_REQUEST,
timeout=10,
),
_RESPONSE,
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 2,269
| 31.898551
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of RPCs made against gRPC Python's application-layer API."""
import logging
import unittest
import grpc
from tests.unit.framework.common import test_constants
_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2
_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2 :]
_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3
_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[: len(bytestring) // 3]
_UNARY_UNARY = "/test/UnaryUnary"
_UNARY_STREAM = "/test/UnaryStream"
_STREAM_UNARY = "/test/StreamUnary"
_STREAM_STREAM = "/test/StreamStream"
def _unary_unary_multi_callable(channel):
return channel.unary_unary(_UNARY_UNARY)
def _unary_stream_multi_callable(channel):
return channel.unary_stream(
_UNARY_STREAM,
request_serializer=_SERIALIZE_REQUEST,
response_deserializer=_DESERIALIZE_RESPONSE,
)
def _stream_unary_multi_callable(channel):
return channel.stream_unary(
_STREAM_UNARY,
request_serializer=_SERIALIZE_REQUEST,
response_deserializer=_DESERIALIZE_RESPONSE,
)
def _stream_stream_multi_callable(channel):
return channel.stream_stream(_STREAM_STREAM)
class InvalidMetadataTest(unittest.TestCase):
def setUp(self):
self._channel = grpc.insecure_channel("localhost:8080")
self._unary_unary = _unary_unary_multi_callable(self._channel)
self._unary_stream = _unary_stream_multi_callable(self._channel)
self._stream_unary = _stream_unary_multi_callable(self._channel)
self._stream_stream = _stream_stream_multi_callable(self._channel)
def tearDown(self):
self._channel.close()
def testUnaryRequestBlockingUnaryResponse(self):
request = b"\x07\x08"
metadata = (("InVaLiD", "UnaryRequestBlockingUnaryResponse"),)
expected_error_details = "metadata was invalid: %s" % metadata
with self.assertRaises(ValueError) as exception_context:
self._unary_unary(request, metadata=metadata)
self.assertIn(expected_error_details, str(exception_context.exception))
def testUnaryRequestBlockingUnaryResponseWithCall(self):
request = b"\x07\x08"
metadata = (("InVaLiD", "UnaryRequestBlockingUnaryResponseWithCall"),)
expected_error_details = "metadata was invalid: %s" % metadata
with self.assertRaises(ValueError) as exception_context:
self._unary_unary.with_call(request, metadata=metadata)
self.assertIn(expected_error_details, str(exception_context.exception))
def testUnaryRequestFutureUnaryResponse(self):
request = b"\x07\x08"
metadata = (("InVaLiD", "UnaryRequestFutureUnaryResponse"),)
expected_error_details = "metadata was invalid: %s" % metadata
with self.assertRaises(ValueError) as exception_context:
self._unary_unary.future(request, metadata=metadata)
def testUnaryRequestStreamResponse(self):
request = b"\x37\x58"
metadata = (("InVaLiD", "UnaryRequestStreamResponse"),)
expected_error_details = "metadata was invalid: %s" % metadata
with self.assertRaises(ValueError) as exception_context:
self._unary_stream(request, metadata=metadata)
self.assertIn(expected_error_details, str(exception_context.exception))
def testStreamRequestBlockingUnaryResponse(self):
request_iterator = (
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
metadata = (("InVaLiD", "StreamRequestBlockingUnaryResponse"),)
expected_error_details = "metadata was invalid: %s" % metadata
with self.assertRaises(ValueError) as exception_context:
self._stream_unary(request_iterator, metadata=metadata)
self.assertIn(expected_error_details, str(exception_context.exception))
def testStreamRequestBlockingUnaryResponseWithCall(self):
request_iterator = (
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
metadata = (("InVaLiD", "StreamRequestBlockingUnaryResponseWithCall"),)
expected_error_details = "metadata was invalid: %s" % metadata
multi_callable = _stream_unary_multi_callable(self._channel)
with self.assertRaises(ValueError) as exception_context:
multi_callable.with_call(request_iterator, metadata=metadata)
self.assertIn(expected_error_details, str(exception_context.exception))
def testStreamRequestFutureUnaryResponse(self):
request_iterator = (
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
metadata = (("InVaLiD", "StreamRequestFutureUnaryResponse"),)
expected_error_details = "metadata was invalid: %s" % metadata
with self.assertRaises(ValueError) as exception_context:
self._stream_unary.future(request_iterator, metadata=metadata)
self.assertIn(expected_error_details, str(exception_context.exception))
def testStreamRequestStreamResponse(self):
request_iterator = (
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
metadata = (("InVaLiD", "StreamRequestStreamResponse"),)
expected_error_details = "metadata was invalid: %s" % metadata
with self.assertRaises(ValueError) as exception_context:
self._stream_stream(request_iterator, metadata=metadata)
self.assertIn(expected_error_details, str(exception_context.exception))
def testInvalidMetadata(self):
self.assertRaises(TypeError, self._unary_unary, b"", metadata=42)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 6,242
| 41.182432
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_rpc_test_helpers.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test helpers for RPC invocation tests."""
import datetime
import threading
import grpc
from grpc.framework.foundation import logging_pool
from tests.unit import test_common
from tests.unit import thread_pool
from tests.unit.framework.common import test_constants
from tests.unit.framework.common import test_control
_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2
_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2 :]
_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3
_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[: len(bytestring) // 3]
_UNARY_UNARY = "/test/UnaryUnary"
_UNARY_STREAM = "/test/UnaryStream"
_UNARY_STREAM_NON_BLOCKING = "/test/UnaryStreamNonBlocking"
_STREAM_UNARY = "/test/StreamUnary"
_STREAM_STREAM = "/test/StreamStream"
_STREAM_STREAM_NON_BLOCKING = "/test/StreamStreamNonBlocking"
TIMEOUT_SHORT = datetime.timedelta(seconds=4).total_seconds()
class Callback(object):
def __init__(self):
self._condition = threading.Condition()
self._value = None
self._called = False
def __call__(self, value):
with self._condition:
self._value = value
self._called = True
self._condition.notify_all()
def value(self):
with self._condition:
while not self._called:
self._condition.wait()
return self._value
class _Handler(object):
def __init__(self, control, thread_pool):
self._control = control
self._thread_pool = thread_pool
non_blocking_functions = (
self.handle_unary_stream_non_blocking,
self.handle_stream_stream_non_blocking,
)
for non_blocking_function in non_blocking_functions:
non_blocking_function.__func__.experimental_non_blocking = True
non_blocking_function.__func__.experimental_thread_pool = (
self._thread_pool
)
def handle_unary_unary(self, request, servicer_context):
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
# TODO(https://github.com/grpc/grpc/issues/8483): test the values
# returned by these methods rather than only "smoke" testing that
# the return after having been called.
servicer_context.is_active()
servicer_context.time_remaining()
return request
def handle_unary_stream(self, request, servicer_context):
for _ in range(test_constants.STREAM_LENGTH):
self._control.control()
yield request
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
def handle_unary_stream_non_blocking(
self, request, servicer_context, on_next
):
for _ in range(test_constants.STREAM_LENGTH):
self._control.control()
on_next(request)
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
on_next(None)
def handle_stream_unary(self, request_iterator, servicer_context):
if servicer_context is not None:
servicer_context.invocation_metadata()
self._control.control()
response_elements = []
for request in request_iterator:
self._control.control()
response_elements.append(request)
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
return b"".join(response_elements)
def handle_stream_stream(self, request_iterator, servicer_context):
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
for request in request_iterator:
self._control.control()
yield request
self._control.control()
def handle_stream_stream_non_blocking(
self, request_iterator, servicer_context, on_next
):
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
for request in request_iterator:
self._control.control()
on_next(request)
self._control.control()
on_next(None)
class _MethodHandler(grpc.RpcMethodHandler):
def __init__(
self,
request_streaming,
response_streaming,
request_deserializer,
response_serializer,
unary_unary,
unary_stream,
stream_unary,
stream_stream,
):
self.request_streaming = request_streaming
self.response_streaming = response_streaming
self.request_deserializer = request_deserializer
self.response_serializer = response_serializer
self.unary_unary = unary_unary
self.unary_stream = unary_stream
self.stream_unary = stream_unary
self.stream_stream = stream_stream
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self, handler):
self._handler = handler
def service(self, handler_call_details):
if handler_call_details.method == _UNARY_UNARY:
return _MethodHandler(
False,
False,
None,
None,
self._handler.handle_unary_unary,
None,
None,
None,
)
elif handler_call_details.method == _UNARY_STREAM:
return _MethodHandler(
False,
True,
_DESERIALIZE_REQUEST,
_SERIALIZE_RESPONSE,
None,
self._handler.handle_unary_stream,
None,
None,
)
elif handler_call_details.method == _UNARY_STREAM_NON_BLOCKING:
return _MethodHandler(
False,
True,
_DESERIALIZE_REQUEST,
_SERIALIZE_RESPONSE,
None,
self._handler.handle_unary_stream_non_blocking,
None,
None,
)
elif handler_call_details.method == _STREAM_UNARY:
return _MethodHandler(
True,
False,
_DESERIALIZE_REQUEST,
_SERIALIZE_RESPONSE,
None,
None,
self._handler.handle_stream_unary,
None,
)
elif handler_call_details.method == _STREAM_STREAM:
return _MethodHandler(
True,
True,
None,
None,
None,
None,
None,
self._handler.handle_stream_stream,
)
elif handler_call_details.method == _STREAM_STREAM_NON_BLOCKING:
return _MethodHandler(
True,
True,
None,
None,
None,
None,
None,
self._handler.handle_stream_stream_non_blocking,
)
else:
return None
def unary_unary_multi_callable(channel):
return channel.unary_unary(_UNARY_UNARY)
def unary_stream_multi_callable(channel):
return channel.unary_stream(
_UNARY_STREAM,
request_serializer=_SERIALIZE_REQUEST,
response_deserializer=_DESERIALIZE_RESPONSE,
)
def unary_stream_non_blocking_multi_callable(channel):
return channel.unary_stream(
_UNARY_STREAM_NON_BLOCKING,
request_serializer=_SERIALIZE_REQUEST,
response_deserializer=_DESERIALIZE_RESPONSE,
)
def stream_unary_multi_callable(channel):
return channel.stream_unary(
_STREAM_UNARY,
request_serializer=_SERIALIZE_REQUEST,
response_deserializer=_DESERIALIZE_RESPONSE,
)
def stream_stream_multi_callable(channel):
return channel.stream_stream(_STREAM_STREAM)
def stream_stream_non_blocking_multi_callable(channel):
return channel.stream_stream(_STREAM_STREAM_NON_BLOCKING)
class BaseRPCTest(object):
def setUp(self):
self._control = test_control.PauseFailControl()
self._thread_pool = thread_pool.RecordingThreadPool(max_workers=None)
self._handler = _Handler(self._control, self._thread_pool)
self._server = test_common.test_server()
port = self._server.add_insecure_port("[::]:0")
self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),))
self._server.start()
self._channel = grpc.insecure_channel("localhost:%d" % port)
def tearDown(self):
self._server.stop(None)
self._channel.close()
def _consume_one_stream_response_unary_request(self, multi_callable):
request = b"\x57\x38"
response_iterator = multi_callable(
request,
metadata=(("test", "ConsumingOneStreamResponseUnaryRequest"),),
)
next(response_iterator)
def _consume_some_but_not_all_stream_responses_unary_request(
self, multi_callable
):
request = b"\x57\x38"
response_iterator = multi_callable(
request,
metadata=(
("test", "ConsumingSomeButNotAllStreamResponsesUnaryRequest"),
),
)
for _ in range(test_constants.STREAM_LENGTH // 2):
next(response_iterator)
def _consume_some_but_not_all_stream_responses_stream_request(
self, multi_callable
):
requests = tuple(
b"\x67\x88" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
response_iterator = multi_callable(
request_iterator,
metadata=(
("test", "ConsumingSomeButNotAllStreamResponsesStreamRequest"),
),
)
for _ in range(test_constants.STREAM_LENGTH // 2):
next(response_iterator)
def _consume_too_many_stream_responses_stream_request(self, multi_callable):
requests = tuple(
b"\x67\x88" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
response_iterator = multi_callable(
request_iterator,
metadata=(
("test", "ConsumingTooManyStreamResponsesStreamRequest"),
),
)
for _ in range(test_constants.STREAM_LENGTH):
next(response_iterator)
for _ in range(test_constants.STREAM_LENGTH):
with self.assertRaises(StopIteration):
next(response_iterator)
self.assertIsNotNone(response_iterator.initial_metadata())
self.assertIs(grpc.StatusCode.OK, response_iterator.code())
self.assertIsNotNone(response_iterator.details())
self.assertIsNotNone(response_iterator.trailing_metadata())
def _cancelled_unary_request_stream_response(self, multi_callable):
request = b"\x07\x19"
with self._control.pause():
response_iterator = multi_callable(
request,
metadata=(("test", "CancelledUnaryRequestStreamResponse"),),
)
self._control.block_until_paused()
response_iterator.cancel()
with self.assertRaises(grpc.RpcError) as exception_context:
next(response_iterator)
self.assertIs(
grpc.StatusCode.CANCELLED, exception_context.exception.code()
)
self.assertIsNotNone(response_iterator.initial_metadata())
self.assertIs(grpc.StatusCode.CANCELLED, response_iterator.code())
self.assertIsNotNone(response_iterator.details())
self.assertIsNotNone(response_iterator.trailing_metadata())
def _cancelled_stream_request_stream_response(self, multi_callable):
requests = tuple(
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
with self._control.pause():
response_iterator = multi_callable(
request_iterator,
metadata=(("test", "CancelledStreamRequestStreamResponse"),),
)
response_iterator.cancel()
with self.assertRaises(grpc.RpcError):
next(response_iterator)
self.assertIsNotNone(response_iterator.initial_metadata())
self.assertIs(grpc.StatusCode.CANCELLED, response_iterator.code())
self.assertIsNotNone(response_iterator.details())
self.assertIsNotNone(response_iterator.trailing_metadata())
def _expired_unary_request_stream_response(self, multi_callable):
request = b"\x07\x19"
with self._control.pause():
with self.assertRaises(grpc.RpcError) as exception_context:
response_iterator = multi_callable(
request,
timeout=test_constants.SHORT_TIMEOUT,
metadata=(("test", "ExpiredUnaryRequestStreamResponse"),),
)
next(response_iterator)
self.assertIs(
grpc.StatusCode.DEADLINE_EXCEEDED,
exception_context.exception.code(),
)
self.assertIs(
grpc.StatusCode.DEADLINE_EXCEEDED, response_iterator.code()
)
def _expired_stream_request_stream_response(self, multi_callable):
requests = tuple(
b"\x67\x18" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
with self._control.pause():
with self.assertRaises(grpc.RpcError) as exception_context:
response_iterator = multi_callable(
request_iterator,
timeout=test_constants.SHORT_TIMEOUT,
metadata=(("test", "ExpiredStreamRequestStreamResponse"),),
)
next(response_iterator)
self.assertIs(
grpc.StatusCode.DEADLINE_EXCEEDED,
exception_context.exception.code(),
)
self.assertIs(
grpc.StatusCode.DEADLINE_EXCEEDED, response_iterator.code()
)
def _failed_unary_request_stream_response(self, multi_callable):
request = b"\x37\x17"
with self.assertRaises(grpc.RpcError) as exception_context:
with self._control.fail():
response_iterator = multi_callable(
request,
metadata=(("test", "FailedUnaryRequestStreamResponse"),),
)
next(response_iterator)
self.assertIs(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
def _failed_stream_request_stream_response(self, multi_callable):
requests = tuple(
b"\x67\x88" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
with self._control.fail():
with self.assertRaises(grpc.RpcError) as exception_context:
response_iterator = multi_callable(
request_iterator,
metadata=(("test", "FailedStreamRequestStreamResponse"),),
)
tuple(response_iterator)
self.assertIs(
grpc.StatusCode.UNKNOWN, exception_context.exception.code()
)
self.assertIs(grpc.StatusCode.UNKNOWN, response_iterator.code())
def _ignored_unary_stream_request_future_unary_response(
self, multi_callable
):
request = b"\x37\x17"
multi_callable(
request, metadata=(("test", "IgnoredUnaryRequestStreamResponse"),)
)
def _ignored_stream_request_stream_response(self, multi_callable):
requests = tuple(
b"\x67\x88" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
multi_callable(
request_iterator,
metadata=(("test", "IgnoredStreamRequestStreamResponse"),),
)
| 17,716
| 32.302632
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_auth_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of standard AuthMetadataPlugins."""
import collections
import logging
import threading
import unittest
from grpc import _auth
class MockGoogleCreds(object):
def get_access_token(self):
token = collections.namedtuple(
"MockAccessTokenInfo", ("access_token", "expires_in")
)
token.access_token = "token"
return token
class MockExceptionGoogleCreds(object):
def get_access_token(self):
raise Exception()
class GoogleCallCredentialsTest(unittest.TestCase):
def test_google_call_credentials_success(self):
callback_event = threading.Event()
def mock_callback(metadata, error):
self.assertEqual(metadata, (("authorization", "Bearer token"),))
self.assertIsNone(error)
callback_event.set()
call_creds = _auth.GoogleCallCredentials(MockGoogleCreds())
call_creds(None, mock_callback)
self.assertTrue(callback_event.wait(1.0))
def test_google_call_credentials_error(self):
callback_event = threading.Event()
def mock_callback(metadata, error):
self.assertIsNotNone(error)
callback_event.set()
call_creds = _auth.GoogleCallCredentials(MockExceptionGoogleCreds())
call_creds(None, mock_callback)
self.assertTrue(callback_event.wait(1.0))
class AccessTokenAuthMetadataPluginTest(unittest.TestCase):
def test_google_call_credentials_success(self):
callback_event = threading.Event()
def mock_callback(metadata, error):
self.assertEqual(metadata, (("authorization", "Bearer token"),))
self.assertIsNone(error)
callback_event.set()
metadata_plugin = _auth.AccessTokenAuthMetadataPlugin("token")
metadata_plugin(None, mock_callback)
self.assertTrue(callback_event.wait(1.0))
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 2,535
| 30.7
| 76
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_session_cache_test.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests experimental TLS Session Resumption API"""
import logging
import pickle
import unittest
import grpc
from grpc import _channel
from grpc.experimental import session_cache
from tests.unit import resources
from tests.unit import test_common
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x00\x00\x00"
_UNARY_UNARY = "/test/UnaryUnary"
_SERVER_HOST_OVERRIDE = "foo.test.google.fr"
_ID = "id"
_ID_KEY = "id_key"
_AUTH_CTX = "auth_ctx"
_PRIVATE_KEY = resources.private_key()
_CERTIFICATE_CHAIN = resources.certificate_chain()
_TEST_ROOT_CERTIFICATES = resources.test_root_certificates()
_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),)
_PROPERTY_OPTIONS = (
(
"grpc.ssl_target_name_override",
_SERVER_HOST_OVERRIDE,
),
)
def handle_unary_unary(request, servicer_context):
return pickle.dumps(
{
_ID: servicer_context.peer_identities(),
_ID_KEY: servicer_context.peer_identity_key(),
_AUTH_CTX: servicer_context.auth_context(),
}
)
def start_secure_server():
handler = grpc.method_handlers_generic_handler(
"test",
{"UnaryUnary": grpc.unary_unary_rpc_method_handler(handle_unary_unary)},
)
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
port = server.add_secure_port("[::]:0", server_cred)
server.start()
return server, port
class SSLSessionCacheTest(unittest.TestCase):
def _do_one_shot_client_rpc(
self, channel_creds, channel_options, port, expect_ssl_session_reused
):
channel = grpc.secure_channel(
"localhost:{}".format(port), channel_creds, options=channel_options
)
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
auth_data = pickle.loads(response)
self.assertEqual(
expect_ssl_session_reused,
auth_data[_AUTH_CTX]["ssl_session_reused"],
)
channel.close()
def testSSLSessionCacheLRU(self):
server_1, port_1 = start_secure_server()
cache = session_cache.ssl_session_cache_lru(1)
channel_creds = grpc.ssl_channel_credentials(
root_certificates=_TEST_ROOT_CERTIFICATES
)
channel_options = _PROPERTY_OPTIONS + (
("grpc.ssl_session_cache", cache),
)
# Initial connection has no session to resume
self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port_1,
expect_ssl_session_reused=[b"false"],
)
# Connection to server_1 resumes from initial session
self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port_1,
expect_ssl_session_reused=[b"true"],
)
# Connection to a different server with the same name overwrites the cache entry
server_2, port_2 = start_secure_server()
self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port_2,
expect_ssl_session_reused=[b"false"],
)
self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port_2,
expect_ssl_session_reused=[b"true"],
)
server_2.stop(None)
# Connection to server_1 now falls back to full TLS handshake
self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port_1,
expect_ssl_session_reused=[b"false"],
)
# Re-creating server_1 causes old sessions to become invalid
server_1.stop(None)
server_1, port_1 = start_secure_server()
# Old sessions should no longer be valid
self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port_1,
expect_ssl_session_reused=[b"false"],
)
# Resumption should work for subsequent connections
self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port_1,
expect_ssl_session_reused=[b"true"],
)
server_1.stop(None)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 4,903
| 28.902439
| 88
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_rpc_part_2_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of RPCs made against gRPC Python's application-layer API."""
from concurrent import futures
import itertools
import logging
import threading
import unittest
import grpc
from grpc.framework.foundation import logging_pool
from tests.unit._rpc_test_helpers import (
stream_stream_non_blocking_multi_callable,
)
from tests.unit._rpc_test_helpers import (
unary_stream_non_blocking_multi_callable,
)
from tests.unit._rpc_test_helpers import BaseRPCTest
from tests.unit._rpc_test_helpers import Callback
from tests.unit._rpc_test_helpers import TIMEOUT_SHORT
from tests.unit._rpc_test_helpers import stream_stream_multi_callable
from tests.unit._rpc_test_helpers import stream_unary_multi_callable
from tests.unit._rpc_test_helpers import unary_stream_multi_callable
from tests.unit._rpc_test_helpers import unary_unary_multi_callable
from tests.unit.framework.common import test_constants
class RPCPart2Test(BaseRPCTest, unittest.TestCase):
def testDefaultThreadPoolIsUsed(self):
self._consume_one_stream_response_unary_request(
unary_stream_multi_callable(self._channel)
)
self.assertFalse(self._thread_pool.was_used())
def testExperimentalThreadPoolIsUsed(self):
self._consume_one_stream_response_unary_request(
unary_stream_non_blocking_multi_callable(self._channel)
)
self.assertTrue(self._thread_pool.was_used())
def testUnrecognizedMethod(self):
request = b"abc"
with self.assertRaises(grpc.RpcError) as exception_context:
self._channel.unary_unary("NoSuchMethod")(request)
self.assertEqual(
grpc.StatusCode.UNIMPLEMENTED, exception_context.exception.code()
)
def testSuccessfulUnaryRequestBlockingUnaryResponse(self):
request = b"\x07\x08"
expected_response = self._handler.handle_unary_unary(request, None)
multi_callable = unary_unary_multi_callable(self._channel)
response = multi_callable(
request,
metadata=(("test", "SuccessfulUnaryRequestBlockingUnaryResponse"),),
)
self.assertEqual(expected_response, response)
def testSuccessfulUnaryRequestBlockingUnaryResponseWithCall(self):
request = b"\x07\x08"
expected_response = self._handler.handle_unary_unary(request, None)
multi_callable = unary_unary_multi_callable(self._channel)
response, call = multi_callable.with_call(
request,
metadata=(
("test", "SuccessfulUnaryRequestBlockingUnaryResponseWithCall"),
),
)
self.assertEqual(expected_response, response)
self.assertIs(grpc.StatusCode.OK, call.code())
self.assertEqual("", call.debug_error_string())
def testSuccessfulUnaryRequestFutureUnaryResponse(self):
request = b"\x07\x08"
expected_response = self._handler.handle_unary_unary(request, None)
multi_callable = unary_unary_multi_callable(self._channel)
response_future = multi_callable.future(
request,
metadata=(("test", "SuccessfulUnaryRequestFutureUnaryResponse"),),
)
response = response_future.result()
self.assertIsInstance(response_future, grpc.Future)
self.assertIsInstance(response_future, grpc.Call)
self.assertEqual(expected_response, response)
self.assertIsNone(response_future.exception())
self.assertIsNone(response_future.traceback())
def testSuccessfulUnaryRequestStreamResponse(self):
request = b"\x37\x58"
expected_responses = tuple(
self._handler.handle_unary_stream(request, None)
)
multi_callable = unary_stream_multi_callable(self._channel)
response_iterator = multi_callable(
request,
metadata=(("test", "SuccessfulUnaryRequestStreamResponse"),),
)
responses = tuple(response_iterator)
self.assertSequenceEqual(expected_responses, responses)
def testSuccessfulStreamRequestBlockingUnaryResponse(self):
requests = tuple(
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
expected_response = self._handler.handle_stream_unary(
iter(requests), None
)
request_iterator = iter(requests)
multi_callable = stream_unary_multi_callable(self._channel)
response = multi_callable(
request_iterator,
metadata=(
("test", "SuccessfulStreamRequestBlockingUnaryResponse"),
),
)
self.assertEqual(expected_response, response)
def testSuccessfulStreamRequestBlockingUnaryResponseWithCall(self):
requests = tuple(
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
expected_response = self._handler.handle_stream_unary(
iter(requests), None
)
request_iterator = iter(requests)
multi_callable = stream_unary_multi_callable(self._channel)
response, call = multi_callable.with_call(
request_iterator,
metadata=(
(
"test",
"SuccessfulStreamRequestBlockingUnaryResponseWithCall",
),
),
)
self.assertEqual(expected_response, response)
self.assertIs(grpc.StatusCode.OK, call.code())
def testSuccessfulStreamRequestFutureUnaryResponse(self):
requests = tuple(
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
expected_response = self._handler.handle_stream_unary(
iter(requests), None
)
request_iterator = iter(requests)
multi_callable = stream_unary_multi_callable(self._channel)
response_future = multi_callable.future(
request_iterator,
metadata=(("test", "SuccessfulStreamRequestFutureUnaryResponse"),),
)
response = response_future.result()
self.assertEqual(expected_response, response)
self.assertIsNone(response_future.exception())
self.assertIsNone(response_future.traceback())
def testSuccessfulStreamRequestStreamResponse(self):
requests = tuple(
b"\x77\x58" for _ in range(test_constants.STREAM_LENGTH)
)
expected_responses = tuple(
self._handler.handle_stream_stream(iter(requests), None)
)
request_iterator = iter(requests)
multi_callable = stream_stream_multi_callable(self._channel)
response_iterator = multi_callable(
request_iterator,
metadata=(("test", "SuccessfulStreamRequestStreamResponse"),),
)
responses = tuple(response_iterator)
self.assertSequenceEqual(expected_responses, responses)
def testSequentialInvocations(self):
first_request = b"\x07\x08"
second_request = b"\x0809"
expected_first_response = self._handler.handle_unary_unary(
first_request, None
)
expected_second_response = self._handler.handle_unary_unary(
second_request, None
)
multi_callable = unary_unary_multi_callable(self._channel)
first_response = multi_callable(
first_request, metadata=(("test", "SequentialInvocations"),)
)
second_response = multi_callable(
second_request, metadata=(("test", "SequentialInvocations"),)
)
self.assertEqual(expected_first_response, first_response)
self.assertEqual(expected_second_response, second_response)
def testConcurrentBlockingInvocations(self):
pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
requests = tuple(
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
expected_response = self._handler.handle_stream_unary(
iter(requests), None
)
expected_responses = [
expected_response
] * test_constants.THREAD_CONCURRENCY
response_futures = [None] * test_constants.THREAD_CONCURRENCY
multi_callable = stream_unary_multi_callable(self._channel)
for index in range(test_constants.THREAD_CONCURRENCY):
request_iterator = iter(requests)
response_future = pool.submit(
multi_callable,
request_iterator,
metadata=(("test", "ConcurrentBlockingInvocations"),),
)
response_futures[index] = response_future
responses = tuple(
response_future.result() for response_future in response_futures
)
pool.shutdown(wait=True)
self.assertSequenceEqual(expected_responses, responses)
def testConcurrentFutureInvocations(self):
requests = tuple(
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
expected_response = self._handler.handle_stream_unary(
iter(requests), None
)
expected_responses = [
expected_response
] * test_constants.THREAD_CONCURRENCY
response_futures = [None] * test_constants.THREAD_CONCURRENCY
multi_callable = stream_unary_multi_callable(self._channel)
for index in range(test_constants.THREAD_CONCURRENCY):
request_iterator = iter(requests)
response_future = multi_callable.future(
request_iterator,
metadata=(("test", "ConcurrentFutureInvocations"),),
)
response_futures[index] = response_future
responses = tuple(
response_future.result() for response_future in response_futures
)
self.assertSequenceEqual(expected_responses, responses)
def testWaitingForSomeButNotAllConcurrentFutureInvocations(self):
pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
request = b"\x67\x68"
expected_response = self._handler.handle_unary_unary(request, None)
response_futures = [None] * test_constants.THREAD_CONCURRENCY
lock = threading.Lock()
test_is_running_cell = [True]
def wrap_future(future):
def wrap():
try:
return future.result()
except grpc.RpcError:
with lock:
if test_is_running_cell[0]:
raise
return None
return wrap
multi_callable = unary_unary_multi_callable(self._channel)
for index in range(test_constants.THREAD_CONCURRENCY):
inner_response_future = multi_callable.future(
request,
metadata=(
(
"test",
"WaitingForSomeButNotAllConcurrentFutureInvocations",
),
),
)
outer_response_future = pool.submit(
wrap_future(inner_response_future)
)
response_futures[index] = outer_response_future
some_completed_response_futures_iterator = itertools.islice(
futures.as_completed(response_futures),
test_constants.THREAD_CONCURRENCY // 2,
)
for response_future in some_completed_response_futures_iterator:
self.assertEqual(expected_response, response_future.result())
with lock:
test_is_running_cell[0] = False
def testConsumingOneStreamResponseUnaryRequest(self):
self._consume_one_stream_response_unary_request(
unary_stream_multi_callable(self._channel)
)
def testConsumingOneStreamResponseUnaryRequestNonBlocking(self):
self._consume_one_stream_response_unary_request(
unary_stream_non_blocking_multi_callable(self._channel)
)
def testConsumingSomeButNotAllStreamResponsesUnaryRequest(self):
self._consume_some_but_not_all_stream_responses_unary_request(
unary_stream_multi_callable(self._channel)
)
def testConsumingSomeButNotAllStreamResponsesUnaryRequestNonBlocking(self):
self._consume_some_but_not_all_stream_responses_unary_request(
unary_stream_non_blocking_multi_callable(self._channel)
)
def testConsumingSomeButNotAllStreamResponsesStreamRequest(self):
self._consume_some_but_not_all_stream_responses_stream_request(
stream_stream_multi_callable(self._channel)
)
def testConsumingSomeButNotAllStreamResponsesStreamRequestNonBlocking(self):
self._consume_some_but_not_all_stream_responses_stream_request(
stream_stream_non_blocking_multi_callable(self._channel)
)
def testConsumingTooManyStreamResponsesStreamRequest(self):
self._consume_too_many_stream_responses_stream_request(
stream_stream_multi_callable(self._channel)
)
def testConsumingTooManyStreamResponsesStreamRequestNonBlocking(self):
self._consume_too_many_stream_responses_stream_request(
stream_stream_non_blocking_multi_callable(self._channel)
)
def testCancelledUnaryRequestUnaryResponse(self):
request = b"\x07\x17"
multi_callable = unary_unary_multi_callable(self._channel)
with self._control.pause():
response_future = multi_callable.future(
request,
metadata=(("test", "CancelledUnaryRequestUnaryResponse"),),
)
response_future.cancel()
self.assertIs(grpc.StatusCode.CANCELLED, response_future.code())
self.assertTrue(response_future.cancelled())
with self.assertRaises(grpc.FutureCancelledError):
response_future.result()
with self.assertRaises(grpc.FutureCancelledError):
response_future.exception()
with self.assertRaises(grpc.FutureCancelledError):
response_future.traceback()
def testCancelledUnaryRequestStreamResponse(self):
self._cancelled_unary_request_stream_response(
unary_stream_multi_callable(self._channel)
)
def testCancelledUnaryRequestStreamResponseNonBlocking(self):
self._cancelled_unary_request_stream_response(
unary_stream_non_blocking_multi_callable(self._channel)
)
def testCancelledStreamRequestUnaryResponse(self):
requests = tuple(
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
multi_callable = stream_unary_multi_callable(self._channel)
with self._control.pause():
response_future = multi_callable.future(
request_iterator,
metadata=(("test", "CancelledStreamRequestUnaryResponse"),),
)
self._control.block_until_paused()
response_future.cancel()
self.assertIs(grpc.StatusCode.CANCELLED, response_future.code())
self.assertTrue(response_future.cancelled())
with self.assertRaises(grpc.FutureCancelledError):
response_future.result()
with self.assertRaises(grpc.FutureCancelledError):
response_future.exception()
with self.assertRaises(grpc.FutureCancelledError):
response_future.traceback()
self.assertIsNotNone(response_future.initial_metadata())
self.assertIsNotNone(response_future.details())
self.assertIsNotNone(response_future.trailing_metadata())
def testCancelledStreamRequestStreamResponse(self):
self._cancelled_stream_request_stream_response(
stream_stream_multi_callable(self._channel)
)
def testCancelledStreamRequestStreamResponseNonBlocking(self):
self._cancelled_stream_request_stream_response(
stream_stream_non_blocking_multi_callable(self._channel)
)
def testExpiredUnaryRequestBlockingUnaryResponse(self):
request = b"\x07\x17"
multi_callable = unary_unary_multi_callable(self._channel)
with self._control.pause():
with self.assertRaises(grpc.RpcError) as exception_context:
multi_callable.with_call(
request,
timeout=TIMEOUT_SHORT,
metadata=(
("test", "ExpiredUnaryRequestBlockingUnaryResponse"),
),
)
self.assertIsInstance(exception_context.exception, grpc.Call)
self.assertIsNotNone(exception_context.exception.initial_metadata())
self.assertIs(
grpc.StatusCode.DEADLINE_EXCEEDED,
exception_context.exception.code(),
)
self.assertIsNotNone(exception_context.exception.details())
self.assertIsNotNone(exception_context.exception.trailing_metadata())
def testExpiredUnaryRequestFutureUnaryResponse(self):
request = b"\x07\x17"
callback = Callback()
multi_callable = unary_unary_multi_callable(self._channel)
with self._control.pause():
response_future = multi_callable.future(
request,
timeout=TIMEOUT_SHORT,
metadata=(("test", "ExpiredUnaryRequestFutureUnaryResponse"),),
)
response_future.add_done_callback(callback)
value_passed_to_callback = callback.value()
self.assertIs(response_future, value_passed_to_callback)
self.assertIsNotNone(response_future.initial_metadata())
self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, response_future.code())
self.assertIsNotNone(response_future.details())
self.assertIsNotNone(response_future.trailing_metadata())
with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result()
self.assertIs(
grpc.StatusCode.DEADLINE_EXCEEDED,
exception_context.exception.code(),
)
self.assertIsInstance(response_future.exception(), grpc.RpcError)
self.assertIsNotNone(response_future.traceback())
self.assertIs(
grpc.StatusCode.DEADLINE_EXCEEDED,
response_future.exception().code(),
)
def testExpiredUnaryRequestStreamResponse(self):
self._expired_unary_request_stream_response(
unary_stream_multi_callable(self._channel)
)
def testExpiredUnaryRequestStreamResponseNonBlocking(self):
self._expired_unary_request_stream_response(
unary_stream_non_blocking_multi_callable(self._channel)
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 19,302
| 37.452191
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_metadata_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests server and client side metadata API."""
import logging
import unittest
import weakref
import grpc
from grpc import _channel
from tests.unit import test_common
from tests.unit.framework.common import test_constants
_CHANNEL_ARGS = (
("grpc.primary_user_agent", "primary-agent"),
("grpc.secondary_user_agent", "secondary-agent"),
)
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x00\x00\x00"
_UNARY_UNARY = "/test/UnaryUnary"
_UNARY_STREAM = "/test/UnaryStream"
_STREAM_UNARY = "/test/StreamUnary"
_STREAM_STREAM = "/test/StreamStream"
_INVOCATION_METADATA = (
(
b"invocation-md-key",
"invocation-md-value",
),
(
"invocation-md-key-bin",
b"\x00\x01",
),
)
_EXPECTED_INVOCATION_METADATA = (
(
"invocation-md-key",
"invocation-md-value",
),
(
"invocation-md-key-bin",
b"\x00\x01",
),
)
_INITIAL_METADATA = (
(b"initial-md-key", "initial-md-value"),
("initial-md-key-bin", b"\x00\x02"),
)
_EXPECTED_INITIAL_METADATA = (
(
"initial-md-key",
"initial-md-value",
),
(
"initial-md-key-bin",
b"\x00\x02",
),
)
_TRAILING_METADATA = (
(
"server-trailing-md-key",
"server-trailing-md-value",
),
(
"server-trailing-md-key-bin",
b"\x00\x03",
),
)
_EXPECTED_TRAILING_METADATA = _TRAILING_METADATA
def _user_agent(metadata):
for key, val in metadata:
if key == "user-agent":
return val
raise KeyError("No user agent!")
def validate_client_metadata(test, servicer_context):
invocation_metadata = servicer_context.invocation_metadata()
test.assertTrue(
test_common.metadata_transmitted(
_EXPECTED_INVOCATION_METADATA, invocation_metadata
)
)
user_agent = _user_agent(invocation_metadata)
test.assertTrue(
user_agent.startswith("primary-agent " + _channel._USER_AGENT)
)
test.assertTrue(user_agent.endswith("secondary-agent"))
def handle_unary_unary(test, request, servicer_context):
validate_client_metadata(test, servicer_context)
servicer_context.send_initial_metadata(_INITIAL_METADATA)
servicer_context.set_trailing_metadata(_TRAILING_METADATA)
return _RESPONSE
def handle_unary_stream(test, request, servicer_context):
validate_client_metadata(test, servicer_context)
servicer_context.send_initial_metadata(_INITIAL_METADATA)
servicer_context.set_trailing_metadata(_TRAILING_METADATA)
for _ in range(test_constants.STREAM_LENGTH):
yield _RESPONSE
def handle_stream_unary(test, request_iterator, servicer_context):
validate_client_metadata(test, servicer_context)
servicer_context.send_initial_metadata(_INITIAL_METADATA)
servicer_context.set_trailing_metadata(_TRAILING_METADATA)
# TODO(issue:#6891) We should be able to remove this loop
for request in request_iterator:
pass
return _RESPONSE
def handle_stream_stream(test, request_iterator, servicer_context):
validate_client_metadata(test, servicer_context)
servicer_context.send_initial_metadata(_INITIAL_METADATA)
servicer_context.set_trailing_metadata(_TRAILING_METADATA)
# TODO(issue:#6891) We should be able to remove this loop,
# and replace with return; yield
for request in request_iterator:
yield _RESPONSE
class _MethodHandler(grpc.RpcMethodHandler):
def __init__(self, test, request_streaming, response_streaming):
self.request_streaming = request_streaming
self.response_streaming = response_streaming
self.request_deserializer = None
self.response_serializer = None
self.unary_unary = None
self.unary_stream = None
self.stream_unary = None
self.stream_stream = None
if self.request_streaming and self.response_streaming:
self.stream_stream = lambda x, y: handle_stream_stream(test, x, y)
elif self.request_streaming:
self.stream_unary = lambda x, y: handle_stream_unary(test, x, y)
elif self.response_streaming:
self.unary_stream = lambda x, y: handle_unary_stream(test, x, y)
else:
self.unary_unary = lambda x, y: handle_unary_unary(test, x, y)
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self, test):
self._test = test
def service(self, handler_call_details):
if handler_call_details.method == _UNARY_UNARY:
return _MethodHandler(self._test, False, False)
elif handler_call_details.method == _UNARY_STREAM:
return _MethodHandler(self._test, False, True)
elif handler_call_details.method == _STREAM_UNARY:
return _MethodHandler(self._test, True, False)
elif handler_call_details.method == _STREAM_STREAM:
return _MethodHandler(self._test, True, True)
else:
return None
class MetadataTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server()
self._server.add_generic_rpc_handlers(
(_GenericHandler(weakref.proxy(self)),)
)
port = self._server.add_insecure_port("[::]:0")
self._server.start()
self._channel = grpc.insecure_channel(
"localhost:%d" % port, options=_CHANNEL_ARGS
)
def tearDown(self):
self._server.stop(0)
self._channel.close()
def testUnaryUnary(self):
multi_callable = self._channel.unary_unary(_UNARY_UNARY)
unused_response, call = multi_callable.with_call(
_REQUEST, metadata=_INVOCATION_METADATA
)
self.assertTrue(
test_common.metadata_transmitted(
_EXPECTED_INITIAL_METADATA, call.initial_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_EXPECTED_TRAILING_METADATA, call.trailing_metadata()
)
)
def testUnaryStream(self):
multi_callable = self._channel.unary_stream(_UNARY_STREAM)
call = multi_callable(_REQUEST, metadata=_INVOCATION_METADATA)
self.assertTrue(
test_common.metadata_transmitted(
_EXPECTED_INITIAL_METADATA, call.initial_metadata()
)
)
for _ in call:
pass
self.assertTrue(
test_common.metadata_transmitted(
_EXPECTED_TRAILING_METADATA, call.trailing_metadata()
)
)
def testStreamUnary(self):
multi_callable = self._channel.stream_unary(_STREAM_UNARY)
unused_response, call = multi_callable.with_call(
iter([_REQUEST] * test_constants.STREAM_LENGTH),
metadata=_INVOCATION_METADATA,
)
self.assertTrue(
test_common.metadata_transmitted(
_EXPECTED_INITIAL_METADATA, call.initial_metadata()
)
)
self.assertTrue(
test_common.metadata_transmitted(
_EXPECTED_TRAILING_METADATA, call.trailing_metadata()
)
)
def testStreamStream(self):
multi_callable = self._channel.stream_stream(_STREAM_STREAM)
call = multi_callable(
iter([_REQUEST] * test_constants.STREAM_LENGTH),
metadata=_INVOCATION_METADATA,
)
self.assertTrue(
test_common.metadata_transmitted(
_EXPECTED_INITIAL_METADATA, call.initial_metadata()
)
)
for _ in call:
pass
self.assertTrue(
test_common.metadata_transmitted(
_EXPECTED_TRAILING_METADATA, call.trailing_metadata()
)
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 8,397
| 30.219331
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests 'utf-8' encoded error message."""
import unittest
import weakref
import grpc
from tests.unit import test_common
from tests.unit.framework.common import test_constants
_UNICODE_ERROR_MESSAGES = [
b"\xe2\x80\x9d".decode("utf-8"),
b"abc\x80\xd0\xaf".decode("latin-1"),
b"\xc3\xa9".decode("utf-8"),
]
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x00\x00\x00"
_UNARY_UNARY = "/test/UnaryUnary"
class _MethodHandler(grpc.RpcMethodHandler):
def __init__(self, request_streaming=None, response_streaming=None):
self.request_streaming = request_streaming
self.response_streaming = response_streaming
self.request_deserializer = None
self.response_serializer = None
self.unary_stream = None
self.stream_unary = None
self.stream_stream = None
def unary_unary(self, request, servicer_context):
servicer_context.set_code(grpc.StatusCode.UNKNOWN)
servicer_context.set_details(request.decode("utf-8"))
return _RESPONSE
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self, test):
self._test = test
def service(self, handler_call_details):
return _MethodHandler()
class ErrorMessageEncodingTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server()
self._server.add_generic_rpc_handlers(
(_GenericHandler(weakref.proxy(self)),)
)
port = self._server.add_insecure_port("[::]:0")
self._server.start()
self._channel = grpc.insecure_channel("localhost:%d" % port)
def tearDown(self):
self._server.stop(0)
self._channel.close()
def testMessageEncoding(self):
for message in _UNICODE_ERROR_MESSAGES:
multi_callable = self._channel.unary_unary(_UNARY_UNARY)
with self.assertRaises(grpc.RpcError) as cm:
multi_callable(message.encode("utf-8"))
self.assertEqual(cm.exception.code(), grpc.StatusCode.UNKNOWN)
self.assertEqual(cm.exception.details(), message)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 2,713
| 30.55814
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_signal_client.py
|
# Copyright 2019 the gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for testing responsiveness to signals."""
from __future__ import print_function
import argparse
import functools
import logging
import signal
import sys
import grpc
SIGTERM_MESSAGE = "Handling sigterm!"
UNARY_UNARY = "/test/Unary"
UNARY_STREAM = "/test/ServerStreaming"
_MESSAGE = b"\x00\x00\x00"
_ASSERTION_MESSAGE = "Control flow should never reach here."
# NOTE(gnossen): We use a global variable here so that the signal handler can be
# installed before the RPC begins. If we do not do this, then we may receive the
# SIGINT before the signal handler is installed. I'm not happy with per-process
# global state, but the per-process global state that is signal handlers
# somewhat forces my hand.
per_process_rpc_future = None
def handle_sigint(unused_signum, unused_frame):
print(SIGTERM_MESSAGE)
if per_process_rpc_future is not None:
per_process_rpc_future.cancel()
sys.stderr.flush()
# This sys.exit(0) avoids an exception caused by the cancelled RPC.
sys.exit(0)
def main_unary(server_target):
"""Initiate a unary RPC to be interrupted by a SIGINT."""
global per_process_rpc_future # pylint: disable=global-statement
with grpc.insecure_channel(server_target) as channel:
multicallable = channel.unary_unary(UNARY_UNARY)
signal.signal(signal.SIGINT, handle_sigint)
per_process_rpc_future = multicallable.future(
_MESSAGE, wait_for_ready=True
)
result = per_process_rpc_future.result()
assert False, _ASSERTION_MESSAGE
def main_streaming(server_target):
"""Initiate a streaming RPC to be interrupted by a SIGINT."""
global per_process_rpc_future # pylint: disable=global-statement
with grpc.insecure_channel(server_target) as channel:
signal.signal(signal.SIGINT, handle_sigint)
per_process_rpc_future = channel.unary_stream(UNARY_STREAM)(
_MESSAGE, wait_for_ready=True
)
for result in per_process_rpc_future:
pass
assert False, _ASSERTION_MESSAGE
def main_unary_with_exception(server_target):
"""Initiate a unary RPC with a signal handler that will raise."""
channel = grpc.insecure_channel(server_target)
try:
channel.unary_unary(UNARY_UNARY)(_MESSAGE, wait_for_ready=True)
except KeyboardInterrupt:
sys.stderr.write("Running signal handler.\n")
sys.stderr.flush()
# This call should not freeze.
channel.close()
def main_streaming_with_exception(server_target):
"""Initiate a streaming RPC with a signal handler that will raise."""
channel = grpc.insecure_channel(server_target)
try:
for _ in channel.unary_stream(UNARY_STREAM)(
_MESSAGE, wait_for_ready=True
):
pass
except KeyboardInterrupt:
sys.stderr.write("Running signal handler.\n")
sys.stderr.flush()
# This call should not freeze.
channel.close()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Signal test client.")
parser.add_argument("server", help="Server target")
parser.add_argument("arity", help="Arity", choices=("unary", "streaming"))
parser.add_argument(
"--exception",
help="Whether the signal throws an exception",
action="store_true",
)
parser.add_argument(
"--gevent", help="Whether to run under gevent.", action="store_true"
)
args = parser.parse_args()
if args.gevent:
from gevent import monkey
import gevent.util
monkey.patch_all()
import grpc.experimental.gevent
grpc.experimental.gevent.init_gevent()
if args.arity == "unary" and not args.exception:
main_unary(args.server)
elif args.arity == "streaming" and not args.exception:
main_streaming(args.server)
elif args.arity == "unary" and args.exception:
main_unary_with_exception(args.server)
else:
main_streaming_with_exception(args.server)
| 4,581
| 32.202899
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_server_shutdown_scenarios.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines a number of module-scope gRPC scenarios to test server shutdown."""
import argparse
from concurrent import futures
import logging
import os
import queue
import threading
import time
import grpc
from tests.unit import test_common
WAIT_TIME = 1000
REQUEST = b"request"
RESPONSE = b"response"
SERVER_RAISES_EXCEPTION = "server_raises_exception"
SERVER_DEALLOCATED = "server_deallocated"
SERVER_FORK_CAN_EXIT = "server_fork_can_exit"
FORK_EXIT = "/test/ForkExit"
def fork_and_exit(request, servicer_context):
pid = os.fork()
if pid == 0:
os._exit(0)
return RESPONSE
class GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == FORK_EXIT:
return grpc.unary_unary_rpc_method_handler(fork_and_exit)
else:
return None
def run_server(port_queue):
server = test_common.test_server()
port = server.add_insecure_port("[::]:0")
port_queue.put(port)
server.add_generic_rpc_handlers((GenericHandler(),))
server.start()
# threading.Event.wait() does not exhibit the bug identified in
# https://github.com/grpc/grpc/issues/17093, sleep instead
time.sleep(WAIT_TIME)
def run_test(args):
if args.scenario == SERVER_RAISES_EXCEPTION:
server = test_common.test_server()
server.start()
raise Exception()
elif args.scenario == SERVER_DEALLOCATED:
server = test_common.test_server()
server.start()
server.__del__()
while server._state.stage != grpc._server._ServerStage.STOPPED:
pass
elif args.scenario == SERVER_FORK_CAN_EXIT:
port_queue = queue.Queue()
thread = threading.Thread(target=run_server, args=(port_queue,))
thread.daemon = True
thread.start()
port = port_queue.get()
channel = grpc.insecure_channel("localhost:%d" % port)
multi_callable = channel.unary_unary(FORK_EXIT)
result, call = multi_callable.with_call(REQUEST, wait_for_ready=True)
os.wait()
else:
raise ValueError("unknown test scenario")
if __name__ == "__main__":
logging.basicConfig()
parser = argparse.ArgumentParser()
parser.add_argument("scenario", type=str)
args = parser.parse_args()
run_test(args)
| 2,900
| 28.907216
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/thread_pool.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from concurrent import futures
import threading
class RecordingThreadPool(futures.ThreadPoolExecutor):
"""A thread pool that records if used."""
def __init__(self, max_workers):
self._tp_executor = futures.ThreadPoolExecutor(max_workers=max_workers)
self._lock = threading.Lock()
self._was_used = False
def submit(self, fn, *args, **kwargs): # pylint: disable=arguments-differ
with self._lock:
self._was_used = True
self._tp_executor.submit(fn, *args, **kwargs)
def was_used(self):
with self._lock:
return self._was_used
| 1,193
| 33.114286
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc.channel_ready_future."""
import logging
import threading
import unittest
import grpc
from tests.unit import thread_pool
from tests.unit.framework.common import test_constants
class _Callback(object):
def __init__(self):
self._condition = threading.Condition()
self._value = None
def accept_value(self, value):
with self._condition:
self._value = value
self._condition.notify_all()
def block_until_called(self):
with self._condition:
while self._value is None:
self._condition.wait()
return self._value
class ChannelReadyFutureTest(unittest.TestCase):
def test_lonely_channel_connectivity(self):
channel = grpc.insecure_channel("localhost:12345")
callback = _Callback()
ready_future = grpc.channel_ready_future(channel)
ready_future.add_done_callback(callback.accept_value)
with self.assertRaises(grpc.FutureTimeoutError):
ready_future.result(timeout=test_constants.SHORT_TIMEOUT)
self.assertFalse(ready_future.cancelled())
self.assertFalse(ready_future.done())
self.assertTrue(ready_future.running())
ready_future.cancel()
value_passed_to_callback = callback.block_until_called()
self.assertIs(ready_future, value_passed_to_callback)
self.assertTrue(ready_future.cancelled())
self.assertTrue(ready_future.done())
self.assertFalse(ready_future.running())
channel.close()
def test_immediately_connectable_channel_connectivity(self):
recording_thread_pool = thread_pool.RecordingThreadPool(
max_workers=None
)
server = grpc.server(
recording_thread_pool, options=(("grpc.so_reuseport", 0),)
)
port = server.add_insecure_port("[::]:0")
server.start()
channel = grpc.insecure_channel("localhost:{}".format(port))
callback = _Callback()
ready_future = grpc.channel_ready_future(channel)
ready_future.add_done_callback(callback.accept_value)
self.assertIsNone(
ready_future.result(timeout=test_constants.LONG_TIMEOUT)
)
value_passed_to_callback = callback.block_until_called()
self.assertIs(ready_future, value_passed_to_callback)
self.assertFalse(ready_future.cancelled())
self.assertTrue(ready_future.done())
self.assertFalse(ready_future.running())
# Cancellation after maturity has no effect.
ready_future.cancel()
self.assertFalse(ready_future.cancelled())
self.assertTrue(ready_future.done())
self.assertFalse(ready_future.running())
self.assertFalse(recording_thread_pool.was_used())
channel.close()
server.stop(None)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 3,496
| 33.97
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_logging_test.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of gRPC Python's interaction with the python logging module"""
import logging
import subprocess
import sys
import unittest
import grpc
INTERPRETER = sys.executable
class LoggingTest(unittest.TestCase):
def test_logger_not_occupied(self):
script = """if True:
import logging
import grpc
if len(logging.getLogger().handlers) != 0:
raise Exception('expected 0 logging handlers')
"""
self._verifyScriptSucceeds(script)
def test_handler_found(self):
script = """if True:
import logging
import grpc
"""
out, err = self._verifyScriptSucceeds(script)
self.assertEqual(0, len(err), "unexpected output to stderr")
def test_can_configure_logger(self):
script = """if True:
import logging
import grpc
import io
intended_stream = io.StringIO()
logging.basicConfig(stream=intended_stream)
if len(logging.getLogger().handlers) != 1:
raise Exception('expected 1 logging handler')
if logging.getLogger().handlers[0].stream is not intended_stream:
raise Exception('wrong handler stream')
"""
self._verifyScriptSucceeds(script)
def test_grpc_logger(self):
script = """if True:
import logging
import grpc
if "grpc" not in logging.Logger.manager.loggerDict:
raise Exception('grpc logger not found')
root_logger = logging.getLogger("grpc")
if len(root_logger.handlers) != 1:
raise Exception('expected 1 root logger handler')
if not isinstance(root_logger.handlers[0], logging.NullHandler):
raise Exception('expected logging.NullHandler')
"""
self._verifyScriptSucceeds(script)
def _verifyScriptSucceeds(self, script):
process = subprocess.Popen(
[INTERPRETER, "-c", script],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
out, err = process.communicate()
self.assertEqual(
0,
process.returncode,
"process failed with exit code %d (stdout: %s, stderr: %s)"
% (process.returncode, out, err),
)
return out, err
if __name__ == "__main__":
unittest.main(verbosity=2)
| 3,024
| 28.368932
| 77
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_xds_credentials_test.py
|
# Copyright 2021 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests xDS server and channel credentials."""
from concurrent import futures
import contextlib
import logging
import unittest
import grpc
import grpc.experimental
from tests.unit import resources
from tests.unit import test_common
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
return grpc.unary_unary_rpc_method_handler(
lambda request, unused_context: request
)
@contextlib.contextmanager
def xds_channel_server_without_xds(server_fallback_creds):
server = grpc.server(futures.ThreadPoolExecutor())
server.add_generic_rpc_handlers((_GenericHandler(),))
server_server_fallback_creds = grpc.ssl_server_credentials(
((resources.private_key(), resources.certificate_chain()),)
)
server_creds = grpc.xds_server_credentials(server_fallback_creds)
port = server.add_secure_port("localhost:0", server_creds)
server.start()
try:
yield "localhost:{}".format(port)
finally:
server.stop(None)
class XdsCredentialsTest(unittest.TestCase):
def test_xds_creds_fallback_ssl(self):
# Since there is no xDS server, the fallback credentials will be used.
# In this case, SSL credentials.
server_fallback_creds = grpc.ssl_server_credentials(
((resources.private_key(), resources.certificate_chain()),)
)
with xds_channel_server_without_xds(
server_fallback_creds
) as server_address:
override_options = (
("grpc.ssl_target_name_override", "foo.test.google.fr"),
)
channel_fallback_creds = grpc.ssl_channel_credentials(
root_certificates=resources.test_root_certificates(),
private_key=resources.private_key(),
certificate_chain=resources.certificate_chain(),
)
channel_creds = grpc.xds_channel_credentials(channel_fallback_creds)
with grpc.secure_channel(
server_address, channel_creds, options=override_options
) as channel:
request = b"abc"
response = channel.unary_unary("/test/method")(
request, wait_for_ready=True
)
self.assertEqual(response, request)
def test_xds_creds_fallback_insecure(self):
# Since there is no xDS server, the fallback credentials will be used.
# In this case, insecure.
server_fallback_creds = grpc.insecure_server_credentials()
with xds_channel_server_without_xds(
server_fallback_creds
) as server_address:
channel_fallback_creds = (
grpc.experimental.insecure_channel_credentials()
)
channel_creds = grpc.xds_channel_credentials(channel_fallback_creds)
with grpc.secure_channel(server_address, channel_creds) as channel:
request = b"abc"
response = channel.unary_unary("/test/method")(
request, wait_for_ready=True
)
self.assertEqual(response, request)
def test_start_xds_server(self):
server = grpc.server(futures.ThreadPoolExecutor(), xds=True)
server.add_generic_rpc_handlers((_GenericHandler(),))
server_fallback_creds = grpc.insecure_server_credentials()
server_creds = grpc.xds_server_credentials(server_fallback_creds)
port = server.add_secure_port("localhost:0", server_creds)
server.start()
server.stop(None)
# No exceptions thrown. A more comprehensive suite of tests will be
# provided by the interop tests.
if __name__ == "__main__":
logging.basicConfig()
unittest.main()
| 4,339
| 37.75
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests server certificate rotation.
Here we test various aspects of gRPC Python, and in some cases gRPC
Core by extension, support for server certificate rotation.
* ServerSSLCertReloadTestWithClientAuth: test ability to rotate
server's SSL cert for use in future channels with clients while not
affecting any existing channel. The server requires client
authentication.
* ServerSSLCertReloadTestWithoutClientAuth: like
ServerSSLCertReloadTestWithClientAuth except that the server does
not authenticate the client.
* ServerSSLCertReloadTestCertConfigReuse: tests gRPC Python's ability
to deal with user's reuse of ServerCertificateConfiguration instances.
"""
import abc
import collections
from concurrent import futures
import logging
import os
import threading
import unittest
import grpc
from tests.testing import _application_common
from tests.testing import _server_application
from tests.testing.proto import services_pb2_grpc
from tests.unit import resources
from tests.unit import test_common
CA_1_PEM = resources.cert_hier_1_root_ca_cert()
CA_2_PEM = resources.cert_hier_2_root_ca_cert()
CLIENT_KEY_1_PEM = resources.cert_hier_1_client_1_key()
CLIENT_CERT_CHAIN_1_PEM = (
resources.cert_hier_1_client_1_cert()
+ resources.cert_hier_1_intermediate_ca_cert()
)
CLIENT_KEY_2_PEM = resources.cert_hier_2_client_1_key()
CLIENT_CERT_CHAIN_2_PEM = (
resources.cert_hier_2_client_1_cert()
+ resources.cert_hier_2_intermediate_ca_cert()
)
SERVER_KEY_1_PEM = resources.cert_hier_1_server_1_key()
SERVER_CERT_CHAIN_1_PEM = (
resources.cert_hier_1_server_1_cert()
+ resources.cert_hier_1_intermediate_ca_cert()
)
SERVER_KEY_2_PEM = resources.cert_hier_2_server_1_key()
SERVER_CERT_CHAIN_2_PEM = (
resources.cert_hier_2_server_1_cert()
+ resources.cert_hier_2_intermediate_ca_cert()
)
# for use with the CertConfigFetcher. Roughly a simple custom mock
# implementation
Call = collections.namedtuple("Call", ["did_raise", "returned_cert_config"])
def _create_channel(port, credentials):
return grpc.secure_channel("localhost:{}".format(port), credentials)
def _create_client_stub(channel, expect_success):
if expect_success:
# per Nathaniel: there's some robustness issue if we start
# using a channel without waiting for it to be actually ready
grpc.channel_ready_future(channel).result(timeout=10)
return services_pb2_grpc.FirstServiceStub(channel)
class CertConfigFetcher(object):
def __init__(self):
self._lock = threading.Lock()
self._calls = []
self._should_raise = False
self._cert_config = None
def reset(self):
with self._lock:
self._calls = []
self._should_raise = False
self._cert_config = None
def configure(self, should_raise, cert_config):
assert not (should_raise and cert_config), (
"should not specify both should_raise and a cert_config at the same"
" time"
)
with self._lock:
self._should_raise = should_raise
self._cert_config = cert_config
def getCalls(self):
with self._lock:
return self._calls
def __call__(self):
with self._lock:
if self._should_raise:
self._calls.append(Call(True, None))
raise ValueError("just for fun, should not affect the test")
else:
self._calls.append(Call(False, self._cert_config))
return self._cert_config
class _ServerSSLCertReloadTest(unittest.TestCase, metaclass=abc.ABCMeta):
def __init__(self, *args, **kwargs):
super(_ServerSSLCertReloadTest, self).__init__(*args, **kwargs)
self.server = None
self.port = None
@abc.abstractmethod
def require_client_auth(self):
raise NotImplementedError()
def setUp(self):
self.server = test_common.test_server()
services_pb2_grpc.add_FirstServiceServicer_to_server(
_server_application.FirstServiceServicer(), self.server
)
switch_cert_on_client_num = 10
initial_cert_config = grpc.ssl_server_certificate_configuration(
[(SERVER_KEY_1_PEM, SERVER_CERT_CHAIN_1_PEM)],
root_certificates=CA_2_PEM,
)
self.cert_config_fetcher = CertConfigFetcher()
server_credentials = grpc.dynamic_ssl_server_credentials(
initial_cert_config,
self.cert_config_fetcher,
require_client_authentication=self.require_client_auth(),
)
self.port = self.server.add_secure_port("[::]:0", server_credentials)
self.server.start()
def tearDown(self):
if self.server:
self.server.stop(None)
def _perform_rpc(self, client_stub, expect_success):
# we don't care about the actual response of the rpc; only
# whether we can perform it or not, and if not, the status
# code must be UNAVAILABLE
request = _application_common.UNARY_UNARY_REQUEST
if expect_success:
response = client_stub.UnUn(request)
self.assertEqual(response, _application_common.UNARY_UNARY_RESPONSE)
else:
with self.assertRaises(grpc.RpcError) as exception_context:
client_stub.UnUn(request)
# If TLS 1.2 is used, then the client receives an alert message
# before the handshake is complete, so the status is UNAVAILABLE. If
# TLS 1.3 is used, then the client receives the alert message after
# the handshake is complete, so the TSI handshaker returns the
# TSI_PROTOCOL_FAILURE result. This result does not have a
# corresponding status code, so this yields an UNKNOWN status.
self.assertTrue(
exception_context.exception.code()
in [grpc.StatusCode.UNAVAILABLE, grpc.StatusCode.UNKNOWN]
)
def _do_one_shot_client_rpc(
self,
expect_success,
root_certificates=None,
private_key=None,
certificate_chain=None,
):
credentials = grpc.ssl_channel_credentials(
root_certificates=root_certificates,
private_key=private_key,
certificate_chain=certificate_chain,
)
with _create_channel(self.port, credentials) as client_channel:
client_stub = _create_client_stub(client_channel, expect_success)
self._perform_rpc(client_stub, expect_success)
def _test(self):
# things should work...
self.cert_config_fetcher.configure(False, None)
self._do_one_shot_client_rpc(
True,
root_certificates=CA_1_PEM,
private_key=CLIENT_KEY_2_PEM,
certificate_chain=CLIENT_CERT_CHAIN_2_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
self.assertIsNone(actual_calls[0].returned_cert_config)
# client should reject server...
# fails because client trusts ca2 and so will reject server
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
self._do_one_shot_client_rpc(
False,
root_certificates=CA_2_PEM,
private_key=CLIENT_KEY_2_PEM,
certificate_chain=CLIENT_CERT_CHAIN_2_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
for i, call in enumerate(actual_calls):
self.assertFalse(call.did_raise, "i= {}".format(i))
self.assertIsNone(call.returned_cert_config, "i= {}".format(i))
# should work again...
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(True, None)
self._do_one_shot_client_rpc(
True,
root_certificates=CA_1_PEM,
private_key=CLIENT_KEY_2_PEM,
certificate_chain=CLIENT_CERT_CHAIN_2_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertTrue(actual_calls[0].did_raise)
self.assertIsNone(actual_calls[0].returned_cert_config)
# if with_client_auth, then client should be rejected by
# server because client uses key/cert1, but server trusts ca2,
# so server will reject
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
self._do_one_shot_client_rpc(
not self.require_client_auth(),
root_certificates=CA_1_PEM,
private_key=CLIENT_KEY_1_PEM,
certificate_chain=CLIENT_CERT_CHAIN_1_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
for i, call in enumerate(actual_calls):
self.assertFalse(call.did_raise, "i= {}".format(i))
self.assertIsNone(call.returned_cert_config, "i= {}".format(i))
# should work again...
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
self._do_one_shot_client_rpc(
True,
root_certificates=CA_1_PEM,
private_key=CLIENT_KEY_2_PEM,
certificate_chain=CLIENT_CERT_CHAIN_2_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
self.assertIsNone(actual_calls[0].returned_cert_config)
# now create the "persistent" clients
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
channel_A = _create_channel(
self.port,
grpc.ssl_channel_credentials(
root_certificates=CA_1_PEM,
private_key=CLIENT_KEY_2_PEM,
certificate_chain=CLIENT_CERT_CHAIN_2_PEM,
),
)
persistent_client_stub_A = _create_client_stub(channel_A, True)
self._perform_rpc(persistent_client_stub_A, True)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
self.assertIsNone(actual_calls[0].returned_cert_config)
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
channel_B = _create_channel(
self.port,
grpc.ssl_channel_credentials(
root_certificates=CA_1_PEM,
private_key=CLIENT_KEY_2_PEM,
certificate_chain=CLIENT_CERT_CHAIN_2_PEM,
),
)
persistent_client_stub_B = _create_client_stub(channel_B, True)
self._perform_rpc(persistent_client_stub_B, True)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
self.assertIsNone(actual_calls[0].returned_cert_config)
# moment of truth!! client should reject server because the
# server switch cert...
cert_config = grpc.ssl_server_certificate_configuration(
[(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)],
root_certificates=CA_1_PEM,
)
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, cert_config)
self._do_one_shot_client_rpc(
False,
root_certificates=CA_1_PEM,
private_key=CLIENT_KEY_2_PEM,
certificate_chain=CLIENT_CERT_CHAIN_2_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
for i, call in enumerate(actual_calls):
self.assertFalse(call.did_raise, "i= {}".format(i))
self.assertEqual(
call.returned_cert_config, cert_config, "i= {}".format(i)
)
# now should work again...
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
self._do_one_shot_client_rpc(
True,
root_certificates=CA_2_PEM,
private_key=CLIENT_KEY_1_PEM,
certificate_chain=CLIENT_CERT_CHAIN_1_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
self.assertIsNone(actual_calls[0].returned_cert_config)
# client should be rejected by server if with_client_auth
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
self._do_one_shot_client_rpc(
not self.require_client_auth(),
root_certificates=CA_2_PEM,
private_key=CLIENT_KEY_2_PEM,
certificate_chain=CLIENT_CERT_CHAIN_2_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
for i, call in enumerate(actual_calls):
self.assertFalse(call.did_raise, "i= {}".format(i))
self.assertIsNone(call.returned_cert_config, "i= {}".format(i))
# here client should reject server...
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
self._do_one_shot_client_rpc(
False,
root_certificates=CA_1_PEM,
private_key=CLIENT_KEY_2_PEM,
certificate_chain=CLIENT_CERT_CHAIN_2_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
for i, call in enumerate(actual_calls):
self.assertFalse(call.did_raise, "i= {}".format(i))
self.assertIsNone(call.returned_cert_config, "i= {}".format(i))
# persistent clients should continue to work
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
self._perform_rpc(persistent_client_stub_A, True)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 0)
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
self._perform_rpc(persistent_client_stub_B, True)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 0)
channel_A.close()
channel_B.close()
class ServerSSLCertConfigFetcherParamsChecks(unittest.TestCase):
def test_check_on_initial_config(self):
with self.assertRaises(TypeError):
grpc.dynamic_ssl_server_credentials(None, str)
with self.assertRaises(TypeError):
grpc.dynamic_ssl_server_credentials(1, str)
def test_check_on_config_fetcher(self):
cert_config = grpc.ssl_server_certificate_configuration(
[(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)],
root_certificates=CA_1_PEM,
)
with self.assertRaises(TypeError):
grpc.dynamic_ssl_server_credentials(cert_config, None)
with self.assertRaises(TypeError):
grpc.dynamic_ssl_server_credentials(cert_config, 1)
class ServerSSLCertReloadTestWithClientAuth(_ServerSSLCertReloadTest):
def require_client_auth(self):
return True
test = _ServerSSLCertReloadTest._test
class ServerSSLCertReloadTestWithoutClientAuth(_ServerSSLCertReloadTest):
def require_client_auth(self):
return False
test = _ServerSSLCertReloadTest._test
class ServerSSLCertReloadTestCertConfigReuse(_ServerSSLCertReloadTest):
"""Ensures that `ServerCertificateConfiguration` instances can be reused.
Because gRPC Core takes ownership of the
`grpc_ssl_server_certificate_config` encapsulated by
`ServerCertificateConfiguration`, this test reuses the same
`ServerCertificateConfiguration` instances multiple times to make sure
gRPC Python takes care of maintaining the validity of
`ServerCertificateConfiguration` instances, so that such instances can be
re-used by user application.
"""
def require_client_auth(self):
return True
def setUp(self):
self.server = test_common.test_server()
services_pb2_grpc.add_FirstServiceServicer_to_server(
_server_application.FirstServiceServicer(), self.server
)
self.cert_config_A = grpc.ssl_server_certificate_configuration(
[(SERVER_KEY_1_PEM, SERVER_CERT_CHAIN_1_PEM)],
root_certificates=CA_2_PEM,
)
self.cert_config_B = grpc.ssl_server_certificate_configuration(
[(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)],
root_certificates=CA_1_PEM,
)
self.cert_config_fetcher = CertConfigFetcher()
server_credentials = grpc.dynamic_ssl_server_credentials(
self.cert_config_A,
self.cert_config_fetcher,
require_client_authentication=True,
)
self.port = self.server.add_secure_port("[::]:0", server_credentials)
self.server.start()
def test_cert_config_reuse(self):
# succeed with A
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, self.cert_config_A)
self._do_one_shot_client_rpc(
True,
root_certificates=CA_1_PEM,
private_key=CLIENT_KEY_2_PEM,
certificate_chain=CLIENT_CERT_CHAIN_2_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
self.assertEqual(
actual_calls[0].returned_cert_config, self.cert_config_A
)
# fail with A
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, self.cert_config_A)
self._do_one_shot_client_rpc(
False,
root_certificates=CA_2_PEM,
private_key=CLIENT_KEY_1_PEM,
certificate_chain=CLIENT_CERT_CHAIN_1_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
for i, call in enumerate(actual_calls):
self.assertFalse(call.did_raise, "i= {}".format(i))
self.assertEqual(
call.returned_cert_config, self.cert_config_A, "i= {}".format(i)
)
# succeed again with A
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, self.cert_config_A)
self._do_one_shot_client_rpc(
True,
root_certificates=CA_1_PEM,
private_key=CLIENT_KEY_2_PEM,
certificate_chain=CLIENT_CERT_CHAIN_2_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
self.assertEqual(
actual_calls[0].returned_cert_config, self.cert_config_A
)
# succeed with B
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, self.cert_config_B)
self._do_one_shot_client_rpc(
True,
root_certificates=CA_2_PEM,
private_key=CLIENT_KEY_1_PEM,
certificate_chain=CLIENT_CERT_CHAIN_1_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
self.assertEqual(
actual_calls[0].returned_cert_config, self.cert_config_B
)
# fail with B
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, self.cert_config_B)
self._do_one_shot_client_rpc(
False,
root_certificates=CA_1_PEM,
private_key=CLIENT_KEY_2_PEM,
certificate_chain=CLIENT_CERT_CHAIN_2_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
for i, call in enumerate(actual_calls):
self.assertFalse(call.did_raise, "i= {}".format(i))
self.assertEqual(
call.returned_cert_config, self.cert_config_B, "i= {}".format(i)
)
# succeed again with B
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, self.cert_config_B)
self._do_one_shot_client_rpc(
True,
root_certificates=CA_2_PEM,
private_key=CLIENT_KEY_1_PEM,
certificate_chain=CLIENT_CERT_CHAIN_1_PEM,
)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
self.assertEqual(
actual_calls[0].returned_cert_config, self.cert_config_B
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 22,003
| 37.739437
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_version_test.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for grpc.__version__"""
import logging
import unittest
import grpc
from grpc import _grpcio_metadata
class VersionTest(unittest.TestCase):
def test_get_version(self):
self.assertEqual(grpc.__version__, _grpcio_metadata.__version__)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 919
| 28.677419
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests metadata flags feature by testing wait-for-ready semantics"""
import logging
import queue
import socket
import threading
import time
import unittest
import weakref
import grpc
from tests.unit import test_common
import tests.unit.framework.common
from tests.unit.framework.common import get_socket
from tests.unit.framework.common import test_constants
_UNARY_UNARY = "/test/UnaryUnary"
_UNARY_STREAM = "/test/UnaryStream"
_STREAM_UNARY = "/test/StreamUnary"
_STREAM_STREAM = "/test/StreamStream"
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x00\x00\x00"
def handle_unary_unary(test, request, servicer_context):
return _RESPONSE
def handle_unary_stream(test, request, servicer_context):
for _ in range(test_constants.STREAM_LENGTH):
yield _RESPONSE
def handle_stream_unary(test, request_iterator, servicer_context):
for _ in request_iterator:
pass
return _RESPONSE
def handle_stream_stream(test, request_iterator, servicer_context):
for _ in request_iterator:
yield _RESPONSE
class _MethodHandler(grpc.RpcMethodHandler):
def __init__(self, test, request_streaming, response_streaming):
self.request_streaming = request_streaming
self.response_streaming = response_streaming
self.request_deserializer = None
self.response_serializer = None
self.unary_unary = None
self.unary_stream = None
self.stream_unary = None
self.stream_stream = None
if self.request_streaming and self.response_streaming:
self.stream_stream = lambda req, ctx: handle_stream_stream(
test, req, ctx
)
elif self.request_streaming:
self.stream_unary = lambda req, ctx: handle_stream_unary(
test, req, ctx
)
elif self.response_streaming:
self.unary_stream = lambda req, ctx: handle_unary_stream(
test, req, ctx
)
else:
self.unary_unary = lambda req, ctx: handle_unary_unary(
test, req, ctx
)
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self, test):
self._test = test
def service(self, handler_call_details):
if handler_call_details.method == _UNARY_UNARY:
return _MethodHandler(self._test, False, False)
elif handler_call_details.method == _UNARY_STREAM:
return _MethodHandler(self._test, False, True)
elif handler_call_details.method == _STREAM_UNARY:
return _MethodHandler(self._test, True, False)
elif handler_call_details.method == _STREAM_STREAM:
return _MethodHandler(self._test, True, True)
else:
return None
def create_phony_channel():
"""Creating phony channels is a workaround for retries"""
host, port, sock = get_socket(sock_options=(socket.SO_REUSEADDR,))
sock.close()
return grpc.insecure_channel("{}:{}".format(host, port))
def perform_unary_unary_call(channel, wait_for_ready=None):
channel.unary_unary(_UNARY_UNARY).__call__(
_REQUEST,
timeout=test_constants.LONG_TIMEOUT,
wait_for_ready=wait_for_ready,
)
def perform_unary_unary_with_call(channel, wait_for_ready=None):
channel.unary_unary(_UNARY_UNARY).with_call(
_REQUEST,
timeout=test_constants.LONG_TIMEOUT,
wait_for_ready=wait_for_ready,
)
def perform_unary_unary_future(channel, wait_for_ready=None):
channel.unary_unary(_UNARY_UNARY).future(
_REQUEST,
timeout=test_constants.LONG_TIMEOUT,
wait_for_ready=wait_for_ready,
).result(timeout=test_constants.LONG_TIMEOUT)
def perform_unary_stream_call(channel, wait_for_ready=None):
response_iterator = channel.unary_stream(_UNARY_STREAM).__call__(
_REQUEST,
timeout=test_constants.LONG_TIMEOUT,
wait_for_ready=wait_for_ready,
)
for _ in response_iterator:
pass
def perform_stream_unary_call(channel, wait_for_ready=None):
channel.stream_unary(_STREAM_UNARY).__call__(
iter([_REQUEST] * test_constants.STREAM_LENGTH),
timeout=test_constants.LONG_TIMEOUT,
wait_for_ready=wait_for_ready,
)
def perform_stream_unary_with_call(channel, wait_for_ready=None):
channel.stream_unary(_STREAM_UNARY).with_call(
iter([_REQUEST] * test_constants.STREAM_LENGTH),
timeout=test_constants.LONG_TIMEOUT,
wait_for_ready=wait_for_ready,
)
def perform_stream_unary_future(channel, wait_for_ready=None):
channel.stream_unary(_STREAM_UNARY).future(
iter([_REQUEST] * test_constants.STREAM_LENGTH),
timeout=test_constants.LONG_TIMEOUT,
wait_for_ready=wait_for_ready,
).result(timeout=test_constants.LONG_TIMEOUT)
def perform_stream_stream_call(channel, wait_for_ready=None):
response_iterator = channel.stream_stream(_STREAM_STREAM).__call__(
iter([_REQUEST] * test_constants.STREAM_LENGTH),
timeout=test_constants.LONG_TIMEOUT,
wait_for_ready=wait_for_ready,
)
for _ in response_iterator:
pass
_ALL_CALL_CASES = [
perform_unary_unary_call,
perform_unary_unary_with_call,
perform_unary_unary_future,
perform_unary_stream_call,
perform_stream_unary_call,
perform_stream_unary_with_call,
perform_stream_unary_future,
perform_stream_stream_call,
]
class MetadataFlagsTest(unittest.TestCase):
def check_connection_does_failfast(self, fn, channel, wait_for_ready=None):
try:
fn(channel, wait_for_ready)
self.fail("The Call should fail")
except BaseException as e: # pylint: disable=broad-except
self.assertIs(grpc.StatusCode.UNAVAILABLE, e.code())
def test_call_wait_for_ready_default(self):
for perform_call in _ALL_CALL_CASES:
with create_phony_channel() as channel:
self.check_connection_does_failfast(perform_call, channel)
def test_call_wait_for_ready_disabled(self):
for perform_call in _ALL_CALL_CASES:
with create_phony_channel() as channel:
self.check_connection_does_failfast(
perform_call, channel, wait_for_ready=False
)
def test_call_wait_for_ready_enabled(self):
# To test the wait mechanism, Python thread is required to make
# client set up first without handling them case by case.
# Also, Python thread don't pass the unhandled exceptions to
# main thread. So, it need another method to store the
# exceptions and raise them again in main thread.
unhandled_exceptions = queue.Queue()
# We just need an unused TCP port
host, port, sock = get_socket(sock_options=(socket.SO_REUSEADDR,))
sock.close()
addr = "{}:{}".format(host, port)
wg = test_common.WaitGroup(len(_ALL_CALL_CASES))
def wait_for_transient_failure(channel_connectivity):
if (
channel_connectivity
== grpc.ChannelConnectivity.TRANSIENT_FAILURE
):
wg.done()
def test_call(perform_call):
with grpc.insecure_channel(addr) as channel:
try:
channel.subscribe(wait_for_transient_failure)
perform_call(channel, wait_for_ready=True)
except BaseException as e: # pylint: disable=broad-except
# If the call failed, the thread would be destroyed. The
# channel object can be collected before calling the
# callback, which will result in a deadlock.
wg.done()
unhandled_exceptions.put(e, True)
test_threads = []
for perform_call in _ALL_CALL_CASES:
test_thread = threading.Thread(
target=test_call, args=(perform_call,)
)
test_thread.daemon = True
test_thread.exception = None
test_thread.start()
test_threads.append(test_thread)
# Start the server after the connections are waiting
wg.wait()
server = test_common.test_server(reuse_port=True)
server.add_generic_rpc_handlers((_GenericHandler(weakref.proxy(self)),))
server.add_insecure_port(addr)
server.start()
for test_thread in test_threads:
test_thread.join()
# Stop the server to make test end properly
server.stop(0)
if not unhandled_exceptions.empty():
raise unhandled_exceptions.get(True)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 9,329
| 32.804348
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_local_credentials_test.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of RPCs made using local credentials."""
from concurrent.futures import ThreadPoolExecutor
import os
import unittest
import grpc
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
return grpc.unary_unary_rpc_method_handler(
lambda request, unused_context: request
)
class LocalCredentialsTest(unittest.TestCase):
def _create_server(self):
server = grpc.server(ThreadPoolExecutor())
server.add_generic_rpc_handlers((_GenericHandler(),))
return server
@unittest.skipIf(
os.name == "nt", "TODO(https://github.com/grpc/grpc/issues/20078)"
)
def test_local_tcp(self):
server_addr = "localhost:{}"
channel_creds = grpc.local_channel_credentials(
grpc.LocalConnectionType.LOCAL_TCP
)
server_creds = grpc.local_server_credentials(
grpc.LocalConnectionType.LOCAL_TCP
)
server = self._create_server()
port = server.add_secure_port(server_addr.format(0), server_creds)
server.start()
with grpc.secure_channel(
server_addr.format(port), channel_creds
) as channel:
self.assertEqual(
b"abc",
channel.unary_unary("/test/method")(
b"abc", wait_for_ready=True
),
)
server.stop(None)
@unittest.skipIf(
os.name == "nt", "Unix Domain Socket is not supported on Windows"
)
def test_uds(self):
server_addr = "unix:/tmp/grpc_fullstack_test"
channel_creds = grpc.local_channel_credentials(
grpc.LocalConnectionType.UDS
)
server_creds = grpc.local_server_credentials(
grpc.LocalConnectionType.UDS
)
server = self._create_server()
server.add_secure_port(server_addr, server_creds)
server.start()
with grpc.secure_channel(server_addr, channel_creds) as channel:
self.assertEqual(
b"abc",
channel.unary_unary("/test/method")(
b"abc", wait_for_ready=True
),
)
server.stop(None)
if __name__ == "__main__":
unittest.main()
| 2,846
| 30.988764
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_auth_context_test.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests exposure of SSL auth context"""
import logging
import pickle
import unittest
import grpc
from grpc import _channel
from grpc.experimental import session_cache
from tests.unit import resources
from tests.unit import test_common
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x00\x00\x00"
_UNARY_UNARY = "/test/UnaryUnary"
_SERVER_HOST_OVERRIDE = "foo.test.google.fr"
_CLIENT_IDS = (
b"*.test.google.fr",
b"waterzooi.test.google.be",
b"*.test.youtube.com",
b"192.168.1.3",
)
_ID = "id"
_ID_KEY = "id_key"
_AUTH_CTX = "auth_ctx"
_PRIVATE_KEY = resources.private_key()
_CERTIFICATE_CHAIN = resources.certificate_chain()
_TEST_ROOT_CERTIFICATES = resources.test_root_certificates()
_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),)
_PROPERTY_OPTIONS = (
(
"grpc.ssl_target_name_override",
_SERVER_HOST_OVERRIDE,
),
)
def handle_unary_unary(request, servicer_context):
return pickle.dumps(
{
_ID: servicer_context.peer_identities(),
_ID_KEY: servicer_context.peer_identity_key(),
_AUTH_CTX: servicer_context.auth_context(),
}
)
class AuthContextTest(unittest.TestCase):
def testInsecure(self):
handler = grpc.method_handlers_generic_handler(
"test",
{
"UnaryUnary": grpc.unary_unary_rpc_method_handler(
handle_unary_unary
)
},
)
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
port = server.add_insecure_port("[::]:0")
server.start()
with grpc.insecure_channel("localhost:%d" % port) as channel:
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
server.stop(None)
auth_data = pickle.loads(response)
self.assertIsNone(auth_data[_ID])
self.assertIsNone(auth_data[_ID_KEY])
self.assertDictEqual(
{
"security_level": [b"TSI_SECURITY_NONE"],
"transport_security_type": [b"insecure"],
},
auth_data[_AUTH_CTX],
)
def testSecureNoCert(self):
handler = grpc.method_handlers_generic_handler(
"test",
{
"UnaryUnary": grpc.unary_unary_rpc_method_handler(
handle_unary_unary
)
},
)
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
port = server.add_secure_port("[::]:0", server_cred)
server.start()
channel_creds = grpc.ssl_channel_credentials(
root_certificates=_TEST_ROOT_CERTIFICATES
)
channel = grpc.secure_channel(
"localhost:{}".format(port),
channel_creds,
options=_PROPERTY_OPTIONS,
)
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
channel.close()
server.stop(None)
auth_data = pickle.loads(response)
self.assertIsNone(auth_data[_ID])
self.assertIsNone(auth_data[_ID_KEY])
self.assertDictEqual(
{
"security_level": [b"TSI_PRIVACY_AND_INTEGRITY"],
"transport_security_type": [b"ssl"],
"ssl_session_reused": [b"false"],
},
auth_data[_AUTH_CTX],
)
def testSecureClientCert(self):
handler = grpc.method_handlers_generic_handler(
"test",
{
"UnaryUnary": grpc.unary_unary_rpc_method_handler(
handle_unary_unary
)
},
)
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
server_cred = grpc.ssl_server_credentials(
_SERVER_CERTS,
root_certificates=_TEST_ROOT_CERTIFICATES,
require_client_auth=True,
)
port = server.add_secure_port("[::]:0", server_cred)
server.start()
channel_creds = grpc.ssl_channel_credentials(
root_certificates=_TEST_ROOT_CERTIFICATES,
private_key=_PRIVATE_KEY,
certificate_chain=_CERTIFICATE_CHAIN,
)
channel = grpc.secure_channel(
"localhost:{}".format(port),
channel_creds,
options=_PROPERTY_OPTIONS,
)
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
channel.close()
server.stop(None)
auth_data = pickle.loads(response)
auth_ctx = auth_data[_AUTH_CTX]
self.assertCountEqual(_CLIENT_IDS, auth_data[_ID])
self.assertEqual("x509_subject_alternative_name", auth_data[_ID_KEY])
self.assertSequenceEqual([b"ssl"], auth_ctx["transport_security_type"])
self.assertSequenceEqual(
[b"*.test.google.com"], auth_ctx["x509_common_name"]
)
def _do_one_shot_client_rpc(
self, channel_creds, channel_options, port, expect_ssl_session_reused
):
channel = grpc.secure_channel(
"localhost:{}".format(port), channel_creds, options=channel_options
)
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
auth_data = pickle.loads(response)
self.assertEqual(
expect_ssl_session_reused,
auth_data[_AUTH_CTX]["ssl_session_reused"],
)
channel.close()
def testSessionResumption(self):
# Set up a secure server
handler = grpc.method_handlers_generic_handler(
"test",
{
"UnaryUnary": grpc.unary_unary_rpc_method_handler(
handle_unary_unary
)
},
)
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
port = server.add_secure_port("[::]:0", server_cred)
server.start()
# Create a cache for TLS session tickets
cache = session_cache.ssl_session_cache_lru(1)
channel_creds = grpc.ssl_channel_credentials(
root_certificates=_TEST_ROOT_CERTIFICATES
)
channel_options = _PROPERTY_OPTIONS + (
("grpc.ssl_session_cache", cache),
)
# Initial connection has no session to resume
self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port,
expect_ssl_session_reused=[b"false"],
)
# Subsequent connections resume sessions
self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port,
expect_ssl_session_reused=[b"true"],
)
server.stop(None)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 7,493
| 30.620253
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_empty_message_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
import grpc
from tests.unit import test_common
from tests.unit.framework.common import test_constants
_REQUEST = b""
_RESPONSE = b""
_UNARY_UNARY = "/test/UnaryUnary"
_UNARY_STREAM = "/test/UnaryStream"
_STREAM_UNARY = "/test/StreamUnary"
_STREAM_STREAM = "/test/StreamStream"
def handle_unary_unary(request, servicer_context):
return _RESPONSE
def handle_unary_stream(request, servicer_context):
for _ in range(test_constants.STREAM_LENGTH):
yield _RESPONSE
def handle_stream_unary(request_iterator, servicer_context):
for request in request_iterator:
pass
return _RESPONSE
def handle_stream_stream(request_iterator, servicer_context):
for request in request_iterator:
yield _RESPONSE
class _MethodHandler(grpc.RpcMethodHandler):
def __init__(self, request_streaming, response_streaming):
self.request_streaming = request_streaming
self.response_streaming = response_streaming
self.request_deserializer = None
self.response_serializer = None
self.unary_unary = None
self.unary_stream = None
self.stream_unary = None
self.stream_stream = None
if self.request_streaming and self.response_streaming:
self.stream_stream = handle_stream_stream
elif self.request_streaming:
self.stream_unary = handle_stream_unary
elif self.response_streaming:
self.unary_stream = handle_unary_stream
else:
self.unary_unary = handle_unary_unary
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == _UNARY_UNARY:
return _MethodHandler(False, False)
elif handler_call_details.method == _UNARY_STREAM:
return _MethodHandler(False, True)
elif handler_call_details.method == _STREAM_UNARY:
return _MethodHandler(True, False)
elif handler_call_details.method == _STREAM_STREAM:
return _MethodHandler(True, True)
else:
return None
class EmptyMessageTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server()
self._server.add_generic_rpc_handlers((_GenericHandler(),))
port = self._server.add_insecure_port("[::]:0")
self._server.start()
self._channel = grpc.insecure_channel("localhost:%d" % port)
def tearDown(self):
self._server.stop(0)
self._channel.close()
def testUnaryUnary(self):
response = self._channel.unary_unary(_UNARY_UNARY)(_REQUEST)
self.assertEqual(_RESPONSE, response)
def testUnaryStream(self):
response_iterator = self._channel.unary_stream(_UNARY_STREAM)(_REQUEST)
self.assertSequenceEqual(
[_RESPONSE] * test_constants.STREAM_LENGTH, list(response_iterator)
)
def testStreamUnary(self):
response = self._channel.stream_unary(_STREAM_UNARY)(
iter([_REQUEST] * test_constants.STREAM_LENGTH)
)
self.assertEqual(_RESPONSE, response)
def testStreamStream(self):
response_iterator = self._channel.stream_stream(_STREAM_STREAM)(
iter([_REQUEST] * test_constants.STREAM_LENGTH)
)
self.assertSequenceEqual(
[_RESPONSE] * test_constants.STREAM_LENGTH, list(response_iterator)
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 4,091
| 31.47619
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_interceptor_test.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of gRPC Python interceptors."""
import collections
from concurrent import futures
import itertools
import logging
import os
import threading
import unittest
import grpc
from grpc.framework.foundation import logging_pool
from tests.unit import test_common
from tests.unit.framework.common import test_constants
from tests.unit.framework.common import test_control
_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2
_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2 :]
_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3
_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[: len(bytestring) // 3]
_EXCEPTION_REQUEST = b"\x09\x0a"
_UNARY_UNARY = "/test/UnaryUnary"
_UNARY_STREAM = "/test/UnaryStream"
_STREAM_UNARY = "/test/StreamUnary"
_STREAM_STREAM = "/test/StreamStream"
class _ApplicationErrorStandin(Exception):
pass
class _Callback(object):
def __init__(self):
self._condition = threading.Condition()
self._value = None
self._called = False
def __call__(self, value):
with self._condition:
self._value = value
self._called = True
self._condition.notify_all()
def value(self):
with self._condition:
while not self._called:
self._condition.wait()
return self._value
class _Handler(object):
def __init__(self, control):
self._control = control
def handle_unary_unary(self, request, servicer_context):
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
if request == _EXCEPTION_REQUEST:
raise _ApplicationErrorStandin()
return request
def handle_unary_stream(self, request, servicer_context):
if request == _EXCEPTION_REQUEST:
raise _ApplicationErrorStandin()
for _ in range(test_constants.STREAM_LENGTH):
self._control.control()
yield request
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
def handle_stream_unary(self, request_iterator, servicer_context):
if servicer_context is not None:
servicer_context.invocation_metadata()
self._control.control()
response_elements = []
for request in request_iterator:
self._control.control()
response_elements.append(request)
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
if _EXCEPTION_REQUEST in response_elements:
raise _ApplicationErrorStandin()
return b"".join(response_elements)
def handle_stream_stream(self, request_iterator, servicer_context):
self._control.control()
if servicer_context is not None:
servicer_context.set_trailing_metadata(
(
(
"testkey",
"testvalue",
),
)
)
for request in request_iterator:
if request == _EXCEPTION_REQUEST:
raise _ApplicationErrorStandin()
self._control.control()
yield request
self._control.control()
class _MethodHandler(grpc.RpcMethodHandler):
def __init__(
self,
request_streaming,
response_streaming,
request_deserializer,
response_serializer,
unary_unary,
unary_stream,
stream_unary,
stream_stream,
):
self.request_streaming = request_streaming
self.response_streaming = response_streaming
self.request_deserializer = request_deserializer
self.response_serializer = response_serializer
self.unary_unary = unary_unary
self.unary_stream = unary_stream
self.stream_unary = stream_unary
self.stream_stream = stream_stream
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self, handler):
self._handler = handler
def service(self, handler_call_details):
if handler_call_details.method == _UNARY_UNARY:
return _MethodHandler(
False,
False,
None,
None,
self._handler.handle_unary_unary,
None,
None,
None,
)
elif handler_call_details.method == _UNARY_STREAM:
return _MethodHandler(
False,
True,
_DESERIALIZE_REQUEST,
_SERIALIZE_RESPONSE,
None,
self._handler.handle_unary_stream,
None,
None,
)
elif handler_call_details.method == _STREAM_UNARY:
return _MethodHandler(
True,
False,
_DESERIALIZE_REQUEST,
_SERIALIZE_RESPONSE,
None,
None,
self._handler.handle_stream_unary,
None,
)
elif handler_call_details.method == _STREAM_STREAM:
return _MethodHandler(
True,
True,
None,
None,
None,
None,
None,
self._handler.handle_stream_stream,
)
else:
return None
def _unary_unary_multi_callable(channel):
return channel.unary_unary(_UNARY_UNARY)
def _unary_stream_multi_callable(channel):
return channel.unary_stream(
_UNARY_STREAM,
request_serializer=_SERIALIZE_REQUEST,
response_deserializer=_DESERIALIZE_RESPONSE,
)
def _stream_unary_multi_callable(channel):
return channel.stream_unary(
_STREAM_UNARY,
request_serializer=_SERIALIZE_REQUEST,
response_deserializer=_DESERIALIZE_RESPONSE,
)
def _stream_stream_multi_callable(channel):
return channel.stream_stream(_STREAM_STREAM)
class _ClientCallDetails(
collections.namedtuple(
"_ClientCallDetails", ("method", "timeout", "metadata", "credentials")
),
grpc.ClientCallDetails,
):
pass
class _GenericClientInterceptor(
grpc.UnaryUnaryClientInterceptor,
grpc.UnaryStreamClientInterceptor,
grpc.StreamUnaryClientInterceptor,
grpc.StreamStreamClientInterceptor,
):
def __init__(self, interceptor_function):
self._fn = interceptor_function
def intercept_unary_unary(self, continuation, client_call_details, request):
new_details, new_request_iterator, postprocess = self._fn(
client_call_details, iter((request,)), False, False
)
response = continuation(new_details, next(new_request_iterator))
return postprocess(response) if postprocess else response
def intercept_unary_stream(
self, continuation, client_call_details, request
):
new_details, new_request_iterator, postprocess = self._fn(
client_call_details, iter((request,)), False, True
)
response_it = continuation(new_details, new_request_iterator)
return postprocess(response_it) if postprocess else response_it
def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
new_details, new_request_iterator, postprocess = self._fn(
client_call_details, request_iterator, True, False
)
response = continuation(new_details, next(new_request_iterator))
return postprocess(response) if postprocess else response
def intercept_stream_stream(
self, continuation, client_call_details, request_iterator
):
new_details, new_request_iterator, postprocess = self._fn(
client_call_details, request_iterator, True, True
)
response_it = continuation(new_details, new_request_iterator)
return postprocess(response_it) if postprocess else response_it
class _LoggingInterceptor(
grpc.ServerInterceptor,
grpc.UnaryUnaryClientInterceptor,
grpc.UnaryStreamClientInterceptor,
grpc.StreamUnaryClientInterceptor,
grpc.StreamStreamClientInterceptor,
):
def __init__(self, tag, record):
self.tag = tag
self.record = record
def intercept_service(self, continuation, handler_call_details):
self.record.append(self.tag + ":intercept_service")
return continuation(handler_call_details)
def intercept_unary_unary(self, continuation, client_call_details, request):
self.record.append(self.tag + ":intercept_unary_unary")
result = continuation(client_call_details, request)
assert isinstance(
result, grpc.Call
), "{} ({}) is not an instance of grpc.Call".format(
result, type(result)
)
assert isinstance(
result, grpc.Future
), "{} ({}) is not an instance of grpc.Future".format(
result, type(result)
)
return result
def intercept_unary_stream(
self, continuation, client_call_details, request
):
self.record.append(self.tag + ":intercept_unary_stream")
return continuation(client_call_details, request)
def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
self.record.append(self.tag + ":intercept_stream_unary")
result = continuation(client_call_details, request_iterator)
assert isinstance(
result, grpc.Call
), "{} is not an instance of grpc.Call".format(result)
assert isinstance(
result, grpc.Future
), "{} is not an instance of grpc.Future".format(result)
return result
def intercept_stream_stream(
self, continuation, client_call_details, request_iterator
):
self.record.append(self.tag + ":intercept_stream_stream")
return continuation(client_call_details, request_iterator)
class _DefectiveClientInterceptor(grpc.UnaryUnaryClientInterceptor):
def intercept_unary_unary(
self, ignored_continuation, ignored_client_call_details, ignored_request
):
raise test_control.Defect()
def _wrap_request_iterator_stream_interceptor(wrapper):
def intercept_call(
client_call_details,
request_iterator,
request_streaming,
ignored_response_streaming,
):
if request_streaming:
return client_call_details, wrapper(request_iterator), None
else:
return client_call_details, request_iterator, None
return _GenericClientInterceptor(intercept_call)
def _append_request_header_interceptor(header, value):
def intercept_call(
client_call_details,
request_iterator,
ignored_request_streaming,
ignored_response_streaming,
):
metadata = []
if client_call_details.metadata:
metadata = list(client_call_details.metadata)
metadata.append(
(
header,
value,
)
)
client_call_details = _ClientCallDetails(
client_call_details.method,
client_call_details.timeout,
metadata,
client_call_details.credentials,
)
return client_call_details, request_iterator, None
return _GenericClientInterceptor(intercept_call)
class _GenericServerInterceptor(grpc.ServerInterceptor):
def __init__(self, fn):
self._fn = fn
def intercept_service(self, continuation, handler_call_details):
return self._fn(continuation, handler_call_details)
def _filter_server_interceptor(condition, interceptor):
def intercept_service(continuation, handler_call_details):
if condition(handler_call_details):
return interceptor.intercept_service(
continuation, handler_call_details
)
return continuation(handler_call_details)
return _GenericServerInterceptor(intercept_service)
class InterceptorTest(unittest.TestCase):
def setUp(self):
self._control = test_control.PauseFailControl()
self._handler = _Handler(self._control)
self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
self._record = []
conditional_interceptor = _filter_server_interceptor(
lambda x: ("secret", "42") in x.invocation_metadata,
_LoggingInterceptor("s3", self._record),
)
self._server = grpc.server(
self._server_pool,
options=(("grpc.so_reuseport", 0),),
interceptors=(
_LoggingInterceptor("s1", self._record),
conditional_interceptor,
_LoggingInterceptor("s2", self._record),
),
)
port = self._server.add_insecure_port("[::]:0")
self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),))
self._server.start()
self._channel = grpc.insecure_channel("localhost:%d" % port)
def tearDown(self):
self._server.stop(None)
self._server_pool.shutdown(wait=True)
self._channel.close()
def testTripleRequestMessagesClientInterceptor(self):
def triple(request_iterator):
while True:
try:
item = next(request_iterator)
yield item
yield item
yield item
except StopIteration:
break
interceptor = _wrap_request_iterator_stream_interceptor(triple)
channel = grpc.intercept_channel(self._channel, interceptor)
requests = tuple(
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
multi_callable = _stream_stream_multi_callable(channel)
response_iterator = multi_callable(
iter(requests),
metadata=(
(
"test",
"InterceptedStreamRequestBlockingUnaryResponseWithCall",
),
),
)
responses = tuple(response_iterator)
self.assertEqual(len(responses), 3 * test_constants.STREAM_LENGTH)
multi_callable = _stream_stream_multi_callable(self._channel)
response_iterator = multi_callable(
iter(requests),
metadata=(
(
"test",
"InterceptedStreamRequestBlockingUnaryResponseWithCall",
),
),
)
responses = tuple(response_iterator)
self.assertEqual(len(responses), test_constants.STREAM_LENGTH)
def testDefectiveClientInterceptor(self):
interceptor = _DefectiveClientInterceptor()
defective_channel = grpc.intercept_channel(self._channel, interceptor)
request = b"\x07\x08"
multi_callable = _unary_unary_multi_callable(defective_channel)
call_future = multi_callable.future(
request,
metadata=(
("test", "InterceptedUnaryRequestBlockingUnaryResponse"),
),
)
self.assertIsNotNone(call_future.exception())
self.assertEqual(call_future.code(), grpc.StatusCode.INTERNAL)
def testInterceptedHeaderManipulationWithServerSideVerification(self):
request = b"\x07\x08"
channel = grpc.intercept_channel(
self._channel, _append_request_header_interceptor("secret", "42")
)
channel = grpc.intercept_channel(
channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
self._record[:] = []
multi_callable = _unary_unary_multi_callable(channel)
multi_callable.with_call(
request,
metadata=(
(
"test",
"InterceptedUnaryRequestBlockingUnaryResponseWithCall",
),
),
)
self.assertSequenceEqual(
self._record,
[
"c1:intercept_unary_unary",
"c2:intercept_unary_unary",
"s1:intercept_service",
"s3:intercept_service",
"s2:intercept_service",
],
)
def testInterceptedUnaryRequestBlockingUnaryResponse(self):
request = b"\x07\x08"
self._record[:] = []
channel = grpc.intercept_channel(
self._channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
multi_callable = _unary_unary_multi_callable(channel)
multi_callable(
request,
metadata=(
("test", "InterceptedUnaryRequestBlockingUnaryResponse"),
),
)
self.assertSequenceEqual(
self._record,
[
"c1:intercept_unary_unary",
"c2:intercept_unary_unary",
"s1:intercept_service",
"s2:intercept_service",
],
)
def testInterceptedUnaryRequestBlockingUnaryResponseWithError(self):
request = _EXCEPTION_REQUEST
self._record[:] = []
channel = grpc.intercept_channel(
self._channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
multi_callable = _unary_unary_multi_callable(channel)
with self.assertRaises(grpc.RpcError) as exception_context:
multi_callable(
request,
metadata=(
("test", "InterceptedUnaryRequestBlockingUnaryResponse"),
),
)
exception = exception_context.exception
self.assertFalse(exception.cancelled())
self.assertFalse(exception.running())
self.assertTrue(exception.done())
with self.assertRaises(grpc.RpcError):
exception.result()
self.assertIsInstance(exception.exception(), grpc.RpcError)
def testInterceptedUnaryRequestBlockingUnaryResponseWithCall(self):
request = b"\x07\x08"
channel = grpc.intercept_channel(
self._channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
self._record[:] = []
multi_callable = _unary_unary_multi_callable(channel)
multi_callable.with_call(
request,
metadata=(
(
"test",
"InterceptedUnaryRequestBlockingUnaryResponseWithCall",
),
),
)
self.assertSequenceEqual(
self._record,
[
"c1:intercept_unary_unary",
"c2:intercept_unary_unary",
"s1:intercept_service",
"s2:intercept_service",
],
)
def testInterceptedUnaryRequestFutureUnaryResponse(self):
request = b"\x07\x08"
self._record[:] = []
channel = grpc.intercept_channel(
self._channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
multi_callable = _unary_unary_multi_callable(channel)
response_future = multi_callable.future(
request,
metadata=(("test", "InterceptedUnaryRequestFutureUnaryResponse"),),
)
response_future.result()
self.assertSequenceEqual(
self._record,
[
"c1:intercept_unary_unary",
"c2:intercept_unary_unary",
"s1:intercept_service",
"s2:intercept_service",
],
)
def testInterceptedUnaryRequestStreamResponse(self):
request = b"\x37\x58"
self._record[:] = []
channel = grpc.intercept_channel(
self._channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
multi_callable = _unary_stream_multi_callable(channel)
response_iterator = multi_callable(
request,
metadata=(("test", "InterceptedUnaryRequestStreamResponse"),),
)
tuple(response_iterator)
self.assertSequenceEqual(
self._record,
[
"c1:intercept_unary_stream",
"c2:intercept_unary_stream",
"s1:intercept_service",
"s2:intercept_service",
],
)
def testInterceptedUnaryRequestStreamResponseWithError(self):
request = _EXCEPTION_REQUEST
self._record[:] = []
channel = grpc.intercept_channel(
self._channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
multi_callable = _unary_stream_multi_callable(channel)
response_iterator = multi_callable(
request,
metadata=(("test", "InterceptedUnaryRequestStreamResponse"),),
)
with self.assertRaises(grpc.RpcError) as exception_context:
tuple(response_iterator)
exception = exception_context.exception
self.assertFalse(exception.cancelled())
self.assertFalse(exception.running())
self.assertTrue(exception.done())
with self.assertRaises(grpc.RpcError):
exception.result()
self.assertIsInstance(exception.exception(), grpc.RpcError)
def testInterceptedStreamRequestBlockingUnaryResponse(self):
requests = tuple(
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
self._record[:] = []
channel = grpc.intercept_channel(
self._channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
multi_callable = _stream_unary_multi_callable(channel)
multi_callable(
request_iterator,
metadata=(
("test", "InterceptedStreamRequestBlockingUnaryResponse"),
),
)
self.assertSequenceEqual(
self._record,
[
"c1:intercept_stream_unary",
"c2:intercept_stream_unary",
"s1:intercept_service",
"s2:intercept_service",
],
)
def testInterceptedStreamRequestBlockingUnaryResponseWithCall(self):
requests = tuple(
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
self._record[:] = []
channel = grpc.intercept_channel(
self._channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
multi_callable = _stream_unary_multi_callable(channel)
multi_callable.with_call(
request_iterator,
metadata=(
(
"test",
"InterceptedStreamRequestBlockingUnaryResponseWithCall",
),
),
)
self.assertSequenceEqual(
self._record,
[
"c1:intercept_stream_unary",
"c2:intercept_stream_unary",
"s1:intercept_service",
"s2:intercept_service",
],
)
def testInterceptedStreamRequestFutureUnaryResponse(self):
requests = tuple(
b"\x07\x08" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
self._record[:] = []
channel = grpc.intercept_channel(
self._channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
multi_callable = _stream_unary_multi_callable(channel)
response_future = multi_callable.future(
request_iterator,
metadata=(("test", "InterceptedStreamRequestFutureUnaryResponse"),),
)
response_future.result()
self.assertSequenceEqual(
self._record,
[
"c1:intercept_stream_unary",
"c2:intercept_stream_unary",
"s1:intercept_service",
"s2:intercept_service",
],
)
def testInterceptedStreamRequestFutureUnaryResponseWithError(self):
requests = tuple(
_EXCEPTION_REQUEST for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
self._record[:] = []
channel = grpc.intercept_channel(
self._channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
multi_callable = _stream_unary_multi_callable(channel)
response_future = multi_callable.future(
request_iterator,
metadata=(("test", "InterceptedStreamRequestFutureUnaryResponse"),),
)
with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result()
exception = exception_context.exception
self.assertFalse(exception.cancelled())
self.assertFalse(exception.running())
self.assertTrue(exception.done())
with self.assertRaises(grpc.RpcError):
exception.result()
self.assertIsInstance(exception.exception(), grpc.RpcError)
def testInterceptedStreamRequestStreamResponse(self):
requests = tuple(
b"\x77\x58" for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
self._record[:] = []
channel = grpc.intercept_channel(
self._channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
multi_callable = _stream_stream_multi_callable(channel)
response_iterator = multi_callable(
request_iterator,
metadata=(("test", "InterceptedStreamRequestStreamResponse"),),
)
tuple(response_iterator)
self.assertSequenceEqual(
self._record,
[
"c1:intercept_stream_stream",
"c2:intercept_stream_stream",
"s1:intercept_service",
"s2:intercept_service",
],
)
def testInterceptedStreamRequestStreamResponseWithError(self):
requests = tuple(
_EXCEPTION_REQUEST for _ in range(test_constants.STREAM_LENGTH)
)
request_iterator = iter(requests)
self._record[:] = []
channel = grpc.intercept_channel(
self._channel,
_LoggingInterceptor("c1", self._record),
_LoggingInterceptor("c2", self._record),
)
multi_callable = _stream_stream_multi_callable(channel)
response_iterator = multi_callable(
request_iterator,
metadata=(("test", "InterceptedStreamRequestStreamResponse"),),
)
with self.assertRaises(grpc.RpcError) as exception_context:
tuple(response_iterator)
exception = exception_context.exception
self.assertFalse(exception.cancelled())
self.assertFalse(exception.running())
self.assertTrue(exception.done())
with self.assertRaises(grpc.RpcError):
exception.result()
self.assertIsInstance(exception.exception(), grpc.RpcError)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 29,024
| 31.178492
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_exit_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests clean exit of server/client on Python Interpreter exit/sigint.
The tests in this module spawn a subprocess for each test case, the
test is considered successful if it doesn't freeze/timeout.
"""
import atexit
import datetime
import logging
import os
import signal
import subprocess
import sys
import threading
import time
import unittest
from tests.unit import _exit_scenarios
SCENARIO_FILE = os.path.abspath(
os.path.join(
os.path.dirname(os.path.realpath(__file__)), "_exit_scenarios.py"
)
)
INTERPRETER = sys.executable
BASE_COMMAND = [INTERPRETER, SCENARIO_FILE]
BASE_SIGTERM_COMMAND = BASE_COMMAND + ["--wait_for_interrupt"]
INIT_TIME = datetime.timedelta(seconds=1)
WAIT_CHECK_INTERVAL = datetime.timedelta(milliseconds=100)
WAIT_CHECK_DEFAULT_TIMEOUT = datetime.timedelta(seconds=5)
processes = []
process_lock = threading.Lock()
# Make sure we attempt to clean up any
# processes we may have left running
def cleanup_processes():
with process_lock:
for process in processes:
try:
process.kill()
except Exception: # pylint: disable=broad-except
pass
atexit.register(cleanup_processes)
def _process_wait_with_timeout(process, timeout=WAIT_CHECK_DEFAULT_TIMEOUT):
"""A funciton to mimic 3.3+ only timeout argument in process.wait."""
deadline = datetime.datetime.now() + timeout
while (process.poll() is None) and (datetime.datetime.now() < deadline):
time.sleep(WAIT_CHECK_INTERVAL.total_seconds())
if process.returncode is None:
raise RuntimeError("Process failed to exit within %s" % timeout)
def interrupt_and_wait(process):
with process_lock:
processes.append(process)
time.sleep(INIT_TIME.total_seconds())
os.kill(process.pid, signal.SIGINT)
_process_wait_with_timeout(process)
def wait(process):
with process_lock:
processes.append(process)
_process_wait_with_timeout(process)
# TODO(lidiz) enable exit tests once the root cause found.
@unittest.skip("https://github.com/grpc/grpc/issues/23982")
@unittest.skip("https://github.com/grpc/grpc/issues/23028")
class ExitTest(unittest.TestCase):
def test_unstarted_server(self):
process = subprocess.Popen(
BASE_COMMAND + [_exit_scenarios.UNSTARTED_SERVER],
stdout=sys.stdout,
stderr=sys.stderr,
)
wait(process)
def test_unstarted_server_terminate(self):
process = subprocess.Popen(
BASE_SIGTERM_COMMAND + [_exit_scenarios.UNSTARTED_SERVER],
stdout=sys.stdout,
)
interrupt_and_wait(process)
def test_running_server(self):
process = subprocess.Popen(
BASE_COMMAND + [_exit_scenarios.RUNNING_SERVER],
stdout=sys.stdout,
stderr=sys.stderr,
)
wait(process)
def test_running_server_terminate(self):
process = subprocess.Popen(
BASE_SIGTERM_COMMAND + [_exit_scenarios.RUNNING_SERVER],
stdout=sys.stdout,
stderr=sys.stderr,
)
interrupt_and_wait(process)
def test_poll_connectivity_no_server(self):
process = subprocess.Popen(
BASE_COMMAND + [_exit_scenarios.POLL_CONNECTIVITY_NO_SERVER],
stdout=sys.stdout,
stderr=sys.stderr,
)
wait(process)
def test_poll_connectivity_no_server_terminate(self):
process = subprocess.Popen(
BASE_SIGTERM_COMMAND
+ [_exit_scenarios.POLL_CONNECTIVITY_NO_SERVER],
stdout=sys.stdout,
stderr=sys.stderr,
)
interrupt_and_wait(process)
def test_poll_connectivity(self):
process = subprocess.Popen(
BASE_COMMAND + [_exit_scenarios.POLL_CONNECTIVITY],
stdout=sys.stdout,
stderr=sys.stderr,
)
wait(process)
def test_poll_connectivity_terminate(self):
process = subprocess.Popen(
BASE_SIGTERM_COMMAND + [_exit_scenarios.POLL_CONNECTIVITY],
stdout=sys.stdout,
stderr=sys.stderr,
)
interrupt_and_wait(process)
@unittest.skipIf(
os.name == "nt", "os.kill does not have required permission on Windows"
)
def test_in_flight_unary_unary_call(self):
process = subprocess.Popen(
BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_UNARY_UNARY_CALL],
stdout=sys.stdout,
stderr=sys.stderr,
)
interrupt_and_wait(process)
@unittest.skipIf(
os.name == "nt", "os.kill does not have required permission on Windows"
)
def test_in_flight_unary_stream_call(self):
process = subprocess.Popen(
BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_UNARY_STREAM_CALL],
stdout=sys.stdout,
stderr=sys.stderr,
)
interrupt_and_wait(process)
@unittest.skipIf(
os.name == "nt", "os.kill does not have required permission on Windows"
)
def test_in_flight_stream_unary_call(self):
process = subprocess.Popen(
BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_STREAM_UNARY_CALL],
stdout=sys.stdout,
stderr=sys.stderr,
)
interrupt_and_wait(process)
@unittest.skipIf(
os.name == "nt", "os.kill does not have required permission on Windows"
)
def test_in_flight_stream_stream_call(self):
process = subprocess.Popen(
BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_STREAM_STREAM_CALL],
stdout=sys.stdout,
stderr=sys.stderr,
)
interrupt_and_wait(process)
@unittest.skipIf(
os.name == "nt", "os.kill does not have required permission on Windows"
)
def test_in_flight_partial_unary_stream_call(self):
process = subprocess.Popen(
BASE_COMMAND
+ [_exit_scenarios.IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL],
stdout=sys.stdout,
stderr=sys.stderr,
)
interrupt_and_wait(process)
@unittest.skipIf(
os.name == "nt", "os.kill does not have required permission on Windows"
)
def test_in_flight_partial_stream_unary_call(self):
process = subprocess.Popen(
BASE_COMMAND
+ [_exit_scenarios.IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL],
stdout=sys.stdout,
stderr=sys.stderr,
)
interrupt_and_wait(process)
@unittest.skipIf(
os.name == "nt", "os.kill does not have required permission on Windows"
)
def test_in_flight_partial_stream_stream_call(self):
process = subprocess.Popen(
BASE_COMMAND
+ [_exit_scenarios.IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL],
stdout=sys.stdout,
stderr=sys.stderr,
)
interrupt_and_wait(process)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 7,576
| 30.702929
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_server_shutdown_test.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests clean shutdown of server on various interpreter exit conditions.
The tests in this module spawn a subprocess for each test case, the
test is considered successful if it doesn't freeze/timeout.
"""
import atexit
import logging
import os
import subprocess
import sys
import threading
import unittest
from tests.unit import _server_shutdown_scenarios
SCENARIO_FILE = os.path.abspath(
os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"_server_shutdown_scenarios.py",
)
)
INTERPRETER = sys.executable
BASE_COMMAND = [INTERPRETER, SCENARIO_FILE]
processes = []
process_lock = threading.Lock()
# Make sure we attempt to clean up any
# processes we may have left running
def cleanup_processes():
with process_lock:
for process in processes:
try:
process.kill()
except Exception: # pylint: disable=broad-except
pass
atexit.register(cleanup_processes)
def wait(process):
with process_lock:
processes.append(process)
process.wait()
class ServerShutdown(unittest.TestCase):
# Currently we shut down a server (if possible) after the Python server
# instance is garbage collected. This behavior may change in the future.
def test_deallocated_server_stops(self):
process = subprocess.Popen(
BASE_COMMAND + [_server_shutdown_scenarios.SERVER_DEALLOCATED],
stdout=sys.stdout,
stderr=sys.stderr,
)
wait(process)
def test_server_exception_exits(self):
process = subprocess.Popen(
BASE_COMMAND + [_server_shutdown_scenarios.SERVER_RAISES_EXCEPTION],
stdout=sys.stdout,
stderr=sys.stderr,
)
wait(process)
@unittest.skipIf(os.name == "nt", "fork not supported on windows")
def test_server_fork_can_exit(self):
process = subprocess.Popen(
BASE_COMMAND + [_server_shutdown_scenarios.SERVER_FORK_CAN_EXIT],
stdout=sys.stdout,
stderr=sys.stderr,
)
wait(process)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 2,746
| 27.915789
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_contextvars_propagation_test.py
|
# Copyright 2020 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of propagation of contextvars to AuthMetadataPlugin threads.."""
import contextlib
import logging
import os
import queue
import sys
import threading
import unittest
import grpc
from tests.unit import test_common
_UNARY_UNARY = "/test/UnaryUnary"
_REQUEST = b"0000"
def _unary_unary_handler(request, context):
return request
def contextvars_supported():
try:
import contextvars
return True
except ImportError:
return False
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == _UNARY_UNARY:
return grpc.unary_unary_rpc_method_handler(_unary_unary_handler)
else:
raise NotImplementedError()
@contextlib.contextmanager
def _server():
try:
server = test_common.test_server()
target = "localhost:0"
port = server.add_insecure_port(target)
server.add_generic_rpc_handlers((_GenericHandler(),))
server.start()
yield port
finally:
server.stop(None)
if contextvars_supported():
import contextvars
_EXPECTED_VALUE = 24601
test_var = contextvars.ContextVar("test_var", default=None)
def set_up_expected_context():
test_var.set(_EXPECTED_VALUE)
class TestCallCredentials(grpc.AuthMetadataPlugin):
def __call__(self, context, callback):
if (
test_var.get() != _EXPECTED_VALUE
and not test_common.running_under_gevent()
):
# contextvars do not work under gevent, but the rest of this
# test is still valuable as a test of concurrent runs of the
# metadata credentials code path.
raise AssertionError(
"{} != {}".format(test_var.get(), _EXPECTED_VALUE)
)
callback((), None)
def assert_called(self, test):
test.assertTrue(self._invoked)
test.assertEqual(_EXPECTED_VALUE, self._recorded_value)
else:
def set_up_expected_context():
pass
class TestCallCredentials(grpc.AuthMetadataPlugin):
def __call__(self, context, callback):
callback((), None)
# TODO(https://github.com/grpc/grpc/issues/22257)
@unittest.skipIf(os.name == "nt", "LocalCredentials not supported on Windows.")
class ContextVarsPropagationTest(unittest.TestCase):
def test_propagation_to_auth_plugin(self):
set_up_expected_context()
with _server() as port:
target = "localhost:{}".format(port)
local_credentials = grpc.local_channel_credentials()
test_call_credentials = TestCallCredentials()
call_credentials = grpc.metadata_call_credentials(
test_call_credentials, "test call credentials"
)
composite_credentials = grpc.composite_channel_credentials(
local_credentials, call_credentials
)
with grpc.secure_channel(target, composite_credentials) as channel:
stub = channel.unary_unary(_UNARY_UNARY)
response = stub(_REQUEST, wait_for_ready=True)
self.assertEqual(_REQUEST, response)
def test_concurrent_propagation(self):
_THREAD_COUNT = 32
_RPC_COUNT = 32
set_up_expected_context()
with _server() as port:
target = "localhost:{}".format(port)
local_credentials = grpc.local_channel_credentials()
test_call_credentials = TestCallCredentials()
call_credentials = grpc.metadata_call_credentials(
test_call_credentials, "test call credentials"
)
composite_credentials = grpc.composite_channel_credentials(
local_credentials, call_credentials
)
wait_group = test_common.WaitGroup(_THREAD_COUNT)
def _run_on_thread(exception_queue):
try:
with grpc.secure_channel(
target, composite_credentials
) as channel:
stub = channel.unary_unary(_UNARY_UNARY)
wait_group.done()
wait_group.wait()
for i in range(_RPC_COUNT):
response = stub(_REQUEST, wait_for_ready=True)
self.assertEqual(_REQUEST, response)
except Exception as e: # pylint: disable=broad-except
exception_queue.put(e)
threads = []
for _ in range(_THREAD_COUNT):
q = queue.Queue()
thread = threading.Thread(target=_run_on_thread, args=(q,))
thread.setDaemon(True)
thread.start()
threads.append((thread, q))
for thread, q in threads:
thread.join()
if not q.empty():
raise q.get()
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 5,676
| 32.005814
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_grpc_shutdown_test.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the gRPC Core shutdown path."""
import datetime
import threading
import time
import unittest
import grpc
_TIMEOUT_FOR_SEGFAULT = datetime.timedelta(seconds=10)
class GrpcShutdownTest(unittest.TestCase):
def test_channel_close_with_connectivity_watcher(self):
"""Originated by https://github.com/grpc/grpc/issues/20299.
The grpc_shutdown happens synchronously, but there might be Core object
references left in Cython which might lead to ABORT or SIGSEGV.
"""
connection_failed = threading.Event()
def on_state_change(state):
if state in (
grpc.ChannelConnectivity.TRANSIENT_FAILURE,
grpc.ChannelConnectivity.SHUTDOWN,
):
connection_failed.set()
# Connects to an void address, and subscribes state changes
channel = grpc.insecure_channel("0.1.1.1:12345")
channel.subscribe(on_state_change, True)
deadline = datetime.datetime.now() + _TIMEOUT_FOR_SEGFAULT
while datetime.datetime.now() < deadline:
time.sleep(0.1)
if connection_failed.is_set():
channel.close()
if __name__ == "__main__":
unittest.main(verbosity=2)
| 1,822
| 31.553571
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_reconnect_test.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests that a channel will reconnect if a connection is dropped"""
import logging
import socket
import time
import unittest
import grpc
from grpc.framework.foundation import logging_pool
from tests.unit.framework.common import bound_socket
from tests.unit.framework.common import test_constants
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x00\x00\x01"
_UNARY_UNARY = "/test/UnaryUnary"
def _handle_unary_unary(unused_request, unused_servicer_context):
return _RESPONSE
class ReconnectTest(unittest.TestCase):
def test_reconnect(self):
server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
handler = grpc.method_handlers_generic_handler(
"test",
{
"UnaryUnary": grpc.unary_unary_rpc_method_handler(
_handle_unary_unary
)
},
)
options = (("grpc.so_reuseport", 1),)
with bound_socket() as (host, port):
addr = "{}:{}".format(host, port)
server = grpc.server(server_pool, (handler,), options=options)
server.add_insecure_port(addr)
server.start()
channel = grpc.insecure_channel(addr)
multi_callable = channel.unary_unary(_UNARY_UNARY)
self.assertEqual(_RESPONSE, multi_callable(_REQUEST))
server.stop(None)
# By default, the channel connectivity is checked every 5s
# GRPC_CLIENT_CHANNEL_BACKUP_POLL_INTERVAL_MS can be set to change
# this.
time.sleep(5.1)
server = grpc.server(server_pool, (handler,), options=options)
server.add_insecure_port(addr)
server.start()
self.assertEqual(_RESPONSE, multi_callable(_REQUEST))
server.stop(None)
channel.close()
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 2,432
| 32.328767
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_server_test.py
|
# Copyright 2018 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from concurrent import futures
import logging
import unittest
import grpc
from tests.unit import resources
class _ActualGenericRpcHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
return None
class ServerTest(unittest.TestCase):
def test_not_a_generic_rpc_handler_at_construction(self):
with self.assertRaises(AttributeError) as exception_context:
grpc.server(
futures.ThreadPoolExecutor(max_workers=5),
handlers=[
_ActualGenericRpcHandler(),
object(),
],
)
self.assertIn(
"grpc.GenericRpcHandler", str(exception_context.exception)
)
def test_not_a_generic_rpc_handler_after_construction(self):
server = grpc.server(futures.ThreadPoolExecutor(max_workers=5))
with self.assertRaises(AttributeError) as exception_context:
server.add_generic_rpc_handlers(
[
_ActualGenericRpcHandler(),
object(),
]
)
self.assertIn(
"grpc.GenericRpcHandler", str(exception_context.exception)
)
def test_failed_port_binding_exception(self):
server = grpc.server(None, options=(("grpc.so_reuseport", 0),))
port = server.add_insecure_port("localhost:0")
bind_address = "localhost:%d" % port
with self.assertRaises(RuntimeError):
server.add_insecure_port(bind_address)
server_credentials = grpc.ssl_server_credentials(
[(resources.private_key(), resources.certificate_chain())]
)
with self.assertRaises(RuntimeError):
server.add_secure_port(bind_address, server_credentials)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 2,456
| 32.202703
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from concurrent import futures
import datetime
import threading
import time
import unittest
import grpc
from tests.unit.framework.common import test_constants
_WAIT_FOR_BLOCKING = datetime.timedelta(seconds=1)
def _block_on_waiting(server, termination_event, timeout=None):
server.start()
server.wait_for_termination(timeout=timeout)
termination_event.set()
class ServerWaitForTerminationTest(unittest.TestCase):
def test_unblock_by_invoking_stop(self):
termination_event = threading.Event()
server = grpc.server(futures.ThreadPoolExecutor())
wait_thread = threading.Thread(
target=_block_on_waiting,
args=(
server,
termination_event,
),
)
wait_thread.daemon = True
wait_thread.start()
time.sleep(_WAIT_FOR_BLOCKING.total_seconds())
server.stop(None)
termination_event.wait(timeout=test_constants.SHORT_TIMEOUT)
self.assertTrue(termination_event.is_set())
def test_unblock_by_del(self):
termination_event = threading.Event()
server = grpc.server(futures.ThreadPoolExecutor())
wait_thread = threading.Thread(
target=_block_on_waiting,
args=(
server,
termination_event,
),
)
wait_thread.daemon = True
wait_thread.start()
time.sleep(_WAIT_FOR_BLOCKING.total_seconds())
# Invoke manually here, in Python 2 it will be invoked by GC sometime.
server.__del__()
termination_event.wait(timeout=test_constants.SHORT_TIMEOUT)
self.assertTrue(termination_event.is_set())
def test_unblock_by_timeout(self):
termination_event = threading.Event()
server = grpc.server(futures.ThreadPoolExecutor())
wait_thread = threading.Thread(
target=_block_on_waiting,
args=(
server,
termination_event,
test_constants.SHORT_TIMEOUT / 2,
),
)
wait_thread.daemon = True
wait_thread.start()
termination_event.wait(timeout=test_constants.SHORT_TIMEOUT)
self.assertTrue(termination_event.is_set())
if __name__ == "__main__":
unittest.main(verbosity=2)
| 2,930
| 29.216495
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_dynamic_stubs_test.py
|
# Copyright 2019 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of dynamic stub import API."""
import contextlib
import functools
import logging
import multiprocessing
import os
import sys
import unittest
from tests.unit import test_common
_DATA_DIR = os.path.join("tests", "unit", "data")
@contextlib.contextmanager
def _grpc_tools_unimportable():
original_sys_path = sys.path
sys.path = [path for path in sys.path if "grpcio_tools" not in path]
try:
import grpc_tools
except ImportError:
pass
else:
del grpc_tools
sys.path = original_sys_path
raise unittest.SkipTest("Failed to make grpc_tools unimportable.")
try:
yield
finally:
sys.path = original_sys_path
def _collect_errors(fn):
@functools.wraps(fn)
def _wrapped(error_queue):
try:
fn()
except Exception as e:
error_queue.put(e)
raise
return _wrapped
def _python3_check(fn):
@functools.wraps(fn)
def _wrapped():
if sys.version_info[0] == 3:
fn()
else:
_assert_unimplemented("Python 3")
return _wrapped
def _run_in_subprocess(test_case):
sys.path.insert(
0, os.path.join(os.path.realpath(os.path.dirname(__file__)), "..")
)
error_queue = multiprocessing.Queue()
proc = multiprocessing.Process(target=test_case, args=(error_queue,))
proc.start()
proc.join()
sys.path.pop(0)
if not error_queue.empty():
raise error_queue.get()
assert proc.exitcode == 0, "Process exited with code {}".format(
proc.exitcode
)
def _assert_unimplemented(msg_substr):
import grpc
try:
protos, services = grpc.protos_and_services(
"tests/unit/data/foo/bar.proto"
)
except NotImplementedError as e:
assert msg_substr in str(e), "{} was not in '{}'".format(
msg_substr, str(e)
)
else:
assert False, "Did not raise NotImplementedError"
@_collect_errors
@_python3_check
def _test_sunny_day():
import grpc
protos, services = grpc.protos_and_services(
os.path.join(_DATA_DIR, "foo", "bar.proto")
)
assert protos.BarMessage is not None
assert services.BarStub is not None
@_collect_errors
@_python3_check
def _test_well_known_types():
import grpc
protos, services = grpc.protos_and_services(
os.path.join(_DATA_DIR, "foo", "bar_with_wkt.proto")
)
assert protos.BarMessage is not None
assert services.BarStub is not None
@_collect_errors
@_python3_check
def _test_grpc_tools_unimportable():
with _grpc_tools_unimportable():
_assert_unimplemented("grpcio-tools")
# NOTE(rbellevi): multiprocessing.Process fails to pickle function objects
# when they do not come from the "__main__" module, so this test passes
# if run directly on Windows, but not if started by the test runner.
@unittest.skipIf(os.name == "nt", "Windows multiprocessing unsupported")
class DynamicStubTest(unittest.TestCase):
def test_sunny_day(self):
_run_in_subprocess(_test_sunny_day)
def test_well_known_types(self):
_run_in_subprocess(_test_well_known_types)
def test_grpc_tools_unimportable(self):
_run_in_subprocess(_test_grpc_tools_unimportable)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 3,932
| 25.22
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_from_grpc_import_star.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
_BEFORE_IMPORT = tuple(globals())
from grpc import * # pylint: disable=wildcard-import,unused-wildcard-import
_AFTER_IMPORT = tuple(globals())
GRPC_ELEMENTS = tuple(
element
for element in _AFTER_IMPORT
if element not in _BEFORE_IMPORT and element != "_BEFORE_IMPORT"
)
| 864
| 32.269231
| 76
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_compression_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests server and client side compression."""
from concurrent import futures
import contextlib
import functools
import itertools
import logging
import os
import unittest
import grpc
from grpc import _grpcio_metadata
from tests.unit import _tcp_proxy
from tests.unit.framework.common import test_constants
_UNARY_UNARY = "/test/UnaryUnary"
_UNARY_STREAM = "/test/UnaryStream"
_STREAM_UNARY = "/test/StreamUnary"
_STREAM_STREAM = "/test/StreamStream"
# Cut down on test time.
_STREAM_LENGTH = test_constants.STREAM_LENGTH // 16
_HOST = "localhost"
_REQUEST = b"\x00" * 100
_COMPRESSION_RATIO_THRESHOLD = 0.05
_COMPRESSION_METHODS = (
None,
# Disabled for test tractability.
# grpc.Compression.NoCompression,
# grpc.Compression.Deflate,
grpc.Compression.Gzip,
)
_COMPRESSION_NAMES = {
None: "Uncompressed",
grpc.Compression.NoCompression: "NoCompression",
grpc.Compression.Deflate: "DeflateCompression",
grpc.Compression.Gzip: "GzipCompression",
}
_TEST_OPTIONS = {
"client_streaming": (True, False),
"server_streaming": (True, False),
"channel_compression": _COMPRESSION_METHODS,
"multicallable_compression": _COMPRESSION_METHODS,
"server_compression": _COMPRESSION_METHODS,
"server_call_compression": _COMPRESSION_METHODS,
}
def _make_handle_unary_unary(pre_response_callback):
def _handle_unary(request, servicer_context):
if pre_response_callback:
pre_response_callback(request, servicer_context)
return request
return _handle_unary
def _make_handle_unary_stream(pre_response_callback):
def _handle_unary_stream(request, servicer_context):
if pre_response_callback:
pre_response_callback(request, servicer_context)
for _ in range(_STREAM_LENGTH):
yield request
return _handle_unary_stream
def _make_handle_stream_unary(pre_response_callback):
def _handle_stream_unary(request_iterator, servicer_context):
if pre_response_callback:
pre_response_callback(request_iterator, servicer_context)
response = None
for request in request_iterator:
if not response:
response = request
return response
return _handle_stream_unary
def _make_handle_stream_stream(pre_response_callback):
def _handle_stream(request_iterator, servicer_context):
# TODO(issue:#6891) We should be able to remove this loop,
# and replace with return; yield
for request in request_iterator:
if pre_response_callback:
pre_response_callback(request, servicer_context)
yield request
return _handle_stream
def set_call_compression(
compression_method, request_or_iterator, servicer_context
):
del request_or_iterator
servicer_context.set_compression(compression_method)
def disable_next_compression(request, servicer_context):
del request
servicer_context.disable_next_message_compression()
def disable_first_compression(request, servicer_context):
if int(request.decode("ascii")) == 0:
servicer_context.disable_next_message_compression()
class _MethodHandler(grpc.RpcMethodHandler):
def __init__(
self, request_streaming, response_streaming, pre_response_callback
):
self.request_streaming = request_streaming
self.response_streaming = response_streaming
self.request_deserializer = None
self.response_serializer = None
self.unary_unary = None
self.unary_stream = None
self.stream_unary = None
self.stream_stream = None
if self.request_streaming and self.response_streaming:
self.stream_stream = _make_handle_stream_stream(
pre_response_callback
)
elif not self.request_streaming and not self.response_streaming:
self.unary_unary = _make_handle_unary_unary(pre_response_callback)
elif not self.request_streaming and self.response_streaming:
self.unary_stream = _make_handle_unary_stream(pre_response_callback)
else:
self.stream_unary = _make_handle_stream_unary(pre_response_callback)
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self, pre_response_callback):
self._pre_response_callback = pre_response_callback
def service(self, handler_call_details):
if handler_call_details.method == _UNARY_UNARY:
return _MethodHandler(False, False, self._pre_response_callback)
elif handler_call_details.method == _UNARY_STREAM:
return _MethodHandler(False, True, self._pre_response_callback)
elif handler_call_details.method == _STREAM_UNARY:
return _MethodHandler(True, False, self._pre_response_callback)
elif handler_call_details.method == _STREAM_STREAM:
return _MethodHandler(True, True, self._pre_response_callback)
else:
return None
@contextlib.contextmanager
def _instrumented_client_server_pair(
channel_kwargs, server_kwargs, server_handler
):
server = grpc.server(futures.ThreadPoolExecutor(), **server_kwargs)
server.add_generic_rpc_handlers((server_handler,))
server_port = server.add_insecure_port("{}:0".format(_HOST))
server.start()
with _tcp_proxy.TcpProxy(_HOST, _HOST, server_port) as proxy:
proxy_port = proxy.get_port()
with grpc.insecure_channel(
"{}:{}".format(_HOST, proxy_port), **channel_kwargs
) as client_channel:
try:
yield client_channel, proxy, server
finally:
server.stop(None)
def _get_byte_counts(
channel_kwargs,
multicallable_kwargs,
client_function,
server_kwargs,
server_handler,
message,
):
with _instrumented_client_server_pair(
channel_kwargs, server_kwargs, server_handler
) as pipeline:
client_channel, proxy, server = pipeline
client_function(client_channel, multicallable_kwargs, message)
return proxy.get_byte_count()
def _get_compression_ratios(
client_function,
first_channel_kwargs,
first_multicallable_kwargs,
first_server_kwargs,
first_server_handler,
second_channel_kwargs,
second_multicallable_kwargs,
second_server_kwargs,
second_server_handler,
message,
):
first_bytes_sent, first_bytes_received = _get_byte_counts(
first_channel_kwargs,
first_multicallable_kwargs,
client_function,
first_server_kwargs,
first_server_handler,
message,
)
second_bytes_sent, second_bytes_received = _get_byte_counts(
second_channel_kwargs,
second_multicallable_kwargs,
client_function,
second_server_kwargs,
second_server_handler,
message,
)
return (
(second_bytes_sent - first_bytes_sent) / float(first_bytes_sent),
(second_bytes_received - first_bytes_received)
/ float(first_bytes_received),
)
def _unary_unary_client(channel, multicallable_kwargs, message):
multi_callable = channel.unary_unary(_UNARY_UNARY)
response = multi_callable(message, **multicallable_kwargs)
if response != message:
raise RuntimeError(
"Request '{}' != Response '{}'".format(message, response)
)
def _unary_stream_client(channel, multicallable_kwargs, message):
multi_callable = channel.unary_stream(_UNARY_STREAM)
response_iterator = multi_callable(message, **multicallable_kwargs)
for response in response_iterator:
if response != message:
raise RuntimeError(
"Request '{}' != Response '{}'".format(message, response)
)
def _stream_unary_client(channel, multicallable_kwargs, message):
multi_callable = channel.stream_unary(_STREAM_UNARY)
requests = (_REQUEST for _ in range(_STREAM_LENGTH))
response = multi_callable(requests, **multicallable_kwargs)
if response != message:
raise RuntimeError(
"Request '{}' != Response '{}'".format(message, response)
)
def _stream_stream_client(channel, multicallable_kwargs, message):
multi_callable = channel.stream_stream(_STREAM_STREAM)
request_prefix = str(0).encode("ascii") * 100
requests = (
request_prefix + str(i).encode("ascii") for i in range(_STREAM_LENGTH)
)
response_iterator = multi_callable(requests, **multicallable_kwargs)
for i, response in enumerate(response_iterator):
if int(response.decode("ascii")) != i:
raise RuntimeError(
"Request '{}' != Response '{}'".format(i, response)
)
class CompressionTest(unittest.TestCase):
def assertCompressed(self, compression_ratio):
self.assertLess(
compression_ratio,
-1.0 * _COMPRESSION_RATIO_THRESHOLD,
msg="Actual compression ratio: {}".format(compression_ratio),
)
def assertNotCompressed(self, compression_ratio):
self.assertGreaterEqual(
compression_ratio,
-1.0 * _COMPRESSION_RATIO_THRESHOLD,
msg="Actual compession ratio: {}".format(compression_ratio),
)
def assertConfigurationCompressed(
self,
client_streaming,
server_streaming,
channel_compression,
multicallable_compression,
server_compression,
server_call_compression,
):
client_side_compressed = (
channel_compression or multicallable_compression
)
server_side_compressed = server_compression or server_call_compression
channel_kwargs = (
{
"compression": channel_compression,
}
if channel_compression
else {}
)
multicallable_kwargs = (
{
"compression": multicallable_compression,
}
if multicallable_compression
else {}
)
client_function = None
if not client_streaming and not server_streaming:
client_function = _unary_unary_client
elif not client_streaming and server_streaming:
client_function = _unary_stream_client
elif client_streaming and not server_streaming:
client_function = _stream_unary_client
else:
client_function = _stream_stream_client
server_kwargs = (
{
"compression": server_compression,
}
if server_compression
else {}
)
server_handler = (
_GenericHandler(
functools.partial(set_call_compression, grpc.Compression.Gzip)
)
if server_call_compression
else _GenericHandler(None)
)
_get_compression_ratios(
client_function,
{},
{},
{},
_GenericHandler(None),
channel_kwargs,
multicallable_kwargs,
server_kwargs,
server_handler,
_REQUEST,
)
def testDisableNextCompressionStreaming(self):
server_kwargs = {
"compression": grpc.Compression.Deflate,
}
_get_compression_ratios(
_stream_stream_client,
{},
{},
{},
_GenericHandler(None),
{},
{},
server_kwargs,
_GenericHandler(disable_next_compression),
_REQUEST,
)
def testDisableNextCompressionStreamingResets(self):
server_kwargs = {
"compression": grpc.Compression.Deflate,
}
_get_compression_ratios(
_stream_stream_client,
{},
{},
{},
_GenericHandler(None),
{},
{},
server_kwargs,
_GenericHandler(disable_first_compression),
_REQUEST,
)
def _get_compression_str(name, value):
return "{}{}".format(name, _COMPRESSION_NAMES[value])
def _get_compression_test_name(
client_streaming,
server_streaming,
channel_compression,
multicallable_compression,
server_compression,
server_call_compression,
):
client_arity = "Stream" if client_streaming else "Unary"
server_arity = "Stream" if server_streaming else "Unary"
arity = "{}{}".format(client_arity, server_arity)
channel_compression_str = _get_compression_str(
"Channel", channel_compression
)
multicallable_compression_str = _get_compression_str(
"Multicallable", multicallable_compression
)
server_compression_str = _get_compression_str("Server", server_compression)
server_call_compression_str = _get_compression_str(
"ServerCall", server_call_compression
)
return "test{}{}{}{}{}".format(
arity,
channel_compression_str,
multicallable_compression_str,
server_compression_str,
server_call_compression_str,
)
def _test_options():
for test_parameters in itertools.product(*_TEST_OPTIONS.values()):
yield dict(zip(_TEST_OPTIONS.keys(), test_parameters))
for options in _test_options():
def test_compression(**kwargs):
def _test_compression(self):
self.assertConfigurationCompressed(**kwargs)
return _test_compression
setattr(
CompressionTest,
_get_compression_test_name(**options),
test_compression(**options),
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 14,226
| 30.406181
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/beta/test_utilities.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test-appropriate entry points into the gRPC Python Beta API."""
import grpc
from grpc.beta import implementations
def not_really_secure_channel(
host, port, channel_credentials, server_host_override
):
"""Creates an insecure Channel to a remote host.
Args:
host: The name of the remote host to which to connect.
port: The port of the remote host to which to connect.
channel_credentials: The implementations.ChannelCredentials with which to
connect.
server_host_override: The target name used for SSL host name checking.
Returns:
An implementations.Channel to the remote host through which RPCs may be
conducted.
"""
target = "%s:%d" % (host, port)
channel = grpc.secure_channel(
target,
channel_credentials,
(
(
"grpc.ssl_target_name_override",
server_host_override,
),
),
)
return implementations.Channel(channel)
| 1,571
| 31.75
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the implementations module of the gRPC Python Beta API."""
import datetime
import unittest
from grpc.beta import implementations
from oauth2client import client as oauth2client_client
from tests.unit import resources
class ChannelCredentialsTest(unittest.TestCase):
def test_runtime_provided_root_certificates(self):
channel_credentials = implementations.ssl_channel_credentials()
self.assertIsInstance(
channel_credentials, implementations.ChannelCredentials
)
def test_application_provided_root_certificates(self):
channel_credentials = implementations.ssl_channel_credentials(
resources.test_root_certificates()
)
self.assertIsInstance(
channel_credentials, implementations.ChannelCredentials
)
class CallCredentialsTest(unittest.TestCase):
def test_google_call_credentials(self):
creds = oauth2client_client.GoogleCredentials(
"token",
"client_id",
"secret",
"refresh_token",
datetime.datetime(2008, 6, 24),
"https://refresh.uri.com/",
"user_agent",
)
call_creds = implementations.google_call_credentials(creds)
self.assertIsInstance(call_creds, implementations.CallCredentials)
def test_access_token_call_credentials(self):
call_creds = implementations.access_token_call_credentials("token")
self.assertIsInstance(call_creds, implementations.CallCredentials)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 2,159
| 33.83871
| 75
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/beta/_utilities_test.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc.beta.utilities."""
import threading
import time
import unittest
from grpc.beta import implementations
from grpc.beta import utilities
from grpc.framework.foundation import future
from tests.unit.framework.common import test_constants
class _Callback(object):
def __init__(self):
self._condition = threading.Condition()
self._value = None
def accept_value(self, value):
with self._condition:
self._value = value
self._condition.notify_all()
def block_until_called(self):
with self._condition:
while self._value is None:
self._condition.wait()
return self._value
@unittest.skip("https://github.com/grpc/grpc/issues/16134")
class ChannelConnectivityTest(unittest.TestCase):
def test_lonely_channel_connectivity(self):
channel = implementations.insecure_channel("localhost", 12345)
callback = _Callback()
ready_future = utilities.channel_ready_future(channel)
ready_future.add_done_callback(callback.accept_value)
with self.assertRaises(future.TimeoutError):
ready_future.result(timeout=test_constants.SHORT_TIMEOUT)
self.assertFalse(ready_future.cancelled())
self.assertFalse(ready_future.done())
self.assertTrue(ready_future.running())
ready_future.cancel()
value_passed_to_callback = callback.block_until_called()
self.assertIs(ready_future, value_passed_to_callback)
self.assertTrue(ready_future.cancelled())
self.assertTrue(ready_future.done())
self.assertFalse(ready_future.running())
def test_immediately_connectable_channel_connectivity(self):
server = implementations.server({})
port = server.add_insecure_port("[::]:0")
server.start()
channel = implementations.insecure_channel("localhost", port)
callback = _Callback()
try:
ready_future = utilities.channel_ready_future(channel)
ready_future.add_done_callback(callback.accept_value)
self.assertIsNone(
ready_future.result(timeout=test_constants.LONG_TIMEOUT)
)
value_passed_to_callback = callback.block_until_called()
self.assertIs(ready_future, value_passed_to_callback)
self.assertFalse(ready_future.cancelled())
self.assertTrue(ready_future.done())
self.assertFalse(ready_future.running())
# Cancellation after maturity has no effect.
ready_future.cancel()
self.assertFalse(ready_future.cancelled())
self.assertTrue(ready_future.done())
self.assertFalse(ready_future.running())
finally:
ready_future.cancel()
server.stop(0)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 3,454
| 35.755319
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of RPC-method-not-found behavior."""
import unittest
from grpc.beta import implementations
from grpc.beta import interfaces
from grpc.framework.interfaces.face import face
from tests.unit.framework.common import test_constants
class NotFoundTest(unittest.TestCase):
def setUp(self):
self._server = implementations.server({})
port = self._server.add_insecure_port("[::]:0")
channel = implementations.insecure_channel("localhost", port)
self._generic_stub = implementations.generic_stub(channel)
self._server.start()
def tearDown(self):
self._server.stop(0).wait()
self._generic_stub = None
def test_blocking_unary_unary_not_found(self):
with self.assertRaises(face.LocalError) as exception_assertion_context:
self._generic_stub.blocking_unary_unary(
"groop",
"meffod",
b"abc",
test_constants.LONG_TIMEOUT,
with_call=True,
)
self.assertIs(
exception_assertion_context.exception.code,
interfaces.StatusCode.UNIMPLEMENTED,
)
def test_future_stream_unary_not_found(self):
rpc_future = self._generic_stub.future_stream_unary(
"grupe", "mevvod", iter([b"def"]), test_constants.LONG_TIMEOUT
)
with self.assertRaises(face.LocalError) as exception_assertion_context:
rpc_future.result()
self.assertIs(
exception_assertion_context.exception.code,
interfaces.StatusCode.UNIMPLEMENTED,
)
self.assertIs(
rpc_future.exception().code, interfaces.StatusCode.UNIMPLEMENTED
)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 2,351
| 33.588235
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/beta/_connectivity_channel_test.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc.beta._connectivity_channel."""
import unittest
from grpc.beta import interfaces
class ConnectivityStatesTest(unittest.TestCase):
def testBetaConnectivityStates(self):
self.assertIsNotNone(interfaces.ChannelConnectivity.IDLE)
self.assertIsNotNone(interfaces.ChannelConnectivity.CONNECTING)
self.assertIsNotNone(interfaces.ChannelConnectivity.READY)
self.assertIsNotNone(interfaces.ChannelConnectivity.TRANSIENT_FAILURE)
self.assertIsNotNone(interfaces.ChannelConnectivity.FATAL_FAILURE)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 1,188
| 36.15625
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests Face interface compliance of the gRPC Python Beta API."""
import threading
import unittest
from grpc.beta import implementations
from grpc.beta import interfaces
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities
from tests.unit import resources
from tests.unit.beta import test_utilities
from tests.unit.framework.common import test_constants
_SERVER_HOST_OVERRIDE = "foo.test.google.fr"
_PER_RPC_CREDENTIALS_METADATA_KEY = b"my-call-credentials-metadata-key"
_PER_RPC_CREDENTIALS_METADATA_VALUE = b"my-call-credentials-metadata-value"
_GROUP = "group"
_UNARY_UNARY = "unary-unary"
_UNARY_STREAM = "unary-stream"
_STREAM_UNARY = "stream-unary"
_STREAM_STREAM = "stream-stream"
_REQUEST = b"abc"
_RESPONSE = b"123"
class _Servicer(object):
def __init__(self):
self._condition = threading.Condition()
self._peer = None
self._serviced = False
def unary_unary(self, request, context):
with self._condition:
self._request = request
self._peer = context.protocol_context().peer()
self._invocation_metadata = context.invocation_metadata()
context.protocol_context().disable_next_response_compression()
self._serviced = True
self._condition.notify_all()
return _RESPONSE
def unary_stream(self, request, context):
with self._condition:
self._request = request
self._peer = context.protocol_context().peer()
self._invocation_metadata = context.invocation_metadata()
context.protocol_context().disable_next_response_compression()
self._serviced = True
self._condition.notify_all()
return
yield # pylint: disable=unreachable
def stream_unary(self, request_iterator, context):
for request in request_iterator:
self._request = request
with self._condition:
self._peer = context.protocol_context().peer()
self._invocation_metadata = context.invocation_metadata()
context.protocol_context().disable_next_response_compression()
self._serviced = True
self._condition.notify_all()
return _RESPONSE
def stream_stream(self, request_iterator, context):
for request in request_iterator:
with self._condition:
self._peer = context.protocol_context().peer()
context.protocol_context().disable_next_response_compression()
yield _RESPONSE
with self._condition:
self._invocation_metadata = context.invocation_metadata()
self._serviced = True
self._condition.notify_all()
def peer(self):
with self._condition:
return self._peer
def block_until_serviced(self):
with self._condition:
while not self._serviced:
self._condition.wait()
class _BlockingIterator(object):
def __init__(self, upstream):
self._condition = threading.Condition()
self._upstream = upstream
self._allowed = []
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
with self._condition:
while True:
if self._allowed is None:
raise StopIteration()
elif self._allowed:
return self._allowed.pop(0)
else:
self._condition.wait()
def allow(self):
with self._condition:
try:
self._allowed.append(next(self._upstream))
except StopIteration:
self._allowed = None
self._condition.notify_all()
def _metadata_plugin(context, callback):
callback(
[
(
_PER_RPC_CREDENTIALS_METADATA_KEY,
_PER_RPC_CREDENTIALS_METADATA_VALUE,
)
],
None,
)
class BetaFeaturesTest(unittest.TestCase):
def setUp(self):
self._servicer = _Servicer()
method_implementations = {
(_GROUP, _UNARY_UNARY): utilities.unary_unary_inline(
self._servicer.unary_unary
),
(_GROUP, _UNARY_STREAM): utilities.unary_stream_inline(
self._servicer.unary_stream
),
(_GROUP, _STREAM_UNARY): utilities.stream_unary_inline(
self._servicer.stream_unary
),
(_GROUP, _STREAM_STREAM): utilities.stream_stream_inline(
self._servicer.stream_stream
),
}
cardinalities = {
_UNARY_UNARY: cardinality.Cardinality.UNARY_UNARY,
_UNARY_STREAM: cardinality.Cardinality.UNARY_STREAM,
_STREAM_UNARY: cardinality.Cardinality.STREAM_UNARY,
_STREAM_STREAM: cardinality.Cardinality.STREAM_STREAM,
}
server_options = implementations.server_options(
thread_pool_size=test_constants.POOL_SIZE
)
self._server = implementations.server(
method_implementations, options=server_options
)
server_credentials = implementations.ssl_server_credentials(
[
(
resources.private_key(),
resources.certificate_chain(),
),
]
)
port = self._server.add_secure_port("[::]:0", server_credentials)
self._server.start()
self._channel_credentials = implementations.ssl_channel_credentials(
resources.test_root_certificates()
)
self._call_credentials = implementations.metadata_call_credentials(
_metadata_plugin
)
channel = test_utilities.not_really_secure_channel(
"localhost", port, self._channel_credentials, _SERVER_HOST_OVERRIDE
)
stub_options = implementations.stub_options(
thread_pool_size=test_constants.POOL_SIZE
)
self._dynamic_stub = implementations.dynamic_stub(
channel, _GROUP, cardinalities, options=stub_options
)
def tearDown(self):
self._dynamic_stub = None
self._server.stop(test_constants.SHORT_TIMEOUT).wait()
def test_unary_unary(self):
call_options = interfaces.grpc_call_options(
disable_compression=True, credentials=self._call_credentials
)
response = getattr(self._dynamic_stub, _UNARY_UNARY)(
_REQUEST, test_constants.LONG_TIMEOUT, protocol_options=call_options
)
self.assertEqual(_RESPONSE, response)
self.assertIsNotNone(self._servicer.peer())
invocation_metadata = [
(metadatum.key, metadatum.value)
for metadatum in self._servicer._invocation_metadata
]
self.assertIn(
(
_PER_RPC_CREDENTIALS_METADATA_KEY,
_PER_RPC_CREDENTIALS_METADATA_VALUE,
),
invocation_metadata,
)
def test_unary_stream(self):
call_options = interfaces.grpc_call_options(
disable_compression=True, credentials=self._call_credentials
)
response_iterator = getattr(self._dynamic_stub, _UNARY_STREAM)(
_REQUEST, test_constants.LONG_TIMEOUT, protocol_options=call_options
)
self._servicer.block_until_serviced()
self.assertIsNotNone(self._servicer.peer())
invocation_metadata = [
(metadatum.key, metadatum.value)
for metadatum in self._servicer._invocation_metadata
]
self.assertIn(
(
_PER_RPC_CREDENTIALS_METADATA_KEY,
_PER_RPC_CREDENTIALS_METADATA_VALUE,
),
invocation_metadata,
)
def test_stream_unary(self):
call_options = interfaces.grpc_call_options(
credentials=self._call_credentials
)
request_iterator = _BlockingIterator(iter((_REQUEST,)))
response_future = getattr(self._dynamic_stub, _STREAM_UNARY).future(
request_iterator,
test_constants.LONG_TIMEOUT,
protocol_options=call_options,
)
response_future.protocol_context().disable_next_request_compression()
request_iterator.allow()
response_future.protocol_context().disable_next_request_compression()
request_iterator.allow()
self._servicer.block_until_serviced()
self.assertIsNotNone(self._servicer.peer())
self.assertEqual(_RESPONSE, response_future.result())
invocation_metadata = [
(metadatum.key, metadatum.value)
for metadatum in self._servicer._invocation_metadata
]
self.assertIn(
(
_PER_RPC_CREDENTIALS_METADATA_KEY,
_PER_RPC_CREDENTIALS_METADATA_VALUE,
),
invocation_metadata,
)
def test_stream_stream(self):
call_options = interfaces.grpc_call_options(
credentials=self._call_credentials
)
request_iterator = _BlockingIterator(iter((_REQUEST,)))
response_iterator = getattr(self._dynamic_stub, _STREAM_STREAM)(
request_iterator,
test_constants.SHORT_TIMEOUT,
protocol_options=call_options,
)
response_iterator.protocol_context().disable_next_request_compression()
request_iterator.allow()
response = next(response_iterator)
response_iterator.protocol_context().disable_next_request_compression()
request_iterator.allow()
self._servicer.block_until_serviced()
self.assertIsNotNone(self._servicer.peer())
self.assertEqual(_RESPONSE, response)
invocation_metadata = [
(metadatum.key, metadatum.value)
for metadatum in self._servicer._invocation_metadata
]
self.assertIn(
(
_PER_RPC_CREDENTIALS_METADATA_KEY,
_PER_RPC_CREDENTIALS_METADATA_VALUE,
),
invocation_metadata,
)
class ContextManagementAndLifecycleTest(unittest.TestCase):
def setUp(self):
self._servicer = _Servicer()
self._method_implementations = {
(_GROUP, _UNARY_UNARY): utilities.unary_unary_inline(
self._servicer.unary_unary
),
(_GROUP, _UNARY_STREAM): utilities.unary_stream_inline(
self._servicer.unary_stream
),
(_GROUP, _STREAM_UNARY): utilities.stream_unary_inline(
self._servicer.stream_unary
),
(_GROUP, _STREAM_STREAM): utilities.stream_stream_inline(
self._servicer.stream_stream
),
}
self._cardinalities = {
_UNARY_UNARY: cardinality.Cardinality.UNARY_UNARY,
_UNARY_STREAM: cardinality.Cardinality.UNARY_STREAM,
_STREAM_UNARY: cardinality.Cardinality.STREAM_UNARY,
_STREAM_STREAM: cardinality.Cardinality.STREAM_STREAM,
}
self._server_options = implementations.server_options(
thread_pool_size=test_constants.POOL_SIZE
)
self._server_credentials = implementations.ssl_server_credentials(
[
(
resources.private_key(),
resources.certificate_chain(),
),
]
)
self._channel_credentials = implementations.ssl_channel_credentials(
resources.test_root_certificates()
)
self._stub_options = implementations.stub_options(
thread_pool_size=test_constants.POOL_SIZE
)
def test_stub_context(self):
server = implementations.server(
self._method_implementations, options=self._server_options
)
port = server.add_secure_port("[::]:0", self._server_credentials)
server.start()
channel = test_utilities.not_really_secure_channel(
"localhost", port, self._channel_credentials, _SERVER_HOST_OVERRIDE
)
dynamic_stub = implementations.dynamic_stub(
channel, _GROUP, self._cardinalities, options=self._stub_options
)
for _ in range(100):
with dynamic_stub:
pass
for _ in range(10):
with dynamic_stub:
call_options = interfaces.grpc_call_options(
disable_compression=True
)
response = getattr(dynamic_stub, _UNARY_UNARY)(
_REQUEST,
test_constants.LONG_TIMEOUT,
protocol_options=call_options,
)
self.assertEqual(_RESPONSE, response)
self.assertIsNotNone(self._servicer.peer())
server.stop(test_constants.SHORT_TIMEOUT).wait()
def test_server_lifecycle(self):
for _ in range(100):
server = implementations.server(
self._method_implementations, options=self._server_options
)
port = server.add_secure_port("[::]:0", self._server_credentials)
server.start()
server.stop(test_constants.SHORT_TIMEOUT).wait()
for _ in range(100):
server = implementations.server(
self._method_implementations, options=self._server_options
)
server.add_secure_port("[::]:0", self._server_credentials)
server.add_insecure_port("[::]:0")
with server:
server.stop(test_constants.SHORT_TIMEOUT)
server.stop(test_constants.SHORT_TIMEOUT)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 14,444
| 34.843672
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/beta/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_cython/test_utilities.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
from grpc._cython import cygrpc
class SimpleFuture(object):
"""A simple future mechanism."""
def __init__(self, function, *args, **kwargs):
def wrapped_function():
try:
self._result = function(*args, **kwargs)
except Exception as error: # pylint: disable=broad-except
self._error = error
self._result = None
self._error = None
self._thread = threading.Thread(target=wrapped_function)
self._thread.start()
def result(self):
"""The resulting value of this future.
Re-raises any exceptions.
"""
self._thread.join()
if self._error:
# TODO(atash): re-raise exceptions in a way that preserves tracebacks
raise self._error # pylint: disable=raising-bad-type
return self._result
class CompletionQueuePollFuture(SimpleFuture):
def __init__(self, completion_queue, deadline):
super(CompletionQueuePollFuture, self).__init__(
lambda: completion_queue.poll(deadline=deadline)
)
| 1,684
| 31.403846
| 81
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import platform
import threading
import time
import unittest
from grpc._cython import cygrpc
from tests.unit import resources
from tests.unit import test_common
from tests.unit._cython import test_utilities
_SSL_HOST_OVERRIDE = b"foo.test.google.fr"
_CALL_CREDENTIALS_METADATA_KEY = "call-creds-key"
_CALL_CREDENTIALS_METADATA_VALUE = "call-creds-value"
_EMPTY_FLAGS = 0
def _metadata_plugin(context, callback):
callback(
(
(
_CALL_CREDENTIALS_METADATA_KEY,
_CALL_CREDENTIALS_METADATA_VALUE,
),
),
cygrpc.StatusCode.ok,
b"",
)
class TypeSmokeTest(unittest.TestCase):
def testCompletionQueueUpDown(self):
completion_queue = cygrpc.CompletionQueue()
del completion_queue
def testServerUpDown(self):
server = cygrpc.Server(
set(
[
(
b"grpc.so_reuseport",
0,
)
]
),
False,
)
del server
def testChannelUpDown(self):
channel = cygrpc.Channel(b"[::]:0", None, None)
channel.close(cygrpc.StatusCode.cancelled, "Test method anyway!")
def test_metadata_plugin_call_credentials_up_down(self):
cygrpc.MetadataPluginCallCredentials(
_metadata_plugin, b"test plugin name!"
)
def testServerStartNoExplicitShutdown(self):
server = cygrpc.Server(
[
(
b"grpc.so_reuseport",
0,
)
],
False,
)
completion_queue = cygrpc.CompletionQueue()
server.register_completion_queue(completion_queue)
port = server.add_http2_port(b"[::]:0")
self.assertIsInstance(port, int)
server.start()
del server
def testServerStartShutdown(self):
completion_queue = cygrpc.CompletionQueue()
server = cygrpc.Server(
[
(
b"grpc.so_reuseport",
0,
),
],
False,
)
server.add_http2_port(b"[::]:0")
server.register_completion_queue(completion_queue)
server.start()
shutdown_tag = object()
server.shutdown(completion_queue, shutdown_tag)
event = completion_queue.poll()
self.assertEqual(
cygrpc.CompletionType.operation_complete, event.completion_type
)
self.assertIs(shutdown_tag, event.tag)
del server
del completion_queue
class ServerClientMixin(object):
def setUpMixin(self, server_credentials, client_credentials, host_override):
self.server_completion_queue = cygrpc.CompletionQueue()
self.server = cygrpc.Server(
[
(
b"grpc.so_reuseport",
0,
)
],
False,
)
self.server.register_completion_queue(self.server_completion_queue)
if server_credentials:
self.port = self.server.add_http2_port(
b"[::]:0", server_credentials
)
else:
self.port = self.server.add_http2_port(b"[::]:0")
self.server.start()
self.client_completion_queue = cygrpc.CompletionQueue()
if client_credentials:
client_channel_arguments = (
(
cygrpc.ChannelArgKey.ssl_target_name_override,
host_override,
),
)
self.client_channel = cygrpc.Channel(
"localhost:{}".format(self.port).encode(),
client_channel_arguments,
client_credentials,
)
else:
self.client_channel = cygrpc.Channel(
"localhost:{}".format(self.port).encode(), set(), None
)
if host_override:
self.host_argument = None # default host
self.expected_host = host_override
else:
# arbitrary host name necessitating no further identification
self.host_argument = b"hostess"
self.expected_host = self.host_argument
def tearDownMixin(self):
self.client_channel.close(cygrpc.StatusCode.ok, "test being torn down!")
del self.client_channel
del self.server
del self.client_completion_queue
del self.server_completion_queue
def _perform_queue_operations(
self, operations, call, queue, deadline, description
):
"""Perform the operations with given call, queue, and deadline.
Invocation errors are reported with as an exception with `description`
in the message. Performs the operations asynchronously, returning a
future.
"""
def performer():
tag = object()
try:
call_result = call.start_client_batch(operations, tag)
self.assertEqual(cygrpc.CallError.ok, call_result)
event = queue.poll(deadline=deadline)
self.assertEqual(
cygrpc.CompletionType.operation_complete,
event.completion_type,
)
self.assertTrue(event.success)
self.assertIs(tag, event.tag)
except Exception as error:
raise Exception(
"Error in '{}': {}".format(description, error.message)
)
return event
return test_utilities.SimpleFuture(performer)
def test_echo(self):
DEADLINE = time.time() + 5
DEADLINE_TOLERANCE = 0.25
CLIENT_METADATA_ASCII_KEY = "key"
CLIENT_METADATA_ASCII_VALUE = "val"
CLIENT_METADATA_BIN_KEY = "key-bin"
CLIENT_METADATA_BIN_VALUE = b"\0" * 1000
SERVER_INITIAL_METADATA_KEY = "init_me_me_me"
SERVER_INITIAL_METADATA_VALUE = "whodawha?"
SERVER_TRAILING_METADATA_KEY = "california_is_in_a_drought"
SERVER_TRAILING_METADATA_VALUE = "zomg it is"
SERVER_STATUS_CODE = cygrpc.StatusCode.ok
SERVER_STATUS_DETAILS = "our work is never over"
REQUEST = b"in death a member of project mayhem has a name"
RESPONSE = b"his name is robert paulson"
METHOD = b"twinkies"
server_request_tag = object()
request_call_result = self.server.request_call(
self.server_completion_queue,
self.server_completion_queue,
server_request_tag,
)
self.assertEqual(cygrpc.CallError.ok, request_call_result)
client_call_tag = object()
client_initial_metadata = (
(
CLIENT_METADATA_ASCII_KEY,
CLIENT_METADATA_ASCII_VALUE,
),
(
CLIENT_METADATA_BIN_KEY,
CLIENT_METADATA_BIN_VALUE,
),
)
client_call = self.client_channel.integrated_call(
0,
METHOD,
self.host_argument,
DEADLINE,
client_initial_metadata,
None,
[
(
[
cygrpc.SendInitialMetadataOperation(
client_initial_metadata, _EMPTY_FLAGS
),
cygrpc.SendMessageOperation(REQUEST, _EMPTY_FLAGS),
cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),
cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),
cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
],
client_call_tag,
),
],
)
client_event_future = test_utilities.SimpleFuture(
self.client_channel.next_call_event
)
request_event = self.server_completion_queue.poll(deadline=DEADLINE)
self.assertEqual(
cygrpc.CompletionType.operation_complete,
request_event.completion_type,
)
self.assertIsInstance(request_event.call, cygrpc.Call)
self.assertIs(server_request_tag, request_event.tag)
self.assertTrue(
test_common.metadata_transmitted(
client_initial_metadata, request_event.invocation_metadata
)
)
self.assertEqual(METHOD, request_event.call_details.method)
self.assertEqual(self.expected_host, request_event.call_details.host)
self.assertLess(
abs(DEADLINE - request_event.call_details.deadline),
DEADLINE_TOLERANCE,
)
server_call_tag = object()
server_call = request_event.call
server_start_batch_result = server_call.start_server_batch(
[
cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),
],
server_call_tag,
)
self.assertEqual(cygrpc.CallError.ok, server_start_batch_result)
server_message_event = self.server_completion_queue.poll(
deadline=DEADLINE
)
server_call_tag = object()
server_initial_metadata = (
(
SERVER_INITIAL_METADATA_KEY,
SERVER_INITIAL_METADATA_VALUE,
),
)
server_trailing_metadata = (
(
SERVER_TRAILING_METADATA_KEY,
SERVER_TRAILING_METADATA_VALUE,
),
)
server_start_batch_result = server_call.start_server_batch(
[
cygrpc.SendInitialMetadataOperation(
server_initial_metadata, _EMPTY_FLAGS
),
cygrpc.SendMessageOperation(RESPONSE, _EMPTY_FLAGS),
cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),
cygrpc.SendStatusFromServerOperation(
server_trailing_metadata,
SERVER_STATUS_CODE,
SERVER_STATUS_DETAILS,
_EMPTY_FLAGS,
),
],
server_call_tag,
)
self.assertEqual(cygrpc.CallError.ok, server_start_batch_result)
server_event = self.server_completion_queue.poll(deadline=DEADLINE)
client_event = client_event_future.result()
self.assertEqual(6, len(client_event.batch_operations))
found_client_op_types = set()
for client_result in client_event.batch_operations:
# we expect each op type to be unique
self.assertNotIn(client_result.type(), found_client_op_types)
found_client_op_types.add(client_result.type())
if (
client_result.type()
== cygrpc.OperationType.receive_initial_metadata
):
self.assertTrue(
test_common.metadata_transmitted(
server_initial_metadata,
client_result.initial_metadata(),
)
)
elif client_result.type() == cygrpc.OperationType.receive_message:
self.assertEqual(RESPONSE, client_result.message())
elif (
client_result.type()
== cygrpc.OperationType.receive_status_on_client
):
self.assertTrue(
test_common.metadata_transmitted(
server_trailing_metadata,
client_result.trailing_metadata(),
)
)
self.assertEqual(SERVER_STATUS_DETAILS, client_result.details())
self.assertEqual(SERVER_STATUS_CODE, client_result.code())
self.assertEqual(
set(
[
cygrpc.OperationType.send_initial_metadata,
cygrpc.OperationType.send_message,
cygrpc.OperationType.send_close_from_client,
cygrpc.OperationType.receive_initial_metadata,
cygrpc.OperationType.receive_message,
cygrpc.OperationType.receive_status_on_client,
]
),
found_client_op_types,
)
self.assertEqual(1, len(server_message_event.batch_operations))
found_server_op_types = set()
for server_result in server_message_event.batch_operations:
self.assertNotIn(server_result.type(), found_server_op_types)
found_server_op_types.add(server_result.type())
if server_result.type() == cygrpc.OperationType.receive_message:
self.assertEqual(REQUEST, server_result.message())
elif (
server_result.type()
== cygrpc.OperationType.receive_close_on_server
):
self.assertFalse(server_result.cancelled())
self.assertEqual(
set(
[
cygrpc.OperationType.receive_message,
]
),
found_server_op_types,
)
self.assertEqual(4, len(server_event.batch_operations))
found_server_op_types = set()
for server_result in server_event.batch_operations:
self.assertNotIn(server_result.type(), found_server_op_types)
found_server_op_types.add(server_result.type())
if server_result.type() == cygrpc.OperationType.receive_message:
self.assertEqual(REQUEST, server_result.message())
elif (
server_result.type()
== cygrpc.OperationType.receive_close_on_server
):
self.assertFalse(server_result.cancelled())
self.assertEqual(
set(
[
cygrpc.OperationType.send_initial_metadata,
cygrpc.OperationType.send_message,
cygrpc.OperationType.receive_close_on_server,
cygrpc.OperationType.send_status_from_server,
]
),
found_server_op_types,
)
del client_call
del server_call
def test_6522(self):
DEADLINE = time.time() + 5
DEADLINE_TOLERANCE = 0.25
METHOD = b"twinkies"
empty_metadata = ()
# Prologue
server_request_tag = object()
self.server.request_call(
self.server_completion_queue,
self.server_completion_queue,
server_request_tag,
)
client_call = self.client_channel.segregated_call(
0,
METHOD,
self.host_argument,
DEADLINE,
None,
None,
(
[
(
[
cygrpc.SendInitialMetadataOperation(
empty_metadata, _EMPTY_FLAGS
),
cygrpc.ReceiveInitialMetadataOperation(
_EMPTY_FLAGS
),
],
object(),
),
(
[
cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
],
object(),
),
]
),
)
client_initial_metadata_event_future = test_utilities.SimpleFuture(
client_call.next_event
)
request_event = self.server_completion_queue.poll(deadline=DEADLINE)
server_call = request_event.call
def perform_server_operations(operations, description):
return self._perform_queue_operations(
operations,
server_call,
self.server_completion_queue,
DEADLINE,
description,
)
server_event_future = perform_server_operations(
[
cygrpc.SendInitialMetadataOperation(
empty_metadata, _EMPTY_FLAGS
),
],
"Server prologue",
)
client_initial_metadata_event_future.result() # force completion
server_event_future.result()
# Messaging
for _ in range(10):
client_call.operate(
[
cygrpc.SendMessageOperation(b"", _EMPTY_FLAGS),
cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),
],
"Client message",
)
client_message_event_future = test_utilities.SimpleFuture(
client_call.next_event
)
server_event_future = perform_server_operations(
[
cygrpc.SendMessageOperation(b"", _EMPTY_FLAGS),
cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),
],
"Server receive",
)
client_message_event_future.result() # force completion
server_event_future.result()
# Epilogue
client_call.operate(
[
cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
],
"Client epilogue",
)
# One for ReceiveStatusOnClient, one for SendCloseFromClient.
client_events_future = test_utilities.SimpleFuture(
lambda: {
client_call.next_event(),
client_call.next_event(),
}
)
server_event_future = perform_server_operations(
[
cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),
cygrpc.SendStatusFromServerOperation(
empty_metadata, cygrpc.StatusCode.ok, b"", _EMPTY_FLAGS
),
],
"Server epilogue",
)
client_events_future.result() # force completion
server_event_future.result()
class InsecureServerInsecureClient(unittest.TestCase, ServerClientMixin):
def setUp(self):
self.setUpMixin(None, None, None)
def tearDown(self):
self.tearDownMixin()
class SecureServerSecureClient(unittest.TestCase, ServerClientMixin):
def setUp(self):
server_credentials = cygrpc.server_credentials_ssl(
None,
[
cygrpc.SslPemKeyCertPair(
resources.private_key(), resources.certificate_chain()
)
],
False,
)
client_credentials = cygrpc.SSLChannelCredentials(
resources.test_root_certificates(), None, None
)
self.setUpMixin(
server_credentials, client_credentials, _SSL_HOST_OVERRIDE
)
def tearDown(self):
self.tearDownMixin()
if __name__ == "__main__":
unittest.main(verbosity=2)
| 19,660
| 33.133681
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test a corner-case at the level of the Cython API."""
import threading
import unittest
from grpc._cython import cygrpc
from tests.unit._cython import _common
from tests.unit._cython import test_utilities
class Test(_common.RpcTest, unittest.TestCase):
def _do_rpcs(self):
server_request_call_tag = "server_request_call_tag"
server_send_initial_metadata_tag = "server_send_initial_metadata_tag"
server_complete_rpc_tag = "server_complete_rpc_tag"
with self.server_condition:
server_request_call_start_batch_result = self.server.request_call(
self.server_completion_queue,
self.server_completion_queue,
server_request_call_tag,
)
self.server_driver.add_due(
{
server_request_call_tag,
}
)
client_receive_initial_metadata_tag = (
"client_receive_initial_metadata_tag"
)
client_complete_rpc_tag = "client_complete_rpc_tag"
client_call = self.channel.integrated_call(
_common.EMPTY_FLAGS,
b"/twinkies",
None,
None,
_common.INVOCATION_METADATA,
None,
[
(
[
cygrpc.SendInitialMetadataOperation(
_common.INVOCATION_METADATA, _common.EMPTY_FLAGS
),
cygrpc.SendCloseFromClientOperation(
_common.EMPTY_FLAGS
),
cygrpc.ReceiveStatusOnClientOperation(
_common.EMPTY_FLAGS
),
],
client_complete_rpc_tag,
),
],
)
client_call.operate(
[
cygrpc.ReceiveInitialMetadataOperation(_common.EMPTY_FLAGS),
],
client_receive_initial_metadata_tag,
)
client_events_future = test_utilities.SimpleFuture(
lambda: [
self.channel.next_call_event(),
self.channel.next_call_event(),
]
)
server_request_call_event = self.server_driver.event_with_tag(
server_request_call_tag
)
with self.server_condition:
server_send_initial_metadata_start_batch_result = (
server_request_call_event.call.start_server_batch(
[
cygrpc.SendInitialMetadataOperation(
_common.INITIAL_METADATA, _common.EMPTY_FLAGS
),
],
server_send_initial_metadata_tag,
)
)
self.server_driver.add_due(
{
server_send_initial_metadata_tag,
}
)
server_send_initial_metadata_event = self.server_driver.event_with_tag(
server_send_initial_metadata_tag
)
with self.server_condition:
server_complete_rpc_start_batch_result = (
server_request_call_event.call.start_server_batch(
[
cygrpc.ReceiveCloseOnServerOperation(
_common.EMPTY_FLAGS
),
cygrpc.SendStatusFromServerOperation(
_common.TRAILING_METADATA,
cygrpc.StatusCode.ok,
"test details",
_common.EMPTY_FLAGS,
),
],
server_complete_rpc_tag,
)
)
self.server_driver.add_due(
{
server_complete_rpc_tag,
}
)
server_complete_rpc_event = self.server_driver.event_with_tag(
server_complete_rpc_tag
)
client_events = client_events_future.result()
client_receive_initial_metadata_event = client_events[0]
client_complete_rpc_event = client_events[1]
return (
_common.OperationResult(
server_request_call_start_batch_result,
server_request_call_event.completion_type,
server_request_call_event.success,
),
_common.OperationResult(
cygrpc.CallError.ok,
client_receive_initial_metadata_event.completion_type,
client_receive_initial_metadata_event.success,
),
_common.OperationResult(
cygrpc.CallError.ok,
client_complete_rpc_event.completion_type,
client_complete_rpc_event.success,
),
_common.OperationResult(
server_send_initial_metadata_start_batch_result,
server_send_initial_metadata_event.completion_type,
server_send_initial_metadata_event.success,
),
_common.OperationResult(
server_complete_rpc_start_batch_result,
server_complete_rpc_event.completion_type,
server_complete_rpc_event.success,
),
)
def test_rpcs(self):
expecteds = [
(_common.SUCCESSFUL_OPERATION_RESULT,) * 5
] * _common.RPC_COUNT
actuallys = _common.execute_many_times(self._do_rpcs)
self.assertSequenceEqual(expecteds, actuallys)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 6,262
| 34.585227
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_cython/_read_some_but_not_all_responses_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test a corner-case at the level of the Cython API."""
import threading
import unittest
from grpc._cython import cygrpc
from tests.unit._cython import test_utilities
_EMPTY_FLAGS = 0
_EMPTY_METADATA = ()
class _ServerDriver(object):
def __init__(self, completion_queue, shutdown_tag):
self._condition = threading.Condition()
self._completion_queue = completion_queue
self._shutdown_tag = shutdown_tag
self._events = []
self._saw_shutdown_tag = False
def start(self):
def in_thread():
while True:
event = self._completion_queue.poll()
with self._condition:
self._events.append(event)
self._condition.notify()
if event.tag is self._shutdown_tag:
self._saw_shutdown_tag = True
break
thread = threading.Thread(target=in_thread)
thread.start()
def done(self):
with self._condition:
return self._saw_shutdown_tag
def first_event(self):
with self._condition:
while not self._events:
self._condition.wait()
return self._events[0]
def events(self):
with self._condition:
while not self._saw_shutdown_tag:
self._condition.wait()
return tuple(self._events)
class _QueueDriver(object):
def __init__(self, condition, completion_queue, due):
self._condition = condition
self._completion_queue = completion_queue
self._due = due
self._events = []
self._returned = False
def start(self):
def in_thread():
while True:
event = self._completion_queue.poll()
with self._condition:
self._events.append(event)
self._due.remove(event.tag)
self._condition.notify_all()
if not self._due:
self._returned = True
return
thread = threading.Thread(target=in_thread)
thread.start()
def done(self):
with self._condition:
return self._returned
def event_with_tag(self, tag):
with self._condition:
while True:
for event in self._events:
if event.tag is tag:
return event
self._condition.wait()
def events(self):
with self._condition:
while not self._returned:
self._condition.wait()
return tuple(self._events)
class ReadSomeButNotAllResponsesTest(unittest.TestCase):
def testReadSomeButNotAllResponses(self):
server_completion_queue = cygrpc.CompletionQueue()
server = cygrpc.Server(
[
(
b"grpc.so_reuseport",
0,
)
],
False,
)
server.register_completion_queue(server_completion_queue)
port = server.add_http2_port(b"[::]:0")
server.start()
channel = cygrpc.Channel(
"localhost:{}".format(port).encode(), set(), None
)
server_shutdown_tag = "server_shutdown_tag"
server_driver = _ServerDriver(
server_completion_queue, server_shutdown_tag
)
server_driver.start()
client_condition = threading.Condition()
client_due = set()
server_call_condition = threading.Condition()
server_send_initial_metadata_tag = "server_send_initial_metadata_tag"
server_send_first_message_tag = "server_send_first_message_tag"
server_send_second_message_tag = "server_send_second_message_tag"
server_complete_rpc_tag = "server_complete_rpc_tag"
server_call_due = set(
(
server_send_initial_metadata_tag,
server_send_first_message_tag,
server_send_second_message_tag,
server_complete_rpc_tag,
)
)
server_call_completion_queue = cygrpc.CompletionQueue()
server_call_driver = _QueueDriver(
server_call_condition, server_call_completion_queue, server_call_due
)
server_call_driver.start()
server_rpc_tag = "server_rpc_tag"
request_call_result = server.request_call(
server_call_completion_queue,
server_completion_queue,
server_rpc_tag,
)
client_receive_initial_metadata_tag = (
"client_receive_initial_metadata_tag"
)
client_complete_rpc_tag = "client_complete_rpc_tag"
client_call = channel.segregated_call(
_EMPTY_FLAGS,
b"/twinkies",
None,
None,
_EMPTY_METADATA,
None,
(
(
[
cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),
],
client_receive_initial_metadata_tag,
),
(
[
cygrpc.SendInitialMetadataOperation(
_EMPTY_METADATA, _EMPTY_FLAGS
),
cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
],
client_complete_rpc_tag,
),
),
)
client_receive_initial_metadata_event_future = (
test_utilities.SimpleFuture(client_call.next_event)
)
server_rpc_event = server_driver.first_event()
with server_call_condition:
server_send_initial_metadata_start_batch_result = (
server_rpc_event.call.start_server_batch(
[
cygrpc.SendInitialMetadataOperation(
_EMPTY_METADATA, _EMPTY_FLAGS
),
],
server_send_initial_metadata_tag,
)
)
server_send_first_message_start_batch_result = (
server_rpc_event.call.start_server_batch(
[
cygrpc.SendMessageOperation(b"\x07", _EMPTY_FLAGS),
],
server_send_first_message_tag,
)
)
server_send_initial_metadata_event = server_call_driver.event_with_tag(
server_send_initial_metadata_tag
)
server_send_first_message_event = server_call_driver.event_with_tag(
server_send_first_message_tag
)
with server_call_condition:
server_send_second_message_start_batch_result = (
server_rpc_event.call.start_server_batch(
[
cygrpc.SendMessageOperation(b"\x07", _EMPTY_FLAGS),
],
server_send_second_message_tag,
)
)
server_complete_rpc_start_batch_result = (
server_rpc_event.call.start_server_batch(
[
cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),
cygrpc.SendStatusFromServerOperation(
(),
cygrpc.StatusCode.ok,
b"test details",
_EMPTY_FLAGS,
),
],
server_complete_rpc_tag,
)
)
server_send_second_message_event = server_call_driver.event_with_tag(
server_send_second_message_tag
)
server_complete_rpc_event = server_call_driver.event_with_tag(
server_complete_rpc_tag
)
server_call_driver.events()
client_recieve_initial_metadata_event = (
client_receive_initial_metadata_event_future.result()
)
client_receive_first_message_tag = "client_receive_first_message_tag"
client_call.operate(
[
cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),
],
client_receive_first_message_tag,
)
client_receive_first_message_event = client_call.next_event()
client_call_cancel_result = client_call.cancel(
cygrpc.StatusCode.cancelled, "Cancelled during test!"
)
client_complete_rpc_event = client_call.next_event()
channel.close(cygrpc.StatusCode.unknown, "Channel closed!")
server.shutdown(server_completion_queue, server_shutdown_tag)
server.cancel_all_calls()
server_driver.events()
self.assertEqual(cygrpc.CallError.ok, request_call_result)
self.assertEqual(
cygrpc.CallError.ok, server_send_initial_metadata_start_batch_result
)
self.assertIs(server_rpc_tag, server_rpc_event.tag)
self.assertEqual(
cygrpc.CompletionType.operation_complete,
server_rpc_event.completion_type,
)
self.assertIsInstance(server_rpc_event.call, cygrpc.Call)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 9,960
| 33.230241
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test making many calls and immediately cancelling most of them."""
import threading
import unittest
from grpc._cython import cygrpc
from grpc.framework.foundation import logging_pool
from tests.unit._cython import test_utilities
from tests.unit.framework.common import test_constants
_EMPTY_FLAGS = 0
_EMPTY_METADATA = ()
_SERVER_SHUTDOWN_TAG = "server_shutdown"
_REQUEST_CALL_TAG = "request_call"
_RECEIVE_CLOSE_ON_SERVER_TAG = "receive_close_on_server"
_RECEIVE_MESSAGE_TAG = "receive_message"
_SERVER_COMPLETE_CALL_TAG = "server_complete_call"
_SUCCESS_CALL_FRACTION = 1.0 / 8.0
_SUCCESSFUL_CALLS = int(test_constants.RPC_CONCURRENCY * _SUCCESS_CALL_FRACTION)
_UNSUCCESSFUL_CALLS = test_constants.RPC_CONCURRENCY - _SUCCESSFUL_CALLS
class _State(object):
def __init__(self):
self.condition = threading.Condition()
self.handlers_released = False
self.parked_handlers = 0
self.handled_rpcs = 0
def _is_cancellation_event(event):
return (
event.tag is _RECEIVE_CLOSE_ON_SERVER_TAG
and event.batch_operations[0].cancelled()
)
class _Handler(object):
def __init__(self, state, completion_queue, rpc_event):
self._state = state
self._lock = threading.Lock()
self._completion_queue = completion_queue
self._call = rpc_event.call
def __call__(self):
with self._state.condition:
self._state.parked_handlers += 1
if self._state.parked_handlers == test_constants.THREAD_CONCURRENCY:
self._state.condition.notify_all()
while not self._state.handlers_released:
self._state.condition.wait()
with self._lock:
self._call.start_server_batch(
(cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),),
_RECEIVE_CLOSE_ON_SERVER_TAG,
)
self._call.start_server_batch(
(cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
_RECEIVE_MESSAGE_TAG,
)
first_event = self._completion_queue.poll()
if _is_cancellation_event(first_event):
self._completion_queue.poll()
else:
with self._lock:
operations = (
cygrpc.SendInitialMetadataOperation(
_EMPTY_METADATA, _EMPTY_FLAGS
),
cygrpc.SendMessageOperation(b"\x79\x57", _EMPTY_FLAGS),
cygrpc.SendStatusFromServerOperation(
_EMPTY_METADATA,
cygrpc.StatusCode.ok,
b"test details!",
_EMPTY_FLAGS,
),
)
self._call.start_server_batch(
operations, _SERVER_COMPLETE_CALL_TAG
)
self._completion_queue.poll()
self._completion_queue.poll()
def _serve(state, server, server_completion_queue, thread_pool):
for _ in range(test_constants.RPC_CONCURRENCY):
call_completion_queue = cygrpc.CompletionQueue()
server.request_call(
call_completion_queue, server_completion_queue, _REQUEST_CALL_TAG
)
rpc_event = server_completion_queue.poll()
thread_pool.submit(_Handler(state, call_completion_queue, rpc_event))
with state.condition:
state.handled_rpcs += 1
if test_constants.RPC_CONCURRENCY <= state.handled_rpcs:
state.condition.notify_all()
server_completion_queue.poll()
class _QueueDriver(object):
def __init__(self, condition, completion_queue, due):
self._condition = condition
self._completion_queue = completion_queue
self._due = due
self._events = []
self._returned = False
def start(self):
def in_thread():
while True:
event = self._completion_queue.poll()
with self._condition:
self._events.append(event)
self._due.remove(event.tag)
self._condition.notify_all()
if not self._due:
self._returned = True
return
thread = threading.Thread(target=in_thread)
thread.start()
def events(self, at_least):
with self._condition:
while len(self._events) < at_least:
self._condition.wait()
return tuple(self._events)
class CancelManyCallsTest(unittest.TestCase):
def testCancelManyCalls(self):
server_thread_pool = logging_pool.pool(
test_constants.THREAD_CONCURRENCY
)
server_completion_queue = cygrpc.CompletionQueue()
server = cygrpc.Server(
[
(
b"grpc.so_reuseport",
0,
)
],
False,
)
server.register_completion_queue(server_completion_queue)
port = server.add_http2_port(b"[::]:0")
server.start()
channel = cygrpc.Channel(
"localhost:{}".format(port).encode(), None, None
)
state = _State()
server_thread_args = (
state,
server,
server_completion_queue,
server_thread_pool,
)
server_thread = threading.Thread(target=_serve, args=server_thread_args)
server_thread.start()
client_condition = threading.Condition()
client_due = set()
with client_condition:
client_calls = []
for index in range(test_constants.RPC_CONCURRENCY):
tag = "client_complete_call_{0:04d}_tag".format(index)
client_call = channel.integrated_call(
_EMPTY_FLAGS,
b"/twinkies",
None,
None,
_EMPTY_METADATA,
None,
(
(
(
cygrpc.SendInitialMetadataOperation(
_EMPTY_METADATA, _EMPTY_FLAGS
),
cygrpc.SendMessageOperation(
b"\x45\x56", _EMPTY_FLAGS
),
cygrpc.SendCloseFromClientOperation(
_EMPTY_FLAGS
),
cygrpc.ReceiveInitialMetadataOperation(
_EMPTY_FLAGS
),
cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),
cygrpc.ReceiveStatusOnClientOperation(
_EMPTY_FLAGS
),
),
tag,
),
),
)
client_due.add(tag)
client_calls.append(client_call)
client_events_future = test_utilities.SimpleFuture(
lambda: tuple(
channel.next_call_event() for _ in range(_SUCCESSFUL_CALLS)
)
)
with state.condition:
while True:
if state.parked_handlers < test_constants.THREAD_CONCURRENCY:
state.condition.wait()
elif state.handled_rpcs < test_constants.RPC_CONCURRENCY:
state.condition.wait()
else:
state.handlers_released = True
state.condition.notify_all()
break
client_events_future.result()
with client_condition:
for client_call in client_calls:
client_call.cancel(cygrpc.StatusCode.cancelled, "Cancelled!")
for _ in range(_UNSUCCESSFUL_CALLS):
channel.next_call_event()
channel.close(cygrpc.StatusCode.unknown, "Cancelled on channel close!")
with state.condition:
server.shutdown(server_completion_queue, _SERVER_SHUTDOWN_TAG)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 8,882
| 34.110672
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_cython/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_cython/_channel_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
import time
import unittest
from grpc._cython import cygrpc
from tests.unit.framework.common import test_constants
def _channel():
return cygrpc.Channel(b"localhost:54321", (), None)
def _connectivity_loop(channel):
for _ in range(100):
connectivity = channel.check_connectivity_state(True)
channel.watch_connectivity_state(connectivity, time.time() + 0.2)
def _create_loop_destroy():
channel = _channel()
_connectivity_loop(channel)
channel.close(cygrpc.StatusCode.ok, "Channel close!")
def _in_parallel(behavior, arguments):
threads = tuple(
threading.Thread(target=behavior, args=arguments)
for _ in range(test_constants.THREAD_CONCURRENCY)
)
for thread in threads:
thread.start()
for thread in threads:
thread.join()
class ChannelTest(unittest.TestCase):
def test_single_channel_lonely_connectivity(self):
channel = _channel()
_connectivity_loop(channel)
channel.close(cygrpc.StatusCode.ok, "Channel close!")
def test_multiple_channels_lonely_connectivity(self):
_in_parallel(_create_loop_destroy, ())
def test_negative_deadline_connectivity(self):
channel = _channel()
connectivity = channel.check_connectivity_state(True)
channel.watch_connectivity_state(connectivity, -3.14)
channel.close(cygrpc.StatusCode.ok, "Channel close!")
# NOTE(lidiz) The negative timeout should not trigger SIGABRT.
# Bug report: https://github.com/grpc/grpc/issues/18244
if __name__ == "__main__":
unittest.main(verbosity=2)
| 2,200
| 30
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test a corner-case at the level of the Cython API."""
import threading
import unittest
from grpc._cython import cygrpc
from tests.unit._cython import _common
from tests.unit._cython import test_utilities
class Test(_common.RpcTest, unittest.TestCase):
def _do_rpcs(self):
server_call_condition = threading.Condition()
server_call_completion_queue = cygrpc.CompletionQueue()
server_call_driver = _common.QueueDriver(
server_call_condition, server_call_completion_queue
)
server_request_call_tag = "server_request_call_tag"
server_send_initial_metadata_tag = "server_send_initial_metadata_tag"
server_complete_rpc_tag = "server_complete_rpc_tag"
with self.server_condition:
server_request_call_start_batch_result = self.server.request_call(
server_call_completion_queue,
self.server_completion_queue,
server_request_call_tag,
)
self.server_driver.add_due(
{
server_request_call_tag,
}
)
client_receive_initial_metadata_tag = (
"client_receive_initial_metadata_tag"
)
client_complete_rpc_tag = "client_complete_rpc_tag"
client_call = self.channel.integrated_call(
_common.EMPTY_FLAGS,
b"/twinkies",
None,
None,
_common.INVOCATION_METADATA,
None,
[
(
[
cygrpc.ReceiveInitialMetadataOperation(
_common.EMPTY_FLAGS
),
],
client_receive_initial_metadata_tag,
)
],
)
client_call.operate(
[
cygrpc.SendInitialMetadataOperation(
_common.INVOCATION_METADATA, _common.EMPTY_FLAGS
),
cygrpc.SendCloseFromClientOperation(_common.EMPTY_FLAGS),
cygrpc.ReceiveStatusOnClientOperation(_common.EMPTY_FLAGS),
],
client_complete_rpc_tag,
)
client_events_future = test_utilities.SimpleFuture(
lambda: [
self.channel.next_call_event(),
self.channel.next_call_event(),
]
)
server_request_call_event = self.server_driver.event_with_tag(
server_request_call_tag
)
with server_call_condition:
server_send_initial_metadata_start_batch_result = (
server_request_call_event.call.start_server_batch(
[
cygrpc.SendInitialMetadataOperation(
_common.INITIAL_METADATA, _common.EMPTY_FLAGS
),
],
server_send_initial_metadata_tag,
)
)
server_call_driver.add_due(
{
server_send_initial_metadata_tag,
}
)
server_send_initial_metadata_event = server_call_driver.event_with_tag(
server_send_initial_metadata_tag
)
with server_call_condition:
server_complete_rpc_start_batch_result = (
server_request_call_event.call.start_server_batch(
[
cygrpc.ReceiveCloseOnServerOperation(
_common.EMPTY_FLAGS
),
cygrpc.SendStatusFromServerOperation(
_common.TRAILING_METADATA,
cygrpc.StatusCode.ok,
b"test details",
_common.EMPTY_FLAGS,
),
],
server_complete_rpc_tag,
)
)
server_call_driver.add_due(
{
server_complete_rpc_tag,
}
)
server_complete_rpc_event = server_call_driver.event_with_tag(
server_complete_rpc_tag
)
client_events = client_events_future.result()
if client_events[0].tag is client_receive_initial_metadata_tag:
client_receive_initial_metadata_event = client_events[0]
client_complete_rpc_event = client_events[1]
else:
client_complete_rpc_event = client_events[0]
client_receive_initial_metadata_event = client_events[1]
return (
_common.OperationResult(
server_request_call_start_batch_result,
server_request_call_event.completion_type,
server_request_call_event.success,
),
_common.OperationResult(
cygrpc.CallError.ok,
client_receive_initial_metadata_event.completion_type,
client_receive_initial_metadata_event.success,
),
_common.OperationResult(
cygrpc.CallError.ok,
client_complete_rpc_event.completion_type,
client_complete_rpc_event.success,
),
_common.OperationResult(
server_send_initial_metadata_start_batch_result,
server_send_initial_metadata_event.completion_type,
server_send_initial_metadata_event.success,
),
_common.OperationResult(
server_complete_rpc_start_batch_result,
server_complete_rpc_event.completion_type,
server_complete_rpc_event.success,
),
)
def test_rpcs(self):
expecteds = [
(_common.SUCCESSFUL_OPERATION_RESULT,) * 5
] * _common.RPC_COUNT
actuallys = _common.execute_many_times(self._do_rpcs)
self.assertSequenceEqual(expecteds, actuallys)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 6,640
| 34.897297
| 79
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_cython/_common.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utilities for tests of the Cython layer of gRPC Python."""
import collections
import threading
from grpc._cython import cygrpc
RPC_COUNT = 4000
EMPTY_FLAGS = 0
INVOCATION_METADATA = (
("client-md-key", "client-md-key"),
("client-md-key-bin", b"\x00\x01" * 3000),
)
INITIAL_METADATA = (
("server-initial-md-key", "server-initial-md-value"),
("server-initial-md-key-bin", b"\x00\x02" * 3000),
)
TRAILING_METADATA = (
("server-trailing-md-key", "server-trailing-md-value"),
("server-trailing-md-key-bin", b"\x00\x03" * 3000),
)
class QueueDriver(object):
def __init__(self, condition, completion_queue):
self._condition = condition
self._completion_queue = completion_queue
self._due = collections.defaultdict(int)
self._events = collections.defaultdict(list)
def add_due(self, tags):
if not self._due:
def in_thread():
while True:
event = self._completion_queue.poll()
with self._condition:
self._events[event.tag].append(event)
self._due[event.tag] -= 1
self._condition.notify_all()
if self._due[event.tag] <= 0:
self._due.pop(event.tag)
if not self._due:
return
thread = threading.Thread(target=in_thread)
thread.start()
for tag in tags:
self._due[tag] += 1
def event_with_tag(self, tag):
with self._condition:
while True:
if self._events[tag]:
return self._events[tag].pop(0)
else:
self._condition.wait()
def execute_many_times(behavior):
return tuple(behavior() for _ in range(RPC_COUNT))
class OperationResult(
collections.namedtuple(
"OperationResult",
(
"start_batch_result",
"completion_type",
"success",
),
)
):
pass
SUCCESSFUL_OPERATION_RESULT = OperationResult(
cygrpc.CallError.ok, cygrpc.CompletionType.operation_complete, True
)
class RpcTest(object):
def setUp(self):
self.server_completion_queue = cygrpc.CompletionQueue()
self.server = cygrpc.Server([(b"grpc.so_reuseport", 0)], False)
self.server.register_completion_queue(self.server_completion_queue)
port = self.server.add_http2_port(b"[::]:0")
self.server.start()
self.channel = cygrpc.Channel(
"localhost:{}".format(port).encode(), [], None
)
self._server_shutdown_tag = "server_shutdown_tag"
self.server_condition = threading.Condition()
self.server_driver = QueueDriver(
self.server_condition, self.server_completion_queue
)
with self.server_condition:
self.server_driver.add_due(
{
self._server_shutdown_tag,
}
)
self.client_condition = threading.Condition()
self.client_completion_queue = cygrpc.CompletionQueue()
self.client_driver = QueueDriver(
self.client_condition, self.client_completion_queue
)
def tearDown(self):
self.server.shutdown(
self.server_completion_queue, self._server_shutdown_tag
)
self.server.cancel_all_calls()
| 4,047
| 29.43609
| 75
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_cython/_fork_test.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import threading
import unittest
from grpc._cython import cygrpc
def _get_number_active_threads():
return cygrpc._fork_state.active_thread_count._num_active_threads
@unittest.skipIf(os.name == "nt", "Posix-specific tests")
class ForkPosixTester(unittest.TestCase):
def setUp(self):
self._saved_fork_support_flag = cygrpc._GRPC_ENABLE_FORK_SUPPORT
cygrpc._GRPC_ENABLE_FORK_SUPPORT = True
def testForkManagedThread(self):
def cb():
self.assertEqual(1, _get_number_active_threads())
thread = cygrpc.ForkManagedThread(cb)
thread.start()
thread.join()
self.assertEqual(0, _get_number_active_threads())
def testForkManagedThreadThrowsException(self):
def cb():
self.assertEqual(1, _get_number_active_threads())
raise Exception("expected exception")
thread = cygrpc.ForkManagedThread(cb)
thread.start()
thread.join()
self.assertEqual(0, _get_number_active_threads())
def tearDown(self):
cygrpc._GRPC_ENABLE_FORK_SUPPORT = self._saved_fork_support_flag
@unittest.skipUnless(os.name == "nt", "Windows-specific tests")
class ForkWindowsTester(unittest.TestCase):
def testForkManagedThreadIsNoOp(self):
def cb():
pass
thread = cygrpc.ForkManagedThread(cb)
thread.start()
thread.join()
if __name__ == "__main__":
unittest.main(verbosity=2)
| 2,043
| 29.058824
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/_cython/_server_test.py
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test servers at the level of the Cython API."""
import threading
import time
import unittest
from grpc._cython import cygrpc
class Test(unittest.TestCase):
def test_lonely_server(self):
server_call_completion_queue = cygrpc.CompletionQueue()
server_shutdown_completion_queue = cygrpc.CompletionQueue()
server = cygrpc.Server(None, False)
server.register_completion_queue(server_call_completion_queue)
server.register_completion_queue(server_shutdown_completion_queue)
port = server.add_http2_port(b"[::]:0")
server.start()
server_request_call_tag = "server_request_call_tag"
server_request_call_start_batch_result = server.request_call(
server_call_completion_queue,
server_call_completion_queue,
server_request_call_tag,
)
time.sleep(4)
server_shutdown_tag = "server_shutdown_tag"
server_shutdown_result = server.shutdown(
server_shutdown_completion_queue, server_shutdown_tag
)
server_request_call_event = server_call_completion_queue.poll()
server_shutdown_event = server_shutdown_completion_queue.poll()
if __name__ == "__main__":
unittest.main(verbosity=2)
| 1,835
| 34.307692
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/framework/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/framework/common/test_constants.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Constants shared among tests throughout RPC Framework."""
# Value for maximum duration in seconds that a test is allowed for its actual
# behavioral logic, excluding all time spent deliberately waiting in the test.
TIME_ALLOWANCE = 10
# Value for maximum duration in seconds of RPCs that may time out as part of a
# test.
SHORT_TIMEOUT = 4
# Absurdly large value for maximum duration in seconds for should-not-time-out
# RPCs made during tests.
LONG_TIMEOUT = 3000
# Values to supply on construction of an object that will service RPCs; these
# should not be used as the actual timeout values of any RPCs made during tests.
DEFAULT_TIMEOUT = 300
MAXIMUM_TIMEOUT = 3600
# The number of payloads to transmit in streaming tests.
STREAM_LENGTH = 200
# The size of payloads to transmit in tests.
PAYLOAD_SIZE = 256 * 1024 + 17
# The concurrency to use in tests of concurrent RPCs that will not create as
# many threads as RPCs.
RPC_CONCURRENCY = 200
# The concurrency to use in tests of concurrent RPCs that will create as many
# threads as RPCs.
THREAD_CONCURRENCY = 25
# The size of thread pools to use in tests.
POOL_SIZE = 10
| 1,711
| 36.217391
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/framework/common/test_control.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Code for instructing systems under test to block or fail."""
import abc
import contextlib
import threading
class Defect(Exception):
"""Simulates a programming defect raised into in a system under test.
Use of a standard exception type is too easily misconstrued as an actual
defect in either the test infrastructure or the system under test.
"""
class NestedDefect(Exception):
"""Simulates a nested programming defect raised into in a system under test."""
def __str__(self):
raise Exception("Nested Exception")
class Control(abc.ABC):
"""An object that accepts program control from a system under test.
Systems under test passed a Control should call its control() method
frequently during execution. The control() method may block, raise an
exception, or do nothing, all according to the enclosing test's desire for
the system under test to simulate freezing, failing, or functioning.
"""
@abc.abstractmethod
def control(self):
"""Potentially does anything."""
raise NotImplementedError()
class PauseFailControl(Control):
"""A Control that can be used to pause or fail code under control.
This object is only safe for use from two threads: one of the system under
test calling control and the other from the test system calling pause,
block_until_paused, and fail.
"""
def __init__(self):
self._condition = threading.Condition()
self._pause = False
self._paused = False
self._fail = False
def control(self):
with self._condition:
if self._fail:
raise Defect()
while self._pause:
self._paused = True
self._condition.notify_all()
self._condition.wait()
self._paused = False
@contextlib.contextmanager
def pause(self):
"""Pauses code under control while controlling code is in context."""
with self._condition:
self._pause = True
yield
with self._condition:
self._pause = False
self._condition.notify_all()
def block_until_paused(self):
"""Blocks controlling code until code under control is paused.
May only be called within the context of a pause call.
"""
with self._condition:
while not self._paused:
self._condition.wait()
@contextlib.contextmanager
def fail(self):
"""Fails code under control while controlling code is in context."""
with self._condition:
self._fail = True
yield
with self._condition:
self._fail = False
| 3,279
| 30.84466
| 83
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/framework/common/__init__.py
|
# Copyright 2019 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import errno
import os
import socket
_DEFAULT_SOCK_OPTIONS = (
(socket.SO_REUSEADDR, socket.SO_REUSEPORT)
if os.name != "nt"
else (socket.SO_REUSEADDR,)
)
_UNRECOVERABLE_ERRNOS = (errno.EADDRINUSE, errno.ENOSR)
def get_socket(
bind_address="localhost",
port=0,
listen=True,
sock_options=_DEFAULT_SOCK_OPTIONS,
):
"""Opens a socket.
Useful for reserving a port for a system-under-test.
Args:
bind_address: The host to which to bind.
port: The port to which to bind.
listen: A boolean value indicating whether or not to listen on the socket.
sock_options: A sequence of socket options to apply to the socket.
Returns:
A tuple containing:
- the address to which the socket is bound
- the port to which the socket is bound
- the socket object itself
"""
_sock_options = sock_options if sock_options else []
if socket.has_ipv6:
address_families = (socket.AF_INET6, socket.AF_INET)
else:
address_families = socket.AF_INET
for address_family in address_families:
try:
sock = socket.socket(address_family, socket.SOCK_STREAM)
for sock_option in _sock_options:
sock.setsockopt(socket.SOL_SOCKET, sock_option, 1)
sock.bind((bind_address, port))
if listen:
sock.listen(1)
return bind_address, sock.getsockname()[1], sock
except OSError as os_error:
sock.close()
if os_error.errno in _UNRECOVERABLE_ERRNOS:
raise
else:
continue
# For PY2, socket.error is a child class of IOError; for PY3, it is
# pointing to OSError. We need this catch to make it 2/3 agnostic.
except socket.error: # pylint: disable=duplicate-except
sock.close()
continue
raise RuntimeError(
"Failed to bind to {} with sock_options {}".format(
bind_address, sock_options
)
)
@contextlib.contextmanager
def bound_socket(
bind_address="localhost",
port=0,
listen=True,
sock_options=_DEFAULT_SOCK_OPTIONS,
):
"""Opens a socket bound to an arbitrary port.
Useful for reserving a port for a system-under-test.
Args:
bind_address: The host to which to bind.
port: The port to which to bind.
listen: A boolean value indicating whether or not to listen on the socket.
sock_options: A sequence of socket options to apply to the socket.
Yields:
A tuple containing:
- the address to which the socket is bound
- the port to which the socket is bound
"""
host, port, sock = get_socket(
bind_address=bind_address,
port=port,
listen=listen,
sock_options=sock_options,
)
try:
yield host, port
finally:
sock.close()
| 3,508
| 29.780702
| 80
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/framework/common/test_coverage.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Governs coverage for tests of RPCs throughout RPC Framework."""
import abc
# This code is designed for use with the unittest module.
# pylint: disable=invalid-name
class Coverage(abc.ABC):
"""Specification of test coverage."""
@abc.abstractmethod
def testSuccessfulUnaryRequestUnaryResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testSuccessfulUnaryRequestStreamResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testSuccessfulStreamRequestUnaryResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testSuccessfulStreamRequestStreamResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testSequentialInvocations(self):
raise NotImplementedError()
@abc.abstractmethod
def testParallelInvocations(self):
raise NotImplementedError()
@abc.abstractmethod
def testWaitingForSomeButNotAllParallelInvocations(self):
raise NotImplementedError()
@abc.abstractmethod
def testCancelledUnaryRequestUnaryResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testCancelledUnaryRequestStreamResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testCancelledStreamRequestUnaryResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testCancelledStreamRequestStreamResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testExpiredUnaryRequestUnaryResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testExpiredUnaryRequestStreamResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testExpiredStreamRequestUnaryResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testExpiredStreamRequestStreamResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testFailedUnaryRequestUnaryResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testFailedUnaryRequestStreamResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testFailedStreamRequestUnaryResponse(self):
raise NotImplementedError()
@abc.abstractmethod
def testFailedStreamRequestStreamResponse(self):
raise NotImplementedError()
| 2,980
| 28.81
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/framework/foundation/stream_testing.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for testing stream-related code."""
from grpc.framework.foundation import stream
class TestConsumer(stream.Consumer):
"""A stream.Consumer instrumented for testing.
Attributes:
calls: A sequence of value-termination pairs describing the history of calls
made on this object.
"""
def __init__(self):
self.calls = []
def consume(self, value):
"""See stream.Consumer.consume for specification."""
self.calls.append((value, False))
def terminate(self):
"""See stream.Consumer.terminate for specification."""
self.calls.append((None, True))
def consume_and_terminate(self, value):
"""See stream.Consumer.consume_and_terminate for specification."""
self.calls.append((value, True))
def is_legal(self):
"""Reports whether or not a legal sequence of calls has been made."""
terminated = False
for value, terminal in self.calls:
if terminated:
return False
elif terminal:
terminated = True
elif value is None:
return False
else: # pylint: disable=useless-else-on-loop
return True
def values(self):
"""Returns the sequence of values that have been passed to this Consumer."""
return [value for value, _ in self.calls if value]
| 1,968
| 32.948276
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/framework/foundation/_logging_pool_test.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for grpc.framework.foundation.logging_pool."""
import threading
import unittest
from grpc.framework.foundation import logging_pool
_POOL_SIZE = 16
class _CallableObject(object):
def __init__(self):
self._lock = threading.Lock()
self._passed_values = []
def __call__(self, value):
with self._lock:
self._passed_values.append(value)
def passed_values(self):
with self._lock:
return tuple(self._passed_values)
class LoggingPoolTest(unittest.TestCase):
def testUpAndDown(self):
pool = logging_pool.pool(_POOL_SIZE)
pool.shutdown(wait=True)
with logging_pool.pool(_POOL_SIZE) as pool:
self.assertIsNotNone(pool)
def testTaskExecuted(self):
test_list = []
with logging_pool.pool(_POOL_SIZE) as pool:
pool.submit(lambda: test_list.append(object())).result()
self.assertTrue(test_list)
def testException(self):
with logging_pool.pool(_POOL_SIZE) as pool:
raised_exception = pool.submit(lambda: 1 / 0).exception()
self.assertIsNotNone(raised_exception)
def testCallableObjectExecuted(self):
callable_object = _CallableObject()
passed_object = object()
with logging_pool.pool(_POOL_SIZE) as pool:
future = pool.submit(callable_object, passed_object)
self.assertIsNone(future.result())
self.assertSequenceEqual(
(passed_object,), callable_object.passed_values()
)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 2,171
| 28.753425
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/unit/framework/foundation/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc_channelz.v1.channelz."""
from concurrent import futures
import sys
import unittest
import grpc
from grpc_channelz.v1 import channelz
from grpc_channelz.v1 import channelz_pb2
from grpc_channelz.v1 import channelz_pb2_grpc
from tests.unit import test_common
from tests.unit.framework.common import test_constants
_SUCCESSFUL_UNARY_UNARY = "/test/SuccessfulUnaryUnary"
_FAILED_UNARY_UNARY = "/test/FailedUnaryUnary"
_SUCCESSFUL_STREAM_STREAM = "/test/SuccessfulStreamStream"
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x01\x01\x01"
_DISABLE_REUSE_PORT = (("grpc.so_reuseport", 0),)
_ENABLE_CHANNELZ = (("grpc.enable_channelz", 1),)
_DISABLE_CHANNELZ = (("grpc.enable_channelz", 0),)
def _successful_unary_unary(request, servicer_context):
return _RESPONSE
def _failed_unary_unary(request, servicer_context):
servicer_context.set_code(grpc.StatusCode.INTERNAL)
servicer_context.set_details("Channelz Test Intended Failure")
def _successful_stream_stream(request_iterator, servicer_context):
for _ in request_iterator:
yield _RESPONSE
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == _SUCCESSFUL_UNARY_UNARY:
return grpc.unary_unary_rpc_method_handler(_successful_unary_unary)
elif handler_call_details.method == _FAILED_UNARY_UNARY:
return grpc.unary_unary_rpc_method_handler(_failed_unary_unary)
elif handler_call_details.method == _SUCCESSFUL_STREAM_STREAM:
return grpc.stream_stream_rpc_method_handler(
_successful_stream_stream
)
else:
return None
class _ChannelServerPair(object):
def __init__(self):
# Server will enable channelz service
self.server = grpc.server(
futures.ThreadPoolExecutor(max_workers=3),
options=_DISABLE_REUSE_PORT + _ENABLE_CHANNELZ,
)
port = self.server.add_insecure_port("[::]:0")
self.server.add_generic_rpc_handlers((_GenericHandler(),))
self.server.start()
# Channel will enable channelz service...
self.channel = grpc.insecure_channel(
"localhost:%d" % port, _ENABLE_CHANNELZ
)
def _generate_channel_server_pairs(n):
return [_ChannelServerPair() for i in range(n)]
def _close_channel_server_pairs(pairs):
for pair in pairs:
pair.server.stop(None)
pair.channel.close()
@unittest.skipIf(
sys.version_info[0] < 3, "ProtoBuf descriptor has moved on from Python2"
)
class ChannelzServicerTest(unittest.TestCase):
def _send_successful_unary_unary(self, idx):
_, r = (
self._pairs[idx]
.channel.unary_unary(_SUCCESSFUL_UNARY_UNARY)
.with_call(_REQUEST)
)
self.assertEqual(r.code(), grpc.StatusCode.OK)
def _send_failed_unary_unary(self, idx):
try:
self._pairs[idx].channel.unary_unary(_FAILED_UNARY_UNARY).with_call(
_REQUEST
)
except grpc.RpcError:
return
else:
self.fail("This call supposed to fail")
def _send_successful_stream_stream(self, idx):
response_iterator = (
self._pairs[idx]
.channel.stream_stream(_SUCCESSFUL_STREAM_STREAM)
.__call__(iter([_REQUEST] * test_constants.STREAM_LENGTH))
)
cnt = 0
for _ in response_iterator:
cnt += 1
self.assertEqual(cnt, test_constants.STREAM_LENGTH)
def _get_channel_id(self, idx):
"""Channel id may not be consecutive"""
resp = self._channelz_stub.GetTopChannels(
channelz_pb2.GetTopChannelsRequest(start_channel_id=0)
)
self.assertGreater(len(resp.channel), idx)
return resp.channel[idx].ref.channel_id
def setUp(self):
self._pairs = []
# This server is for Channelz info fetching only
# It self should not enable Channelz
self._server = grpc.server(
futures.ThreadPoolExecutor(max_workers=3),
options=_DISABLE_REUSE_PORT + _DISABLE_CHANNELZ,
)
port = self._server.add_insecure_port("[::]:0")
channelz.add_channelz_servicer(self._server)
self._server.start()
# This channel is used to fetch Channelz info only
# Channelz should not be enabled
self._channel = grpc.insecure_channel(
"localhost:%d" % port, _DISABLE_CHANNELZ
)
self._channelz_stub = channelz_pb2_grpc.ChannelzStub(self._channel)
def tearDown(self):
self._server.stop(None)
self._channel.close()
_close_channel_server_pairs(self._pairs)
def test_get_top_channels_basic(self):
self._pairs = _generate_channel_server_pairs(1)
resp = self._channelz_stub.GetTopChannels(
channelz_pb2.GetTopChannelsRequest(start_channel_id=0)
)
self.assertEqual(len(resp.channel), 1)
self.assertEqual(resp.end, True)
def test_get_top_channels_high_start_id(self):
self._pairs = _generate_channel_server_pairs(1)
resp = self._channelz_stub.GetTopChannels(
channelz_pb2.GetTopChannelsRequest(start_channel_id=10000)
)
self.assertEqual(len(resp.channel), 0)
self.assertEqual(resp.end, True)
def test_successful_request(self):
self._pairs = _generate_channel_server_pairs(1)
self._send_successful_unary_unary(0)
resp = self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))
)
self.assertEqual(resp.channel.data.calls_started, 1)
self.assertEqual(resp.channel.data.calls_succeeded, 1)
self.assertEqual(resp.channel.data.calls_failed, 0)
def test_failed_request(self):
self._pairs = _generate_channel_server_pairs(1)
self._send_failed_unary_unary(0)
resp = self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))
)
self.assertEqual(resp.channel.data.calls_started, 1)
self.assertEqual(resp.channel.data.calls_succeeded, 0)
self.assertEqual(resp.channel.data.calls_failed, 1)
def test_many_requests(self):
self._pairs = _generate_channel_server_pairs(1)
k_success = 7
k_failed = 9
for i in range(k_success):
self._send_successful_unary_unary(0)
for i in range(k_failed):
self._send_failed_unary_unary(0)
resp = self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))
)
self.assertEqual(resp.channel.data.calls_started, k_success + k_failed)
self.assertEqual(resp.channel.data.calls_succeeded, k_success)
self.assertEqual(resp.channel.data.calls_failed, k_failed)
def test_many_channel(self):
k_channels = 4
self._pairs = _generate_channel_server_pairs(k_channels)
resp = self._channelz_stub.GetTopChannels(
channelz_pb2.GetTopChannelsRequest(start_channel_id=0)
)
self.assertEqual(len(resp.channel), k_channels)
def test_many_requests_many_channel(self):
k_channels = 4
self._pairs = _generate_channel_server_pairs(k_channels)
k_success = 11
k_failed = 13
for i in range(k_success):
self._send_successful_unary_unary(0)
self._send_successful_unary_unary(2)
for i in range(k_failed):
self._send_failed_unary_unary(1)
self._send_failed_unary_unary(2)
# The first channel saw only successes
resp = self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))
)
self.assertEqual(resp.channel.data.calls_started, k_success)
self.assertEqual(resp.channel.data.calls_succeeded, k_success)
self.assertEqual(resp.channel.data.calls_failed, 0)
# The second channel saw only failures
resp = self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(1))
)
self.assertEqual(resp.channel.data.calls_started, k_failed)
self.assertEqual(resp.channel.data.calls_succeeded, 0)
self.assertEqual(resp.channel.data.calls_failed, k_failed)
# The third channel saw both successes and failures
resp = self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(2))
)
self.assertEqual(resp.channel.data.calls_started, k_success + k_failed)
self.assertEqual(resp.channel.data.calls_succeeded, k_success)
self.assertEqual(resp.channel.data.calls_failed, k_failed)
# The fourth channel saw nothing
resp = self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(3))
)
self.assertEqual(resp.channel.data.calls_started, 0)
self.assertEqual(resp.channel.data.calls_succeeded, 0)
self.assertEqual(resp.channel.data.calls_failed, 0)
def test_many_subchannels(self):
k_channels = 4
self._pairs = _generate_channel_server_pairs(k_channels)
k_success = 17
k_failed = 19
for i in range(k_success):
self._send_successful_unary_unary(0)
self._send_successful_unary_unary(2)
for i in range(k_failed):
self._send_failed_unary_unary(1)
self._send_failed_unary_unary(2)
gtc_resp = self._channelz_stub.GetTopChannels(
channelz_pb2.GetTopChannelsRequest(start_channel_id=0)
)
self.assertEqual(len(gtc_resp.channel), k_channels)
for i in range(k_channels):
# If no call performed in the channel, there shouldn't be any subchannel
if gtc_resp.channel[i].data.calls_started == 0:
self.assertEqual(len(gtc_resp.channel[i].subchannel_ref), 0)
continue
# Otherwise, the subchannel should exist
self.assertGreater(len(gtc_resp.channel[i].subchannel_ref), 0)
gsc_resp = self._channelz_stub.GetSubchannel(
channelz_pb2.GetSubchannelRequest(
subchannel_id=gtc_resp.channel[i]
.subchannel_ref[0]
.subchannel_id
)
)
self.assertEqual(
gtc_resp.channel[i].data.calls_started,
gsc_resp.subchannel.data.calls_started,
)
self.assertEqual(
gtc_resp.channel[i].data.calls_succeeded,
gsc_resp.subchannel.data.calls_succeeded,
)
self.assertEqual(
gtc_resp.channel[i].data.calls_failed,
gsc_resp.subchannel.data.calls_failed,
)
def test_server_basic(self):
self._pairs = _generate_channel_server_pairs(1)
resp = self._channelz_stub.GetServers(
channelz_pb2.GetServersRequest(start_server_id=0)
)
self.assertEqual(len(resp.server), 1)
def test_get_one_server(self):
self._pairs = _generate_channel_server_pairs(1)
gss_resp = self._channelz_stub.GetServers(
channelz_pb2.GetServersRequest(start_server_id=0)
)
self.assertEqual(len(gss_resp.server), 1)
gs_resp = self._channelz_stub.GetServer(
channelz_pb2.GetServerRequest(
server_id=gss_resp.server[0].ref.server_id
)
)
self.assertEqual(
gss_resp.server[0].ref.server_id, gs_resp.server.ref.server_id
)
def test_server_call(self):
self._pairs = _generate_channel_server_pairs(1)
k_success = 23
k_failed = 29
for i in range(k_success):
self._send_successful_unary_unary(0)
for i in range(k_failed):
self._send_failed_unary_unary(0)
resp = self._channelz_stub.GetServers(
channelz_pb2.GetServersRequest(start_server_id=0)
)
self.assertEqual(len(resp.server), 1)
self.assertEqual(
resp.server[0].data.calls_started, k_success + k_failed
)
self.assertEqual(resp.server[0].data.calls_succeeded, k_success)
self.assertEqual(resp.server[0].data.calls_failed, k_failed)
def test_many_subchannels_and_sockets(self):
k_channels = 4
self._pairs = _generate_channel_server_pairs(k_channels)
k_success = 3
k_failed = 5
for i in range(k_success):
self._send_successful_unary_unary(0)
self._send_successful_unary_unary(2)
for i in range(k_failed):
self._send_failed_unary_unary(1)
self._send_failed_unary_unary(2)
gtc_resp = self._channelz_stub.GetTopChannels(
channelz_pb2.GetTopChannelsRequest(start_channel_id=0)
)
self.assertEqual(len(gtc_resp.channel), k_channels)
for i in range(k_channels):
# If no call performed in the channel, there shouldn't be any subchannel
if gtc_resp.channel[i].data.calls_started == 0:
self.assertEqual(len(gtc_resp.channel[i].subchannel_ref), 0)
continue
# Otherwise, the subchannel should exist
self.assertGreater(len(gtc_resp.channel[i].subchannel_ref), 0)
gsc_resp = self._channelz_stub.GetSubchannel(
channelz_pb2.GetSubchannelRequest(
subchannel_id=gtc_resp.channel[i]
.subchannel_ref[0]
.subchannel_id
)
)
self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
gs_resp = self._channelz_stub.GetSocket(
channelz_pb2.GetSocketRequest(
socket_id=gsc_resp.subchannel.socket_ref[0].socket_id
)
)
self.assertEqual(
gsc_resp.subchannel.data.calls_started,
gs_resp.socket.data.streams_started,
)
self.assertEqual(
gsc_resp.subchannel.data.calls_started,
gs_resp.socket.data.streams_succeeded,
)
# Calls started == messages sent, only valid for unary calls
self.assertEqual(
gsc_resp.subchannel.data.calls_started,
gs_resp.socket.data.messages_sent,
)
# Only receive responses when the RPC was successful
self.assertEqual(
gsc_resp.subchannel.data.calls_succeeded,
gs_resp.socket.data.messages_received,
)
if gs_resp.socket.remote.HasField("tcpip_address"):
address = gs_resp.socket.remote.tcpip_address.ip_address
self.assertTrue(
len(address) == 4 or len(address) == 16, address
)
if gs_resp.socket.local.HasField("tcpip_address"):
address = gs_resp.socket.local.tcpip_address.ip_address
self.assertTrue(
len(address) == 4 or len(address) == 16, address
)
def test_streaming_rpc(self):
self._pairs = _generate_channel_server_pairs(1)
# In C++, the argument for _send_successful_stream_stream is message length.
# Here the argument is still channel idx, to be consistent with the other two.
self._send_successful_stream_stream(0)
gc_resp = self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))
)
self.assertEqual(gc_resp.channel.data.calls_started, 1)
self.assertEqual(gc_resp.channel.data.calls_succeeded, 1)
self.assertEqual(gc_resp.channel.data.calls_failed, 0)
# Subchannel exists
self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
while True:
gsc_resp = self._channelz_stub.GetSubchannel(
channelz_pb2.GetSubchannelRequest(
subchannel_id=gc_resp.channel.subchannel_ref[
0
].subchannel_id
)
)
if (
gsc_resp.subchannel.data.calls_started
== gsc_resp.subchannel.data.calls_succeeded
+ gsc_resp.subchannel.data.calls_failed
):
break
self.assertEqual(gsc_resp.subchannel.data.calls_started, 1)
self.assertEqual(gsc_resp.subchannel.data.calls_failed, 0)
self.assertEqual(gsc_resp.subchannel.data.calls_succeeded, 1)
# Socket exists
self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
while True:
gs_resp = self._channelz_stub.GetSocket(
channelz_pb2.GetSocketRequest(
socket_id=gsc_resp.subchannel.socket_ref[0].socket_id
)
)
if (
gs_resp.socket.data.streams_started
== gs_resp.socket.data.streams_succeeded
+ gs_resp.socket.data.streams_failed
):
break
self.assertEqual(gs_resp.socket.data.streams_started, 1)
self.assertEqual(gs_resp.socket.data.streams_succeeded, 1)
self.assertEqual(gs_resp.socket.data.streams_failed, 0)
self.assertEqual(
gs_resp.socket.data.messages_sent, test_constants.STREAM_LENGTH
)
self.assertEqual(
gs_resp.socket.data.messages_received, test_constants.STREAM_LENGTH
)
def test_server_sockets(self):
self._pairs = _generate_channel_server_pairs(1)
self._send_successful_unary_unary(0)
self._send_failed_unary_unary(0)
gs_resp = self._channelz_stub.GetServers(
channelz_pb2.GetServersRequest(start_server_id=0)
)
self.assertEqual(len(gs_resp.server), 1)
self.assertEqual(gs_resp.server[0].data.calls_started, 2)
self.assertEqual(gs_resp.server[0].data.calls_succeeded, 1)
self.assertEqual(gs_resp.server[0].data.calls_failed, 1)
gss_resp = self._channelz_stub.GetServerSockets(
channelz_pb2.GetServerSocketsRequest(
server_id=gs_resp.server[0].ref.server_id, start_socket_id=0
)
)
# If the RPC call failed, it will raise a grpc.RpcError
# So, if there is no exception raised, considered pass
def test_server_listen_sockets(self):
self._pairs = _generate_channel_server_pairs(1)
gss_resp = self._channelz_stub.GetServers(
channelz_pb2.GetServersRequest(start_server_id=0)
)
self.assertEqual(len(gss_resp.server), 1)
self.assertEqual(len(gss_resp.server[0].listen_socket), 1)
gs_resp = self._channelz_stub.GetSocket(
channelz_pb2.GetSocketRequest(
socket_id=gss_resp.server[0].listen_socket[0].socket_id
)
)
# If the RPC call failed, it will raise a grpc.RpcError
# So, if there is no exception raised, considered pass
def test_invalid_query_get_server(self):
try:
self._channelz_stub.GetServer(
channelz_pb2.GetServerRequest(server_id=10000)
)
except BaseException as e:
self.assertIn("StatusCode.NOT_FOUND", str(e))
else:
self.fail("Invalid query not detected")
def test_invalid_query_get_channel(self):
try:
self._channelz_stub.GetChannel(
channelz_pb2.GetChannelRequest(channel_id=10000)
)
except BaseException as e:
self.assertIn("StatusCode.NOT_FOUND", str(e))
else:
self.fail("Invalid query not detected")
def test_invalid_query_get_subchannel(self):
try:
self._channelz_stub.GetSubchannel(
channelz_pb2.GetSubchannelRequest(subchannel_id=10000)
)
except BaseException as e:
self.assertIn("StatusCode.NOT_FOUND", str(e))
else:
self.fail("Invalid query not detected")
def test_invalid_query_get_socket(self):
try:
self._channelz_stub.GetSocket(
channelz_pb2.GetSocketRequest(socket_id=10000)
)
except BaseException as e:
self.assertIn("StatusCode.NOT_FOUND", str(e))
else:
self.fail("Invalid query not detected")
def test_invalid_query_get_server_sockets(self):
try:
self._channelz_stub.GetServerSockets(
channelz_pb2.GetServerSocketsRequest(
server_id=10000,
start_socket_id=0,
)
)
except BaseException as e:
self.assertIn("StatusCode.NOT_FOUND", str(e))
else:
self.fail("Invalid query not detected")
if __name__ == "__main__":
unittest.main(verbosity=2)
| 21,786
| 37.357394
| 86
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/channelz/__init__.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 580
| 40.5
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/protoc_plugin/beta_python_plugin_test.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import importlib
import itertools
import os
from os import path
import pkgutil
import shutil
import sys
import tempfile
import threading
import unittest
from grpc.beta import implementations
from grpc.beta import interfaces
from grpc.framework.foundation import future
from grpc.framework.interfaces.face import face
from grpc_tools import protoc
from tests.unit.framework.common import test_constants
_RELATIVE_PROTO_PATH = "relative_proto_path"
_RELATIVE_PYTHON_OUT = "relative_python_out"
_PROTO_FILES_PATH_COMPONENTS = (
(
"beta_grpc_plugin_test",
"payload",
"test_payload.proto",
),
(
"beta_grpc_plugin_test",
"requests",
"r",
"test_requests.proto",
),
(
"beta_grpc_plugin_test",
"responses",
"test_responses.proto",
),
(
"beta_grpc_plugin_test",
"service",
"test_service.proto",
),
)
_PAYLOAD_PB2 = "beta_grpc_plugin_test.payload.test_payload_pb2"
_REQUESTS_PB2 = "beta_grpc_plugin_test.requests.r.test_requests_pb2"
_RESPONSES_PB2 = "beta_grpc_plugin_test.responses.test_responses_pb2"
_SERVICE_PB2 = "beta_grpc_plugin_test.service.test_service_pb2"
# Identifiers of entities we expect to find in the generated module.
SERVICER_IDENTIFIER = "BetaTestServiceServicer"
STUB_IDENTIFIER = "BetaTestServiceStub"
SERVER_FACTORY_IDENTIFIER = "beta_create_TestService_server"
STUB_FACTORY_IDENTIFIER = "beta_create_TestService_stub"
@contextlib.contextmanager
def _system_path(path_insertion):
old_system_path = sys.path[:]
sys.path = sys.path[0:1] + path_insertion + sys.path[1:]
yield
sys.path = old_system_path
def _create_directory_tree(root, path_components_sequence):
created = set()
for path_components in path_components_sequence:
thus_far = ""
for path_component in path_components:
relative_path = path.join(thus_far, path_component)
if relative_path not in created:
os.makedirs(path.join(root, relative_path))
created.add(relative_path)
thus_far = path.join(thus_far, path_component)
def _massage_proto_content(raw_proto_content):
imports_substituted = raw_proto_content.replace(
b'import "tests/protoc_plugin/protos/',
b'import "beta_grpc_plugin_test/',
)
package_statement_substituted = imports_substituted.replace(
b"package grpc_protoc_plugin;", b"package beta_grpc_protoc_plugin;"
)
return package_statement_substituted
def _packagify(directory):
for subdirectory, _, _ in os.walk(directory):
init_file_name = path.join(subdirectory, "__init__.py")
with open(init_file_name, "wb") as init_file:
init_file.write(b"")
class _ServicerMethods(object):
def __init__(self, payload_pb2, responses_pb2):
self._condition = threading.Condition()
self._paused = False
self._fail = False
self._payload_pb2 = payload_pb2
self._responses_pb2 = responses_pb2
@contextlib.contextmanager
def pause(self): # pylint: disable=invalid-name
with self._condition:
self._paused = True
yield
with self._condition:
self._paused = False
self._condition.notify_all()
@contextlib.contextmanager
def fail(self): # pylint: disable=invalid-name
with self._condition:
self._fail = True
yield
with self._condition:
self._fail = False
def _control(self): # pylint: disable=invalid-name
with self._condition:
if self._fail:
raise ValueError()
while self._paused:
self._condition.wait()
def UnaryCall(self, request, unused_rpc_context):
response = self._responses_pb2.SimpleResponse()
response.payload.payload_type = self._payload_pb2.COMPRESSABLE
response.payload.payload_compressable = "a" * request.response_size
self._control()
return response
def StreamingOutputCall(self, request, unused_rpc_context):
for parameter in request.response_parameters:
response = self._responses_pb2.StreamingOutputCallResponse()
response.payload.payload_type = self._payload_pb2.COMPRESSABLE
response.payload.payload_compressable = "a" * parameter.size
self._control()
yield response
def StreamingInputCall(self, request_iter, unused_rpc_context):
response = self._responses_pb2.StreamingInputCallResponse()
aggregated_payload_size = 0
for request in request_iter:
aggregated_payload_size += len(request.payload.payload_compressable)
response.aggregated_payload_size = aggregated_payload_size
self._control()
return response
def FullDuplexCall(self, request_iter, unused_rpc_context):
for request in request_iter:
for parameter in request.response_parameters:
response = self._responses_pb2.StreamingOutputCallResponse()
response.payload.payload_type = self._payload_pb2.COMPRESSABLE
response.payload.payload_compressable = "a" * parameter.size
self._control()
yield response
def HalfDuplexCall(self, request_iter, unused_rpc_context):
responses = []
for request in request_iter:
for parameter in request.response_parameters:
response = self._responses_pb2.StreamingOutputCallResponse()
response.payload.payload_type = self._payload_pb2.COMPRESSABLE
response.payload.payload_compressable = "a" * parameter.size
self._control()
responses.append(response)
for response in responses:
yield response
@contextlib.contextmanager
def _CreateService(payload_pb2, responses_pb2, service_pb2):
"""Provides a servicer backend and a stub.
The servicer is just the implementation of the actual servicer passed to the
face player of the python RPC implementation; the two are detached.
Yields:
A (servicer_methods, stub) pair where servicer_methods is the back-end of
the service bound to the stub and stub is the stub on which to invoke
RPCs.
"""
servicer_methods = _ServicerMethods(payload_pb2, responses_pb2)
class Servicer(getattr(service_pb2, SERVICER_IDENTIFIER)):
def UnaryCall(self, request, context):
return servicer_methods.UnaryCall(request, context)
def StreamingOutputCall(self, request, context):
return servicer_methods.StreamingOutputCall(request, context)
def StreamingInputCall(self, request_iter, context):
return servicer_methods.StreamingInputCall(request_iter, context)
def FullDuplexCall(self, request_iter, context):
return servicer_methods.FullDuplexCall(request_iter, context)
def HalfDuplexCall(self, request_iter, context):
return servicer_methods.HalfDuplexCall(request_iter, context)
servicer = Servicer()
server = getattr(service_pb2, SERVER_FACTORY_IDENTIFIER)(servicer)
port = server.add_insecure_port("[::]:0")
server.start()
channel = implementations.insecure_channel("localhost", port)
stub = getattr(service_pb2, STUB_FACTORY_IDENTIFIER)(channel)
yield servicer_methods, stub
server.stop(0)
@contextlib.contextmanager
def _CreateIncompleteService(service_pb2):
"""Provides a servicer backend that fails to implement methods and its stub.
The servicer is just the implementation of the actual servicer passed to the
face player of the python RPC implementation; the two are detached.
Args:
service_pb2: The service_pb2 module generated by this test.
Yields:
A (servicer_methods, stub) pair where servicer_methods is the back-end of
the service bound to the stub and stub is the stub on which to invoke
RPCs.
"""
class Servicer(getattr(service_pb2, SERVICER_IDENTIFIER)):
pass
servicer = Servicer()
server = getattr(service_pb2, SERVER_FACTORY_IDENTIFIER)(servicer)
port = server.add_insecure_port("[::]:0")
server.start()
channel = implementations.insecure_channel("localhost", port)
stub = getattr(service_pb2, STUB_FACTORY_IDENTIFIER)(channel)
yield None, stub
server.stop(0)
def _streaming_input_request_iterator(payload_pb2, requests_pb2):
for _ in range(3):
request = requests_pb2.StreamingInputCallRequest()
request.payload.payload_type = payload_pb2.COMPRESSABLE
request.payload.payload_compressable = "a"
yield request
def _streaming_output_request(requests_pb2):
request = requests_pb2.StreamingOutputCallRequest()
sizes = [1, 2, 3]
request.response_parameters.add(size=sizes[0], interval_us=0)
request.response_parameters.add(size=sizes[1], interval_us=0)
request.response_parameters.add(size=sizes[2], interval_us=0)
return request
def _full_duplex_request_iterator(requests_pb2):
request = requests_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = requests_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
class PythonPluginTest(unittest.TestCase):
"""Test case for the gRPC Python protoc-plugin.
While reading these tests, remember that the futures API
(`stub.method.future()`) only gives futures for the *response-unary*
methods and does not exist for response-streaming methods.
"""
def setUp(self):
self._directory = tempfile.mkdtemp(dir=".")
self._proto_path = path.join(self._directory, _RELATIVE_PROTO_PATH)
self._python_out = path.join(self._directory, _RELATIVE_PYTHON_OUT)
os.makedirs(self._proto_path)
os.makedirs(self._python_out)
directories_path_components = {
proto_file_path_components[:-1]
for proto_file_path_components in _PROTO_FILES_PATH_COMPONENTS
}
_create_directory_tree(self._proto_path, directories_path_components)
self._proto_file_names = set()
for proto_file_path_components in _PROTO_FILES_PATH_COMPONENTS:
raw_proto_content = pkgutil.get_data(
"tests.protoc_plugin.protos",
path.join(*proto_file_path_components[1:]),
)
massaged_proto_content = _massage_proto_content(raw_proto_content)
proto_file_name = path.join(
self._proto_path, *proto_file_path_components
)
with open(proto_file_name, "wb") as proto_file:
proto_file.write(massaged_proto_content)
self._proto_file_names.add(proto_file_name)
def tearDown(self):
shutil.rmtree(self._directory)
def _protoc(self):
args = [
"",
"--proto_path={}".format(self._proto_path),
"--python_out={}".format(self._python_out),
"--grpc_python_out=grpc_1_0:{}".format(self._python_out),
] + list(self._proto_file_names)
protoc_exit_code = protoc.main(args)
self.assertEqual(0, protoc_exit_code)
_packagify(self._python_out)
with _system_path([self._python_out]):
self._payload_pb2 = importlib.import_module(_PAYLOAD_PB2)
self._requests_pb2 = importlib.import_module(_REQUESTS_PB2)
self._responses_pb2 = importlib.import_module(_RESPONSES_PB2)
self._service_pb2 = importlib.import_module(_SERVICE_PB2)
def testImportAttributes(self):
self._protoc()
# check that we can access the generated module and its members.
self.assertIsNotNone(
getattr(self._service_pb2, SERVICER_IDENTIFIER, None)
)
self.assertIsNotNone(getattr(self._service_pb2, STUB_IDENTIFIER, None))
self.assertIsNotNone(
getattr(self._service_pb2, SERVER_FACTORY_IDENTIFIER, None)
)
self.assertIsNotNone(
getattr(self._service_pb2, STUB_FACTORY_IDENTIFIER, None)
)
def testUpDown(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
):
self._requests_pb2.SimpleRequest(response_size=13)
def testIncompleteServicer(self):
self._protoc()
with _CreateIncompleteService(self._service_pb2) as (_, stub):
request = self._requests_pb2.SimpleRequest(response_size=13)
try:
stub.UnaryCall(request, test_constants.LONG_TIMEOUT)
except face.AbortionError as error:
self.assertEqual(
interfaces.StatusCode.UNIMPLEMENTED, error.code
)
def testUnaryCall(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
request = self._requests_pb2.SimpleRequest(response_size=13)
response = stub.UnaryCall(request, test_constants.LONG_TIMEOUT)
expected_response = methods.UnaryCall(request, "not a real context!")
self.assertEqual(expected_response, response)
def testUnaryCallFuture(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
request = self._requests_pb2.SimpleRequest(response_size=13)
# Check that the call does not block waiting for the server to respond.
with methods.pause():
response_future = stub.UnaryCall.future(
request, test_constants.LONG_TIMEOUT
)
response = response_future.result()
expected_response = methods.UnaryCall(request, "not a real RpcContext!")
self.assertEqual(expected_response, response)
def testUnaryCallFutureExpired(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
request = self._requests_pb2.SimpleRequest(response_size=13)
with methods.pause():
response_future = stub.UnaryCall.future(
request, test_constants.SHORT_TIMEOUT
)
with self.assertRaises(face.ExpirationError):
response_future.result()
def testUnaryCallFutureCancelled(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
request = self._requests_pb2.SimpleRequest(response_size=13)
with methods.pause():
response_future = stub.UnaryCall.future(request, 1)
response_future.cancel()
self.assertTrue(response_future.cancelled())
def testUnaryCallFutureFailed(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
request = self._requests_pb2.SimpleRequest(response_size=13)
with methods.fail():
response_future = stub.UnaryCall.future(
request, test_constants.LONG_TIMEOUT
)
self.assertIsNotNone(response_future.exception())
def testStreamingOutputCall(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
request = _streaming_output_request(self._requests_pb2)
responses = stub.StreamingOutputCall(
request, test_constants.LONG_TIMEOUT
)
expected_responses = methods.StreamingOutputCall(
request, "not a real RpcContext!"
)
for expected_response, response in itertools.zip_longest(
expected_responses, responses
):
self.assertEqual(expected_response, response)
def testStreamingOutputCallExpired(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
request = _streaming_output_request(self._requests_pb2)
with methods.pause():
responses = stub.StreamingOutputCall(
request, test_constants.SHORT_TIMEOUT
)
with self.assertRaises(face.ExpirationError):
list(responses)
def testStreamingOutputCallCancelled(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
request = _streaming_output_request(self._requests_pb2)
responses = stub.StreamingOutputCall(
request, test_constants.LONG_TIMEOUT
)
next(responses)
responses.cancel()
with self.assertRaises(face.CancellationError):
next(responses)
def testStreamingOutputCallFailed(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
request = _streaming_output_request(self._requests_pb2)
with methods.fail():
responses = stub.StreamingOutputCall(request, 1)
self.assertIsNotNone(responses)
with self.assertRaises(face.RemoteError):
next(responses)
def testStreamingInputCall(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
response = stub.StreamingInputCall(
_streaming_input_request_iterator(
self._payload_pb2, self._requests_pb2
),
test_constants.LONG_TIMEOUT,
)
expected_response = methods.StreamingInputCall(
_streaming_input_request_iterator(
self._payload_pb2, self._requests_pb2
),
"not a real RpcContext!",
)
self.assertEqual(expected_response, response)
def testStreamingInputCallFuture(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
with methods.pause():
response_future = stub.StreamingInputCall.future(
_streaming_input_request_iterator(
self._payload_pb2, self._requests_pb2
),
test_constants.LONG_TIMEOUT,
)
response = response_future.result()
expected_response = methods.StreamingInputCall(
_streaming_input_request_iterator(
self._payload_pb2, self._requests_pb2
),
"not a real RpcContext!",
)
self.assertEqual(expected_response, response)
def testStreamingInputCallFutureExpired(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
with methods.pause():
response_future = stub.StreamingInputCall.future(
_streaming_input_request_iterator(
self._payload_pb2, self._requests_pb2
),
test_constants.SHORT_TIMEOUT,
)
with self.assertRaises(face.ExpirationError):
response_future.result()
self.assertIsInstance(
response_future.exception(), face.ExpirationError
)
def testStreamingInputCallFutureCancelled(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
with methods.pause():
response_future = stub.StreamingInputCall.future(
_streaming_input_request_iterator(
self._payload_pb2, self._requests_pb2
),
test_constants.LONG_TIMEOUT,
)
response_future.cancel()
self.assertTrue(response_future.cancelled())
with self.assertRaises(future.CancelledError):
response_future.result()
def testStreamingInputCallFutureFailed(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
with methods.fail():
response_future = stub.StreamingInputCall.future(
_streaming_input_request_iterator(
self._payload_pb2, self._requests_pb2
),
test_constants.LONG_TIMEOUT,
)
self.assertIsNotNone(response_future.exception())
def testFullDuplexCall(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
responses = stub.FullDuplexCall(
_full_duplex_request_iterator(self._requests_pb2),
test_constants.LONG_TIMEOUT,
)
expected_responses = methods.FullDuplexCall(
_full_duplex_request_iterator(self._requests_pb2),
"not a real RpcContext!",
)
for expected_response, response in itertools.zip_longest(
expected_responses, responses
):
self.assertEqual(expected_response, response)
def testFullDuplexCallExpired(self):
self._protoc()
request_iterator = _full_duplex_request_iterator(self._requests_pb2)
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
with methods.pause():
responses = stub.FullDuplexCall(
request_iterator, test_constants.SHORT_TIMEOUT
)
with self.assertRaises(face.ExpirationError):
list(responses)
def testFullDuplexCallCancelled(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
request_iterator = _full_duplex_request_iterator(self._requests_pb2)
responses = stub.FullDuplexCall(
request_iterator, test_constants.LONG_TIMEOUT
)
next(responses)
responses.cancel()
with self.assertRaises(face.CancellationError):
next(responses)
def testFullDuplexCallFailed(self):
self._protoc()
request_iterator = _full_duplex_request_iterator(self._requests_pb2)
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
with methods.fail():
responses = stub.FullDuplexCall(
request_iterator, test_constants.LONG_TIMEOUT
)
self.assertIsNotNone(responses)
with self.assertRaises(face.RemoteError):
next(responses)
def testHalfDuplexCall(self):
self._protoc()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
def half_duplex_request_iterator():
request = self._requests_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = self._requests_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
responses = stub.HalfDuplexCall(
half_duplex_request_iterator(), test_constants.LONG_TIMEOUT
)
expected_responses = methods.HalfDuplexCall(
half_duplex_request_iterator(), "not a real RpcContext!"
)
for check in itertools.zip_longest(expected_responses, responses):
expected_response, response = check
self.assertEqual(expected_response, response)
def testHalfDuplexCallWedged(self):
self._protoc()
condition = threading.Condition()
wait_cell = [False]
@contextlib.contextmanager
def wait(): # pylint: disable=invalid-name
# Where's Python 3's 'nonlocal' statement when you need it?
with condition:
wait_cell[0] = True
yield
with condition:
wait_cell[0] = False
condition.notify_all()
def half_duplex_request_iterator():
request = self._requests_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
with condition:
while wait_cell[0]:
condition.wait()
with _CreateService(
self._payload_pb2, self._responses_pb2, self._service_pb2
) as (methods, stub):
with wait():
responses = stub.HalfDuplexCall(
half_duplex_request_iterator(), test_constants.SHORT_TIMEOUT
)
# half-duplex waits for the client to send all info
with self.assertRaises(face.ExpirationError):
next(responses)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 26,959
| 36.34072
| 83
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/protoc_plugin/_split_definitions_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import contextlib
import importlib
import os
from os import path
import pkgutil
import platform
import shutil
import sys
import tempfile
import unittest
import grpc
from grpc_tools import protoc
import pkg_resources
from tests.unit import test_common
_MESSAGES_IMPORT = b'import "messages.proto";'
_SPLIT_NAMESPACE = b"package grpc_protoc_plugin.invocation_testing.split;"
_COMMON_NAMESPACE = b"package grpc_protoc_plugin.invocation_testing;"
_RELATIVE_PROTO_PATH = "relative_proto_path"
_RELATIVE_PYTHON_OUT = "relative_python_out"
_TEST_DIR = os.path.dirname(os.path.realpath(__file__))
@contextlib.contextmanager
def _system_path(path_insertion):
old_system_path = sys.path[:]
sys.path = sys.path[0:1] + path_insertion + sys.path[1:]
yield
sys.path = old_system_path
# NOTE(nathaniel): https://twitter.com/exoplaneteer/status/677259364256747520
# Life lesson "just always default to idempotence" reinforced.
def _create_directory_tree(root, path_components_sequence):
created = set()
for path_components in path_components_sequence:
thus_far = ""
for path_component in path_components:
relative_path = path.join(thus_far, path_component)
if relative_path not in created:
os.makedirs(path.join(root, relative_path))
created.add(relative_path)
thus_far = path.join(thus_far, path_component)
def _massage_proto_content(
proto_content, test_name_bytes, messages_proto_relative_file_name_bytes
):
package_substitution = (
b"package grpc_protoc_plugin.invocation_testing."
+ test_name_bytes
+ b";"
)
common_namespace_substituted = proto_content.replace(
_COMMON_NAMESPACE, package_substitution
)
split_namespace_substituted = common_namespace_substituted.replace(
_SPLIT_NAMESPACE, package_substitution
)
message_import_replaced = split_namespace_substituted.replace(
_MESSAGES_IMPORT,
b'import "' + messages_proto_relative_file_name_bytes + b'";',
)
return message_import_replaced
def _packagify(directory):
for subdirectory, _, _ in os.walk(directory):
init_file_name = path.join(subdirectory, "__init__.py")
with open(init_file_name, "wb") as init_file:
init_file.write(b"")
class _Servicer(object):
def __init__(self, response_class):
self._response_class = response_class
def Call(self, request, context):
return self._response_class()
def _protoc(
proto_path,
python_out,
grpc_python_out_flag,
grpc_python_out,
absolute_proto_file_names,
):
args = [
"",
"--proto_path={}".format(proto_path),
]
if python_out is not None:
args.append("--python_out={}".format(python_out))
if grpc_python_out is not None:
args.append(
"--grpc_python_out={}:{}".format(
grpc_python_out_flag, grpc_python_out
)
)
args.extend(absolute_proto_file_names)
return protoc.main(args)
class _Mid2016ProtocStyle(object):
def name(self):
return "Mid2016ProtocStyle"
def grpc_in_pb2_expected(self):
return True
def protoc(self, proto_path, python_out, absolute_proto_file_names):
return (
_protoc(
proto_path,
python_out,
"grpc_1_0",
python_out,
absolute_proto_file_names,
),
)
class _SingleProtocExecutionProtocStyle(object):
def name(self):
return "SingleProtocExecutionProtocStyle"
def grpc_in_pb2_expected(self):
return False
def protoc(self, proto_path, python_out, absolute_proto_file_names):
return (
_protoc(
proto_path,
python_out,
"grpc_2_0",
python_out,
absolute_proto_file_names,
),
)
class _ProtoBeforeGrpcProtocStyle(object):
def name(self):
return "ProtoBeforeGrpcProtocStyle"
def grpc_in_pb2_expected(self):
return False
def protoc(self, proto_path, python_out, absolute_proto_file_names):
pb2_protoc_exit_code = _protoc(
proto_path, python_out, None, None, absolute_proto_file_names
)
pb2_grpc_protoc_exit_code = _protoc(
proto_path, None, "grpc_2_0", python_out, absolute_proto_file_names
)
return pb2_protoc_exit_code, pb2_grpc_protoc_exit_code
class _GrpcBeforeProtoProtocStyle(object):
def name(self):
return "GrpcBeforeProtoProtocStyle"
def grpc_in_pb2_expected(self):
return False
def protoc(self, proto_path, python_out, absolute_proto_file_names):
pb2_grpc_protoc_exit_code = _protoc(
proto_path, None, "grpc_2_0", python_out, absolute_proto_file_names
)
pb2_protoc_exit_code = _protoc(
proto_path, python_out, None, None, absolute_proto_file_names
)
return pb2_grpc_protoc_exit_code, pb2_protoc_exit_code
_PROTOC_STYLES = (
_Mid2016ProtocStyle(),
_SingleProtocExecutionProtocStyle(),
_ProtoBeforeGrpcProtocStyle(),
_GrpcBeforeProtoProtocStyle(),
)
@unittest.skipIf(
platform.python_implementation() == "PyPy", "Skip test if run with PyPy!"
)
class _Test(unittest.TestCase, metaclass=abc.ABCMeta):
def setUp(self):
self._directory = tempfile.mkdtemp(suffix=self.NAME, dir=".")
self._proto_path = path.join(self._directory, _RELATIVE_PROTO_PATH)
self._python_out = path.join(self._directory, _RELATIVE_PYTHON_OUT)
os.makedirs(self._proto_path)
os.makedirs(self._python_out)
proto_directories_and_names = {
(
self.MESSAGES_PROTO_RELATIVE_DIRECTORY_NAMES,
self.MESSAGES_PROTO_FILE_NAME,
),
(
self.SERVICES_PROTO_RELATIVE_DIRECTORY_NAMES,
self.SERVICES_PROTO_FILE_NAME,
),
}
messages_proto_relative_file_name_forward_slashes = "/".join(
self.MESSAGES_PROTO_RELATIVE_DIRECTORY_NAMES
+ (self.MESSAGES_PROTO_FILE_NAME,)
)
_create_directory_tree(
self._proto_path,
(
relative_proto_directory_names
for relative_proto_directory_names, _ in proto_directories_and_names
),
)
self._absolute_proto_file_names = set()
for relative_directory_names, file_name in proto_directories_and_names:
absolute_proto_file_name = path.join(
self._proto_path, *relative_directory_names + (file_name,)
)
raw_proto_content = pkgutil.get_data(
"tests.protoc_plugin.protos.invocation_testing",
path.join(*relative_directory_names + (file_name,)),
)
massaged_proto_content = _massage_proto_content(
raw_proto_content,
self.NAME.encode(),
messages_proto_relative_file_name_forward_slashes.encode(),
)
with open(absolute_proto_file_name, "wb") as proto_file:
proto_file.write(massaged_proto_content)
self._absolute_proto_file_names.add(absolute_proto_file_name)
def tearDown(self):
shutil.rmtree(self._directory)
def _protoc(self):
protoc_exit_codes = self.PROTOC_STYLE.protoc(
self._proto_path, self._python_out, self._absolute_proto_file_names
)
for protoc_exit_code in protoc_exit_codes:
self.assertEqual(0, protoc_exit_code)
_packagify(self._python_out)
generated_modules = {}
expected_generated_full_module_names = {
self.EXPECTED_MESSAGES_PB2,
self.EXPECTED_SERVICES_PB2,
self.EXPECTED_SERVICES_PB2_GRPC,
}
with _system_path([self._python_out]):
for full_module_name in expected_generated_full_module_names:
module = importlib.import_module(full_module_name)
generated_modules[full_module_name] = module
self._messages_pb2 = generated_modules[self.EXPECTED_MESSAGES_PB2]
self._services_pb2 = generated_modules[self.EXPECTED_SERVICES_PB2]
self._services_pb2_grpc = generated_modules[
self.EXPECTED_SERVICES_PB2_GRPC
]
def _services_modules(self):
if self.PROTOC_STYLE.grpc_in_pb2_expected():
return self._services_pb2, self._services_pb2_grpc
else:
return (self._services_pb2_grpc,)
def test_imported_attributes(self):
self._protoc()
self._messages_pb2.Request
self._messages_pb2.Response
self._services_pb2.DESCRIPTOR.services_by_name["TestService"]
for services_module in self._services_modules():
services_module.TestServiceStub
services_module.TestServiceServicer
services_module.add_TestServiceServicer_to_server
def test_call(self):
self._protoc()
for services_module in self._services_modules():
server = test_common.test_server()
services_module.add_TestServiceServicer_to_server(
_Servicer(self._messages_pb2.Response), server
)
port = server.add_insecure_port("[::]:0")
server.start()
channel = grpc.insecure_channel("localhost:{}".format(port))
stub = services_module.TestServiceStub(channel)
response = stub.Call(self._messages_pb2.Request())
self.assertEqual(self._messages_pb2.Response(), response)
server.stop(None)
def _create_test_case_class(split_proto, protoc_style):
attributes = {}
name = "{}{}".format(
"SplitProto" if split_proto else "SameProto", protoc_style.name()
)
attributes["NAME"] = name
if split_proto:
attributes["MESSAGES_PROTO_RELATIVE_DIRECTORY_NAMES"] = (
"split_messages",
"sub",
)
attributes["MESSAGES_PROTO_FILE_NAME"] = "messages.proto"
attributes["SERVICES_PROTO_RELATIVE_DIRECTORY_NAMES"] = (
"split_services",
)
attributes["SERVICES_PROTO_FILE_NAME"] = "services.proto"
attributes["EXPECTED_MESSAGES_PB2"] = "split_messages.sub.messages_pb2"
attributes["EXPECTED_SERVICES_PB2"] = "split_services.services_pb2"
attributes[
"EXPECTED_SERVICES_PB2_GRPC"
] = "split_services.services_pb2_grpc"
else:
attributes["MESSAGES_PROTO_RELATIVE_DIRECTORY_NAMES"] = ()
attributes["MESSAGES_PROTO_FILE_NAME"] = "same.proto"
attributes["SERVICES_PROTO_RELATIVE_DIRECTORY_NAMES"] = ()
attributes["SERVICES_PROTO_FILE_NAME"] = "same.proto"
attributes["EXPECTED_MESSAGES_PB2"] = "same_pb2"
attributes["EXPECTED_SERVICES_PB2"] = "same_pb2"
attributes["EXPECTED_SERVICES_PB2_GRPC"] = "same_pb2_grpc"
attributes["PROTOC_STYLE"] = protoc_style
attributes["__module__"] = _Test.__module__
return type("{}Test".format(name), (_Test,), attributes)
def _create_test_case_classes():
for split_proto in (
False,
True,
):
for protoc_style in _PROTOC_STYLES:
yield _create_test_case_class(split_proto, protoc_style)
class WellKnownTypesTest(unittest.TestCase):
def testWellKnownTypes(self):
os.chdir(_TEST_DIR)
out_dir = tempfile.mkdtemp(suffix="wkt_test", dir=".")
well_known_protos_include = pkg_resources.resource_filename(
"grpc_tools", "_proto"
)
args = [
"grpc_tools.protoc",
"--proto_path=protos",
"--proto_path={}".format(well_known_protos_include),
"--python_out={}".format(out_dir),
"--grpc_python_out={}".format(out_dir),
"protos/invocation_testing/compiler.proto",
]
rc = protoc.main(args)
self.assertEqual(0, rc)
def load_tests(loader, tests, pattern):
tests = tuple(
loader.loadTestsFromTestCase(test_case_class)
for test_case_class in _create_test_case_classes()
) + tuple(loader.loadTestsFromTestCase(WellKnownTypesTest))
return unittest.TestSuite(tests=tests)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 13,032
| 31.994937
| 84
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/protoc_plugin/_python_plugin_test.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import contextlib
import distutils.spawn
import errno
import itertools
import os
import shutil
import subprocess
import sys
import tempfile
import threading
import unittest
import grpc
import grpc.experimental
import tests.protoc_plugin.protos.payload.test_payload_pb2 as payload_pb2
import tests.protoc_plugin.protos.requests.r.test_requests_pb2 as request_pb2
import tests.protoc_plugin.protos.responses.test_responses_pb2 as response_pb2
import tests.protoc_plugin.protos.service.test_service_pb2_grpc as service_pb2_grpc
from tests.unit import test_common
from tests.unit.framework.common import test_constants
# Identifiers of entities we expect to find in the generated module.
STUB_IDENTIFIER = "TestServiceStub"
SERVICER_IDENTIFIER = "TestServiceServicer"
ADD_SERVICER_TO_SERVER_IDENTIFIER = "add_TestServiceServicer_to_server"
class _ServicerMethods(object):
def __init__(self):
self._condition = threading.Condition()
self._paused = False
self._fail = False
@contextlib.contextmanager
def pause(self): # pylint: disable=invalid-name
with self._condition:
self._paused = True
yield
with self._condition:
self._paused = False
self._condition.notify_all()
@contextlib.contextmanager
def fail(self): # pylint: disable=invalid-name
with self._condition:
self._fail = True
yield
with self._condition:
self._fail = False
def _control(self): # pylint: disable=invalid-name
with self._condition:
if self._fail:
raise ValueError()
while self._paused:
self._condition.wait()
def UnaryCall(self, request, unused_rpc_context):
response = response_pb2.SimpleResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = "a" * request.response_size
self._control()
return response
def StreamingOutputCall(self, request, unused_rpc_context):
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = "a" * parameter.size
self._control()
yield response
def StreamingInputCall(self, request_iter, unused_rpc_context):
response = response_pb2.StreamingInputCallResponse()
aggregated_payload_size = 0
for request in request_iter:
aggregated_payload_size += len(request.payload.payload_compressable)
response.aggregated_payload_size = aggregated_payload_size
self._control()
return response
def FullDuplexCall(self, request_iter, unused_rpc_context):
for request in request_iter:
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = "a" * parameter.size
self._control()
yield response
def HalfDuplexCall(self, request_iter, unused_rpc_context):
responses = []
for request in request_iter:
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = "a" * parameter.size
self._control()
responses.append(response)
for response in responses:
yield response
class _Service(
collections.namedtuple(
"_Service",
(
"servicer_methods",
"server",
"stub",
),
)
):
"""A live and running service.
Attributes:
servicer_methods: The _ServicerMethods servicing RPCs.
server: The grpc.Server servicing RPCs.
stub: A stub on which to invoke RPCs.
"""
def _CreateService():
"""Provides a servicer backend and a stub.
Returns:
A _Service with which to test RPCs.
"""
servicer_methods = _ServicerMethods()
class Servicer(getattr(service_pb2_grpc, SERVICER_IDENTIFIER)):
def UnaryCall(self, request, context):
return servicer_methods.UnaryCall(request, context)
def StreamingOutputCall(self, request, context):
return servicer_methods.StreamingOutputCall(request, context)
def StreamingInputCall(self, request_iterator, context):
return servicer_methods.StreamingInputCall(
request_iterator, context
)
def FullDuplexCall(self, request_iterator, context):
return servicer_methods.FullDuplexCall(request_iterator, context)
def HalfDuplexCall(self, request_iterator, context):
return servicer_methods.HalfDuplexCall(request_iterator, context)
server = test_common.test_server()
getattr(service_pb2_grpc, ADD_SERVICER_TO_SERVER_IDENTIFIER)(
Servicer(), server
)
port = server.add_insecure_port("[::]:0")
server.start()
channel = grpc.insecure_channel("localhost:{}".format(port))
stub = getattr(service_pb2_grpc, STUB_IDENTIFIER)(channel)
return _Service(servicer_methods, server, stub)
def _CreateIncompleteService():
"""Provides a servicer backend that fails to implement methods and its stub.
Returns:
A _Service with which to test RPCs. The returned _Service's
servicer_methods implements none of the methods required of it.
"""
class Servicer(getattr(service_pb2_grpc, SERVICER_IDENTIFIER)):
pass
server = test_common.test_server()
getattr(service_pb2_grpc, ADD_SERVICER_TO_SERVER_IDENTIFIER)(
Servicer(), server
)
port = server.add_insecure_port("[::]:0")
server.start()
channel = grpc.insecure_channel("localhost:{}".format(port))
stub = getattr(service_pb2_grpc, STUB_IDENTIFIER)(channel)
return _Service(None, server, stub)
def _streaming_input_request_iterator():
for _ in range(3):
request = request_pb2.StreamingInputCallRequest()
request.payload.payload_type = payload_pb2.COMPRESSABLE
request.payload.payload_compressable = "a"
yield request
def _streaming_output_request():
request = request_pb2.StreamingOutputCallRequest()
sizes = [1, 2, 3]
request.response_parameters.add(size=sizes[0], interval_us=0)
request.response_parameters.add(size=sizes[1], interval_us=0)
request.response_parameters.add(size=sizes[2], interval_us=0)
return request
def _full_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
class PythonPluginTest(unittest.TestCase):
"""Test case for the gRPC Python protoc-plugin.
While reading these tests, remember that the futures API
(`stub.method.future()`) only gives futures for the *response-unary*
methods and does not exist for response-streaming methods.
"""
def testImportAttributes(self):
# check that we can access the generated module and its members.
self.assertIsNotNone(getattr(service_pb2_grpc, STUB_IDENTIFIER, None))
self.assertIsNotNone(
getattr(service_pb2_grpc, SERVICER_IDENTIFIER, None)
)
self.assertIsNotNone(
getattr(service_pb2_grpc, ADD_SERVICER_TO_SERVER_IDENTIFIER, None)
)
def testUpDown(self):
service = _CreateService()
self.assertIsNotNone(service.servicer_methods)
self.assertIsNotNone(service.server)
self.assertIsNotNone(service.stub)
service.server.stop(None)
def testIncompleteServicer(self):
service = _CreateIncompleteService()
request = request_pb2.SimpleRequest(response_size=13)
with self.assertRaises(grpc.RpcError) as exception_context:
service.stub.UnaryCall(request)
self.assertIs(
exception_context.exception.code(), grpc.StatusCode.UNIMPLEMENTED
)
service.server.stop(None)
def testUnaryCall(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
response = service.stub.UnaryCall(request)
expected_response = service.servicer_methods.UnaryCall(
request, "not a real context!"
)
self.assertEqual(expected_response, response)
service.server.stop(None)
def testUnaryCallFuture(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
# Check that the call does not block waiting for the server to respond.
with service.servicer_methods.pause():
response_future = service.stub.UnaryCall.future(request)
response = response_future.result()
expected_response = service.servicer_methods.UnaryCall(
request, "not a real RpcContext!"
)
self.assertEqual(expected_response, response)
service.server.stop(None)
def testUnaryCallFutureExpired(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
with service.servicer_methods.pause():
response_future = service.stub.UnaryCall.future(
request, timeout=test_constants.SHORT_TIMEOUT
)
with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result()
self.assertIs(
exception_context.exception.code(),
grpc.StatusCode.DEADLINE_EXCEEDED,
)
self.assertIs(response_future.code(), grpc.StatusCode.DEADLINE_EXCEEDED)
service.server.stop(None)
def testUnaryCallFutureCancelled(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
with service.servicer_methods.pause():
response_future = service.stub.UnaryCall.future(request)
response_future.cancel()
self.assertTrue(response_future.cancelled())
self.assertIs(response_future.code(), grpc.StatusCode.CANCELLED)
service.server.stop(None)
def testUnaryCallFutureFailed(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
with service.servicer_methods.fail():
response_future = service.stub.UnaryCall.future(request)
self.assertIsNotNone(response_future.exception())
self.assertIs(response_future.code(), grpc.StatusCode.UNKNOWN)
service.server.stop(None)
def testStreamingOutputCall(self):
service = _CreateService()
request = _streaming_output_request()
responses = service.stub.StreamingOutputCall(request)
expected_responses = service.servicer_methods.StreamingOutputCall(
request, "not a real RpcContext!"
)
for expected_response, response in itertools.zip_longest(
expected_responses, responses
):
self.assertEqual(expected_response, response)
service.server.stop(None)
def testStreamingOutputCallExpired(self):
service = _CreateService()
request = _streaming_output_request()
with service.servicer_methods.pause():
responses = service.stub.StreamingOutputCall(
request, timeout=test_constants.SHORT_TIMEOUT
)
with self.assertRaises(grpc.RpcError) as exception_context:
list(responses)
self.assertIs(
exception_context.exception.code(),
grpc.StatusCode.DEADLINE_EXCEEDED,
)
service.server.stop(None)
def testStreamingOutputCallCancelled(self):
service = _CreateService()
request = _streaming_output_request()
responses = service.stub.StreamingOutputCall(request)
next(responses)
responses.cancel()
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(responses.code(), grpc.StatusCode.CANCELLED)
service.server.stop(None)
def testStreamingOutputCallFailed(self):
service = _CreateService()
request = _streaming_output_request()
with service.servicer_methods.fail():
responses = service.stub.StreamingOutputCall(request)
self.assertIsNotNone(responses)
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(
exception_context.exception.code(), grpc.StatusCode.UNKNOWN
)
service.server.stop(None)
def testStreamingInputCall(self):
service = _CreateService()
response = service.stub.StreamingInputCall(
_streaming_input_request_iterator()
)
expected_response = service.servicer_methods.StreamingInputCall(
_streaming_input_request_iterator(), "not a real RpcContext!"
)
self.assertEqual(expected_response, response)
service.server.stop(None)
def testStreamingInputCallFuture(self):
service = _CreateService()
with service.servicer_methods.pause():
response_future = service.stub.StreamingInputCall.future(
_streaming_input_request_iterator()
)
response = response_future.result()
expected_response = service.servicer_methods.StreamingInputCall(
_streaming_input_request_iterator(), "not a real RpcContext!"
)
self.assertEqual(expected_response, response)
service.server.stop(None)
def testStreamingInputCallFutureExpired(self):
service = _CreateService()
with service.servicer_methods.pause():
response_future = service.stub.StreamingInputCall.future(
_streaming_input_request_iterator(),
timeout=test_constants.SHORT_TIMEOUT,
)
with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result()
self.assertIsInstance(response_future.exception(), grpc.RpcError)
self.assertIs(
response_future.exception().code(),
grpc.StatusCode.DEADLINE_EXCEEDED,
)
self.assertIs(
exception_context.exception.code(),
grpc.StatusCode.DEADLINE_EXCEEDED,
)
service.server.stop(None)
def testStreamingInputCallFutureCancelled(self):
service = _CreateService()
with service.servicer_methods.pause():
response_future = service.stub.StreamingInputCall.future(
_streaming_input_request_iterator()
)
response_future.cancel()
self.assertTrue(response_future.cancelled())
with self.assertRaises(grpc.FutureCancelledError):
response_future.result()
service.server.stop(None)
def testStreamingInputCallFutureFailed(self):
service = _CreateService()
with service.servicer_methods.fail():
response_future = service.stub.StreamingInputCall.future(
_streaming_input_request_iterator()
)
self.assertIsNotNone(response_future.exception())
self.assertIs(response_future.code(), grpc.StatusCode.UNKNOWN)
service.server.stop(None)
def testFullDuplexCall(self):
service = _CreateService()
responses = service.stub.FullDuplexCall(_full_duplex_request_iterator())
expected_responses = service.servicer_methods.FullDuplexCall(
_full_duplex_request_iterator(), "not a real RpcContext!"
)
for expected_response, response in itertools.zip_longest(
expected_responses, responses
):
self.assertEqual(expected_response, response)
service.server.stop(None)
def testFullDuplexCallExpired(self):
request_iterator = _full_duplex_request_iterator()
service = _CreateService()
with service.servicer_methods.pause():
responses = service.stub.FullDuplexCall(
request_iterator, timeout=test_constants.SHORT_TIMEOUT
)
with self.assertRaises(grpc.RpcError) as exception_context:
list(responses)
self.assertIs(
exception_context.exception.code(),
grpc.StatusCode.DEADLINE_EXCEEDED,
)
service.server.stop(None)
def testFullDuplexCallCancelled(self):
service = _CreateService()
request_iterator = _full_duplex_request_iterator()
responses = service.stub.FullDuplexCall(request_iterator)
next(responses)
responses.cancel()
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(
exception_context.exception.code(), grpc.StatusCode.CANCELLED
)
service.server.stop(None)
def testFullDuplexCallFailed(self):
request_iterator = _full_duplex_request_iterator()
service = _CreateService()
with service.servicer_methods.fail():
responses = service.stub.FullDuplexCall(request_iterator)
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(
exception_context.exception.code(), grpc.StatusCode.UNKNOWN
)
service.server.stop(None)
def testHalfDuplexCall(self):
service = _CreateService()
def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
responses = service.stub.HalfDuplexCall(half_duplex_request_iterator())
expected_responses = service.servicer_methods.HalfDuplexCall(
half_duplex_request_iterator(), "not a real RpcContext!"
)
for expected_response, response in itertools.zip_longest(
expected_responses, responses
):
self.assertEqual(expected_response, response)
service.server.stop(None)
def testHalfDuplexCallWedged(self):
condition = threading.Condition()
wait_cell = [False]
@contextlib.contextmanager
def wait(): # pylint: disable=invalid-name
# Where's Python 3's 'nonlocal' statement when you need it?
with condition:
wait_cell[0] = True
yield
with condition:
wait_cell[0] = False
condition.notify_all()
def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
with condition:
while wait_cell[0]:
condition.wait()
service = _CreateService()
with wait():
responses = service.stub.HalfDuplexCall(
half_duplex_request_iterator(),
timeout=test_constants.SHORT_TIMEOUT,
)
# half-duplex waits for the client to send all info
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(
exception_context.exception.code(),
grpc.StatusCode.DEADLINE_EXCEEDED,
)
service.server.stop(None)
@unittest.skipIf(
sys.version_info[0] < 3 or sys.version_info[1] < 6,
"Unsupported on Python 2.",
)
class SimpleStubsPluginTest(unittest.TestCase):
servicer_methods = _ServicerMethods()
class Servicer(service_pb2_grpc.TestServiceServicer):
def UnaryCall(self, request, context):
return SimpleStubsPluginTest.servicer_methods.UnaryCall(
request, context
)
def StreamingOutputCall(self, request, context):
return SimpleStubsPluginTest.servicer_methods.StreamingOutputCall(
request, context
)
def StreamingInputCall(self, request_iterator, context):
return SimpleStubsPluginTest.servicer_methods.StreamingInputCall(
request_iterator, context
)
def FullDuplexCall(self, request_iterator, context):
return SimpleStubsPluginTest.servicer_methods.FullDuplexCall(
request_iterator, context
)
def HalfDuplexCall(self, request_iterator, context):
return SimpleStubsPluginTest.servicer_methods.HalfDuplexCall(
request_iterator, context
)
def setUp(self):
super(SimpleStubsPluginTest, self).setUp()
self._server = test_common.test_server()
service_pb2_grpc.add_TestServiceServicer_to_server(
self.Servicer(), self._server
)
self._port = self._server.add_insecure_port("[::]:0")
self._server.start()
self._target = "localhost:{}".format(self._port)
def tearDown(self):
self._server.stop(None)
super(SimpleStubsPluginTest, self).tearDown()
def testUnaryCall(self):
request = request_pb2.SimpleRequest(response_size=13)
response = service_pb2_grpc.TestService.UnaryCall(
request,
self._target,
channel_credentials=grpc.experimental.insecure_channel_credentials(),
wait_for_ready=True,
)
expected_response = self.servicer_methods.UnaryCall(
request, "not a real context!"
)
self.assertEqual(expected_response, response)
def testUnaryCallInsecureSugar(self):
request = request_pb2.SimpleRequest(response_size=13)
response = service_pb2_grpc.TestService.UnaryCall(
request, self._target, insecure=True, wait_for_ready=True
)
expected_response = self.servicer_methods.UnaryCall(
request, "not a real context!"
)
self.assertEqual(expected_response, response)
def testStreamingOutputCall(self):
request = _streaming_output_request()
expected_responses = self.servicer_methods.StreamingOutputCall(
request, "not a real RpcContext!"
)
responses = service_pb2_grpc.TestService.StreamingOutputCall(
request,
self._target,
channel_credentials=grpc.experimental.insecure_channel_credentials(),
wait_for_ready=True,
)
for expected_response, response in itertools.zip_longest(
expected_responses, responses
):
self.assertEqual(expected_response, response)
def testStreamingInputCall(self):
response = service_pb2_grpc.TestService.StreamingInputCall(
_streaming_input_request_iterator(),
self._target,
channel_credentials=grpc.experimental.insecure_channel_credentials(),
wait_for_ready=True,
)
expected_response = self.servicer_methods.StreamingInputCall(
_streaming_input_request_iterator(), "not a real RpcContext!"
)
self.assertEqual(expected_response, response)
def testFullDuplexCall(self):
responses = service_pb2_grpc.TestService.FullDuplexCall(
_full_duplex_request_iterator(),
self._target,
channel_credentials=grpc.experimental.insecure_channel_credentials(),
wait_for_ready=True,
)
expected_responses = self.servicer_methods.FullDuplexCall(
_full_duplex_request_iterator(), "not a real RpcContext!"
)
for expected_response, response in itertools.zip_longest(
expected_responses, responses
):
self.assertEqual(expected_response, response)
def testHalfDuplexCall(self):
def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
responses = service_pb2_grpc.TestService.HalfDuplexCall(
half_duplex_request_iterator(),
self._target,
channel_credentials=grpc.experimental.insecure_channel_credentials(),
wait_for_ready=True,
)
expected_responses = self.servicer_methods.HalfDuplexCall(
half_duplex_request_iterator(), "not a real RpcContext!"
)
for expected_response, response in itertools.zip_longest(
expected_responses, responses
):
self.assertEqual(expected_response, response)
class ModuleMainTest(unittest.TestCase):
"""Test case for running `python -m grpc_tools.protoc`."""
def test_clean_output(self):
if sys.executable is None:
raise unittest.SkipTest(
"Running on a interpreter that cannot be invoked from the CLI."
)
proto_dir_path = os.path.join("src", "proto")
test_proto_path = os.path.join(
proto_dir_path, "grpc", "testing", "empty.proto"
)
streams = tuple(tempfile.TemporaryFile() for _ in range(2))
work_dir = tempfile.mkdtemp()
try:
invocation = (
sys.executable,
"-m",
"grpc_tools.protoc",
"--proto_path",
proto_dir_path,
"--python_out",
work_dir,
"--grpc_python_out",
work_dir,
test_proto_path,
)
proc = subprocess.Popen(
invocation, stdout=streams[0], stderr=streams[1]
)
proc.wait()
outs = []
for stream in streams:
stream.seek(0)
self.assertEqual(0, len(stream.read()))
self.assertEqual(0, proc.returncode)
except Exception: # pylint: disable=broad-except
shutil.rmtree(work_dir)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 27,568
| 36.869505
| 83
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/protoc_plugin/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/protoc_plugin/protos/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/protoc_plugin/protos/service/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/protoc_plugin/protos/responses/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/protoc_plugin/protos/invocation_testing/__init__.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/protoc_plugin/protos/requests/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/protoc_plugin/protos/requests/r/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/protoc_plugin/protos/payload/__init__.py
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/status/_grpc_status_test.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc_status.
isort:skip_file
"""
# NOTE(lidiz) This module only exists in Bazel BUILD file, for more details
# please refer to comments in the "bazel_namespace_package_hack" module.
try:
from tests import bazel_namespace_package_hack
bazel_namespace_package_hack.sys_path_to_site_dir_hack()
except ImportError:
pass
import unittest
import logging
import traceback
import sys
import grpc
from grpc_status import rpc_status
from tests.unit import test_common
from google.protobuf import any_pb2
from google.rpc import code_pb2, status_pb2, error_details_pb2
_STATUS_OK = "/test/StatusOK"
_STATUS_NOT_OK = "/test/StatusNotOk"
_ERROR_DETAILS = "/test/ErrorDetails"
_INCONSISTENT = "/test/Inconsistent"
_INVALID_CODE = "/test/InvalidCode"
_REQUEST = b"\x00\x00\x00"
_RESPONSE = b"\x01\x01\x01"
_GRPC_DETAILS_METADATA_KEY = "grpc-status-details-bin"
_STATUS_DETAILS = "This is an error detail"
_STATUS_DETAILS_ANOTHER = "This is another error detail"
def _ok_unary_unary(request, servicer_context):
return _RESPONSE
def _not_ok_unary_unary(request, servicer_context):
servicer_context.abort(grpc.StatusCode.INTERNAL, _STATUS_DETAILS)
def _error_details_unary_unary(request, servicer_context):
details = any_pb2.Any()
details.Pack(
error_details_pb2.DebugInfo(
stack_entries=traceback.format_stack(),
detail="Intentionally invoked",
)
)
rich_status = status_pb2.Status(
code=code_pb2.INTERNAL,
message=_STATUS_DETAILS,
details=[details],
)
servicer_context.abort_with_status(rpc_status.to_status(rich_status))
def _inconsistent_unary_unary(request, servicer_context):
rich_status = status_pb2.Status(
code=code_pb2.INTERNAL,
message=_STATUS_DETAILS,
)
servicer_context.set_code(grpc.StatusCode.NOT_FOUND)
servicer_context.set_details(_STATUS_DETAILS_ANOTHER)
# User put inconsistent status information in trailing metadata
servicer_context.set_trailing_metadata(
((_GRPC_DETAILS_METADATA_KEY, rich_status.SerializeToString()),)
)
def _invalid_code_unary_unary(request, servicer_context):
rich_status = status_pb2.Status(
code=42,
message="Invalid code",
)
servicer_context.abort_with_status(rpc_status.to_status(rich_status))
class _GenericHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
if handler_call_details.method == _STATUS_OK:
return grpc.unary_unary_rpc_method_handler(_ok_unary_unary)
elif handler_call_details.method == _STATUS_NOT_OK:
return grpc.unary_unary_rpc_method_handler(_not_ok_unary_unary)
elif handler_call_details.method == _ERROR_DETAILS:
return grpc.unary_unary_rpc_method_handler(
_error_details_unary_unary
)
elif handler_call_details.method == _INCONSISTENT:
return grpc.unary_unary_rpc_method_handler(
_inconsistent_unary_unary
)
elif handler_call_details.method == _INVALID_CODE:
return grpc.unary_unary_rpc_method_handler(
_invalid_code_unary_unary
)
else:
return None
@unittest.skipIf(
sys.version_info[0] < 3, "ProtoBuf descriptor has moved on from Python2"
)
class StatusTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server()
self._server.add_generic_rpc_handlers((_GenericHandler(),))
port = self._server.add_insecure_port("[::]:0")
self._server.start()
self._channel = grpc.insecure_channel("localhost:%d" % port)
def tearDown(self):
self._server.stop(None)
self._channel.close()
def test_status_ok(self):
_, call = self._channel.unary_unary(_STATUS_OK).with_call(_REQUEST)
# Succeed RPC doesn't have status
status = rpc_status.from_call(call)
self.assertIs(status, None)
def test_status_not_ok(self):
with self.assertRaises(grpc.RpcError) as exception_context:
self._channel.unary_unary(_STATUS_NOT_OK).with_call(_REQUEST)
rpc_error = exception_context.exception
self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
# Failed RPC doesn't automatically generate status
status = rpc_status.from_call(rpc_error)
self.assertIs(status, None)
def test_error_details(self):
with self.assertRaises(grpc.RpcError) as exception_context:
self._channel.unary_unary(_ERROR_DETAILS).with_call(_REQUEST)
rpc_error = exception_context.exception
status = rpc_status.from_call(rpc_error)
self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
self.assertEqual(status.code, code_pb2.Code.Value("INTERNAL"))
# Check if the underlying proto message is intact
self.assertEqual(
status.details[0].Is(error_details_pb2.DebugInfo.DESCRIPTOR), True
)
info = error_details_pb2.DebugInfo()
status.details[0].Unpack(info)
self.assertIn("_error_details_unary_unary", info.stack_entries[-1])
def test_code_message_validation(self):
with self.assertRaises(grpc.RpcError) as exception_context:
self._channel.unary_unary(_INCONSISTENT).with_call(_REQUEST)
rpc_error = exception_context.exception
self.assertEqual(rpc_error.code(), grpc.StatusCode.NOT_FOUND)
# Code/Message validation failed
self.assertRaises(ValueError, rpc_status.from_call, rpc_error)
def test_invalid_code(self):
with self.assertRaises(grpc.RpcError) as exception_context:
self._channel.unary_unary(_INVALID_CODE).with_call(_REQUEST)
rpc_error = exception_context.exception
self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN)
# Invalid status code exception raised during coversion
self.assertIn("Invalid status code", rpc_error.details())
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 6,688
| 33.302564
| 78
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/status/__init__.py
|
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 580
| 40.5
| 74
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc_health.v1.health."""
import logging
import queue
import sys
import threading
import time
import unittest
import grpc
from grpc_health.v1 import health
from grpc_health.v1 import health_pb2
from grpc_health.v1 import health_pb2_grpc
from tests.unit import test_common
from tests.unit import thread_pool
from tests.unit.framework.common import test_constants
_SERVING_SERVICE = "grpc.test.TestServiceServing"
_UNKNOWN_SERVICE = "grpc.test.TestServiceUnknown"
_NOT_SERVING_SERVICE = "grpc.test.TestServiceNotServing"
_WATCH_SERVICE = "grpc.test.WatchService"
def _consume_responses(response_iterator, response_queue):
for response in response_iterator:
response_queue.put(response)
class BaseWatchTests(object):
@unittest.skipIf(
sys.version_info[0] < 3, "ProtoBuf descriptor has moved on from Python2"
)
class WatchTests(unittest.TestCase):
def start_server(self, non_blocking=False, thread_pool=None):
self._thread_pool = thread_pool
self._servicer = health.HealthServicer(
experimental_non_blocking=non_blocking,
experimental_thread_pool=thread_pool,
)
self._servicer.set(
_SERVING_SERVICE, health_pb2.HealthCheckResponse.SERVING
)
self._servicer.set(
_UNKNOWN_SERVICE, health_pb2.HealthCheckResponse.UNKNOWN
)
self._servicer.set(
_NOT_SERVING_SERVICE, health_pb2.HealthCheckResponse.NOT_SERVING
)
self._server = test_common.test_server()
port = self._server.add_insecure_port("[::]:0")
health_pb2_grpc.add_HealthServicer_to_server(
self._servicer, self._server
)
self._server.start()
self._channel = grpc.insecure_channel("localhost:%d" % port)
self._stub = health_pb2_grpc.HealthStub(self._channel)
def tearDown(self):
self._server.stop(None)
self._channel.close()
def test_watch_empty_service(self):
request = health_pb2.HealthCheckRequest(service="")
response_queue = queue.Queue()
rendezvous = self._stub.Watch(request)
thread = threading.Thread(
target=_consume_responses, args=(rendezvous, response_queue)
)
thread.start()
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVING, response.status
)
rendezvous.cancel()
thread.join()
self.assertTrue(response_queue.empty())
if self._thread_pool is not None:
self.assertTrue(self._thread_pool.was_used())
def test_watch_new_service(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
response_queue = queue.Queue()
rendezvous = self._stub.Watch(request)
thread = threading.Thread(
target=_consume_responses, args=(rendezvous, response_queue)
)
thread.start()
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, response.status
)
self._servicer.set(
_WATCH_SERVICE, health_pb2.HealthCheckResponse.SERVING
)
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVING, response.status
)
self._servicer.set(
_WATCH_SERVICE, health_pb2.HealthCheckResponse.NOT_SERVING
)
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
self.assertEqual(
health_pb2.HealthCheckResponse.NOT_SERVING, response.status
)
rendezvous.cancel()
thread.join()
self.assertTrue(response_queue.empty())
def test_watch_service_isolation(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
response_queue = queue.Queue()
rendezvous = self._stub.Watch(request)
thread = threading.Thread(
target=_consume_responses, args=(rendezvous, response_queue)
)
thread.start()
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, response.status
)
self._servicer.set(
"some-other-service", health_pb2.HealthCheckResponse.SERVING
)
with self.assertRaises(queue.Empty):
response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
rendezvous.cancel()
thread.join()
self.assertTrue(response_queue.empty())
def test_two_watchers(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
response_queue1 = queue.Queue()
response_queue2 = queue.Queue()
rendezvous1 = self._stub.Watch(request)
rendezvous2 = self._stub.Watch(request)
thread1 = threading.Thread(
target=_consume_responses, args=(rendezvous1, response_queue1)
)
thread2 = threading.Thread(
target=_consume_responses, args=(rendezvous2, response_queue2)
)
thread1.start()
thread2.start()
response1 = response_queue1.get(
timeout=test_constants.SHORT_TIMEOUT
)
response2 = response_queue2.get(
timeout=test_constants.SHORT_TIMEOUT
)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, response1.status
)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, response2.status
)
self._servicer.set(
_WATCH_SERVICE, health_pb2.HealthCheckResponse.SERVING
)
response1 = response_queue1.get(
timeout=test_constants.SHORT_TIMEOUT
)
response2 = response_queue2.get(
timeout=test_constants.SHORT_TIMEOUT
)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVING, response1.status
)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVING, response2.status
)
rendezvous1.cancel()
rendezvous2.cancel()
thread1.join()
thread2.join()
self.assertTrue(response_queue1.empty())
self.assertTrue(response_queue2.empty())
@unittest.skip("https://github.com/grpc/grpc/issues/18127")
def test_cancelled_watch_removed_from_watch_list(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
response_queue = queue.Queue()
rendezvous = self._stub.Watch(request)
thread = threading.Thread(
target=_consume_responses, args=(rendezvous, response_queue)
)
thread.start()
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, response.status
)
rendezvous.cancel()
self._servicer.set(
_WATCH_SERVICE, health_pb2.HealthCheckResponse.SERVING
)
thread.join()
# Wait, if necessary, for serving thread to process client cancellation
timeout = time.time() + test_constants.TIME_ALLOWANCE
while (
time.time() < timeout
and self._servicer._send_response_callbacks[_WATCH_SERVICE]
):
time.sleep(1)
self.assertFalse(
self._servicer._send_response_callbacks[_WATCH_SERVICE],
"watch set should be empty",
)
self.assertTrue(response_queue.empty())
def test_graceful_shutdown(self):
request = health_pb2.HealthCheckRequest(service="")
response_queue = queue.Queue()
rendezvous = self._stub.Watch(request)
thread = threading.Thread(
target=_consume_responses, args=(rendezvous, response_queue)
)
thread.start()
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
self.assertEqual(
health_pb2.HealthCheckResponse.SERVING, response.status
)
self._servicer.enter_graceful_shutdown()
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
self.assertEqual(
health_pb2.HealthCheckResponse.NOT_SERVING, response.status
)
# This should be a no-op.
self._servicer.set("", health_pb2.HealthCheckResponse.SERVING)
rendezvous.cancel()
thread.join()
self.assertTrue(response_queue.empty())
@unittest.skipIf(
sys.version_info[0] < 3, "ProtoBuf descriptor has moved on from Python2"
)
class HealthServicerTest(BaseWatchTests.WatchTests):
def setUp(self):
self._thread_pool = thread_pool.RecordingThreadPool(max_workers=None)
super(HealthServicerTest, self).start_server(
non_blocking=True, thread_pool=self._thread_pool
)
def test_check_empty_service(self):
request = health_pb2.HealthCheckRequest()
resp = self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
def test_check_serving_service(self):
request = health_pb2.HealthCheckRequest(service=_SERVING_SERVICE)
resp = self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
def test_check_unknown_service(self):
request = health_pb2.HealthCheckRequest(service=_UNKNOWN_SERVICE)
resp = self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.UNKNOWN, resp.status)
def test_check_not_serving_service(self):
request = health_pb2.HealthCheckRequest(service=_NOT_SERVING_SERVICE)
resp = self._stub.Check(request)
self.assertEqual(
health_pb2.HealthCheckResponse.NOT_SERVING, resp.status
)
def test_check_not_found_service(self):
request = health_pb2.HealthCheckRequest(service="not-found")
with self.assertRaises(grpc.RpcError) as context:
resp = self._stub.Check(request)
self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.code())
def test_health_service_name(self):
self.assertEqual(health.SERVICE_NAME, "grpc.health.v1.Health")
@unittest.skipIf(
sys.version_info[0] < 3, "ProtoBuf descriptor has moved on from Python2"
)
class HealthServicerBackwardsCompatibleWatchTest(BaseWatchTests.WatchTests):
def setUp(self):
super(HealthServicerBackwardsCompatibleWatchTest, self).start_server(
non_blocking=False, thread_pool=None
)
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)
| 12,149
| 36.384615
| 83
|
py
|
grpc
|
grpc-master/src/python/grpcio_tests/tests/health_check/__init__.py
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 577
| 40.285714
| 74
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.