Skip to content

Commit 3b29113

Browse files
committed
feat: add client debug logging support for unary-stream gRPC calls
1 parent 7fbd5fd commit 3b29113

File tree

2 files changed

+57
-3
lines changed

2 files changed

+57
-3
lines changed

google/api_core/grpc_helpers.py

+28-1
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,13 @@
1717

1818
import collections
1919
import functools
20+
import logging
21+
import pickle
2022
import warnings
2123

24+
import google.protobuf.json_format
2225
import grpc
26+
import proto
2327

2428
from google.api_core import exceptions
2529
import google.auth
@@ -48,6 +52,7 @@
4852
else:
4953
HAS_GRPC_GCP = False
5054

55+
_LOGGER = logging.getLogger(__name__)
5156

5257
# The list of gRPC Callable interfaces that return iterators.
5358
_STREAM_WRAP_CLASSES = (grpc.UnaryStreamMultiCallable, grpc.StreamStreamMultiCallable)
@@ -113,7 +118,29 @@ def __next__(self) -> P:
113118
result = self._stored_first_result
114119
del self._stored_first_result
115120
return result
116-
return next(self._wrapped)
121+
result = next(self._wrapped)
122+
123+
logging_enabled = _LOGGER.isEnabledFor(logging.DEBUG)
124+
if logging_enabled: # pragma: NO COVER
125+
if isinstance(result, proto.Message):
126+
response_payload = type(result).to_json(result)
127+
elif isinstance(result, google.protobuf.message.Message):
128+
response_payload = google.protobuf.json_format.MessageToJson(result)
129+
else:
130+
response_payload = (
131+
f"{type(result).__name__}: {pickle.dumps(result)}"
132+
)
133+
grpc_response = {
134+
"payload": response_payload,
135+
"status": "OK",
136+
}
137+
_LOGGER.debug(
138+
f"Received response of type {type(result)} via gRPC stream",
139+
extra={
140+
"response": grpc_response,
141+
},
142+
)
143+
return result
117144
except grpc.RpcError as exc:
118145
# If the stream has already returned data, we cannot recover here.
119146
raise exceptions.from_grpc_error(exc) from exc

google/api_core/grpc_helpers_async.py

+29-2
Original file line numberDiff line numberDiff line change
@@ -20,17 +20,23 @@
2020

2121
import asyncio
2222
import functools
23+
import logging
24+
import pickle
2325

2426
from typing import AsyncGenerator, Generic, Iterator, Optional, TypeVar
2527

28+
import google.protobuf.json_format
2629
import grpc
2730
from grpc import aio
31+
import proto
2832

2933
from google.api_core import exceptions, grpc_helpers
3034

3135
# denotes the proto response type for grpc calls
3236
P = TypeVar("P")
3337

38+
_LOGGER = logging.getLogger(__name__)
39+
3440
# NOTE(lidiz) Alternatively, we can hack "__getattribute__" to perform
3541
# automatic patching for us. But that means the overhead of creating an
3642
# extra Python function spreads to every single send and receive.
@@ -94,7 +100,28 @@ def __init__(self):
94100

95101
async def read(self) -> P:
96102
try:
97-
return await self._call.read()
103+
result = await self._call.read()
104+
logging_enabled = _LOGGER.isEnabledFor(logging.DEBUG)
105+
if logging_enabled: # pragma: NO COVER
106+
if isinstance(result, proto.Message):
107+
response_payload = type(result).to_json(result)
108+
elif isinstance(result, google.protobuf.message.Message):
109+
response_payload = google.protobuf.json_format.MessageToJson(result)
110+
else:
111+
response_payload = (
112+
f"{type(result).__name__}: {pickle.dumps(result)}"
113+
)
114+
grpc_response = {
115+
"payload": response_payload,
116+
"status": "OK",
117+
}
118+
_LOGGER.debug(
119+
f"Received response of type {type(result)} via gRPC stream",
120+
extra={
121+
"response": grpc_response,
122+
},
123+
)
124+
return result
98125
except grpc.RpcError as rpc_error:
99126
raise exceptions.from_grpc_error(rpc_error) from rpc_error
100127

@@ -219,7 +246,7 @@ def create_channel(
219246
default_host=None,
220247
compression=None,
221248
attempt_direct_path: Optional[bool] = False,
222-
**kwargs
249+
**kwargs,
223250
):
224251
"""Create an AsyncIO secure channel with credentials.
225252

0 commit comments

Comments
 (0)