I'm trying to send audio files, which are fairly large, to the server and am getting a resource exhausted error. Is there any way to configure the server in order to increase the maximum allowed message size?
Here's the stack trace:
2020-12-24 23:30:14.941839:cortex:pid-2247:INFO:500 Internal Server Error POST /
2020-12-24 23:30:14.942071:cortex:pid-2247:ERROR:Exception in ASGI application
Traceback (most recent call last):
File "/opt/conda/envs/env/lib/python3.6/site-packages/uvicorn/protocols/http/httptools_impl.py", line
390, in run_asgi
result = await app(self.scope, self.receive, self.send)
File "/opt/conda/envs/env/lib/python3.6/site-packages/uvicorn/middleware/proxy_headers.py", line 45, in __call__
return await self.app(scope, receive, send)
File "/opt/conda/envs/env/lib/python3.6/site-packages/fastapi/applications.py", line 181, in __call__
await super().__call__(scope, receive, send) # pragma: no cover
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/applications.py", line 111, in __call__
await self.middleware_stack(scope, receive, send)
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/middleware/errors.py", line 181, in __call__
raise exc from None
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/middleware/errors.py", line 159, in __call__
await self.app(scope, receive, _send)
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/middleware/base.py", line 25, in __call__
response = await self.dispatch_func(request, self.call_next)
File "/opt/conda/envs/env/lib/python3.6/site-packages/cortex_internal/serve/serve.py", line 187, in parse_payload
return await call_next(request)
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/middleware/base.py", line 45, in call_next
task.result()
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/middleware/base.py", line 38, in coro
await self.app(scope, receive, send)
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/middleware/base.py", line 25, in __call__
response = await self.dispatch_func(request, self.call_next)
File "/opt/conda/envs/env/lib/python3.6/site-packages/cortex_internal/serve/serve.py", line 134, in register_request
response = await call_next(request)
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/middleware/base.py", line 45, in call_next
task.result()
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/middleware/base.py", line 38, in coro
await self.app(scope, receive, send)
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/exceptions.py", line 82, in __call__
raise exc from None
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/exceptions.py", line 71, in __call__
await self.app(scope, receive, sender)
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/routing.py", line 566, in __call__
await route.handle(scope, receive, send)
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/routing.py", line 227, in handle
await self.app(scope, receive, send)
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/routing.py", line 41, in app
response = await func(request)
File "/opt/conda/envs/env/lib/python3.6/site-packages/fastapi/routing.py", line 183, in app
dependant=dependant, values=values, is_coroutine=is_coroutine
File "/opt/conda/envs/env/lib/python3.6/site-packages/fastapi/routing.py", line 135, in run_endpoint_function
return await run_in_threadpool(dependant.call, **values)
File "/opt/conda/envs/env/lib/python3.6/site-packages/starlette/concurrency.py", line 34, in run_in_threadpool
return await loop.run_in_executor(None, func, *args)
File "/opt/conda/envs/env/lib/python3.6/concurrent/futures/thread.py", line 56, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/envs/env/lib/python3.6/site-packages/cortex_internal/serve/serve.py", line 200, in predict
prediction = predictor_impl.predict(**kwargs)
File "/mnt/project/serving/cortex_server.py", line 10, in predict
return self.client.predict({"waveform": np.array(payload["audio"]).astype("float32")})
File "/opt/conda/envs/env/lib/python3.6/site-packages/cortex_internal/lib/client/tensorflow.py", line
114, in predict
return self._run_inference(model_input, consts.SINGLE_MODEL_NAME, model_version)
File "/opt/conda/envs/env/lib/python3.6/site-packages/cortex_internal/lib/client/tensorflow.py", line
164, in _run_inference
return self._client.predict(model_input, model_name, model_version)
File "/opt/conda/envs/env/lib/python3.6/site-packages/cortex_internal/lib/model/tfs.py", line 376, in
predict
response_proto = self._pred.Predict(prediction_request, timeout=timeout)
File "/opt/conda/envs/env/lib/python3.6/site-packages/grpc/_channel.py", line 826, in __call__
return _end_unary_response_blocking(state, call, False, None)
File "/opt/conda/envs/env/lib/python3.6/site-packages/grpc/_channel.py", line 729, in _end_unary_response_blocking
raise _InactiveRpcError(state)
grpc._channel._InactiveRpcError: <_InactiveRpcError of RPC that terminated with:
status = StatusCode.RESOURCE_EXHAUSTED
details = "Received message larger than max (102484524 vs. 4194304)"
debug_error_string = "{"created":"@1608852614.937822193","description":"Received message larger
than max (102484524 vs. 4194304)","file":"src/core/ext/filters/message_size/message_size_filter.cc","file_line":203,"grpc_status":8}"
bug