runtime error

Exit code: 1. Reason: __init__.py", line 336, in wrapped_f return copy(f, *args, **kw) File "/usr/local/lib/python3.10/site-packages/tenacity/__init__.py", line 475, in __call__ do = self.iter(retry_state=retry_state) File "/usr/local/lib/python3.10/site-packages/tenacity/__init__.py", line 376, in iter result = action(retry_state) File "/usr/local/lib/python3.10/site-packages/tenacity/__init__.py", line 398, in <lambda> self._add_action_func(lambda rs: rs.outcome.result()) File "/usr/local/lib/python3.10/concurrent/futures/_base.py", line 451, in result return self.__get_result() File "/usr/local/lib/python3.10/concurrent/futures/_base.py", line 403, in __get_result raise self._exception File "/usr/local/lib/python3.10/site-packages/tenacity/__init__.py", line 478, in __call__ result = fn(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/llama_index/llms/openai/base.py", line 429, in _chat response = client.chat.completions.create( File "/usr/local/lib/python3.10/site-packages/openai/_utils/_utils.py", line 275, in wrapper return func(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/openai/resources/chat/completions.py", line 829, in create return self._post( File "/usr/local/lib/python3.10/site-packages/openai/_base_client.py", line 1280, in post return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) File "/usr/local/lib/python3.10/site-packages/openai/_base_client.py", line 957, in request return self._request( File "/usr/local/lib/python3.10/site-packages/openai/_base_client.py", line 1061, in _request raise self._make_status_error_from_response(err.response) from None openai.BadRequestError: Error code: 400 - {'error': {'message': 'The model `llama-3.1-70b-versatile` has been decommissioned and is no longer supported. Please refer to https://console.groq.com/docs/deprecations for a recommendation on which model to use instead.', 'type': 'invalid_request_error', 'code': 'model_decommissioned'}}

Container logs:

Fetching error logs...