llm:type: azure_openai_chat api_base: ${AZURE_OPENAI_API_BASE} api_version: ${AZURE_OPENAI_CHAT_API_VERSION} auth_type: api_key # or azure_managed_identity api_key: ${AZURE_OPENAI_CHAT_API_KEY}# audience: "https://cognitiveservices.azure.com/.default"# organization: <organization_id> model: ${AZURE_OPENAI_CHAT_MODEL} deployment_name: ${AZURE_OPENAI_CHAT_DEPLOYMENT_NAME}# encoding_model: cl100k_base # automatically set by tiktoken if left undefined model_supports_json: true # recommended if this is available for your model. concurrent_requests: 25# max number of simultaneous LLM requests allowed async_mode: threaded # or asyncio retry_strategy: native max_retries: 10embeddings: llm:type: azure_openai_embedding api_base: ${AZURE_OPENAI_API_BASE} api_version: ${AZURE_OPENAI_EMBEDDING_API_VERSION} auth_type: api_key # or azure_managed_identity api_key: ${AZURE_OPENAI_EMBEDDING_API_KEY}# audience: "https://cognitiveservices.azure.com/.default"# organization: <organization_id> model: ${AZURE_OPENAI_EMBEDDING_MODEL} deployment_name: ${AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME}# encoding_model: cl100k_base # automatically set by tiktoken if left undefined model_supports_json: true # recommended if this is available for your model. concurrent_requests: 25# max number of simultaneous LLM requests allowed async_mode: threaded # or asyncio retry_strategy: native max_retries: 10### Input settings ###