diff --git a/README.md b/README.md
index 764c265b..41620df3 100644
--- a/README.md
+++ b/README.md
@@ -77,6 +77,7 @@ Our library is LLM-agnostic: you could switch the example above to any inference
4+ inference providers via HfApiModel
+
```py
from smolagents import HfApiModel
@@ -88,6 +89,7 @@ model = HfApiModel(
Any LLM via [LiteLLM](litellm.ai)
+
```py
from smolagents import LiteLLMModel
@@ -100,6 +102,7 @@ model = LiteLLMModel(
OpenAI chat server
+
```py
from smolagents import OpenAIServerModel
@@ -112,12 +115,29 @@ model = OpenAIServerModel(
Local `transformers` model
+
```py
from smolagents import TransformersModel
model = TransformersModel(model_id="Qwen/Qwen2.5-Coder-32B-Instruct", max_new_tokens=4096)
```
+
+ Azure models
+
+```py
+import os
+
+from smolagents import AzureOpenAIServerModel
+
+model = AzureOpenAIServerModel(
+ model_id = os.environ.get("AZURE_OPENAI_MODEL"),
+ azure_endpoint=os.environ.get("AZURE_OPENAI_ENDPOINT"),
+ api_key=os.environ.get("AZURE_OPENAI_API_KEY"),
+ api_version=os.environ.get("OPENAI_API_VERSION")
+)
+```
+
## Command Line Interface