From 7ba3b3df146a541988148746ca111b8586ef2b9e Mon Sep 17 00:00:00 2001 From: Nuno Bispo Date: Thu, 15 Jan 2026 15:12:02 +0100 Subject: [PATCH 1/5] Add requirements for python-dotenv and requests --- .../requirements.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/requirements.txt diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/requirements.txt b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/requirements.txt new file mode 100644 index 0000000000..d2c6cdac80 --- /dev/null +++ b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/requirements.txt @@ -0,0 +1,2 @@ +python-dotenv==1.2.1 +requests==2.32.5 From e124b30e4afea92aa832a3f9c8620227bc43dd56 Mon Sep 17 00:00:00 2001 From: Nuno Bispo Date: Thu, 5 Feb 2026 10:29:17 +0100 Subject: [PATCH 2/5] Added the materials --- .../ask_auto_model.py | 20 ++++++++++++ .../ask_specific_model.py | 24 ++++++++++++++ .../fallback_models.py | 32 +++++++++++++++++++ .../get_models.py | 14 ++++++++ .../requirements.txt | 3 +- .../route_requests.py | 32 +++++++++++++++++++ 6 files changed, 123 insertions(+), 2 deletions(-) create mode 100644 how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/ask_auto_model.py create mode 100644 how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/ask_specific_model.py create mode 100644 how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/fallback_models.py create mode 100644 how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/get_models.py create mode 100644 how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/route_requests.py diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/ask_auto_model.py b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/ask_auto_model.py new file mode 100644 index 0000000000..f4b61f2aaa --- /dev/null +++ b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/ask_auto_model.py @@ -0,0 +1,20 @@ +import os +import requests + +OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions" + +api_key = os.getenv("OPENROUTER_API_KEY") + +headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json" +} +payload = { + "model": "openrouter/auto", + "messages": [{"role": "user", "content": "Say hello in one sentence."}] +} +response = requests.post(OPENROUTER_API_URL, headers=headers, json=payload) +data = response.json() + +print(f"Model: {data.get('model')}") +print(f"Response: {data['choices'][0]['message']['content']}") \ No newline at end of file diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/ask_specific_model.py b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/ask_specific_model.py new file mode 100644 index 0000000000..094c8fd874 --- /dev/null +++ b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/ask_specific_model.py @@ -0,0 +1,24 @@ +import os +import requests + +OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions" + +api_key = os.getenv("OPENROUTER_API_KEY") + +headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json" +} +payload = { + "model": "openai/gpt-3.5-turbo", + "messages": [{"role": "user", "content": "Say hello in one sentence."}] +} +response = requests.post(OPENROUTER_API_URL, headers=headers, json=payload) +data = response.json() + +if model := data.get('model'): + print(f"Model: {model} by {data['provider']}") + print(f"Response: {data['choices'][0]['message']['content']}") +else: + print("No model found in the response.") + print(f"Response: {data}") \ No newline at end of file diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/fallback_models.py b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/fallback_models.py new file mode 100644 index 0000000000..b0bd6181a1 --- /dev/null +++ b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/fallback_models.py @@ -0,0 +1,32 @@ +import os +import requests + +OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions" + +api_key = os.getenv("OPENROUTER_API_KEY") + +def make_request_with_fallback(models_list, messages): + headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json" + } + payload = {"models": models_list, "messages": messages} + + return requests.post(OPENROUTER_API_URL, headers=headers, json=payload) + +response = make_request_with_fallback( + models_list=[ + "openai/gpt-5", + "openai/gpt-3.5-turbo", + "openai/gpt-3.5-turbo-16k" + ], + messages=[{"role": "user", "content": "What is the capital of France?"}] +) + +data = response.json() +if model := data.get('model'): + print(f"Model: {model} by {data['provider']}") + print(f"Response: {data['choices'][0]['message']['content']}") +else: + print("No model found in the response.") + print(f"Response: {data}") \ No newline at end of file diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/get_models.py b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/get_models.py new file mode 100644 index 0000000000..f4dcfe4616 --- /dev/null +++ b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/get_models.py @@ -0,0 +1,14 @@ +import os +import requests + +OPENROUTER_MODELS_URL = "https://openrouter.ai/api/v1/models" + +api_key = os.getenv("OPENROUTER_API_KEY") + +headers = {"Authorization": f"Bearer {api_key}"} +response = requests.get(OPENROUTER_MODELS_URL, headers=headers) +data = response.json() + +models = data.get("data", []) +print(f"Success! Found {len(models)} models via OpenRouter.") +print(f"Examples: {', '.join(m['id'] for m in models[:5])}") \ No newline at end of file diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/requirements.txt b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/requirements.txt index d2c6cdac80..663bd1f6a2 100644 --- a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/requirements.txt +++ b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/requirements.txt @@ -1,2 +1 @@ -python-dotenv==1.2.1 -requests==2.32.5 +requests \ No newline at end of file diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/route_requests.py b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/route_requests.py new file mode 100644 index 0000000000..04a423292b --- /dev/null +++ b/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/route_requests.py @@ -0,0 +1,32 @@ +import os +import requests + +OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions" + +api_key = os.getenv("OPENROUTER_API_KEY") + +def make_request(model, messages, provider_config=None): + headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json" + } + payload = {"model": model, "messages": messages} + if provider_config: + payload["provider"] = provider_config + + response = requests.post(OPENROUTER_API_URL, headers=headers, json=payload) + response.raise_for_status() + return response.json() + +data = make_request( + model="meta-llama/llama-3.1-70b-instruct", + messages=[{"role": "user", "content": "Explain AI in one sentence."}], + provider_config={"sort": "price"} +) + +if model := data.get('model'): + print(f"Model: {model} by {data['provider']}") + print(f"Response: {data['choices'][0]['message']['content']}") +else: + print("No model found in the response.") + print(f"Response: {data}") \ No newline at end of file From bbb1efd5157e39d14b627be47d494bcc3389bb7d Mon Sep 17 00:00:00 2001 From: Philipp Acsany <68116180+acsany@users.noreply.github.com> Date: Fri, 6 Feb 2026 09:29:01 +0000 Subject: [PATCH 3/5] Rename folder and add README --- openrouter-api/README.md | 3 +++ .../ask_auto_model.py | 0 .../ask_specific_model.py | 0 .../fallback_models.py | 0 .../get_models.py | 0 .../requirements.txt | 0 .../route_requests.py | 0 7 files changed, 3 insertions(+) create mode 100644 openrouter-api/README.md rename {how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script => openrouter-api}/ask_auto_model.py (100%) rename {how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script => openrouter-api}/ask_specific_model.py (100%) rename {how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script => openrouter-api}/fallback_models.py (100%) rename {how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script => openrouter-api}/get_models.py (100%) rename {how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script => openrouter-api}/requirements.txt (100%) rename {how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script => openrouter-api}/route_requests.py (100%) diff --git a/openrouter-api/README.md b/openrouter-api/README.md new file mode 100644 index 0000000000..df390af6a7 --- /dev/null +++ b/openrouter-api/README.md @@ -0,0 +1,3 @@ +# How to Use the OpenRouter API to Access Multiple AI Models via Python + +This folder contains supporting materials for the Real Python tutorial [How to Use the OpenRouter API to Access Multiple AI Models via Python](https://realpython.com/openrouter-api/). diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/ask_auto_model.py b/openrouter-api/ask_auto_model.py similarity index 100% rename from how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/ask_auto_model.py rename to openrouter-api/ask_auto_model.py diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/ask_specific_model.py b/openrouter-api/ask_specific_model.py similarity index 100% rename from how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/ask_specific_model.py rename to openrouter-api/ask_specific_model.py diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/fallback_models.py b/openrouter-api/fallback_models.py similarity index 100% rename from how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/fallback_models.py rename to openrouter-api/fallback_models.py diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/get_models.py b/openrouter-api/get_models.py similarity index 100% rename from how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/get_models.py rename to openrouter-api/get_models.py diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/requirements.txt b/openrouter-api/requirements.txt similarity index 100% rename from how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/requirements.txt rename to openrouter-api/requirements.txt diff --git a/how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/route_requests.py b/openrouter-api/route_requests.py similarity index 100% rename from how-to-use-openrouter-to-access-multiple-ai-models-in-one-python-script/route_requests.py rename to openrouter-api/route_requests.py From 50d142f512dd4b374932b8d7427232aeeacc4e05 Mon Sep 17 00:00:00 2001 From: Philipp Acsany <68116180+acsany@users.noreply.github.com> Date: Fri, 6 Feb 2026 09:33:55 +0000 Subject: [PATCH 4/5] Format files --- openrouter-api/ask_auto_model.py | 9 +++------ openrouter-api/ask_specific_model.py | 11 ++++------- openrouter-api/fallback_models.py | 19 +++++++------------ openrouter-api/get_models.py | 2 +- openrouter-api/route_requests.py | 13 ++++++------- 5 files changed, 21 insertions(+), 33 deletions(-) diff --git a/openrouter-api/ask_auto_model.py b/openrouter-api/ask_auto_model.py index f4b61f2aaa..b95a50d9c2 100644 --- a/openrouter-api/ask_auto_model.py +++ b/openrouter-api/ask_auto_model.py @@ -5,16 +5,13 @@ api_key = os.getenv("OPENROUTER_API_KEY") -headers = { - "Authorization": f"Bearer {api_key}", - "Content-Type": "application/json" -} +headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} payload = { "model": "openrouter/auto", - "messages": [{"role": "user", "content": "Say hello in one sentence."}] + "messages": [{"role": "user", "content": "Say hello in one sentence."}], } response = requests.post(OPENROUTER_API_URL, headers=headers, json=payload) data = response.json() print(f"Model: {data.get('model')}") -print(f"Response: {data['choices'][0]['message']['content']}") \ No newline at end of file +print(f"Response: {data['choices'][0]['message']['content']}") diff --git a/openrouter-api/ask_specific_model.py b/openrouter-api/ask_specific_model.py index 094c8fd874..2b25bb9e8f 100644 --- a/openrouter-api/ask_specific_model.py +++ b/openrouter-api/ask_specific_model.py @@ -5,20 +5,17 @@ api_key = os.getenv("OPENROUTER_API_KEY") -headers = { - "Authorization": f"Bearer {api_key}", - "Content-Type": "application/json" -} +headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} payload = { "model": "openai/gpt-3.5-turbo", - "messages": [{"role": "user", "content": "Say hello in one sentence."}] + "messages": [{"role": "user", "content": "Say hello in one sentence."}], } response = requests.post(OPENROUTER_API_URL, headers=headers, json=payload) data = response.json() -if model := data.get('model'): +if model := data.get("model"): print(f"Model: {model} by {data['provider']}") print(f"Response: {data['choices'][0]['message']['content']}") else: print("No model found in the response.") - print(f"Response: {data}") \ No newline at end of file + print(f"Response: {data}") diff --git a/openrouter-api/fallback_models.py b/openrouter-api/fallback_models.py index b0bd6181a1..6363c28b54 100644 --- a/openrouter-api/fallback_models.py +++ b/openrouter-api/fallback_models.py @@ -5,28 +5,23 @@ api_key = os.getenv("OPENROUTER_API_KEY") + def make_request_with_fallback(models_list, messages): - headers = { - "Authorization": f"Bearer {api_key}", - "Content-Type": "application/json" - } + headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} payload = {"models": models_list, "messages": messages} return requests.post(OPENROUTER_API_URL, headers=headers, json=payload) + response = make_request_with_fallback( - models_list=[ - "openai/gpt-5", - "openai/gpt-3.5-turbo", - "openai/gpt-3.5-turbo-16k" - ], - messages=[{"role": "user", "content": "What is the capital of France?"}] + models_list=["openai/gpt-5", "openai/gpt-3.5-turbo", "openai/gpt-3.5-turbo-16k"], + messages=[{"role": "user", "content": "What is the capital of France?"}], ) data = response.json() -if model := data.get('model'): +if model := data.get("model"): print(f"Model: {model} by {data['provider']}") print(f"Response: {data['choices'][0]['message']['content']}") else: print("No model found in the response.") - print(f"Response: {data}") \ No newline at end of file + print(f"Response: {data}") diff --git a/openrouter-api/get_models.py b/openrouter-api/get_models.py index f4dcfe4616..b533d6d7e4 100644 --- a/openrouter-api/get_models.py +++ b/openrouter-api/get_models.py @@ -11,4 +11,4 @@ models = data.get("data", []) print(f"Success! Found {len(models)} models via OpenRouter.") -print(f"Examples: {', '.join(m['id'] for m in models[:5])}") \ No newline at end of file +print(f"Examples: {', '.join(m['id'] for m in models[:5])}") diff --git a/openrouter-api/route_requests.py b/openrouter-api/route_requests.py index 04a423292b..c02ee33dc9 100644 --- a/openrouter-api/route_requests.py +++ b/openrouter-api/route_requests.py @@ -5,11 +5,9 @@ api_key = os.getenv("OPENROUTER_API_KEY") + def make_request(model, messages, provider_config=None): - headers = { - "Authorization": f"Bearer {api_key}", - "Content-Type": "application/json" - } + headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} payload = {"model": model, "messages": messages} if provider_config: payload["provider"] = provider_config @@ -18,15 +16,16 @@ def make_request(model, messages, provider_config=None): response.raise_for_status() return response.json() + data = make_request( model="meta-llama/llama-3.1-70b-instruct", messages=[{"role": "user", "content": "Explain AI in one sentence."}], - provider_config={"sort": "price"} + provider_config={"sort": "price"}, ) -if model := data.get('model'): +if model := data.get("model"): print(f"Model: {model} by {data['provider']}") print(f"Response: {data['choices'][0]['message']['content']}") else: print("No model found in the response.") - print(f"Response: {data}") \ No newline at end of file + print(f"Response: {data}") From e3e3653c0c836b7e1f7268659db8ceca02928fd9 Mon Sep 17 00:00:00 2001 From: Philipp Date: Fri, 6 Feb 2026 10:40:10 +0100 Subject: [PATCH 5/5] Format again --- openrouter-api/ask_auto_model.py | 5 ++++- openrouter-api/ask_specific_model.py | 5 ++++- openrouter-api/fallback_models.py | 11 +++++++++-- openrouter-api/route_requests.py | 5 ++++- 4 files changed, 21 insertions(+), 5 deletions(-) diff --git a/openrouter-api/ask_auto_model.py b/openrouter-api/ask_auto_model.py index b95a50d9c2..517b767354 100644 --- a/openrouter-api/ask_auto_model.py +++ b/openrouter-api/ask_auto_model.py @@ -5,7 +5,10 @@ api_key = os.getenv("OPENROUTER_API_KEY") -headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} +headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json", +} payload = { "model": "openrouter/auto", "messages": [{"role": "user", "content": "Say hello in one sentence."}], diff --git a/openrouter-api/ask_specific_model.py b/openrouter-api/ask_specific_model.py index 2b25bb9e8f..441b60dfd5 100644 --- a/openrouter-api/ask_specific_model.py +++ b/openrouter-api/ask_specific_model.py @@ -5,7 +5,10 @@ api_key = os.getenv("OPENROUTER_API_KEY") -headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} +headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json", +} payload = { "model": "openai/gpt-3.5-turbo", "messages": [{"role": "user", "content": "Say hello in one sentence."}], diff --git a/openrouter-api/fallback_models.py b/openrouter-api/fallback_models.py index 6363c28b54..5427554095 100644 --- a/openrouter-api/fallback_models.py +++ b/openrouter-api/fallback_models.py @@ -7,14 +7,21 @@ def make_request_with_fallback(models_list, messages): - headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} + headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json", + } payload = {"models": models_list, "messages": messages} return requests.post(OPENROUTER_API_URL, headers=headers, json=payload) response = make_request_with_fallback( - models_list=["openai/gpt-5", "openai/gpt-3.5-turbo", "openai/gpt-3.5-turbo-16k"], + models_list=[ + "openai/gpt-5", + "openai/gpt-3.5-turbo", + "openai/gpt-3.5-turbo-16k", + ], messages=[{"role": "user", "content": "What is the capital of France?"}], ) diff --git a/openrouter-api/route_requests.py b/openrouter-api/route_requests.py index c02ee33dc9..0203322295 100644 --- a/openrouter-api/route_requests.py +++ b/openrouter-api/route_requests.py @@ -7,7 +7,10 @@ def make_request(model, messages, provider_config=None): - headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} + headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json", + } payload = {"model": model, "messages": messages} if provider_config: payload["provider"] = provider_config