From 8516f167ea44c4b77cb0a6a8a8e5ac65bea0be30 Mon Sep 17 00:00:00 2001 From: michael m <55236695+michaelbrinkworth@users.noreply.github.com> Date: Thu, 11 Dec 2025 08:05:14 +1000 Subject: [PATCH] feat: add AI Badgr as OpenAI-compatible provider - Add AIBadgr provider class extending OpenaiTemplate - API endpoint: https://aibadgr.com/api/v1 - Full support for streaming, system messages, and message history - Add example usage script in etc/examples/aibadgr.py - Provider requires API key authentication --- etc/examples/aibadgr.py | 50 +++++++++++++++++++++++++++++ g4f/Provider/needs_auth/AIBadgr.py | 14 ++++++++ g4f/Provider/needs_auth/__init__.py | 1 + 3 files changed, 65 insertions(+) create mode 100644 etc/examples/aibadgr.py create mode 100644 g4f/Provider/needs_auth/AIBadgr.py diff --git a/etc/examples/aibadgr.py b/etc/examples/aibadgr.py new file mode 100644 index 00000000..d839b8a8 --- /dev/null +++ b/etc/examples/aibadgr.py @@ -0,0 +1,50 @@ +""" +Example usage of AI Badgr provider. +AI Badgr is an OpenAI-compatible API provider. +Get your API key at: https://aibadgr.com/api-keys + +Usage: + export AIBADGR_API_KEY="your-api-key-here" + python aibadgr.py +""" + +from g4f.client import Client +from g4f.Provider import AIBadgr + +# Using AI Badgr with the g4f client +client = Client( + provider=AIBadgr, + api_key="your-api-key-here" # Or set AIBADGR_API_KEY environment variable +) + +# Example 1: Simple chat completion +print("Example 1: Simple chat completion") +response = client.chat.completions.create( + model="gpt-4o-mini", # AI Badgr supports OpenAI-compatible models + messages=[{"role": "user", "content": "Hello! What can you help me with?"}] +) +print(response.choices[0].message.content) +print() + +# Example 2: Streaming response +print("Example 2: Streaming response") +response = client.chat.completions.create( + model="gpt-4o-mini", + messages=[{"role": "user", "content": "Count from 1 to 5"}], + stream=True +) +for chunk in response: + if chunk.choices[0].delta.content: + print(chunk.choices[0].delta.content, end="", flush=True) +print("\n") + +# Example 3: With system message +print("Example 3: With system message") +response = client.chat.completions.create( + model="gpt-4o-mini", + messages=[ + {"role": "system", "content": "You are a helpful assistant that speaks like a pirate."}, + {"role": "user", "content": "Tell me about the weather"} + ] +) +print(response.choices[0].message.content) diff --git a/g4f/Provider/needs_auth/AIBadgr.py b/g4f/Provider/needs_auth/AIBadgr.py new file mode 100644 index 00000000..9580ebac --- /dev/null +++ b/g4f/Provider/needs_auth/AIBadgr.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from ..template import OpenaiTemplate + +class AIBadgr(OpenaiTemplate): + label = "AI Badgr" + url = "https://aibadgr.com" + login_url = "https://aibadgr.com/api-keys" + api_base = "https://aibadgr.com/api/v1" + working = True + needs_auth = True + supports_stream = True + supports_system_message = True + supports_message_history = True diff --git a/g4f/Provider/needs_auth/__init__.py b/g4f/Provider/needs_auth/__init__.py index 0ded523c..fbd2c4f1 100644 --- a/g4f/Provider/needs_auth/__init__.py +++ b/g4f/Provider/needs_auth/__init__.py @@ -1,3 +1,4 @@ +from .AIBadgr import AIBadgr from .Anthropic import Anthropic from .Azure import Azure from .BingCreateImages import BingCreateImages