mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-24 13:07:53 +08:00
Add StringableInference provider and update OpenaiTemplate for model retrieval
This commit is contained in:
31
g4f/Provider/StringableInference.py
Normal file
31
g4f/Provider/StringableInference.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import secrets
|
||||
import string
|
||||
|
||||
from .template import OpenaiTemplate
|
||||
|
||||
class StringableInference(OpenaiTemplate):
|
||||
label = "Stringable Inference"
|
||||
url = "https://stringable-inference.onrender.com"
|
||||
api_base = "https://stringableinf.com/api"
|
||||
api_endpoint = "https://stringableinf.com/api/v1/chat/completions"
|
||||
|
||||
working = True
|
||||
active_by_default = True
|
||||
default_model = "deepseek-v3.2"
|
||||
default_vision_model = "gpt-oss-120b"
|
||||
|
||||
@classmethod
|
||||
def get_headers(cls, stream: bool, api_key: str = None, headers: dict = None) -> dict:
|
||||
return {
|
||||
"Accept": "text/event-stream" if stream else "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"HTTP-Referer": "https://g4f.dev/",
|
||||
"X-Title": "G4F Python",
|
||||
**(
|
||||
{"Authorization": f"Bearer {api_key}"}
|
||||
if api_key else {}
|
||||
),
|
||||
**({} if headers is None else headers)
|
||||
}
|
||||
@@ -57,6 +57,7 @@ from .PollinationsAI import PollinationsAI
|
||||
from .PollinationsImage import PollinationsImage
|
||||
from .Startnest import Startnest
|
||||
from .Qwen import Qwen
|
||||
from .StringableInference import StringableInference
|
||||
from .TeachAnything import TeachAnything
|
||||
from .WeWordle import WeWordle
|
||||
from .Yqcloud import Yqcloud
|
||||
|
||||
@@ -46,7 +46,7 @@ class OpenaiTemplate(AsyncGeneratorProvider, ProviderModelMixin, RaiseErrorMixin
|
||||
response = requests.get(f"{api_base}/models", headers=cls.get_headers(False, api_key), verify=cls.ssl)
|
||||
raise_for_status(response)
|
||||
data = response.json()
|
||||
data = data.get("data") if isinstance(data, dict) else data
|
||||
data = data.get("data", data.get("models")) if isinstance(data, dict) else data
|
||||
if (not cls.needs_auth or cls.models_needs_auth or api_key) and data:
|
||||
cls.live += 1
|
||||
cls.image_models = [model.get("name") if cls.use_model_names else model.get("id", model.get("name")) for model in data if model.get("image") or model.get("type") == "image" or model.get("supports_images")]
|
||||
@@ -144,10 +144,10 @@ class OpenaiTemplate(AsyncGeneratorProvider, ProviderModelMixin, RaiseErrorMixin
|
||||
**extra_body
|
||||
)
|
||||
if api_endpoint is None:
|
||||
if api_base:
|
||||
api_endpoint = f"{api_base.rstrip('/')}/chat/completions"
|
||||
if api_endpoint is None:
|
||||
api_endpoint = cls.api_endpoint
|
||||
if api_endpoint is None:
|
||||
api_endpoint = f"{api_base.rstrip('/')}/chat/completions"
|
||||
yield JsonRequest.from_dict(data)
|
||||
async with session.post(api_endpoint, json=data, ssl=cls.ssl) as response:
|
||||
async for chunk in read_response(response, stream, prompt, cls.get_dict(), download_media):
|
||||
|
||||
@@ -27,6 +27,7 @@ from .Provider import (
|
||||
PollinationsAI,
|
||||
PollinationsImage,
|
||||
Qwen,
|
||||
StringableInference,
|
||||
TeachAnything,
|
||||
Together,
|
||||
WeWordle,
|
||||
@@ -154,15 +155,14 @@ default = Model(
|
||||
name = "",
|
||||
base_provider = "",
|
||||
best_provider = IterListProvider([
|
||||
StringableInference,
|
||||
OIVSCodeSer0501,
|
||||
OIVSCodeSer2,
|
||||
Blackbox,
|
||||
Copilot,
|
||||
DeepInfra,
|
||||
OperaAria,
|
||||
Startnest,
|
||||
GLM,
|
||||
Kimi,
|
||||
PollinationsAI,
|
||||
Qwen,
|
||||
Together,
|
||||
@@ -179,7 +179,7 @@ default_vision = VisionModel(
|
||||
name = "",
|
||||
base_provider = "",
|
||||
best_provider = IterListProvider([
|
||||
Blackbox,
|
||||
StringableInference,
|
||||
DeepInfra,
|
||||
OIVSCodeSer0501,
|
||||
OIVSCodeSer2,
|
||||
|
||||
Reference in New Issue
Block a user