New provider added ChatHub

This commit is contained in:
kqlio67
2024-09-27 12:13:11 +03:00
parent cfe5acc152
commit 105b4aac75
5 changed files with 141 additions and 32 deletions

View File

@@ -28,6 +28,7 @@
|[chatgpt4online.org](https://chatgpt4online.org)|`g4f.Provider.Chatgpt4Online`|`gpt-4`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[chatgpt4o.one](https://chatgpt4o.one)|`g4f.Provider.Chatgpt4o`|`gpt-4o-mini`|❌|❌|❌|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[chatgptfree.ai](https://chatgptfree.ai)|`g4f.Provider.ChatgptFree`|`gpt-4o-mini`|❌|❌|?|![Cloudflare](https://img.shields.io/badge/Cloudflare-f48d37)|❌|
|[app.chathub.gg](https://app.chathub.gg)|`g4f.Provider.ChatHub`|`llama-3.1-8b, mixtral-8x7b, gemma-2, sonar-online`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[duckduckgo.com](https://duckduckgo.com/duckchat/v1/chat)|`g4f.Provider.DDG`|`gpt-4o-mini, claude-3-haiku, llama-3.1-70b, mixtral-8x7b`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[deepinfra.com](https://deepinfra.com)|`g4f.Provider.DeepInfra`|✔|❌|❌|✔|![Unknown](https://img.shields.io/badge/Unknown-grey)|✔|
|[deepinfra.com/chat](https://deepinfra.com/chat)|`g4f.Provider.DeepInfraChat`|`llama-3.1-405b, llama-3.1-70b, Llama-3.1-8B, mixtral-8x22b, mixtral-8x7b, wizardlm-2-8x22b, wizardlm-2-7b, qwen-2-72b, phi-3-medium-4k, gemma-2b-27b, minicpm-llama-3-v2.5, mistral-7b, lzlv_70b, openchat-3.6-8b, phind-codellama-34b-v2, dolphin-2.9.1-llama-3-70b`|❌|`minicpm-llama-3-v2.5`|❌|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
@@ -55,7 +56,7 @@
|[chatgpt.com](https://chatgpt.com/)|`g4f.Provider.OpenaiChat`|`gpt-4o, gpt-4o-mini, gpt-4`|❌|✔||![Unknown](https://img.shields.io/badge/Unknown-grey)|✔|
|[www.perplexity.ai)](https://www.perplexity.ai)|`g4f.Provider.PerplexityAi`|✔|❌|❌|?|![Disabled](https://img.shields.io/badge/Disabled-red)|❌|
|[perplexity.ai](https://www.perplexity.ai)|`g4f.Provider.PerplexityApi`|✔|❌|❌|?|![Unknown](https://img.shields.io/badge/Unknown-grey)|✔|
|[labs.perplexity.ai](https://labs.perplexity.ai)|`g4f.Provider.PerplexityLabs`|`llama-3.1-8b, llama-3.1-70b`|❌|❌|?|![Cloudflare](https://img.shields.io/badge/Cloudflare-f48d37)|❌|
|[labs.perplexity.ai](https://labs.perplexity.ai)|`g4f.Provider.PerplexityLabs`|`sonar-online, sonar-chat, llama-3.1-8b, llama-3.1-70b`|❌|❌|?|![Cloudflare](https://img.shields.io/badge/Cloudflare-f48d37)|❌|
|[pi.ai/talk](https://pi.ai/talk)|`g4f.Provider.Pi`|`pi`|❌|❌|?|![Unknown](https://img.shields.io/badge/Unknown-grey)|❌|
|[]()|`g4f.Provider.Pizzagpt`|`gpt-4o-mini`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[poe.com](https://poe.com)|`g4f.Provider.Poe`|✔|❌|❌|?|![Unknown](https://img.shields.io/badge/Unknown-grey)|✔|

84
g4f/Provider/ChatHub.py Normal file
View File

@@ -0,0 +1,84 @@
from __future__ import annotations
import json
from aiohttp import ClientSession
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .helper import format_prompt
class ChatHub(AsyncGeneratorProvider, ProviderModelMixin):
label = "ChatHub"
url = "https://app.chathub.gg"
api_endpoint = "https://app.chathub.gg/api/v3/chat/completions"
working = True
supports_stream = True
supports_system_message = True
supports_message_history = True
default_model = 'meta/llama3.1-8b'
models = [
'meta/llama3.1-8b',
'mistral/mixtral-8x7b',
'google/gemma-2',
'perplexity/sonar-online',
]
model_aliases = {
"llama-3.1-8b": "meta/llama3.1-8b",
"mixtral-8x7b": "mistral/mixtral-8x7b",
"gemma-2": "google/gemma-2",
"sonar-online": "perplexity/sonar-online",
}
@classmethod
def get_model(cls, model: str) -> str:
if model in cls.models:
return model
elif model in cls.model_aliases:
return cls.model_aliases[model]
else:
return cls.default_model
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
model = cls.get_model(model)
headers = {
'accept': '*/*',
'accept-language': 'en-US,en;q=0.9',
'content-type': 'application/json',
'origin': cls.url,
'referer': f"{cls.url}/chat/cloud-llama3.1-8b",
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36',
'x-app-id': 'web'
}
async with ClientSession(headers=headers) as session:
prompt = format_prompt(messages)
data = {
"model": model,
"messages": [{"role": "user", "content": prompt}],
"tools": []
}
async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
response.raise_for_status()
async for line in response.content:
if line:
decoded_line = line.decode('utf-8')
if decoded_line.startswith('data:'):
try:
data = json.loads(decoded_line[5:])
if data['type'] == 'text-delta':
yield data['textDelta']
elif data['type'] == 'done':
break
except json.JSONDecodeError:
continue

View File

@@ -24,10 +24,10 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
]
model_aliases = {
"llama-3.1-8b": "llama-3.1-sonar-large-128k-online",
"llama-3.1-8b": "sonar-small-128k-online",
"llama-3.1-8b": "llama-3.1-sonar-large-128k-chat",
"llama-3.1-8b": "llama-3.1-sonar-small-128k-chat",
"sonar-online": "llama-3.1-sonar-large-128k-online",
"sonar-online": "sonar-small-128k-online",
"sonar-chat": "llama-3.1-sonar-large-128k-chat",
"sonar-chat": "llama-3.1-sonar-small-128k-chat",
"llama-3.1-8b": "llama-3.1-8b-instruct",
"llama-3.1-70b": "llama-3.1-70b-instruct",
}

View File

@@ -27,6 +27,7 @@ from .Chatgpt4Online import Chatgpt4Online
from .Chatgpt4o import Chatgpt4o
from .ChatGptEs import ChatGptEs
from .ChatgptFree import ChatgptFree
from .ChatHub import ChatHub
from .DDG import DDG
from .DeepInfra import DeepInfra
from .DeepInfraChat import DeepInfraChat

View File

@@ -16,6 +16,7 @@ from .Provider import (
Chatgpt4Online,
ChatGptEs,
ChatgptFree,
ChatHub,
DDG,
DeepInfra,
DeepInfraChat,
@@ -96,9 +97,7 @@ default = Model(
gpt_3 = Model(
name = 'gpt-3',
base_provider = 'OpenAI',
best_provider = IterListProvider([
Nexra,
])
best_provider = Nexra
)
# gpt-3.5
@@ -167,7 +166,7 @@ meta = Model(
llama_2_13b = Model(
name = "llama-2-13b",
base_provider = "Meta Llama",
best_provider = IterListProvider([Airforce])
best_provider = Airforce
)
# llama 3
@@ -193,7 +192,7 @@ llama_3 = Model(
llama_3_1_8b = Model(
name = "llama-3.1-8b",
base_provider = "Meta Llama",
best_provider = IterListProvider([Blackbox, DeepInfraChat, Airforce, PerplexityLabs])
best_provider = IterListProvider([Blackbox, DeepInfraChat, ChatHub, Airforce, PerplexityLabs])
)
llama_3_1_70b = Model(
@@ -225,7 +224,7 @@ mistral_7b = Model(
mixtral_8x7b = Model(
name = "mixtral-8x7b",
base_provider = "Mistral",
best_provider = IterListProvider([DDG, ReplicateHome, DeepInfraChat, Airforce, DeepInfra])
best_provider = IterListProvider([DDG, ReplicateHome, DeepInfraChat, ChatHub, Airforce, DeepInfra])
)
mixtral_8x22b = Model(
@@ -245,7 +244,7 @@ mistral_nemo = Model(
mixtral_8x7b_dpo = Model(
name = "mixtral-8x7b-dpo",
base_provider = "NousResearch",
best_provider = IterListProvider([Airforce])
best_provider = Airforce
)
hermes_3 = Model(
@@ -259,7 +258,7 @@ hermes_3 = Model(
phi_3_medium_4k = Model(
name = "phi-3-medium-4k",
base_provider = "Microsoft",
best_provider = IterListProvider([DeepInfraChat])
best_provider = DeepInfraChat
)
phi_3_5_mini = Model(
@@ -295,7 +294,7 @@ gemini = Model(
gemma_2b_9b = Model(
name = 'gemma-2b-9b',
base_provider = 'Google',
best_provider = IterListProvider([Airforce])
best_provider = Airforce
)
gemma_2b_27b = Model(
@@ -313,37 +312,43 @@ gemma_2b = Model(
])
)
gemma_2 = Model(
name = 'gemma-2',
base_provider = 'Google',
best_provider = ChatHub
)
### Anthropic ###
claude_2 = Model(
name = 'claude-2',
base_provider = 'Anthropic',
best_provider = IterListProvider([You])
best_provider = You
)
claude_2_0 = Model(
name = 'claude-2.0',
base_provider = 'Anthropic',
best_provider = IterListProvider([Liaobots])
best_provider = Liaobots
)
claude_2_1 = Model(
name = 'claude-2.1',
base_provider = 'Anthropic',
best_provider = IterListProvider([Liaobots])
best_provider = Liaobots
)
# claude 3
claude_3_opus = Model(
name = 'claude-3-opus',
base_provider = 'Anthropic',
best_provider = IterListProvider([Liaobots])
best_provider = Liaobots
)
claude_3_sonnet = Model(
name = 'claude-3-sonnet',
base_provider = 'Anthropic',
best_provider = IterListProvider([Liaobots])
best_provider = Liaobots
)
claude_3_haiku = Model(
@@ -390,7 +395,7 @@ reka_core = Model(
blackbox = Model(
name = 'blackbox',
base_provider = 'Blackbox AI',
best_provider = IterListProvider([Blackbox])
best_provider = Blackbox
)
@@ -406,7 +411,7 @@ dbrx_instruct = Model(
command_r_plus = Model(
name = 'command-r-plus',
base_provider = 'CohereForAI',
best_provider = IterListProvider([HuggingChat])
best_provider = HuggingChat
)
@@ -422,19 +427,19 @@ sparkdesk_v1_1 = Model(
qwen_1_5_14b = Model(
name = 'qwen-1.5-14b',
base_provider = 'Qwen',
best_provider = IterListProvider([FreeChatgpt])
best_provider = FreeChatgpt
)
qwen_1_5_72b = Model(
name = 'qwen-1.5-72b',
base_provider = 'Qwen',
best_provider = IterListProvider([Airforce])
best_provider = Airforce
)
qwen_1_5_110b = Model(
name = 'qwen-1.5-110b',
base_provider = 'Qwen',
best_provider = IterListProvider([Airforce])
best_provider = Airforce
)
qwen_2_72b = Model(
@@ -446,7 +451,7 @@ qwen_2_72b = Model(
qwen_turbo = Model(
name = 'qwen-turbo',
base_provider = 'Qwen',
best_provider = IterListProvider([Bixin123])
best_provider = Bixin123
)
qwen = Model(
@@ -462,13 +467,13 @@ qwen = Model(
glm_3_6b = Model(
name = 'glm-3-6b',
base_provider = 'Zhipu AI',
best_provider = IterListProvider([FreeChatgpt])
best_provider = FreeChatgpt
)
glm_4_9b = Model(
name = 'glm-4-9B',
base_provider = 'Zhipu AI',
best_provider = IterListProvider([FreeChatgpt])
best_provider = FreeChatgpt
)
glm_4 = Model(
@@ -484,13 +489,13 @@ glm_4 = Model(
yi_1_5_9b = Model(
name = 'yi-1.5-9b',
base_provider = '01-ai',
best_provider = IterListProvider([FreeChatgpt])
best_provider = FreeChatgpt
)
yi_34b = Model(
name = 'yi-34b',
base_provider = '01-ai',
best_provider = IterListProvider([Airforce])
best_provider = Airforce
)
@@ -498,7 +503,7 @@ yi_34b = Model(
solar_1_mini = Model(
name = 'solar-1-mini',
base_provider = 'Upstage',
best_provider = IterListProvider([Upstage])
best_provider = Upstage
)
solar_10_7b = Model(
@@ -525,14 +530,14 @@ pi = Model(
deepseek = Model(
name = 'deepseek',
base_provider = 'DeepSeek',
best_provider = IterListProvider([Airforce])
best_provider = Airforce
)
### WizardLM ###
wizardlm_2_7b = Model(
name = 'wizardlm-2-7b',
base_provider = 'WizardLM',
best_provider = IterListProvider([DeepInfraChat])
best_provider = DeepInfraChat
)
wizardlm_2_8x22b = Model(
@@ -610,6 +615,19 @@ grok_2_mini = Model(
best_provider = Liaobots
)
# Perplexity
sonar_online = Model(
name = 'sonar-online',
base_provider = 'Perplexity',
best_provider = IterListProvider([ChatHub, PerplexityLabs])
)
sonar_chat = Model(
name = 'sonar-chat',
base_provider = 'Perplexity',
best_provider = PerplexityLabs
)
#############
@@ -817,6 +835,7 @@ class ModelUtils:
'gemma-2b': gemma_2b,
'gemma-2b-9b': gemma_2b_9b,
'gemma-2b-27b': gemma_2b_27b,
'gemma-2': gemma_2,
### Anthropic ###
@@ -928,6 +947,10 @@ class ModelUtils:
'grok-2': grok_2,
'grok-2-mini': grok_2_mini,
### Perplexity ###
'sonar-online': sonar_online,
'sonar-chat': sonar_chat,
#############