mirror of
				https://github.com/xtekky/gpt4free.git
				synced 2025-10-31 19:42:45 +08:00 
			
		
		
		
	New provider added ChatHub
This commit is contained in:
		| @@ -28,6 +28,7 @@ | |||||||
| |[chatgpt4online.org](https://chatgpt4online.org)|`g4f.Provider.Chatgpt4Online`|`gpt-4`|❌|❌|✔||❌| | |[chatgpt4online.org](https://chatgpt4online.org)|`g4f.Provider.Chatgpt4Online`|`gpt-4`|❌|❌|✔||❌| | ||||||
| |[chatgpt4o.one](https://chatgpt4o.one)|`g4f.Provider.Chatgpt4o`|`gpt-4o-mini`|❌|❌|❌||❌| | |[chatgpt4o.one](https://chatgpt4o.one)|`g4f.Provider.Chatgpt4o`|`gpt-4o-mini`|❌|❌|❌||❌| | ||||||
| |[chatgptfree.ai](https://chatgptfree.ai)|`g4f.Provider.ChatgptFree`|`gpt-4o-mini`|❌|❌|?||❌| | |[chatgptfree.ai](https://chatgptfree.ai)|`g4f.Provider.ChatgptFree`|`gpt-4o-mini`|❌|❌|?||❌| | ||||||
|  | |[app.chathub.gg](https://app.chathub.gg)|`g4f.Provider.ChatHub`|`llama-3.1-8b, mixtral-8x7b, gemma-2, sonar-online`|❌|❌|✔||❌| | ||||||
| |[duckduckgo.com](https://duckduckgo.com/duckchat/v1/chat)|`g4f.Provider.DDG`|`gpt-4o-mini, claude-3-haiku, llama-3.1-70b, mixtral-8x7b`|❌|❌|✔||❌| | |[duckduckgo.com](https://duckduckgo.com/duckchat/v1/chat)|`g4f.Provider.DDG`|`gpt-4o-mini, claude-3-haiku, llama-3.1-70b, mixtral-8x7b`|❌|❌|✔||❌| | ||||||
| |[deepinfra.com](https://deepinfra.com)|`g4f.Provider.DeepInfra`|✔|❌|❌|✔||✔| | |[deepinfra.com](https://deepinfra.com)|`g4f.Provider.DeepInfra`|✔|❌|❌|✔||✔| | ||||||
| |[deepinfra.com/chat](https://deepinfra.com/chat)|`g4f.Provider.DeepInfraChat`|`llama-3.1-405b, llama-3.1-70b, Llama-3.1-8B, mixtral-8x22b, mixtral-8x7b, wizardlm-2-8x22b, wizardlm-2-7b, qwen-2-72b, phi-3-medium-4k, gemma-2b-27b, minicpm-llama-3-v2.5, mistral-7b, lzlv_70b, openchat-3.6-8b, phind-codellama-34b-v2, dolphin-2.9.1-llama-3-70b`|❌|`minicpm-llama-3-v2.5`|❌||❌| | |[deepinfra.com/chat](https://deepinfra.com/chat)|`g4f.Provider.DeepInfraChat`|`llama-3.1-405b, llama-3.1-70b, Llama-3.1-8B, mixtral-8x22b, mixtral-8x7b, wizardlm-2-8x22b, wizardlm-2-7b, qwen-2-72b, phi-3-medium-4k, gemma-2b-27b, minicpm-llama-3-v2.5, mistral-7b, lzlv_70b, openchat-3.6-8b, phind-codellama-34b-v2, dolphin-2.9.1-llama-3-70b`|❌|`minicpm-llama-3-v2.5`|❌||❌| | ||||||
| @@ -55,7 +56,7 @@ | |||||||
| |[chatgpt.com](https://chatgpt.com/)|`g4f.Provider.OpenaiChat`|`gpt-4o, gpt-4o-mini, gpt-4`|❌|✔|||✔| | |[chatgpt.com](https://chatgpt.com/)|`g4f.Provider.OpenaiChat`|`gpt-4o, gpt-4o-mini, gpt-4`|❌|✔|||✔| | ||||||
| |[www.perplexity.ai)](https://www.perplexity.ai)|`g4f.Provider.PerplexityAi`|✔|❌|❌|?||❌| | |[www.perplexity.ai)](https://www.perplexity.ai)|`g4f.Provider.PerplexityAi`|✔|❌|❌|?||❌| | ||||||
| |[perplexity.ai](https://www.perplexity.ai)|`g4f.Provider.PerplexityApi`|✔|❌|❌|?||✔| | |[perplexity.ai](https://www.perplexity.ai)|`g4f.Provider.PerplexityApi`|✔|❌|❌|?||✔| | ||||||
| |[labs.perplexity.ai](https://labs.perplexity.ai)|`g4f.Provider.PerplexityLabs`|`llama-3.1-8b, llama-3.1-70b`|❌|❌|?||❌| | |[labs.perplexity.ai](https://labs.perplexity.ai)|`g4f.Provider.PerplexityLabs`|`sonar-online, sonar-chat, llama-3.1-8b, llama-3.1-70b`|❌|❌|?||❌| | ||||||
| |[pi.ai/talk](https://pi.ai/talk)|`g4f.Provider.Pi`|`pi`|❌|❌|?||❌| | |[pi.ai/talk](https://pi.ai/talk)|`g4f.Provider.Pi`|`pi`|❌|❌|?||❌| | ||||||
| |[]()|`g4f.Provider.Pizzagpt`|`gpt-4o-mini`|❌|❌|✔||❌| | |[]()|`g4f.Provider.Pizzagpt`|`gpt-4o-mini`|❌|❌|✔||❌| | ||||||
| |[poe.com](https://poe.com)|`g4f.Provider.Poe`|✔|❌|❌|?||✔| | |[poe.com](https://poe.com)|`g4f.Provider.Poe`|✔|❌|❌|?||✔| | ||||||
|   | |||||||
							
								
								
									
										84
									
								
								g4f/Provider/ChatHub.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										84
									
								
								g4f/Provider/ChatHub.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,84 @@ | |||||||
|  | from __future__ import annotations | ||||||
|  |  | ||||||
|  | import json | ||||||
|  | from aiohttp import ClientSession | ||||||
|  |  | ||||||
|  | from ..typing import AsyncResult, Messages | ||||||
|  | from .base_provider import AsyncGeneratorProvider, ProviderModelMixin | ||||||
|  | from .helper import format_prompt | ||||||
|  |  | ||||||
|  | class ChatHub(AsyncGeneratorProvider, ProviderModelMixin): | ||||||
|  |     label = "ChatHub" | ||||||
|  |     url = "https://app.chathub.gg" | ||||||
|  |     api_endpoint = "https://app.chathub.gg/api/v3/chat/completions" | ||||||
|  |     working = True | ||||||
|  |     supports_stream = True | ||||||
|  |     supports_system_message = True | ||||||
|  |     supports_message_history = True | ||||||
|  |      | ||||||
|  |     default_model = 'meta/llama3.1-8b' | ||||||
|  |     models = [ | ||||||
|  |         'meta/llama3.1-8b', | ||||||
|  |         'mistral/mixtral-8x7b', | ||||||
|  |         'google/gemma-2', | ||||||
|  |         'perplexity/sonar-online', | ||||||
|  |     ] | ||||||
|  |      | ||||||
|  |     model_aliases = { | ||||||
|  |         "llama-3.1-8b": "meta/llama3.1-8b", | ||||||
|  |         "mixtral-8x7b": "mistral/mixtral-8x7b", | ||||||
|  |         "gemma-2": "google/gemma-2", | ||||||
|  |         "sonar-online": "perplexity/sonar-online", | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def get_model(cls, model: str) -> str: | ||||||
|  |         if model in cls.models: | ||||||
|  |             return model | ||||||
|  |         elif model in cls.model_aliases: | ||||||
|  |             return cls.model_aliases[model] | ||||||
|  |         else: | ||||||
|  |             return cls.default_model | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     async def create_async_generator( | ||||||
|  |         cls, | ||||||
|  |         model: str, | ||||||
|  |         messages: Messages, | ||||||
|  |         proxy: str = None, | ||||||
|  |         **kwargs | ||||||
|  |     ) -> AsyncResult: | ||||||
|  |         model = cls.get_model(model) | ||||||
|  |          | ||||||
|  |         headers = { | ||||||
|  |             'accept': '*/*', | ||||||
|  |             'accept-language': 'en-US,en;q=0.9', | ||||||
|  |             'content-type': 'application/json', | ||||||
|  |             'origin': cls.url, | ||||||
|  |             'referer': f"{cls.url}/chat/cloud-llama3.1-8b", | ||||||
|  |             'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36', | ||||||
|  |             'x-app-id': 'web' | ||||||
|  |         } | ||||||
|  |          | ||||||
|  |         async with ClientSession(headers=headers) as session: | ||||||
|  |             prompt = format_prompt(messages) | ||||||
|  |             data = { | ||||||
|  |                 "model": model, | ||||||
|  |                 "messages": [{"role": "user", "content": prompt}], | ||||||
|  |                 "tools": [] | ||||||
|  |             } | ||||||
|  |              | ||||||
|  |             async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response: | ||||||
|  |                 response.raise_for_status() | ||||||
|  |                 async for line in response.content: | ||||||
|  |                     if line: | ||||||
|  |                         decoded_line = line.decode('utf-8') | ||||||
|  |                         if decoded_line.startswith('data:'): | ||||||
|  |                             try: | ||||||
|  |                                 data = json.loads(decoded_line[5:]) | ||||||
|  |                                 if data['type'] == 'text-delta': | ||||||
|  |                                     yield data['textDelta'] | ||||||
|  |                                 elif data['type'] == 'done': | ||||||
|  |                                     break | ||||||
|  |                             except json.JSONDecodeError: | ||||||
|  |                                 continue  | ||||||
| @@ -24,10 +24,10 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin): | |||||||
|     ] |     ] | ||||||
|      |      | ||||||
|     model_aliases = { |     model_aliases = { | ||||||
|         "llama-3.1-8b": "llama-3.1-sonar-large-128k-online", |         "sonar-online": "llama-3.1-sonar-large-128k-online", | ||||||
|         "llama-3.1-8b": "sonar-small-128k-online", |         "sonar-online": "sonar-small-128k-online", | ||||||
|         "llama-3.1-8b": "llama-3.1-sonar-large-128k-chat", |         "sonar-chat": "llama-3.1-sonar-large-128k-chat", | ||||||
|         "llama-3.1-8b": "llama-3.1-sonar-small-128k-chat", |         "sonar-chat": "llama-3.1-sonar-small-128k-chat", | ||||||
|         "llama-3.1-8b": "llama-3.1-8b-instruct", |         "llama-3.1-8b": "llama-3.1-8b-instruct", | ||||||
|         "llama-3.1-70b": "llama-3.1-70b-instruct", |         "llama-3.1-70b": "llama-3.1-70b-instruct", | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -27,6 +27,7 @@ from .Chatgpt4Online   import Chatgpt4Online | |||||||
| from .Chatgpt4o        import Chatgpt4o | from .Chatgpt4o        import Chatgpt4o | ||||||
| from .ChatGptEs        import ChatGptEs | from .ChatGptEs        import ChatGptEs | ||||||
| from .ChatgptFree      import ChatgptFree | from .ChatgptFree      import ChatgptFree | ||||||
|  | from .ChatHub          import ChatHub | ||||||
| from .DDG              import DDG | from .DDG              import DDG | ||||||
| from .DeepInfra        import DeepInfra | from .DeepInfra        import DeepInfra | ||||||
| from .DeepInfraChat    import DeepInfraChat | from .DeepInfraChat    import DeepInfraChat | ||||||
|   | |||||||
| @@ -16,6 +16,7 @@ from .Provider import ( | |||||||
|     Chatgpt4Online, |     Chatgpt4Online, | ||||||
|     ChatGptEs, |     ChatGptEs, | ||||||
|     ChatgptFree, |     ChatgptFree, | ||||||
|  |     ChatHub, | ||||||
|     DDG, |     DDG, | ||||||
|     DeepInfra, |     DeepInfra, | ||||||
|     DeepInfraChat, |     DeepInfraChat, | ||||||
| @@ -96,9 +97,7 @@ default = Model( | |||||||
| gpt_3 = Model( | gpt_3 = Model( | ||||||
|     name          = 'gpt-3', |     name          = 'gpt-3', | ||||||
|     base_provider = 'OpenAI', |     base_provider = 'OpenAI', | ||||||
|     best_provider = IterListProvider([ |     best_provider = Nexra | ||||||
|         Nexra, |  | ||||||
|     ]) |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| # gpt-3.5 | # gpt-3.5 | ||||||
| @@ -167,7 +166,7 @@ meta = Model( | |||||||
| llama_2_13b = Model( | llama_2_13b = Model( | ||||||
|     name          = "llama-2-13b", |     name          = "llama-2-13b", | ||||||
|     base_provider = "Meta Llama", |     base_provider = "Meta Llama", | ||||||
|     best_provider = IterListProvider([Airforce]) |     best_provider = Airforce | ||||||
| ) | ) | ||||||
|  |  | ||||||
| # llama 3 | # llama 3 | ||||||
| @@ -193,7 +192,7 @@ llama_3 = Model( | |||||||
| llama_3_1_8b = Model( | llama_3_1_8b = Model( | ||||||
|     name          = "llama-3.1-8b", |     name          = "llama-3.1-8b", | ||||||
|     base_provider = "Meta Llama", |     base_provider = "Meta Llama", | ||||||
|     best_provider = IterListProvider([Blackbox, DeepInfraChat, Airforce, PerplexityLabs]) |     best_provider = IterListProvider([Blackbox, DeepInfraChat, ChatHub, Airforce, PerplexityLabs]) | ||||||
| ) | ) | ||||||
|  |  | ||||||
| llama_3_1_70b = Model( | llama_3_1_70b = Model( | ||||||
| @@ -225,7 +224,7 @@ mistral_7b = Model( | |||||||
| mixtral_8x7b = Model( | mixtral_8x7b = Model( | ||||||
|     name          = "mixtral-8x7b", |     name          = "mixtral-8x7b", | ||||||
|     base_provider = "Mistral", |     base_provider = "Mistral", | ||||||
|     best_provider = IterListProvider([DDG, ReplicateHome, DeepInfraChat, Airforce, DeepInfra]) |     best_provider = IterListProvider([DDG, ReplicateHome, DeepInfraChat, ChatHub, Airforce, DeepInfra]) | ||||||
| ) | ) | ||||||
|  |  | ||||||
| mixtral_8x22b = Model( | mixtral_8x22b = Model( | ||||||
| @@ -245,7 +244,7 @@ mistral_nemo = Model( | |||||||
| mixtral_8x7b_dpo = Model( | mixtral_8x7b_dpo = Model( | ||||||
|     name          = "mixtral-8x7b-dpo", |     name          = "mixtral-8x7b-dpo", | ||||||
|     base_provider = "NousResearch", |     base_provider = "NousResearch", | ||||||
|     best_provider = IterListProvider([Airforce]) |     best_provider = Airforce | ||||||
| ) | ) | ||||||
|  |  | ||||||
| hermes_3 = Model( | hermes_3 = Model( | ||||||
| @@ -259,7 +258,7 @@ hermes_3 = Model( | |||||||
| phi_3_medium_4k = Model( | phi_3_medium_4k = Model( | ||||||
|     name          = "phi-3-medium-4k", |     name          = "phi-3-medium-4k", | ||||||
|     base_provider = "Microsoft", |     base_provider = "Microsoft", | ||||||
|     best_provider = IterListProvider([DeepInfraChat]) |     best_provider = DeepInfraChat | ||||||
| ) | ) | ||||||
|  |  | ||||||
| phi_3_5_mini = Model( | phi_3_5_mini = Model( | ||||||
| @@ -295,7 +294,7 @@ gemini = Model( | |||||||
| gemma_2b_9b = Model( | gemma_2b_9b = Model( | ||||||
|     name          = 'gemma-2b-9b', |     name          = 'gemma-2b-9b', | ||||||
|     base_provider = 'Google', |     base_provider = 'Google', | ||||||
|     best_provider = IterListProvider([Airforce]) |     best_provider = Airforce | ||||||
| ) | ) | ||||||
|  |  | ||||||
| gemma_2b_27b = Model( | gemma_2b_27b = Model( | ||||||
| @@ -313,37 +312,43 @@ gemma_2b = Model( | |||||||
|    ]) |    ]) | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | gemma_2 = Model( | ||||||
|  |     name          = 'gemma-2', | ||||||
|  |     base_provider = 'Google', | ||||||
|  |     best_provider = ChatHub | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| ### Anthropic ### | ### Anthropic ### | ||||||
| claude_2 = Model( | claude_2 = Model( | ||||||
|     name          = 'claude-2', |     name          = 'claude-2', | ||||||
|     base_provider = 'Anthropic', |     base_provider = 'Anthropic', | ||||||
|     best_provider = IterListProvider([You]) |     best_provider = You | ||||||
| ) | ) | ||||||
|  |  | ||||||
| claude_2_0 = Model( | claude_2_0 = Model( | ||||||
|     name          = 'claude-2.0', |     name          = 'claude-2.0', | ||||||
|     base_provider = 'Anthropic', |     base_provider = 'Anthropic', | ||||||
|     best_provider = IterListProvider([Liaobots]) |     best_provider = Liaobots | ||||||
| ) | ) | ||||||
|  |  | ||||||
| claude_2_1 = Model( | claude_2_1 = Model( | ||||||
|     name          = 'claude-2.1', |     name          = 'claude-2.1', | ||||||
|     base_provider = 'Anthropic', |     base_provider = 'Anthropic', | ||||||
|     best_provider = IterListProvider([Liaobots]) |     best_provider = Liaobots | ||||||
| ) | ) | ||||||
|  |  | ||||||
| # claude 3 | # claude 3 | ||||||
| claude_3_opus = Model( | claude_3_opus = Model( | ||||||
|     name          = 'claude-3-opus', |     name          = 'claude-3-opus', | ||||||
|     base_provider = 'Anthropic', |     base_provider = 'Anthropic', | ||||||
|     best_provider = IterListProvider([Liaobots]) |     best_provider = Liaobots | ||||||
| ) | ) | ||||||
|  |  | ||||||
| claude_3_sonnet = Model( | claude_3_sonnet = Model( | ||||||
|     name          = 'claude-3-sonnet', |     name          = 'claude-3-sonnet', | ||||||
|     base_provider = 'Anthropic', |     base_provider = 'Anthropic', | ||||||
|     best_provider = IterListProvider([Liaobots]) |     best_provider = Liaobots | ||||||
| ) | ) | ||||||
|  |  | ||||||
| claude_3_haiku = Model( | claude_3_haiku = Model( | ||||||
| @@ -390,7 +395,7 @@ reka_core = Model( | |||||||
| blackbox = Model( | blackbox = Model( | ||||||
|     name = 'blackbox', |     name = 'blackbox', | ||||||
|     base_provider = 'Blackbox AI', |     base_provider = 'Blackbox AI', | ||||||
|     best_provider = IterListProvider([Blackbox]) |     best_provider = Blackbox | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -406,7 +411,7 @@ dbrx_instruct = Model( | |||||||
| command_r_plus = Model( | command_r_plus = Model( | ||||||
|     name = 'command-r-plus', |     name = 'command-r-plus', | ||||||
|     base_provider = 'CohereForAI', |     base_provider = 'CohereForAI', | ||||||
|     best_provider = IterListProvider([HuggingChat]) |     best_provider = HuggingChat | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -422,19 +427,19 @@ sparkdesk_v1_1 = Model( | |||||||
| qwen_1_5_14b = Model( | qwen_1_5_14b = Model( | ||||||
|     name = 'qwen-1.5-14b', |     name = 'qwen-1.5-14b', | ||||||
|     base_provider = 'Qwen', |     base_provider = 'Qwen', | ||||||
|     best_provider = IterListProvider([FreeChatgpt]) |     best_provider = FreeChatgpt | ||||||
| ) | ) | ||||||
|  |  | ||||||
| qwen_1_5_72b = Model( | qwen_1_5_72b = Model( | ||||||
|     name = 'qwen-1.5-72b', |     name = 'qwen-1.5-72b', | ||||||
|     base_provider = 'Qwen', |     base_provider = 'Qwen', | ||||||
|     best_provider = IterListProvider([Airforce]) |     best_provider = Airforce | ||||||
| ) | ) | ||||||
|  |  | ||||||
| qwen_1_5_110b = Model( | qwen_1_5_110b = Model( | ||||||
|     name = 'qwen-1.5-110b', |     name = 'qwen-1.5-110b', | ||||||
|     base_provider = 'Qwen', |     base_provider = 'Qwen', | ||||||
|     best_provider = IterListProvider([Airforce]) |     best_provider = Airforce | ||||||
| ) | ) | ||||||
|  |  | ||||||
| qwen_2_72b = Model( | qwen_2_72b = Model( | ||||||
| @@ -446,7 +451,7 @@ qwen_2_72b = Model( | |||||||
| qwen_turbo = Model( | qwen_turbo = Model( | ||||||
|     name = 'qwen-turbo', |     name = 'qwen-turbo', | ||||||
|     base_provider = 'Qwen', |     base_provider = 'Qwen', | ||||||
|     best_provider = IterListProvider([Bixin123]) |     best_provider = Bixin123 | ||||||
| ) | ) | ||||||
|  |  | ||||||
| qwen = Model( | qwen = Model( | ||||||
| @@ -462,13 +467,13 @@ qwen = Model( | |||||||
| glm_3_6b = Model( | glm_3_6b = Model( | ||||||
|     name = 'glm-3-6b', |     name = 'glm-3-6b', | ||||||
|     base_provider = 'Zhipu AI', |     base_provider = 'Zhipu AI', | ||||||
|     best_provider = IterListProvider([FreeChatgpt]) |     best_provider = FreeChatgpt | ||||||
| ) | ) | ||||||
|  |  | ||||||
| glm_4_9b = Model( | glm_4_9b = Model( | ||||||
|     name = 'glm-4-9B', |     name = 'glm-4-9B', | ||||||
|     base_provider = 'Zhipu AI', |     base_provider = 'Zhipu AI', | ||||||
|     best_provider = IterListProvider([FreeChatgpt]) |     best_provider = FreeChatgpt | ||||||
| ) | ) | ||||||
|  |  | ||||||
| glm_4 = Model( | glm_4 = Model( | ||||||
| @@ -484,13 +489,13 @@ glm_4 = Model( | |||||||
| yi_1_5_9b = Model( | yi_1_5_9b = Model( | ||||||
|     name = 'yi-1.5-9b', |     name = 'yi-1.5-9b', | ||||||
|     base_provider = '01-ai', |     base_provider = '01-ai', | ||||||
|     best_provider = IterListProvider([FreeChatgpt]) |     best_provider = FreeChatgpt | ||||||
| ) | ) | ||||||
|  |  | ||||||
| yi_34b = Model( | yi_34b = Model( | ||||||
|     name = 'yi-34b', |     name = 'yi-34b', | ||||||
|     base_provider = '01-ai', |     base_provider = '01-ai', | ||||||
|     best_provider = IterListProvider([Airforce]) |     best_provider = Airforce | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -498,7 +503,7 @@ yi_34b = Model( | |||||||
| solar_1_mini = Model( | solar_1_mini = Model( | ||||||
|     name = 'solar-1-mini', |     name = 'solar-1-mini', | ||||||
|     base_provider = 'Upstage', |     base_provider = 'Upstage', | ||||||
|     best_provider = IterListProvider([Upstage]) |     best_provider = Upstage | ||||||
| ) | ) | ||||||
|  |  | ||||||
| solar_10_7b = Model( | solar_10_7b = Model( | ||||||
| @@ -525,14 +530,14 @@ pi = Model( | |||||||
| deepseek = Model( | deepseek = Model( | ||||||
|     name = 'deepseek', |     name = 'deepseek', | ||||||
|     base_provider = 'DeepSeek', |     base_provider = 'DeepSeek', | ||||||
|     best_provider = IterListProvider([Airforce]) |     best_provider = Airforce | ||||||
| ) | ) | ||||||
|  |  | ||||||
| ### WizardLM ### | ### WizardLM ### | ||||||
| wizardlm_2_7b = Model( | wizardlm_2_7b = Model( | ||||||
|     name = 'wizardlm-2-7b', |     name = 'wizardlm-2-7b', | ||||||
|     base_provider = 'WizardLM', |     base_provider = 'WizardLM', | ||||||
|     best_provider = IterListProvider([DeepInfraChat]) |     best_provider = DeepInfraChat | ||||||
| ) | ) | ||||||
|  |  | ||||||
| wizardlm_2_8x22b = Model( | wizardlm_2_8x22b = Model( | ||||||
| @@ -610,6 +615,19 @@ grok_2_mini = Model( | |||||||
|     best_provider = Liaobots |     best_provider = Liaobots | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | # Perplexity | ||||||
|  | sonar_online = Model( | ||||||
|  |     name = 'sonar-online', | ||||||
|  |     base_provider = 'Perplexity', | ||||||
|  |     best_provider = IterListProvider([ChatHub, PerplexityLabs]) | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | sonar_chat = Model( | ||||||
|  |     name = 'sonar-chat', | ||||||
|  |     base_provider = 'Perplexity', | ||||||
|  |     best_provider = PerplexityLabs | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
| ############# | ############# | ||||||
| @@ -817,6 +835,7 @@ class ModelUtils: | |||||||
| 'gemma-2b': gemma_2b, | 'gemma-2b': gemma_2b, | ||||||
| 'gemma-2b-9b': gemma_2b_9b, | 'gemma-2b-9b': gemma_2b_9b, | ||||||
| 'gemma-2b-27b': gemma_2b_27b, | 'gemma-2b-27b': gemma_2b_27b, | ||||||
|  | 'gemma-2': gemma_2, | ||||||
|  |  | ||||||
|  |  | ||||||
| ### Anthropic ### | ### Anthropic ### | ||||||
| @@ -928,6 +947,10 @@ class ModelUtils: | |||||||
| 'grok-2': grok_2, | 'grok-2': grok_2, | ||||||
| 'grok-2-mini': grok_2_mini, | 'grok-2-mini': grok_2_mini, | ||||||
|          |          | ||||||
|  | ### Perplexity ### | ||||||
|  | 'sonar-online': sonar_online, | ||||||
|  | 'sonar-chat': sonar_chat, | ||||||
|  |          | ||||||
|          |          | ||||||
|          |          | ||||||
| ############# | ############# | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user
	 kqlio67
					kqlio67