mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-24 13:07:53 +08:00
Merge pull request #1630 from hlohaus/flow
Custom api_base for GeminiPro
This commit is contained in:
@@ -13,6 +13,7 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
url = "https://ai.google.dev"
|
||||
working = True
|
||||
supports_message_history = True
|
||||
needs_auth = True
|
||||
default_model = "gemini-pro"
|
||||
models = ["gemini-pro", "gemini-pro-vision"]
|
||||
|
||||
@@ -24,19 +25,27 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
stream: bool = False,
|
||||
proxy: str = None,
|
||||
api_key: str = None,
|
||||
api_base: str = None,
|
||||
image: ImageType = None,
|
||||
**kwargs
|
||||
) -> AsyncResult:
|
||||
model = "gemini-pro-vision" if not model and image else model
|
||||
model = cls.get_model(model)
|
||||
|
||||
if not api_key:
|
||||
raise MissingAuthError('Missing "api_key" for auth')
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
raise MissingAuthError('Missing "api_key"')
|
||||
if not api_base:
|
||||
api_base = f"https://generativelanguage.googleapis.com/v1beta"
|
||||
|
||||
method = "streamGenerateContent" if stream else "generateContent"
|
||||
url = f"{api_base.rstrip('/')}/models/{model}:{method}"
|
||||
headers = None
|
||||
if api_base:
|
||||
headers = {f"Authorization": "Bearer {api_key}"}
|
||||
else:
|
||||
url += f"?key={api_key}"
|
||||
|
||||
async with ClientSession(headers=headers) as session:
|
||||
method = "streamGenerateContent" if stream else "generateContent"
|
||||
url = f"https://generativelanguage.googleapis.com/v1beta/models/{model}:{method}"
|
||||
contents = [
|
||||
{
|
||||
"role": "model" if message["role"] == "assistant" else message["role"],
|
||||
@@ -62,7 +71,7 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
"topK": kwargs.get("top_k"),
|
||||
}
|
||||
}
|
||||
async with session.post(url, params={"key": api_key}, json=data, proxy=proxy) as response:
|
||||
async with session.post(url, json=data, proxy=proxy) as response:
|
||||
if not response.ok:
|
||||
data = await response.json()
|
||||
raise RuntimeError(data[0]["error"]["message"])
|
||||
@@ -73,12 +82,11 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
lines = [b"{\n"]
|
||||
elif chunk == b",\r\n" or chunk == b"]":
|
||||
try:
|
||||
data = b"".join(lines)
|
||||
data = json.loads(data)
|
||||
data = json.loads(b"".join(lines))
|
||||
yield data["candidates"][0]["content"]["parts"][0]["text"]
|
||||
except:
|
||||
data = data.decode() if isinstance(data, bytes) else data
|
||||
raise RuntimeError(f"Read text failed. data: {data}")
|
||||
raise RuntimeError(f"Read chunk failed: {data}")
|
||||
lines = []
|
||||
else:
|
||||
lines.append(chunk)
|
||||
|
||||
@@ -78,7 +78,7 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
supports_gpt_35_turbo = True
|
||||
supports_gpt_4 = True
|
||||
default_model = "gpt-3.5-turbo"
|
||||
models = [m for m in models]
|
||||
models = list(models)
|
||||
model_aliases = {
|
||||
"claude-v2": "claude-2"
|
||||
}
|
||||
|
||||
@@ -541,7 +541,6 @@ label[for="camera"] {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
padding-right: 15px
|
||||
}
|
||||
|
||||
.field .about {
|
||||
@@ -569,7 +568,16 @@ select {
|
||||
padding: 8px 16px;
|
||||
|
||||
appearance: none;
|
||||
width: 250px;
|
||||
width: 160px;
|
||||
}
|
||||
|
||||
@media only screen and (min-width: 40em) {
|
||||
select {
|
||||
width: 200px;
|
||||
}
|
||||
.field {
|
||||
padding-right: 15px
|
||||
}
|
||||
}
|
||||
|
||||
.input-box {
|
||||
|
||||
@@ -3,7 +3,6 @@ const markdown = window.markdownit();
|
||||
const message_box = document.getElementById(`messages`);
|
||||
const message_input = document.getElementById(`message-input`);
|
||||
const box_conversations = document.querySelector(`.top`);
|
||||
const spinner = box_conversations.querySelector(".spinner");
|
||||
const stop_generating = document.querySelector(`.stop_generating`);
|
||||
const regenerate = document.querySelector(`.regenerate`);
|
||||
const send_button = document.querySelector(`#send-button`);
|
||||
@@ -71,6 +70,7 @@ const handle_ask = async () => {
|
||||
message_input.style.height = `82px`;
|
||||
message_input.focus();
|
||||
window.scrollTo(0, 0);
|
||||
|
||||
message = message_input.value
|
||||
if (message.length > 0) {
|
||||
message_input.value = '';
|
||||
@@ -268,6 +268,11 @@ const ask_gpt = async () => {
|
||||
}
|
||||
}
|
||||
if (!error) {
|
||||
// Remove cursor
|
||||
html = markdown_render(text);
|
||||
content_inner.innerHTML = html;
|
||||
highlight(content_inner);
|
||||
|
||||
if (imageInput) imageInput.value = "";
|
||||
if (cameraInput) cameraInput.value = "";
|
||||
if (fileInput) fileInput.value = "";
|
||||
@@ -275,26 +280,28 @@ const ask_gpt = async () => {
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
|
||||
if (e.name != `AbortError`) {
|
||||
text = `oops ! something went wrong, please try again / reload. [stacktrace in console]`;
|
||||
if (e.name != "AbortError") {
|
||||
error = true;
|
||||
text = "oops ! something went wrong, please try again / reload. [stacktrace in console]";
|
||||
content_inner.innerHTML = text;
|
||||
} else {
|
||||
content_inner.innerHTML += ` [aborted]`;
|
||||
text += ` [aborted]`
|
||||
}
|
||||
}
|
||||
let cursorDiv = document.getElementById(`cursor`);
|
||||
if (cursorDiv) cursorDiv.parentNode.removeChild(cursorDiv);
|
||||
if (text) {
|
||||
if (!error) {
|
||||
await add_message(window.conversation_id, "assistant", text, provider);
|
||||
await load_conversation(window.conversation_id);
|
||||
} else {
|
||||
let cursorDiv = document.getElementById(`cursor`);
|
||||
if (cursorDiv) cursorDiv.parentNode.removeChild(cursorDiv);
|
||||
}
|
||||
await load_conversation(window.conversation_id);
|
||||
message_box.scrollTop = message_box.scrollHeight;
|
||||
await remove_cancel_button();
|
||||
await register_remove_message();
|
||||
prompt_lock = false;
|
||||
window.scrollTo(0, 0);
|
||||
await load_conversations(20, 0);
|
||||
await load_conversations();
|
||||
regenerate.classList.remove(`regenerate-hidden`);
|
||||
};
|
||||
|
||||
@@ -353,7 +360,7 @@ const delete_conversation = async (conversation_id) => {
|
||||
await new_conversation();
|
||||
}
|
||||
|
||||
await load_conversations(20, 0, true);
|
||||
await load_conversations();
|
||||
};
|
||||
|
||||
const set_conversation = async (conversation_id) => {
|
||||
@@ -362,7 +369,7 @@ const set_conversation = async (conversation_id) => {
|
||||
|
||||
await clear_conversation();
|
||||
await load_conversation(conversation_id);
|
||||
await load_conversations(20, 0, true);
|
||||
await load_conversations();
|
||||
};
|
||||
|
||||
const new_conversation = async () => {
|
||||
@@ -370,7 +377,7 @@ const new_conversation = async () => {
|
||||
window.conversation_id = uuid();
|
||||
|
||||
await clear_conversation();
|
||||
await load_conversations(20, 0, true);
|
||||
await load_conversations();
|
||||
|
||||
await say_hello()
|
||||
};
|
||||
@@ -435,14 +442,14 @@ function count_words(text) {
|
||||
}
|
||||
|
||||
function count_tokens(model, text) {
|
||||
if (model.startsWith("gpt-3") || model.startsWith("gpt-4")) {
|
||||
return GPTTokenizer_cl100k_base?.encode(text).length
|
||||
if (model.startsWith("gpt-3") || model.startsWith("gpt-4") || model.startsWith("text-davinci")) {
|
||||
return GPTTokenizer_cl100k_base?.encode(text).length;
|
||||
}
|
||||
if (model.startsWith("llama2") || model.startsWith("codellama")) {
|
||||
return llamaTokenizer?.encode(text).length
|
||||
return llamaTokenizer?.encode(text).length;
|
||||
}
|
||||
if (model.startsWith("mistral") || model.startsWith("mixtral")) {
|
||||
return mistralTokenizer?.encode(text).length
|
||||
return mistralTokenizer?.encode(text).length;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -526,7 +533,7 @@ const add_message = async (conversation_id, role, content, provider) => {
|
||||
return conversation.items.length - 1;
|
||||
};
|
||||
|
||||
const load_conversations = async (limit, offset, loader) => {
|
||||
const load_conversations = async () => {
|
||||
let conversations = [];
|
||||
for (let i = 0; i < localStorage.length; i++) {
|
||||
if (localStorage.key(i).startsWith("conversation:")) {
|
||||
@@ -550,7 +557,6 @@ const load_conversations = async (limit, offset, loader) => {
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
document.getElementById(`cancelButton`).addEventListener(`click`, async () => {
|
||||
@@ -693,10 +699,8 @@ window.onload = async () => {
|
||||
}
|
||||
}
|
||||
|
||||
if (conversations == 0) localStorage.clear();
|
||||
|
||||
await setTimeout(() => {
|
||||
load_conversations(20, 0);
|
||||
load_conversations();
|
||||
}, 1);
|
||||
|
||||
if (/\/chat\/.+/.test(window.location.href)) {
|
||||
@@ -776,15 +780,17 @@ observer.observe(message_input, { attributes: true });
|
||||
versions = await response.json()
|
||||
|
||||
document.title = 'g4f - gui - ' + versions["version"];
|
||||
text = "version ~ "
|
||||
let text = "version ~ "
|
||||
if (versions["version"] != versions["latest_version"]) {
|
||||
release_url = 'https://github.com/xtekky/gpt4free/releases/tag/' + versions["latest_version"];
|
||||
text += '<a href="' + release_url +'" target="_blank" title="New version: ' + versions["latest_version"] +'">' + versions["version"] + ' 🆕</a>';
|
||||
let release_url = 'https://github.com/xtekky/gpt4free/releases/tag/' + versions["latest_version"];
|
||||
let title = `New version: ${versions["latest_version"]}`;
|
||||
text += `<a href="${release_url}" target="_blank" title="${title}">${versions["version"]} 🆕</a>`;
|
||||
} else {
|
||||
text += versions["version"];
|
||||
}
|
||||
document.getElementById("version_text").innerHTML = text
|
||||
})()
|
||||
|
||||
for (const el of [imageInput, cameraInput]) {
|
||||
el.addEventListener('click', async () => {
|
||||
el.value = '';
|
||||
@@ -794,6 +800,7 @@ for (const el of [imageInput, cameraInput]) {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fileInput.addEventListener('click', async (event) => {
|
||||
fileInput.value = '';
|
||||
delete fileInput.dataset.text;
|
||||
|
||||
Reference in New Issue
Block a user