Skip to content

Commit

Permalink
update version 3.8
Browse files Browse the repository at this point in the history
  • Loading branch information
binary-husky committed Aug 2, 2024
1 parent 58e732c commit da8b2d6
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 23 deletions.
39 changes: 22 additions & 17 deletions crazy_functions/Internet_GPT.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
from toolbox import CatchException, update_ui, get_conf
from .crazy_utils import request_gpt_model_in_new_thread_with_ui_alive, input_clipping
import requests
from bs4 import BeautifulSoup
from request_llms.bridge_all import model_info
import random
from functools import lru_cache
from check_proxy import check_proxy
from request_llms.bridge_all import predict_no_ui_long_connection
from .prompts.Internet_GPT import Search_optimizer, Search_academic_optimizer
import time
import re
import json
from bs4 import BeautifulSoup
from functools import lru_cache
from itertools import zip_longest
from check_proxy import check_proxy
from toolbox import CatchException, update_ui, get_conf
from crazy_functions.crazy_utils import request_gpt_model_in_new_thread_with_ui_alive, input_clipping
from request_llms.bridge_all import model_info
from request_llms.bridge_all import predict_no_ui_long_connection
from crazy_functions.prompts.internet import SearchOptimizerPrompt, SearchAcademicOptimizerPrompt

def search_optimizer(
query,
Expand All @@ -36,15 +36,15 @@ def search_optimizer(
else:
his += f"A: {h}\n"
if categories == "general":
sys_prompt = Search_optimizer.format(query=query, history=his, num=4)
sys_prompt = SearchOptimizerPrompt.format(query=query, history=his, num=4)
elif categories == "science":
sys_prompt = Search_academic_optimizer.format(query=query, history=his, num=4)
sys_prompt = SearchAcademicOptimizerPrompt.format(query=query, history=his, num=4)
else:
his = " "
if categories == "general":
sys_prompt = Search_optimizer.format(query=query, history=his, num=3)
sys_prompt = SearchOptimizerPrompt.format(query=query, history=his, num=3)
elif categories == "science":
sys_prompt = Search_academic_optimizer.format(query=query, history=his, num=3)
sys_prompt = SearchAcademicOptimizerPrompt.format(query=query, history=his, num=3)

mutable = ["", time.time(), ""]
llm_kwargs["temperature"] = 0.8
Expand Down Expand Up @@ -104,13 +104,15 @@ def search_optimizer(
result.append(item)
return result


@lru_cache
def get_auth_ip():
ip = check_proxy(None, return_ip=True)
if ip is None:
return '114.114.114.' + str(random.randint(1, 10))
return ip


def searxng_request(query, proxies, categories='general', searxng_url=None, engines=None):
if searxng_url is None:
url = get_conf("SEARXNG_URL")
Expand Down Expand Up @@ -162,6 +164,7 @@ def searxng_request(query, proxies, categories='general', searxng_url=None, engi
else:
raise ValueError("在线搜索失败,状态码: " + str(response.status_code) + '\t' + response.content.decode('utf-8'))


def scrape_text(url, proxies) -> str:
"""Scrape text from a webpage
Expand Down Expand Up @@ -189,6 +192,7 @@ def scrape_text(url, proxies) -> str:
text = "\n".join(chunk for chunk in chunks if chunk)
return text


@CatchException
def 连接网络回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request):
optimizer_history = history[:-8]
Expand All @@ -202,8 +206,8 @@ def 连接网络回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, history, s
categories = plugin_kwargs.get('categories', 'general')
searxng_url = plugin_kwargs.get('searxng_url', None)
engines = plugin_kwargs.get('engine', None)
optimizer = plugin_kwargs.get('optimizer', 0)
if optimizer == 0:
optimizer = plugin_kwargs.get('optimizer', "关闭")
if optimizer == "关闭":
urls = searxng_request(txt, proxies, categories, searxng_url, engines=engines)
else:
urls = search_optimizer(txt, proxies, optimizer_history, llm_kwargs, optimizer, categories, searxng_url, engines)
Expand All @@ -213,10 +217,10 @@ def 连接网络回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, history, s
"[Local Message] 受到限制,无法从searxng获取信息!请尝试更换搜索引擎。"))
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
return

# ------------- < 第2步:依次访问网页 > -------------
max_search_result = 5 # 最多收纳多少个网页的结果
if optimizer == 2:
if optimizer == "开启(增强)":
max_search_result = 8
chatbot.append(["联网检索中 ...", None])
for index, url in enumerate(urls[:max_search_result]):
Expand All @@ -228,7 +232,7 @@ def 连接网络回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, history, s
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面

# ------------- < 第3步:ChatGPT综合 > -------------
if (optimizer == 0 or optimizer == 1):
if (optimizer != "开启(增强)"):
i_say = f"从以上搜索结果中抽取信息,然后回答问题:{txt}"
i_say, history = input_clipping( # 裁剪输入,从最长的条目开始裁剪,防止爆token
inputs=i_say,
Expand All @@ -243,6 +247,7 @@ def 连接网络回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, history, s
chatbot[-1] = (i_say, gpt_say)
history.append(i_say);history.append(gpt_say)
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 界面更新

#* 或者使用搜索优化器,这样可以保证后续问答能读取到有效的历史记录
else:
i_say = f"从以上搜索结果中抽取与问题:{txt} 相关的信息:"
Expand Down
4 changes: 1 addition & 3 deletions crazy_functions/Internet_GPT_Wrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def define_arg_selection_menu(self):
"categories":
ArgProperty(title="搜索分类", options=["网页", "学术论文"], default_value="网页", description="无", type="dropdown").model_dump_json(),
"engine":
ArgProperty(title="选择搜索引擎", options=["Mixed", "bing", "google", "duckduckgo"], default_value="Mixed", description="无", type="dropdown").model_dump_json(),
ArgProperty(title="选择搜索引擎", options=["Mixed", "bing", "google", "duckduckgo"], default_value="google", description="无", type="dropdown").model_dump_json(),
"optimizer":
ArgProperty(title="搜索优化", options=["关闭", "开启", "开启(增强)"], default_value="关闭", description="是否使用搜索增强。注意这可能会消耗较多token", type="dropdown").model_dump_json(),
"searxng_url":
Expand All @@ -41,7 +41,5 @@ def execute(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, use
"""
if plugin_kwargs["categories"] == "网页": plugin_kwargs["categories"] = "general"
if plugin_kwargs["categories"] == "学术论文": plugin_kwargs["categories"] = "science"
optimizer_options=["关闭", "开启", "开启(增强)"]
plugin_kwargs["optimizer"] = optimizer_options.index(plugin_kwargs["optimizer"])
yield from 连接网络回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request)

Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Search_optimizer="""作为一个网页搜索助手,你的任务是结合历史记录,从不同角度,为“原问题”生成个不同版本的“检索词”,从而提高网页检索的精度。生成的问题要求指向对象清晰明确,并与“原问题语言相同”。例如:
SearchOptimizerPrompt="""作为一个网页搜索助手,你的任务是结合历史记录,从不同角度,为“原问题”生成个不同版本的“检索词”,从而提高网页检索的精度。生成的问题要求指向对象清晰明确,并与“原问题语言相同”。例如:
历史记录:
"
Q: 对话背景。
Expand Down Expand Up @@ -42,7 +42,7 @@
直接给出最多{num}个检索词,必须以json形式给出,不得有多余字符:
"""

Search_academic_optimizer="""作为一个学术论文搜索助手,你的任务是结合历史记录,从不同角度,为“原问题”生成个不同版本的“检索词”,从而提高学术论文检索的精度。生成的问题要求指向对象清晰明确,并与“原问题语言相同”。例如:
SearchAcademicOptimizerPrompt="""作为一个学术论文搜索助手,你的任务是结合历史记录,从不同角度,为“原问题”生成个不同版本的“检索词”,从而提高学术论文检索的精度。生成的问题要求指向对象清晰明确,并与“原问题语言相同”。例如:
历史记录:
"
Q: 对话背景。
Expand Down
2 changes: 1 addition & 1 deletion version
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"version": 3.83,
"show_feature": true,
"new_feature": "优化图像生成插件 <-> 添加紫东太初大模型支持 <-> 保留主题选择 <-> 支持更复杂的插件框架 <-> 上传文件时显示进度条"
"new_feature": "增加欢迎页面 <-> 优化图像生成插件 <-> 添加紫东太初大模型支持 <-> 保留主题选择 <-> 支持更复杂的插件框架 <-> 上传文件时显示进度条"
}

0 comments on commit da8b2d6

Please sign in to comment.