Browse Source

支持本地ollama服务

xiongweixp 8 months ago
parent
commit
832f0867f4
3 changed files with 75 additions and 28 deletions
  1. 1 1
      .vscode/launch.json
  2. 6 1
      README.md
  3. 68 26
      erniecli.py

+ 1 - 1
.vscode/launch.json

@@ -9,7 +9,7 @@
             "type": "debugpy",
             "request": "launch",
             "program": "erniecli.py",
-            "args": ["command","显示当前时间"],
+            "args": ["command","用百度的源pip3安装numpy"],
             "console": "integratedTerminal"
         },
         {

+ 6 - 1
README.md

@@ -3,7 +3,10 @@
 ## 简介
 基于大语言模型的命令行助手,可以通过对话的方式生成Linux命令,并执行。节省指令查询时间。
 ## 安装
-pip3 install requets
+pip3 install requets <br/>
+可选本地部署ollama服务,使用qwen 7b模型 <br/>
+ollama serve
+ollama pull qwen:7b
 ## 使用方法
 需要注册百度千帆账号,并获取API KEY及SECRET KEY。<br/>
 https://console.bce.baidu.com/qianfan <br/>
@@ -33,5 +36,7 @@ BACKEND:可选
     1.ernie:百度千帆
     2.openai:OpenAI兼容接口模型
 
+LOCAL:是否启用本地ollama服务
+
 
 

+ 68 - 26
erniecli.py

@@ -17,6 +17,10 @@ OPENAI_MODEL = ""
 # 后端可选“openai”或“ernie”
 BACKEND = ""
 
+# 是否启用本地模式
+USE_LOCAL = False
+LOCAL_PORT = 11434
+
 PROMPT = """
 你是一个Linux命令行专家,请将我的请求转换成一条可执行的bash命令。
 只给一条命令,不要做任何解释或说明。
@@ -58,6 +62,9 @@ def set_config():
     elif bidx == 2:
         BACKEND = "openai"
     
+    choose_local = input("\033[1;35m是否需要配置本地模式?(y/N)")
+    if choose_local == "y":
+        USE_LOCAL = True
 
     choice_ernie = input("\033[1;34m是否需要配置ernie?(y/N)")
     if choice_ernie == "y":
@@ -84,6 +91,7 @@ def set_config():
         # 写入所有配置
         f.write("[GLOBAL]\n")
         f.write("BACKEND="+BACKEND+"\n")
+        f.write("LOCAL="+str(USE_LOCAL)+"\n")
         f.write("\n[ERNIE]\n")
         f.write("API_KEY="+ERNIE_API_KEY+"\n")
         f.write("SECRET_KEY="+ERNIE_SECRET_KEY+"\n")
@@ -96,7 +104,7 @@ def set_config():
 
 # 读取$HOME/.config/erniecli/.ernierc文件中API_KEY和SECRET_KEY
 def get_config():
-    global ERNIE_API_KEY, ERNIE_SECRET_KEY, OPENAI_API_KEY, OPENAI_MODEL, OPENAI_URL, BACKEND
+    global ERNIE_API_KEY, ERNIE_SECRET_KEY, OPENAI_API_KEY, OPENAI_MODEL, OPENAI_URL, BACKEND, USE_LOCAL
     config = os.path.expanduser('~/.config/erniecli/.ernierc')
     if not os.path.exists(config):
         print("\033[1;31m请进行使用erniecli进行配置\033[0m")
@@ -117,6 +125,8 @@ def get_config():
                 key, value = line.split('=')
                 if key.strip() == "BACKEND":
                     BACKEND = value.strip()
+                if key.strip() == "LOCAL":
+                    USE_LOCAL = value.strip() == "True"
             if group == "ERNIE":
                 key, value = line.split('=')
                 if key.strip() == "API_KEY":
@@ -198,13 +208,39 @@ def askOpenAI(quest):
     
     return response.text
 
-def parse(answer):
+# 查询本地
+def askLocal(quest):
+    res = requests.post(
+        "http://localhost:{0}/api/chat".format(LOCAL_PORT),
+        json={
+            "model": "qwen:7b",  # 使用 mistral 模型
+            "stream": False,  # 禁用流式输出
+            "messages": [
+            
+                {
+                    "role": "system",
+                    "content": PROMPT
+                },
+                {
+                    "role": "user",
+                    "content": quest
+                }
+                           
+            ],
+            "temperature": 0.3
+        },
+    )
+    return res.text
+def parse(answer, isLocal):
     answer = json.loads(answer)
     # 获取第一个结果
-    if BACKEND=="ernie":
-        result = answer['result']
-    elif BACKEND=="openai":
-        result = answer['choices'][0]['message']['content']
+    if isLocal:
+        result = answer['message']['content']
+    else:
+        if BACKEND=="ernie":
+            result = answer['result']
+        elif BACKEND=="openai":
+            result = answer['choices'][0]['message']['content']
     lines = result.split('\n')
     # 获取lines中的```bash项到```项,并拼成一个字符串
     cmd = ''
@@ -220,11 +256,7 @@ def parse(answer):
                 break
             cmd += line+'\n'
     
-    if cmd != '':
-        return cmd
-    else:
-        print("\033[1;31mAI没有找到可执行的命令\033[0m")
-        sys.exit(0)
+    return cmd
 
 
 
@@ -312,22 +344,32 @@ erniecli version          :    show version")
         except Exception as e:
             print(e)
             #使用绿色字体
-        lock[1] ="\033[1;32m终端助理正在思考...\033[0m"
-        
-        # 百度千帆
-        # answer = askERNIE(question)
-        # cmd = parseERNIE(answer)
-        if BACKEND=="ernie":
-            query = askERNIE
-        elif BACKEND=="openai":
-            query = askOpenAI
+        cli = ""
+        if USE_LOCAL:
+            # 本地ollama
+            lock[1] ="\033[1;32m终端助理正在本地思考...\033[0m"
+            answer = askLocal(question)
+            cli = parse(answer, True)
+        if cli== "":
+            lock[1] ="\033[1;32m终端助理正在云端思考...\033[0m"
+
+            # 百度千帆
+            # answer = askERNIE(question)
+            # cmd = parseERNIE(answer)
+            if BACKEND=="ernie":
+                query = askERNIE
+            elif BACKEND=="openai":
+                query = askOpenAI
 
-        # OpenAI API
-        answer = query(question)
-        cmd = parse(answer)
+            # OpenAI API
+            answer = query(question)
+            cli = parse(answer, False)
         lock[0] = False
+        if cli == "":
+            print("\033[1;31mAI没有找到可执行的命令\033[0m")
+            sys.exit(0)
         print('\033[F\033[K',end =  "\033[1;32m❯ \033[0m")
-        print('\033[1;32m{0}\033[0m'.format(cmd))
+        print('\033[1;32m{0}\033[0m'.format(cli))
         while True:
             choice = input('\033[1;32m? \033[0m\033[1;90m是否执行 ⬆ ? [Y/n]\033[0m')
             if choice == 'Y' or choice == 'y' or choice == '':
@@ -337,8 +379,8 @@ erniecli version          :    show version")
                 sys.stdout.flush()
                 # 执行命令,并输出结果
                 # print('')
-                cmd = replaceAlias(cmd)
-                subprocess.run(cmd, shell=True)
+                cmd = replaceAlias(cli)
+                subprocess.run(cli, shell=True)
                 # print('')
                 # os.system(cmd)
                 break