erniecli.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471
  1. #!/usr/bin/python3
  2. import requests
  3. import json
  4. import sys
  5. import time
  6. import _thread
  7. import os
  8. import subprocess
  9. import socket
  10. ERNIE_API_KEY = ""
  11. ERNIE_SECRET_KEY = ""
  12. OPENAI_URL = ""
  13. OPENAI_API_KEY = ""
  14. OPENAI_MODEL = ""
  15. # 后端可选“openai”或“ernie”
  16. BACKEND = ""
  17. # 是否启用本地模式
  18. USE_LOCAL = False
  19. LOCAL_PORT = 11434
  20. PROMPT2 = """
  21. 你是一个Linux终端,请将我的请求转换成一条最简洁的bash命令。
  22. 只给一条命令,不要做任何解释或说明。
  23. 示例:
  24. 请求:显示系统版本信息。
  25. 输出:
  26. ```bash
  27. uname -a
  28. ```
  29. """
  30. PROMPT = """
  31. 你是一个Linux Bash终端,请将我的请求转化为一句bash指令,
  32. 输出格式为
  33. ```bash
  34. 指令
  35. ```
  36. 输出尽量简洁,如果有多个指令,请把它们合并成一个输出
  37. """
  38. chats = []
  39. # 设置API_KEY和SECRET_KEY
  40. def set_config():
  41. global ERNIE_API_KEY, ERNIE_SECRET_KEY, OPENAI_API_KEY, OPENAI_MODEL, OPENAI_URL, BACKEND
  42. if os.path.exists(os.path.expanduser('~/.config/erniecli/.ernierc')):
  43. get_config()
  44. elif not os.path.exists(os.path.expanduser('~/.config/erniecli')):
  45. os.makedirs(os.path.expanduser('~/.config/erniecli'))
  46. # 用黄色字体
  47. bidx = 0
  48. if BACKEND == "ernie":
  49. bidx = 1
  50. elif BACKEND == "openai":
  51. bidx = 2
  52. elif BACKEND == "":
  53. bidx = 0
  54. while True:
  55. choice_bidx = input("\033[1;33m请选择后端(0: None | 1: ernie | 2: openai)[current: {0}]:".format(bidx))
  56. if choice_bidx == '':
  57. bidx = bidx
  58. else:
  59. bidx = int(choice_bidx)
  60. if bidx == 1 or bidx == 2:
  61. break
  62. if bidx == 1:
  63. BACKEND = "ernie"
  64. elif bidx == 2:
  65. BACKEND = "openai"
  66. choose_local = input("\033[1;35m是否需要配置本地模式?(y/N)")
  67. if choose_local == "y":
  68. USE_LOCAL = True
  69. else:
  70. USE_LOCAL = False
  71. choice_ernie = input("\033[1;34m是否需要配置ernie?(y/N)")
  72. if choice_ernie == "y":
  73. apikey_value = input("\033[1;34m请输入API_KEY(当前值:"+ERNIE_API_KEY+"):")
  74. securekey_value = input("请输入SECRET_KEY(当前值"+ERNIE_SECRET_KEY+"):")
  75. if apikey_value != "":
  76. ERNIE_API_KEY = apikey_value.strip()
  77. if securekey_value != "":
  78. ERNIE_SECRET_KEY = securekey_value.strip()
  79. choice_openai = input("\033[1;36m是否需要配置openai?(y/N)")
  80. if choice_openai == "y":
  81. url_value = input("\033[1;36m请输入BASE URL(当前值:"+OPENAI_URL+"):")
  82. apikey_value = input("\033[1;36m请输入API_KEY(当前值:"+OPENAI_API_KEY+"):")
  83. model_value = input("请输入模型(当前值:"+OPENAI_MODEL+"):")
  84. if url_value != "":
  85. OPENAI_URL = url_value.strip()
  86. if apikey_value != "":
  87. OPENAI_API_KEY = apikey_value.strip()
  88. if model_value != "":
  89. OPENAI_MODEL = model_value.strip()
  90. with open(os.path.expanduser('~/.config/erniecli/.ernierc'), 'w', encoding='utf-8') as f:
  91. # 写入所有配置
  92. f.write("[GLOBAL]\n")
  93. f.write("BACKEND="+BACKEND+"\n")
  94. f.write("LOCAL="+str(USE_LOCAL)+"\n")
  95. f.write("\n[ERNIE]\n")
  96. f.write("API_KEY="+ERNIE_API_KEY+"\n")
  97. f.write("SECRET_KEY="+ERNIE_SECRET_KEY+"\n")
  98. f.write("\n[OPENAI]\n")
  99. f.write("URL="+OPENAI_URL+"\n")
  100. f.write("API_KEY="+OPENAI_API_KEY+"\n")
  101. f.write("MODEL="+OPENAI_MODEL+"\n")
  102. print("\033[1;32m配置成功\033[0m")
  103. sys.exit(0)
  104. # 读取$HOME/.config/erniecli/.ernierc文件中API_KEY和SECRET_KEY
  105. def get_config():
  106. global ERNIE_API_KEY, ERNIE_SECRET_KEY, OPENAI_API_KEY, OPENAI_MODEL, OPENAI_URL, BACKEND, USE_LOCAL
  107. config = os.path.expanduser('~/.config/erniecli/.ernierc')
  108. if not os.path.exists(config):
  109. print("\033[1;31m请进行使用erniecli进行配置\033[0m")
  110. sys.exit(0)
  111. # 读取配置文件,读取[global]的BACKEND
  112. group = "global"
  113. with open(config, 'r', encoding='utf-8') as f:
  114. for line in f.readlines():
  115. line = line.strip()
  116. if len(line) == 0 or line[0] == '#':
  117. continue
  118. elif line.startswith("["):
  119. group = line[1:-1]
  120. continue
  121. # 配置global
  122. if group == "GLOBAL":
  123. key, value = line.split('=')
  124. if key.strip() == "BACKEND":
  125. BACKEND = value.strip()
  126. if key.strip() == "LOCAL":
  127. USE_LOCAL = value.strip() == "True"
  128. if group == "ERNIE":
  129. key, value = line.split('=')
  130. if key.strip() == "API_KEY":
  131. ERNIE_API_KEY = value.strip()
  132. if key.strip() == "SECRET_KEY":
  133. ERNIE_SECRET_KEY = value.strip()
  134. if group == "OPENAI":
  135. key, value = line.split('=')
  136. if key.strip() == "API_KEY":
  137. OPENAI_API_KEY = value.strip()
  138. if key.strip() == "MODEL":
  139. OPENAI_MODEL = value.strip()
  140. if key.strip() == "URL":
  141. OPENAI_URL = value.strip()
  142. # 查询百度千帆
  143. def askERNIE(question):
  144. global chats
  145. url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-speed-128k?access_token=" + get_access_token()
  146. data = {
  147. "messages": [
  148. ],
  149. "temperature": 0.95,
  150. "top_p": 0.8,
  151. "penalty_score": 1,
  152. "disable_search": False,
  153. "enable_citation": False,
  154. "response_format": "text"
  155. }
  156. index = 0
  157. for chat in chats:
  158. quest = chat[0]
  159. if index == 0:
  160. quest = PROMPT+"我的问题是:{0}".format(quest)
  161. index = index + 1
  162. data['messages'].append({
  163. "role": "user",
  164. "content": quest
  165. })
  166. data['messages'].append({
  167. "role": "assistant",
  168. "content": chat[1]
  169. })
  170. if index == 0:
  171. question = PROMPT+"我的问题是:{0}".format(question)
  172. data['messages'].append({
  173. "role": "user",
  174. "content": question
  175. })
  176. payload = json.dumps(data)
  177. headers = {
  178. 'Content-Type': 'application/json'
  179. }
  180. response = requests.request("POST", url, headers=headers, data=payload)
  181. return response.text
  182. def get_access_token():
  183. """
  184. 使用 AK,SK 生成鉴权签名(Access Token)
  185. :return: access_token,或是None(如果错误)
  186. """
  187. url = "https://aip.baidubce.com/oauth/2.0/token"
  188. params = {"grant_type": "client_credentials", "client_id": ERNIE_API_KEY, "client_secret": ERNIE_SECRET_KEY}
  189. return str(requests.post(url, params=params).json().get("access_token"))
  190. # 查询OpenAI接口,如赞同模型
  191. def askOpenAI(question):
  192. global OPENAI_URL, chats
  193. # 如果OPENAI_URL是/结尾,去掉最后的/
  194. if OPENAI_URL[-1] == '/':
  195. OPENAI_URL = OPENAI_URL[:-1]
  196. url = "{0}/v1/chat/completions".format(OPENAI_URL)
  197. data = {
  198. "model": OPENAI_MODEL,
  199. "messages": [
  200. {
  201. "role": "system",
  202. "content": PROMPT
  203. }
  204. ],
  205. "temperature": 0.3
  206. }
  207. for chat in chats:
  208. data['messages'].append({
  209. "role": "user",
  210. "content": chat[0]
  211. })
  212. data['messages'].append({
  213. "role": "assistant",
  214. "content": chat[1]
  215. })
  216. data["messages"].append({
  217. "role": "user",
  218. "content": question
  219. })
  220. payload = json.dumps(data)
  221. headers = {
  222. 'Content-Type': 'application/json',
  223. 'Authorization': 'Bearer '+OPENAI_API_KEY
  224. }
  225. response = requests.request("POST", url, headers=headers, data=payload)
  226. return response.text
  227. # 检查端口是否可用
  228. def check_port_available(port):
  229. sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
  230. sock.settimeout(1)
  231. result = sock.connect_ex(('localhost', port))
  232. if result == 0:
  233. return True
  234. else:
  235. return False
  236. # 查询本地
  237. def askLocal(question):
  238. global chats
  239. data = {
  240. "model": "codegemma:2b", # 使用 qwen:7b 模型
  241. "stream": False, # 禁用流式输出
  242. "messages": [
  243. {
  244. "role": "system",
  245. "content": PROMPT
  246. }
  247. ],
  248. "temperature": 0.3
  249. }
  250. for chat in chats:
  251. data['messages'].append({
  252. "role": "user",
  253. "content": chat[0]
  254. })
  255. data['messages'].append({
  256. "role": "assistant",
  257. "content": chat[1]
  258. })
  259. data["messages"].append({
  260. "role": "user",
  261. "content": question
  262. })
  263. res = requests.post(
  264. "http://localhost:{0}/api/chat".format(LOCAL_PORT),
  265. json=data,
  266. )
  267. return res.text
  268. def parse(answer, isLocal):
  269. answer = json.loads(answer)
  270. # 获取第一个结果
  271. if isLocal:
  272. result = answer['message']['content']
  273. else:
  274. if BACKEND=="ernie":
  275. result = answer['result']
  276. elif BACKEND=="openai":
  277. result = answer['choices'][0]['message']['content']
  278. lines = result.split('\n')
  279. # 获取lines中的```bash项到```项,并拼成一个字符串
  280. cmd = ''
  281. start = False
  282. for line in lines:
  283. if line.strip() == '```bash':
  284. start = True
  285. continue
  286. if start:
  287. if line.strip() == '```':
  288. # 去掉最后的\n
  289. cmd = cmd[:-1]
  290. break
  291. cmd += line+'\n'
  292. return result, cmd
  293. def issueModel(question):
  294. global lock, LOCAL_PORT, chats
  295. # 显示转圈
  296. lock = [True, '']
  297. try:
  298. _thread.start_new_thread(loading, (lock,))
  299. except Exception as e:
  300. print(e)
  301. cli = ""
  302. if USE_LOCAL:
  303. # 检查11434端口是否可用
  304. if check_port_available(LOCAL_PORT):
  305. # 本地ollama
  306. lock[1] ="\033[1;32m终端助理正在本地思考...\033[0m"
  307. answer = askLocal(question)
  308. chats.append([question, answer])
  309. result, cli = parse(answer, True)
  310. if cli== "":
  311. lock[1] ="\033[1;32m终端助理正在云端思考...\033[0m"
  312. # 百度千帆
  313. if BACKEND=="ernie":
  314. query = askERNIE
  315. elif BACKEND=="openai":
  316. query = askOpenAI
  317. # OpenAI API
  318. answer = query(question)
  319. result, cli = parse(answer, False)
  320. chats.append([question, result])
  321. lock[0] = False
  322. return cli
  323. def loading(lock):
  324. chars = ['⣾', '⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽']
  325. i = 0
  326. print('')
  327. while lock[0]:
  328. i = (i+1) % len(chars)
  329. print('\033[A%s %s' %
  330. (chars[i], lock[1] or '' if len(lock) >= 2 else ''))
  331. time.sleep(0.1)
  332. # 执行alias命令,获取输出结果保存为alias_result
  333. def alias():
  334. # 定义一个dict
  335. alias_result = {'egrep': 'egrep --color=auto'
  336. ,'grep': 'grep --color=auto'
  337. ,'fgrep': 'fgrep --color=auto'
  338. ,'grep': 'grep --color=auto'
  339. ,'l': 'ls -CF'
  340. ,'ll': 'ls -alF'
  341. ,'la': 'ls -A'
  342. ,'ls': 'ls --color=auto'}
  343. return alias_result
  344. def replaceAlias(cmd):
  345. # 获取alias_result
  346. alias_result = alias()
  347. # 获取cmd中的第一个单词
  348. cmd_first = cmd.split(' ')[0]
  349. # 如果cmd_first在alias_result中,则替换
  350. if cmd_first in alias_result:
  351. cmd = alias_result[cmd_first] + ' ' + cmd.replace(cmd_first, '')
  352. return cmd
  353. if __name__ == '__main__':
  354. global lock
  355. # 如果没有参数
  356. if len(sys.argv) < 2:
  357. print("Copyright (c) 2024 Xiongwei Yu. Info-Terminal Copilot v1.0 \n\n \
  358. Usage: \n \
  359. erniecli command question : \"?? question\" or \"? question\" for short, quest a command and run\n \
  360. erniecli config : set your config\n \
  361. erniecli alias : show alias\n \
  362. erniecli version : show version")
  363. sys.exit(0)
  364. # 获取第一个参数
  365. cmd = sys.argv[1]
  366. if cmd == "config":
  367. set_config()
  368. # 设置??别名
  369. if cmd == "alias":
  370. print ("alias erniecli='erniecli.py'")
  371. print ("alias ??='erniecli.py command'")
  372. print ("alias ??='erniecli.py command'")
  373. print ("alias ?='erniecli.py command'")
  374. print ("alias ?='erniecli.py command'")
  375. # 显示版本信息
  376. if cmd == "version":
  377. # 紫色显示
  378. print("\033[1;95m终端助理 Version 0.1\n\033[0m")
  379. # 用绿色字体显示“基于文心一言的对话式命令行助理”
  380. print("\033[1;32m基于大语言模型的对话式终端助理\n可使用百度千帆文心大模型ERNIE-3.5-8K或其他OpenAI接口的大语言模型\n让命令行插上AI的翅膀🪽\033[0m")
  381. sys.exit(0)
  382. # 如果cmd为command,调用ask函数
  383. if cmd == "command":
  384. get_config()
  385. # 获取第二个参数
  386. # 如果第二个参数为空,则输出错误,用红色字体显示
  387. if len(sys.argv) < 3:
  388. print("\033[1;31m请输入你的意图\033[0m")
  389. sys.exit(0)
  390. # 获取后面的所有参数,并拼接成字符串
  391. question = ' '.join(sys.argv[2:])
  392. # question = sys.argv[2]
  393. # 如果question为空,则输出错误,用红色字体显示
  394. if question == "":
  395. print("\033[1;31m请输入你的意图\033[0m")
  396. sys.exit(0)
  397. # 调用ask函数,并输出结果
  398. #使用绿色字体
  399. cli = ""
  400. while True:
  401. cli = issueModel(question)
  402. if cli == "":
  403. question = input("\033[A\033[2K\033[1;33mAI没有找到可执行的命令[\x1B[4m\033[37mA\x1B[0m\033[1;33mbort/您的需求] \033[0m")
  404. if question == "A" or question == "a" or question == "":
  405. print('已取消')
  406. sys.exit(0)
  407. else:
  408. continue
  409. print('\033[F\033[K',end = "\033[1;32m❯ \033[0m")
  410. print('\033[1;32m{0}\033[0m'.format(cli))
  411. question = input('\033[1;32m? \033[0m\033[1;90m是否执行 ⬆ ? [\x1B[4m\033[37mC\x1B[0m\033[1;90monfirm/\x1B[4m\033[37ma\x1B[0m\033[1;90mbort/您的需求] \033[0m')
  412. if question == 'C' or question == 'c' or question == '':
  413. sys.stdout.write('\033[A\r')
  414. sys.stdout.flush()
  415. sys.stdout.write('\033[K')
  416. sys.stdout.flush()
  417. # 执行命令,并输出结果
  418. # print('')
  419. cmd = replaceAlias(cli)
  420. subprocess.run(cli, shell=True)
  421. sys.exit(0)
  422. elif question == 'A' or question == 'a':
  423. print('已取消')
  424. sys.exit(0)
  425. else:
  426. continue