erniecli.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462
  1. #!/usr/bin/python3
  2. import requests
  3. import json
  4. import sys
  5. import time
  6. import _thread
  7. import os
  8. import subprocess
  9. import socket
  10. ERNIE_API_KEY = ""
  11. ERNIE_SECRET_KEY = ""
  12. OPENAI_URL = ""
  13. OPENAI_API_KEY = ""
  14. OPENAI_MODEL = ""
  15. # 后端可选“openai”或“ernie”
  16. BACKEND = ""
  17. # 是否启用本地模式
  18. USE_LOCAL = False
  19. LOCAL_PORT = 11434
  20. PROMPT = """
  21. 你是一个Linux终端,请将我的请求转换成一条最简洁的bash命令。
  22. 只给一条命令,不要做任何解释或说明。
  23. 示例:
  24. 请求:显示系统版本信息。
  25. 输出:
  26. ```bash
  27. uname -a
  28. ```
  29. """
  30. chats = []
  31. # 设置API_KEY和SECRET_KEY
  32. def set_config():
  33. global ERNIE_API_KEY, ERNIE_SECRET_KEY, OPENAI_API_KEY, OPENAI_MODEL, OPENAI_URL, BACKEND
  34. if os.path.exists(os.path.expanduser('~/.config/erniecli/.ernierc')):
  35. get_config()
  36. elif not os.path.exists(os.path.expanduser('~/.config/erniecli')):
  37. os.makedirs(os.path.expanduser('~/.config/erniecli'))
  38. # 用黄色字体
  39. bidx = 0
  40. if BACKEND == "ernie":
  41. bidx = 1
  42. elif BACKEND == "openai":
  43. bidx = 2
  44. elif BACKEND == "":
  45. bidx = 0
  46. while True:
  47. choice_bidx = input("\033[1;33m请选择后端(0: None | 1: ernie | 2: openai)[current: {0}]:".format(bidx))
  48. if choice_bidx == '':
  49. bidx = bidx
  50. else:
  51. bidx = int(choice_bidx)
  52. if bidx == 1 or bidx == 2:
  53. break
  54. if bidx == 1:
  55. BACKEND = "ernie"
  56. elif bidx == 2:
  57. BACKEND = "openai"
  58. choose_local = input("\033[1;35m是否需要配置本地模式?(y/N)")
  59. if choose_local == "y":
  60. USE_LOCAL = True
  61. else:
  62. USE_LOCAL = False
  63. choice_ernie = input("\033[1;34m是否需要配置ernie?(y/N)")
  64. if choice_ernie == "y":
  65. apikey_value = input("\033[1;34m请输入API_KEY(当前值:"+ERNIE_API_KEY+"):")
  66. securekey_value = input("请输入SECRET_KEY(当前值"+ERNIE_SECRET_KEY+"):")
  67. if apikey_value != "":
  68. ERNIE_API_KEY = apikey_value.strip()
  69. if securekey_value != "":
  70. ERNIE_SECRET_KEY = securekey_value.strip()
  71. choice_openai = input("\033[1;36m是否需要配置openai?(y/N)")
  72. if choice_openai == "y":
  73. url_value = input("\033[1;36m请输入BASE URL(当前值:"+OPENAI_URL+"):")
  74. apikey_value = input("\033[1;36m请输入API_KEY(当前值:"+OPENAI_API_KEY+"):")
  75. model_value = input("请输入模型(当前值:"+OPENAI_MODEL+"):")
  76. if url_value != "":
  77. OPENAI_URL = url_value.strip()
  78. if apikey_value != "":
  79. OPENAI_API_KEY = apikey_value.strip()
  80. if model_value != "":
  81. OPENAI_MODEL = model_value.strip()
  82. with open(os.path.expanduser('~/.config/erniecli/.ernierc'), 'w', encoding='utf-8') as f:
  83. # 写入所有配置
  84. f.write("[GLOBAL]\n")
  85. f.write("BACKEND="+BACKEND+"\n")
  86. f.write("LOCAL="+str(USE_LOCAL)+"\n")
  87. f.write("\n[ERNIE]\n")
  88. f.write("API_KEY="+ERNIE_API_KEY+"\n")
  89. f.write("SECRET_KEY="+ERNIE_SECRET_KEY+"\n")
  90. f.write("\n[OPENAI]\n")
  91. f.write("URL="+OPENAI_URL+"\n")
  92. f.write("API_KEY="+OPENAI_API_KEY+"\n")
  93. f.write("MODEL="+OPENAI_MODEL+"\n")
  94. print("\033[1;32m配置成功\033[0m")
  95. sys.exit(0)
  96. # 读取$HOME/.config/erniecli/.ernierc文件中API_KEY和SECRET_KEY
  97. def get_config():
  98. global ERNIE_API_KEY, ERNIE_SECRET_KEY, OPENAI_API_KEY, OPENAI_MODEL, OPENAI_URL, BACKEND, USE_LOCAL
  99. config = os.path.expanduser('~/.config/erniecli/.ernierc')
  100. if not os.path.exists(config):
  101. print("\033[1;31m请进行使用erniecli进行配置\033[0m")
  102. sys.exit(0)
  103. # 读取配置文件,读取[global]的BACKEND
  104. group = "global"
  105. with open(config, 'r', encoding='utf-8') as f:
  106. for line in f.readlines():
  107. line = line.strip()
  108. if len(line) == 0 or line[0] == '#':
  109. continue
  110. elif line.startswith("["):
  111. group = line[1:-1]
  112. continue
  113. # 配置global
  114. if group == "GLOBAL":
  115. key, value = line.split('=')
  116. if key.strip() == "BACKEND":
  117. BACKEND = value.strip()
  118. if key.strip() == "LOCAL":
  119. USE_LOCAL = value.strip() == "True"
  120. if group == "ERNIE":
  121. key, value = line.split('=')
  122. if key.strip() == "API_KEY":
  123. ERNIE_API_KEY = value.strip()
  124. if key.strip() == "SECRET_KEY":
  125. ERNIE_SECRET_KEY = value.strip()
  126. if group == "OPENAI":
  127. key, value = line.split('=')
  128. if key.strip() == "API_KEY":
  129. OPENAI_API_KEY = value.strip()
  130. if key.strip() == "MODEL":
  131. OPENAI_MODEL = value.strip()
  132. if key.strip() == "URL":
  133. OPENAI_URL = value.strip()
  134. # 查询百度千帆
  135. def askERNIE(question):
  136. global chats
  137. url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-speed-128k?access_token=" + get_access_token()
  138. data = {
  139. "messages": [
  140. ],
  141. "temperature": 0.95,
  142. "top_p": 0.8,
  143. "penalty_score": 1,
  144. "disable_search": False,
  145. "enable_citation": False,
  146. "response_format": "text"
  147. }
  148. index = 0
  149. for chat in chats:
  150. quest = chat[0]
  151. if index == 0:
  152. quest = PROMPT+"我的问题是:{0}".format(quest)
  153. index = index + 1
  154. data['messages'].append({
  155. "role": "user",
  156. "content": quest
  157. })
  158. data['messages'].append({
  159. "role": "assistant",
  160. "content": chat[1]
  161. })
  162. if index == 0:
  163. question = PROMPT+"我的问题是:{0}".format(question)
  164. data['messages'].append({
  165. "role": "user",
  166. "content": question
  167. })
  168. payload = json.dumps(data)
  169. headers = {
  170. 'Content-Type': 'application/json'
  171. }
  172. response = requests.request("POST", url, headers=headers, data=payload)
  173. return response.text
  174. def get_access_token():
  175. """
  176. 使用 AK,SK 生成鉴权签名(Access Token)
  177. :return: access_token,或是None(如果错误)
  178. """
  179. url = "https://aip.baidubce.com/oauth/2.0/token"
  180. params = {"grant_type": "client_credentials", "client_id": ERNIE_API_KEY, "client_secret": ERNIE_SECRET_KEY}
  181. return str(requests.post(url, params=params).json().get("access_token"))
  182. # 查询OpenAI接口,如赞同模型
  183. def askOpenAI(question):
  184. global OPENAI_URL, chats
  185. # 如果OPENAI_URL是/结尾,去掉最后的/
  186. if OPENAI_URL[-1] == '/':
  187. OPENAI_URL = OPENAI_URL[:-1]
  188. url = "{0}/v1/chat/completions".format(OPENAI_URL)
  189. data = {
  190. "model": OPENAI_MODEL,
  191. "messages": [
  192. {
  193. "role": "system",
  194. "content": PROMPT
  195. }
  196. ],
  197. "temperature": 0.3
  198. }
  199. for chat in chats:
  200. data['messages'].append({
  201. "role": "user",
  202. "content": chat[0]
  203. })
  204. data['messages'].append({
  205. "role": "assistant",
  206. "content": chat[1]
  207. })
  208. data["messages"].append({
  209. "role": "user",
  210. "content": question
  211. })
  212. payload = json.dumps(data)
  213. headers = {
  214. 'Content-Type': 'application/json',
  215. 'Authorization': 'Bearer '+OPENAI_API_KEY
  216. }
  217. response = requests.request("POST", url, headers=headers, data=payload)
  218. return response.text
  219. # 检查端口是否可用
  220. def check_port_available(port):
  221. sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
  222. sock.settimeout(1)
  223. result = sock.connect_ex(('localhost', port))
  224. if result == 0:
  225. return True
  226. else:
  227. return False
  228. # 查询本地
  229. def askLocal(question):
  230. global chats
  231. data = {
  232. "model": "codegemma:2b", # 使用 qwen:7b 模型
  233. "stream": False, # 禁用流式输出
  234. "messages": [
  235. {
  236. "role": "system",
  237. "content": PROMPT
  238. }
  239. ],
  240. "temperature": 0.3
  241. }
  242. for chat in chats:
  243. data['messages'].append({
  244. "role": "user",
  245. "content": chat[0]
  246. })
  247. data['messages'].append({
  248. "role": "assistant",
  249. "content": chat[1]
  250. })
  251. data["messages"].append({
  252. "role": "user",
  253. "content": question
  254. })
  255. res = requests.post(
  256. "http://localhost:{0}/api/chat".format(LOCAL_PORT),
  257. json=data,
  258. )
  259. return res.text
  260. def parse(answer, isLocal):
  261. answer = json.loads(answer)
  262. # 获取第一个结果
  263. if isLocal:
  264. result = answer['message']['content']
  265. else:
  266. if BACKEND=="ernie":
  267. result = answer['result']
  268. elif BACKEND=="openai":
  269. result = answer['choices'][0]['message']['content']
  270. lines = result.split('\n')
  271. # 获取lines中的```bash项到```项,并拼成一个字符串
  272. cmd = ''
  273. start = False
  274. for line in lines:
  275. if line.strip() == '```bash':
  276. start = True
  277. continue
  278. if start:
  279. if line.strip() == '```':
  280. # 去掉最后的\n
  281. cmd = cmd[:-1]
  282. break
  283. cmd += line+'\n'
  284. return result, cmd
  285. def issueModel(question):
  286. global lock, LOCAL_PORT, chats
  287. # 显示转圈
  288. lock = [True, '']
  289. try:
  290. _thread.start_new_thread(loading, (lock,))
  291. except Exception as e:
  292. print(e)
  293. cli = ""
  294. if USE_LOCAL:
  295. # 检查11434端口是否可用
  296. if check_port_available(LOCAL_PORT):
  297. # 本地ollama
  298. lock[1] ="\033[1;32m终端助理正在本地思考...\033[0m"
  299. answer = askLocal(question)
  300. chats.append([question, answer])
  301. result, cli = parse(answer, True)
  302. if cli== "":
  303. lock[1] ="\033[1;32m终端助理正在云端思考...\033[0m"
  304. # 百度千帆
  305. if BACKEND=="ernie":
  306. query = askERNIE
  307. elif BACKEND=="openai":
  308. query = askOpenAI
  309. # OpenAI API
  310. answer = query(question)
  311. result, cli = parse(answer, False)
  312. chats.append([question, result])
  313. lock[0] = False
  314. return cli
  315. def loading(lock):
  316. chars = ['⣾', '⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽']
  317. i = 0
  318. print('')
  319. while lock[0]:
  320. i = (i+1) % len(chars)
  321. print('\033[A%s %s' %
  322. (chars[i], lock[1] or '' if len(lock) >= 2 else ''))
  323. time.sleep(0.1)
  324. # 执行alias命令,获取输出结果保存为alias_result
  325. def alias():
  326. # 定义一个dict
  327. alias_result = {'egrep': 'egrep --color=auto'
  328. ,'grep': 'grep --color=auto'
  329. ,'fgrep': 'fgrep --color=auto'
  330. ,'grep': 'grep --color=auto'
  331. ,'l': 'ls -CF'
  332. ,'ll': 'ls -alF'
  333. ,'la': 'ls -A'
  334. ,'ls': 'ls --color=auto'}
  335. return alias_result
  336. def replaceAlias(cmd):
  337. # 获取alias_result
  338. alias_result = alias()
  339. # 获取cmd中的第一个单词
  340. cmd_first = cmd.split(' ')[0]
  341. # 如果cmd_first在alias_result中,则替换
  342. if cmd_first in alias_result:
  343. cmd = alias_result[cmd_first] + ' ' + cmd.replace(cmd_first, '')
  344. return cmd
  345. if __name__ == '__main__':
  346. global lock
  347. # 如果没有参数
  348. if len(sys.argv) < 2:
  349. print("Copyright (c) 2024 Xiongwei Yu. Info-Terminal Copilot v1.0 \n\n \
  350. Usage: \n \
  351. erniecli command question : \"?? question\" or \"? question\" for short, quest a command and run\n \
  352. erniecli config : set your config\n \
  353. erniecli alias : show alias\n \
  354. erniecli version : show version")
  355. sys.exit(0)
  356. # 获取第一个参数
  357. cmd = sys.argv[1]
  358. if cmd == "config":
  359. set_config()
  360. # 设置??别名
  361. if cmd == "alias":
  362. print ("alias erniecli='erniecli.py'")
  363. print ("alias ??='erniecli.py command'")
  364. print ("alias ??='erniecli.py command'")
  365. print ("alias ?='erniecli.py command'")
  366. print ("alias ?='erniecli.py command'")
  367. # 显示版本信息
  368. if cmd == "version":
  369. # 紫色显示
  370. print("\033[1;95m终端助理 Version 0.1\n\033[0m")
  371. # 用绿色字体显示“基于文心一言的对话式命令行助理”
  372. print("\033[1;32m基于大语言模型的对话式终端助理\n可使用百度千帆文心大模型ERNIE-3.5-8K或其他OpenAI接口的大语言模型\n让命令行插上AI的翅膀🪽\033[0m")
  373. sys.exit(0)
  374. # 如果cmd为command,调用ask函数
  375. if cmd == "command":
  376. get_config()
  377. # 获取第二个参数
  378. # 如果第二个参数为空,则输出错误,用红色字体显示
  379. if len(sys.argv) < 3:
  380. print("\033[1;31m请输入你的意图\033[0m")
  381. sys.exit(0)
  382. # 获取后面的所有参数,并拼接成字符串
  383. question = ' '.join(sys.argv[2:])
  384. # question = sys.argv[2]
  385. # 如果question为空,则输出错误,用红色字体显示
  386. if question == "":
  387. print("\033[1;31m请输入你的意图\033[0m")
  388. sys.exit(0)
  389. # 调用ask函数,并输出结果
  390. #使用绿色字体
  391. cli = ""
  392. while True:
  393. cli = issueModel(question)
  394. if cli == "":
  395. question = input("\033[A\033[2K\033[1;33mAI没有找到可执行的命令[\x1B[4m\033[37mA\x1B[0m\033[1;33mbort/您的需求] \033[0m")
  396. if question == "A" or question == "a" or question == "":
  397. print('已取消')
  398. sys.exit(0)
  399. else:
  400. continue
  401. print('\033[F\033[K',end = "\033[1;32m❯ \033[0m")
  402. print('\033[1;32m{0}\033[0m'.format(cli))
  403. question = input('\033[1;32m? \033[0m\033[1;90m是否执行 ⬆ ? [\x1B[4m\033[37mC\x1B[0m\033[1;90monfirm/\x1B[4m\033[37ma\x1B[0m\033[1;90mbort/您的需求] \033[0m')
  404. if question == 'C' or question == 'c' or question == '':
  405. sys.stdout.write('\033[A\r')
  406. sys.stdout.flush()
  407. sys.stdout.write('\033[K')
  408. sys.stdout.flush()
  409. # 执行命令,并输出结果
  410. # print('')
  411. cmd = replaceAlias(cli)
  412. subprocess.run(cli, shell=True)
  413. sys.exit(0)
  414. elif question == 'A' or question == 'a':
  415. print('已取消')
  416. sys.exit(0)
  417. else:
  418. continue