From 6274963b496468226eb511242f7d3474bed2ce8e Mon Sep 17 00:00:00 2001 From: nsde Date: Mon, 26 Dec 2022 14:12:49 +0100 Subject: [PATCH] Error detection etc. --- .gitignore | 2 ++ README.md | 24 +++++++++++++++++++++++- requirements.txt | 2 ++ run.py | 20 ++++++++++++++++++++ violet/__init__.py | 4 ++++ violet/error-model.txt | 27 +++++++++++++++++++++++++++ violet/gpt.py | 39 +++++++++++++++++++++++++++++++++++++++ violet/helpers.py | 3 +++ violet/main.py | 33 +++++++++++++++++++++++++++++++++ violet/model.txt | 42 ++++++++++++++++++++++++++++++++++++++++++ violet/sandbox.py | 27 +++++++++++++++++++++++++++ voice.py | 21 +++++++++++++++++++++ 12 files changed, 243 insertions(+), 1 deletion(-) create mode 100644 requirements.txt create mode 100644 run.py create mode 100644 violet/__init__.py create mode 100644 violet/error-model.txt create mode 100644 violet/gpt.py create mode 100644 violet/helpers.py create mode 100644 violet/main.py create mode 100644 violet/model.txt create mode 100644 violet/sandbox.py create mode 100644 voice.py diff --git a/.gitignore b/.gitignore index b6e4761..855b8d2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +_temp.py + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/README.md b/README.md index d875e37..a3307b2 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,24 @@ -# violet +# ***V i o l e t G P T*** 🤖 A next-generation chat bot based on OpenAI, which can access your files, open the webbrowser and more! + +## Features +- Accurate mathematical calculations +- File system access +- Voice support (SpeechToText + TextToSpeech) + +### Planned +- API access (save your API tokens for various services in a `.env` file, and the bot will be able to find out e.g. the weather.) +- Web UI planned (the sandbox has a way higher priority, though! No web UI is going to be worked on before there is no proper sandbox.) + +## Warning +The sandbox is currently being developed, which means, in theory, the bot do anything. + +Never, ever run the bot with administrator permissions. You should always make sure the sandbox can't delete any imporant files! +Additionally, never ever, give others access to the sandbox features! Malicious actors could use this as a remote code execution. + +## Support +✅ Debian-Based Linux Distros (such as Mint 20.2) + +✅ Windows 10 (and above) + +✅ MacOS (probably - testers needed!) \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..3f4e1ba --- /dev/null +++ b/requirements.txt @@ -0,0 +1,2 @@ +openai +colorama \ No newline at end of file diff --git a/run.py b/run.py new file mode 100644 index 0000000..f34fd20 --- /dev/null +++ b/run.py @@ -0,0 +1,20 @@ +import sys +import colorama + +import violet + +colorama.init(autoreset=True) + +if len(sys.argv) > 1: + prompt = ' '.join(sys.argv[1:]) + print(violet.respond(prompt)['colored']) + +else: + while True: + try: + prompt = input(colorama.Fore.BLUE) + except KeyboardInterrupt: + sys.exit(0) + + print(colorama.Fore.RESET, end='') + print(violet.respond(prompt)['colored']) diff --git a/violet/__init__.py b/violet/__init__.py new file mode 100644 index 0000000..2afa198 --- /dev/null +++ b/violet/__init__.py @@ -0,0 +1,4 @@ +from .gpt import * +from .main import * +from .helpers import * +from .sandbox import * diff --git a/violet/error-model.txt b/violet/error-model.txt new file mode 100644 index 0000000..34f939e --- /dev/null +++ b/violet/error-model.txt @@ -0,0 +1,27 @@ +The following is a conversation between a Human and an AI which explains Python errors. +The AI gives instructions on how the issue could be solved, if necessary. + +Human: ``` +Traceback (most recent call last): + File "sandbox/_temp.py", line 3, in + subprocess.call(["nautilus"]) + File "/usr/lib/python3.8/subprocess.py", line 340, in call + with Popen(*popenargs, **kwargs) as p: + File "/usr/lib/python3.8/subprocess.py", line 858, in __init__ + self._execute_child(args, executable, preexec_fn, close_fds, + File "/usr/lib/python3.8/subprocess.py", line 1704, in _execute_child + raise child_exception_type(errno_num, err_msg, err_filename) +FileNotFoundError: [Errno 2] No such file or directory: 'nautilus' +``` +AI: Sorry, you the process "nautilus" could not be found. Please try another command. +Human: ``` +Traceback (most recent call last): + File "run.py", line 20, in + print(violet.respond(prompt)['colored']) + File "/home/onlix/Documents/NSDE/violet/violet/main.py", line 18, in respond + response = sandbox.run(code) + File "/home/onlix/Documents/NSDE/violet/violet/sandbox.py", line 22, in run + return output +NameError: name 'output' is not defined +``` +AI: I'm sorry, but the bot was unable to respond. Please try asking a different question. \ No newline at end of file diff --git a/violet/gpt.py b/violet/gpt.py new file mode 100644 index 0000000..413f1fb --- /dev/null +++ b/violet/gpt.py @@ -0,0 +1,39 @@ +import os +import openai + +from dotenv import load_dotenv + +load_dotenv() + +openai.api_key = os.getenv('OPENAI_API_KEY') +error_model = open('violet/error-model.txt').read() + +def generate(history: str, prompt: str) -> str: + completions = openai.Completion.create( + model="text-davinci-003", + prompt=f'{history}\nHuman: {prompt}\nAI: ', + temperature=0.9, + max_tokens=300, + top_p=1, + frequency_penalty=0.0, + presence_penalty=0.6, + stop=[" Human:", " AI:"] + ) + + message = completions.choices[0].text + return message[1:] + +def explain_error(prompt: str) -> str: + completions = openai.Completion.create( + model="text-davinci-003", + prompt=f'{error_model}\nHuman: {prompt}\nAI: ', + temperature=0.9, + max_tokens=300, + top_p=1, + frequency_penalty=0.0, + presence_penalty=0.6, + stop=[" Human:", " AI:"] + ) + + message = completions.choices[0].text + return message[1:] diff --git a/violet/helpers.py b/violet/helpers.py new file mode 100644 index 0000000..890bc67 --- /dev/null +++ b/violet/helpers.py @@ -0,0 +1,3 @@ +import platform + +opsys = f"{platform.system().replace('Darwin', 'MacOS')} {platform.machine()}" diff --git a/violet/main.py b/violet/main.py new file mode 100644 index 0000000..0c47ecf --- /dev/null +++ b/violet/main.py @@ -0,0 +1,33 @@ +import colorama + +from . import gpt +from . import helpers +from . import sandbox + +CODE_START = '```\n' +CODE_END = '\n```' + +history = open('violet/model.txt').read().replace('#OS#', helpers.opsys) + +def respond(prompt: str) -> str: + global history + response = gpt.generate(history=history, prompt=prompt) + + if CODE_START in response and CODE_END in response: + code = response.split(CODE_START)[1].split(CODE_END)[0] + response = sandbox.run(code) + colored = colorama.Fore.GREEN if response['status'] == 'success' else colorama.Fore.RED + response['message'] + else: + colored = colorama.Fore.YELLOW + response + + history += \ +f""" +Human: {prompt} +AI: {response} +""" + + return { + 'plain': response, + 'colored': colored + } + diff --git a/violet/model.txt b/violet/model.txt new file mode 100644 index 0000000..7d2d565 --- /dev/null +++ b/violet/model.txt @@ -0,0 +1,42 @@ +The following is a conversation with an AI assistant. +The assistant is helpful, creative, clever, and smart. +For some specific questions, such as time, mathematical equations and more, the assistant writes Python code which is then being ran and the output is being displayed. +The code is being ran on #OS#, so all code blocks are made to work on #OS#. + +Human: Hello, who are you? +AI: I am an AI created by OpenAI. How can I help you today? +Human: What's the current time? +AI: ``` +import time + +current_time = time.localtime() +current_time = time.strftime("%H:%M:%S", current_time) + +print(f"The current time is {current_time}.") +``` +Human: Thanks. Please create a file called "todo.txt" and save "Read a book" in it." +AI: ``` +with open("todo.txt", "w") as f: + f.write("Read a book") +print("Done, the file has been written.".) +``` +Human: What is the capital of France? +AI: Paris. +Human: Please read my todo.txt. +AI: ``` +with open("todo.txt", "r", encoding="utf8") as f: + print("The content of todo.txt is: " + f.read()) +``` +Human: Thanks! +AI: You're welcome. +Human: Now, please open YouTube. +AI: ``` +import webbrowser + +webbrowser.open("https://youtube.com") +print("Done.") +``` +Human: What's 3 to the power of 4 plus 6 multiplied by 5? +AI: ``` +print(f"{3**4+6*5}") +``` \ No newline at end of file diff --git a/violet/sandbox.py b/violet/sandbox.py new file mode 100644 index 0000000..dc883ef --- /dev/null +++ b/violet/sandbox.py @@ -0,0 +1,27 @@ +import colorama +import subprocess + +from . import gpt + +def run(code: str): + print(colorama.Style.DIM + 'Launching sandbox...') + with open('sandbox/_temp.py', 'w') as f: + f.write(code) + + # output = subprocess.check_output(, stderr=subprocess.DEVNULL).decode('utf8').strip() + process = subprocess.Popen(['python', 'sandbox/_temp.py'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + + output = stdout.decode('utf8').strip() + errors = stderr.decode('utf8').strip() + + if errors: + return { + 'status': 'error', + 'message': gpt.explain_error(errors) + } + + return { + 'status': 'success', + 'message': output + } diff --git a/voice.py b/voice.py new file mode 100644 index 0000000..85b08be --- /dev/null +++ b/voice.py @@ -0,0 +1,21 @@ +import sys +import colorama +import pyttsx3 + +import violet + +colorama.init(autoreset=True) +engine = pyttsx3.init() + +while True: + try: + prompt = input(colorama.Fore.BLUE) + + except KeyboardInterrupt: + sys.exit(0) + + answer = violet.respond(prompt) + print(answer['colored']) + + engine.say(answer['plain']) + engine.runAndWait()