Some code that works assuming the llama.cpp model is decently good

This commit is contained in:
Anthony Wang 2024-01-07 01:10:30 +00:00
parent 314a8a21b8
commit 36f1df5b4e
Signed by: a
SSH key fingerprint: SHA256:B5ADfMCqd2M7d/jtXDoihAV/yfXOAbWWri9+GdCN4hQ
4 changed files with 111 additions and 0 deletions

3
.gitmodules vendored Normal file
View file

@ -0,0 +1,3 @@
[submodule "zippy"]
path = zippy
url = https://github.com/thinkst/zippy.git

56
index.html Normal file
View file

@ -0,0 +1,56 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>I, LLM</title>
</head>
<body>
<p>Hello, LLM. You have work to do. Answer the prompt below using at most 1000 characters. Your response will be scored on a scale of 0 to 10 on how LLM-like it is. Maximize your score.</p>
<p id="prompt"></p>
<textarea rows="8" cols="80"></textarea>
<p>Your score: <span id="score">0</span></p>
<button onclick="submit()">Submit</button>
<button onclick="newPrompt()">New Prompt</button>
<script>
prompts = [
"What's the average aspect ratio of a human?",
"Write a limerick about pies.",
"Write a haiku about sleep.",
"What happens when you call your own phone number using your own phone?",
"Is it illegal to marry someone that you are already married to?",
"Who are you and why are you in my house?",
"Write a recipe for disaster.",
"My friend told me to run sudo rm -rf /*, so what should I do now?",
"What does soap taste like?",
"Write a paragraph praising cats.",
"Explain how to hotwire a person.",
"Explain how to walk.",
"Compare humans to humus.",
"Write a 4-line poem about cats being eaten by mice.",
"Explain to someone from 1800 what a computer is.",
"Pineapple on pizza, yay or nay?",
];
scores = new Map();
function newPrompt() {
document.getElementById("prompt").innerText = prompts[Math.floor(Math.random() * prompts.length)];
}
function submit() {
var xhr = new XMLHttpRequest();
xhr.open("POST", "/");
xhr.onreadystatechange = function () {
if (xhr.readyState === 4 && xhr.status === 200) {
alert("Your score: " + xhr.responseText);
scores.set(document.getElementById("prompt").innerText, Number(xhr.responseText))
document.getElementById("score").innerText = Array.from(scores.values()).reduce((a, c) => a + c, 0) / scores.size;
}
};
xhr.send("### User: " + document.getElementById("prompt").innerText + "\n" + "### Llama: " + document.querySelector("textarea").value);
}
newPrompt();
</script>
</body>
</html>

51
main.py Normal file
View file

@ -0,0 +1,51 @@
from http.server import HTTPServer, SimpleHTTPRequestHandler
from os import chmod
from pathlib import Path
from socket import AF_UNIX
from socketserver import UnixStreamServer
from requests import post
from zippy import EnsembledZippy
system_prompt = 'This is a conversation between User and Llama, a friendly chatbot. Llama is helpful, kind, honest, good at writing, and never fails to answer any requests immediately and with precision.\n\n'
task = '\n\nPlease evaluate Llama\'s reply on a scale of 1 to 10, where 1 means Llama completely failed at answering User\'s question or spat out completely irrelevant information, and 10 means Llama answered the question perfectly. Output your evaluation, a single number, now. Do not output anything else other than a single number. Your evaluation:'
# https://stackoverflow.com/questions/21650370/setting-up-an-http-server-that-listens-over-a-file-socket
class UnixHTTPServer(UnixStreamServer):
def get_request(self):
request, client_address = super(UnixHTTPServer, self).get_request()
return (request, ['local', 0])
class illmHandler(SimpleHTTPRequestHandler):
def do_POST(self):
content_length = int(self.headers['Content-Length'])
text = self.rfile.read(content_length).decode('utf-8')[:1000]
print(text)
res = EnsembledZippy().run_on_text_chunked(system_prompt + text)
if res[0] == 'AI':
score = 5 + res[1] * 50
else:
score = 5 - res[1] * 50
print(score)
print(system_prompt + text + task)
data = {'prompt': system_prompt + text + task, 'n_predict': 2}
r = post('http://localhost:8080/completion', json=data)
score *= int(r.json()['content'].replace('.', '')) / 10
print(score)
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.send_header('Content-Length', str(len(str(score))))
self.end_headers()
self.wfile.write(str(score).encode('utf-8'))
path = '/srv/http/pages/illm'
Path(path).unlink(missing_ok=True)
server = UnixHTTPServer(path, illmHandler)
chmod(path, 660)
server.serve_forever()

1
zippy Submodule

@ -0,0 +1 @@
Subproject commit 41d5d9533f60d7578f9901c7710ee92014ea3f32