consistent semicolons

This commit is contained in:
Henri Vasserman 2023-07-20 00:58:16 +03:00
parent 890d1b8446
commit 43694ca867
No known key found for this signature in database
GPG Key ID: 2995FC0F58B1A986

View File

@ -108,9 +108,9 @@
<script type="module"> <script type="module">
import { import {
html, h, signal, effect, computed, render, useSignal, useEffect, useRef html, h, signal, effect, computed, render, useSignal, useEffect, useRef
} from '/index.js'; } from '/index.js'
import { llama } from '/completion.js'; import { llama } from '/completion.js'
const session = signal({ const session = signal({
prompt: "This is a conversation between user and llama, a friendly chatbot. respond in simple markdown.", prompt: "This is a conversation between user and llama, a friendly chatbot. respond in simple markdown.",
@ -140,35 +140,35 @@
const transcriptUpdate = (transcript) => { const transcriptUpdate = (transcript) => {
session.value = { session.value = {
...session.value, ...session.value,
transcript transcript,
} }
} }
// simple template replace // simple template replace
const template = (str, extraSettings) => { const template = (str, extraSettings) => {
let settings = session.value; let settings = session.value
if (extraSettings) { if (extraSettings) {
settings = { ...settings, ...extraSettings }; settings = { ...settings, ...extraSettings }
} }
return String(str).replaceAll(/\{\{(.*?)\}\}/g, (_, key) => template(settings[key])); return String(str).replaceAll(/\{\{(.*?)\}\}/g, (_, key) => template(settings[key]))
} }
// send message to server // send message to server
const chat = async (msg) => { const chat = async (msg) => {
if (controller.value) { if (controller.value) {
console.log('already running...'); console.log('already running...')
return; return
} }
controller.value = new AbortController(); controller.value = new AbortController()
transcriptUpdate([...session.value.transcript, ["{{user}}", msg]]) transcriptUpdate([...session.value.transcript, ["{{user}}", msg]])
const prompt = template(session.value.template, { const prompt = template(session.value.template, {
message: msg, message: msg,
history: session.value.transcript.flatMap(([name, message]) => template(session.value.historyTemplate, {name, message})).join("\n"), history: session.value.transcript.flatMap(([name, message]) => template(session.value.historyTemplate, {name, message})).join("\n"),
}); })
let currentMessage = ''; let currentMessage = ''
const history = session.value.transcript const history = session.value.transcript
const llamaParams = { const llamaParams = {
@ -177,8 +177,8 @@
} }
for await (const chunk of llama(prompt, llamaParams, { controller: controller.value })) { for await (const chunk of llama(prompt, llamaParams, { controller: controller.value })) {
const data = chunk.data; const data = chunk.data
currentMessage += data.content; currentMessage += data.content
// remove leading whitespace // remove leading whitespace
currentMessage = currentMessage.replace(/^\s+/, "") currentMessage = currentMessage.replace(/^\s+/, "")
@ -186,42 +186,42 @@
transcriptUpdate([...history, ["{{char}}", currentMessage]]) transcriptUpdate([...history, ["{{char}}", currentMessage]])
if (data.stop) { if (data.stop) {
console.log("Completion finished: '", currentMessage, "', summary: ", data); console.log("Completion finished: '", currentMessage, "', summary: ", data)
} }
if (data.timings) { if (data.timings) {
llamaStats.value = data.timings; llamaStats.value = data.timings
} }
} }
controller.value = null; controller.value = null
} }
function MessageInput() { function MessageInput() {
const message = useSignal("") const message = useSignal("")
const stop = (e) => { const stop = (e) => {
e.preventDefault(); e.preventDefault()
if (controller.value) { if (controller.value) {
controller.value.abort(); controller.value.abort()
controller.value = null; controller.value = null
} }
} }
const reset = (e) => { const reset = (e) => {
stop(e); stop(e)
transcriptUpdate([]); transcriptUpdate([])
} }
const submit = (e) => { const submit = (e) => {
stop(e); stop(e)
chat(message.value); chat(message.value)
message.value = ""; message.value = ""
} }
const enterSubmits = (event) => { const enterSubmits = (event) => {
if (event.which === 13 && !event.shiftKey) { if (event.which === 13 && !event.shiftKey) {
submit(event); submit(event)
} }
} }
@ -240,7 +240,7 @@
} }
const ChatLog = (props) => { const ChatLog = (props) => {
const messages = session.value.transcript; const messages = session.value.transcript
const container = useRef(null) const container = useRef(null)
useEffect(() => { useEffect(() => {
@ -252,13 +252,13 @@
const chatLine = ([user, msg]) => { const chatLine = ([user, msg]) => {
return html`<p key=${msg}><strong>${template(user)}:</strong> <${Markdownish} text=${template(msg)} /></p>` return html`<p key=${msg}><strong>${template(user)}:</strong> <${Markdownish} text=${template(msg)} /></p>`
}; }
return html` return html`
<section id="chat" ref=${container}> <section id="chat" ref=${container}>
${messages.flatMap(chatLine)} ${messages.flatMap(chatLine)}
</section>`; </section>`
}; }
const ConfigForm = (props) => { const ConfigForm = (props) => {
const updateSession = (el) => session.value = { ...session.value, [el.target.name]: el.target.value } const updateSession = (el) => session.value = { ...session.value, [el.target.name]: el.target.value }
@ -331,9 +331,9 @@
.replace(/_(.*?)_/g, '<em>$1</em>') .replace(/_(.*?)_/g, '<em>$1</em>')
.replace(/```.*?\n([\s\S]*?)```/g, '<pre><code>$1</code></pre>') .replace(/```.*?\n([\s\S]*?)```/g, '<pre><code>$1</code></pre>')
.replace(/`(.*?)`/g, '<code>$1</code>') .replace(/`(.*?)`/g, '<code>$1</code>')
.replace(/\n/gim, '<br />'); .replace(/\n/gim, '<br />')
return html`<span dangerouslySetInnerHTML=${{ __html: md }} />`; return html`<span dangerouslySetInnerHTML=${{ __html: md }} />`
}; }
const ModelGenerationInfo = (params) => { const ModelGenerationInfo = (params) => {
if (!llamaStats.value) { if (!llamaStats.value) {
@ -367,10 +367,10 @@
<p>Powered by <a href="https://github.com/ggerganov/llama.cpp">llama.cpp</a> and <a href="https://ggml.ai">ggml.ai</a>.</p> <p>Powered by <a href="https://github.com/ggerganov/llama.cpp">llama.cpp</a> and <a href="https://ggml.ai">ggml.ai</a>.</p>
</footer> </footer>
</div> </div>
`; `
} }
render(h(App), document.body); render(h(App), document.body)
</script> </script>
</head> </head>