webui : add ?m=... and ?q=... params (#12148)

* webui : add ?m=... and ?q=... params

* also clear prefilledMessage variable

* better approach

* fix comment

* test: bump timeout on GITHUB_ACTION
This commit is contained in:
Xuan-Son Nguyen 2025-03-03 11:42:45 +01:00 committed by GitHub
parent ece9745bb8
commit 7b69003af7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 48 additions and 3 deletions

Binary file not shown.

View file

@ -26,7 +26,10 @@ from re import RegexFlag
import wget
DEFAULT_HTTP_TIMEOUT = 12 if "LLAMA_SANITIZE" not in os.environ else 30
DEFAULT_HTTP_TIMEOUT = 12
if "LLAMA_SANITIZE" in os.environ or "GITHUB_ACTION" in os.environ:
DEFAULT_HTTP_TIMEOUT = 30
class ServerResponse:

View file

@ -2,7 +2,7 @@ import { useEffect, useMemo, useRef, useState } from 'react';
import { CallbackGeneratedChunk, useAppContext } from '../utils/app.context';
import ChatMessage from './ChatMessage';
import { CanvasType, Message, PendingMessage } from '../utils/types';
import { classNames, throttle } from '../utils/misc';
import { classNames, cleanCurrentUrl, throttle } from '../utils/misc';
import CanvasPyInterpreter from './CanvasPyInterpreter';
import StorageUtils from '../utils/storage';
import { useVSCodeContext } from '../utils/llama-vscode';
@ -18,6 +18,24 @@ export interface MessageDisplay {
isPending?: boolean;
}
/**
* If the current URL contains "?m=...", prefill the message input with the value.
* If the current URL contains "?q=...", prefill and SEND the message.
*/
const prefilledMsg = {
content() {
const url = new URL(window.location.href);
return url.searchParams.get('m') ?? url.searchParams.get('q') ?? '';
},
shouldSend() {
const url = new URL(window.location.href);
return url.searchParams.has('q');
},
clear() {
cleanCurrentUrl(['m', 'q']);
},
};
function getListMessageDisplay(
msgs: Readonly<Message[]>,
leafNodeId: Message['id']
@ -81,7 +99,7 @@ export default function ChatScreen() {
canvasData,
replaceMessageAndGenerate,
} = useAppContext();
const [inputMsg, setInputMsg] = useState('');
const [inputMsg, setInputMsg] = useState(prefilledMsg.content());
const inputRef = useRef<HTMLTextAreaElement>(null);
const { extraContext, clearExtraContext } = useVSCodeContext(
@ -172,6 +190,22 @@ export default function ChatScreen() {
const hasCanvas = !!canvasData;
useEffect(() => {
if (prefilledMsg.shouldSend()) {
// send the prefilled message if needed
sendNewMessage();
} else {
// otherwise, focus on the input and move the cursor to the end
if (inputRef.current) {
inputRef.current.focus();
inputRef.current.selectionStart = inputRef.current.value.length;
}
}
prefilledMsg.clear();
// no need to keep track of sendNewMessage
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [inputRef]);
// due to some timing issues of StorageUtils.appendMsg(), we need to make sure the pendingMsg is not duplicated upon rendering (i.e. appears once in the saved conversation and once in the pendingMsg)
const pendingMsgDisplay: MessageDisplay[] =
pendingMsg && messages.at(-1)?.msg.id !== pendingMsg.id

View file

@ -118,3 +118,11 @@ export const throttle = <T extends unknown[]>(
}, delay);
};
};
export const cleanCurrentUrl = (removeQueryParams: string[]) => {
const url = new URL(window.location.href);
removeQueryParams.forEach((param) => {
url.searchParams.delete(param);
});
window.history.replaceState({}, '', url.toString());
};