mirror of
https://github.com/Jermolene/TiddlyWiki5
synced 2025-02-02 12:19:11 +00:00
Fix Llamafile compatibility
This commit is contained in:
parent
2faba2e820
commit
3b07607d1b
@ -7,13 +7,13 @@ tags: $:/tags/Global
|
||||
|
||||
<!--
|
||||
Action procedure to retrieve an LLM completion, given the following parameters:
|
||||
payload - JSON payload to be posted to the LLM
|
||||
conversationTitle - Title of the tiddler containing the conversation
|
||||
resultTitlePrefix - Prefix of the tiddler to be used for saving the result. If the tiddler already exists then a number will be added repeatedly until the resulting title is unique
|
||||
resultTags - Tags to be applied to the result tiddler
|
||||
statusTitle - Optional title of a tiddler to which the status of the request will be bound: "pending", "complete", "error"
|
||||
completionServer - Optional URL of server
|
||||
-->
|
||||
\procedure get-llm-completion(payload,resultTitlePrefix,resultTags,statusTitle,completionServer)
|
||||
\procedure get-llm-completion(conversationTitle,resultTitlePrefix,resultTags,statusTitle,completionServer)
|
||||
<!--
|
||||
Callback for the HTTP response from the LLM
|
||||
-->
|
||||
@ -42,19 +42,21 @@ completionServer - Optional URL of server
|
||||
<$let
|
||||
completionServer={{{ [<completionServer>!is[blank]else<default-llm-completion-server>] }}}
|
||||
>
|
||||
<$action-log message="get-llm-completion"/>
|
||||
<$action-log/>
|
||||
<$action-sendmessage
|
||||
$message="tm-http-request"
|
||||
url={{{ [<completionServer>get[url]addsuffix[/v1/chat/completions]] }}}
|
||||
body=<<payload>>
|
||||
header-content-type="application/json"
|
||||
bearer-auth-token-from-store="openai-secret-key"
|
||||
method="POST"
|
||||
oncompletion=<<get-llm-completion-callback>>
|
||||
bind-status=<<statusTitle>>
|
||||
var-resultTitlePrefix=<<resultTitlePrefix>>
|
||||
var-resultTags=<<resultTags>>
|
||||
/>
|
||||
<$wikify name="json" text={{{ [<completionServer>get[text]] }}}>
|
||||
<$action-log message="get-llm-completion"/>
|
||||
<$action-log/>
|
||||
<$action-sendmessage
|
||||
$message="tm-http-request"
|
||||
url={{{ [<completionServer>get[url]addsuffix[/v1/chat/completions]] }}}
|
||||
body=<<json>>
|
||||
header-content-type="application/json"
|
||||
bearer-auth-token-from-store="openai-secret-key"
|
||||
method="POST"
|
||||
oncompletion=<<get-llm-completion-callback>>
|
||||
bind-status=<<statusTitle>>
|
||||
var-resultTitlePrefix=<<resultTitlePrefix>>
|
||||
var-resultTags=<<resultTags>>
|
||||
/>
|
||||
</$wikify>
|
||||
</$let>
|
||||
\end get-llm-completion
|
||||
|
@ -6,7 +6,7 @@ tags: $:/tags/AI/PageMenu
|
||||
$basetitle="AI Conversation"
|
||||
tags="$:/tags/AI/Conversation"
|
||||
system-prompt="You are a helpful assistant."
|
||||
current-response-text="Please describe this picture"
|
||||
current-response-text="Please list the 10 most important things that happened in South East Asia in the Twentieth Century"
|
||||
>
|
||||
<$action-navigate $to=<<createTiddler-title>>/>
|
||||
</$action-createtiddler>
|
||||
|
@ -2,3 +2,23 @@ title: $:/plugins/tiddlywiki/ai-tools/servers/local-llamafile
|
||||
tags: $:/tags/AI/CompletionServer
|
||||
url: http://127.0.0.1:8080
|
||||
caption: Locally running Llamafile server
|
||||
|
||||
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline html conditional commentblock commentinline
|
||||
{
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "<$text text={{{ [<conversationTitle>get[system-prompt]jsonstringify[]] }}}/>"
|
||||
}
|
||||
<!-- Loop through the tiddlers tagged with this one to pick up all the messages in the conversation -->
|
||||
<$list filter="[all[shadows+tiddlers]tag<conversationTitle>!is[draft]sort[created]]">
|
||||
,
|
||||
{
|
||||
<!-- We use JSON stringify to escape the characters that can't be used directly in JSON -->
|
||||
"role": "<$text text={{{ [<currentTiddler>get[role]jsonstringify[]] }}}/>",
|
||||
"content": "<$text text={{{ [<currentTiddler>get[text]jsonstringify[]] }}}/>"
|
||||
}
|
||||
</$list>
|
||||
]
|
||||
}
|
@ -2,3 +2,38 @@ title: $:/plugins/tiddlywiki/ai-tools/servers/openai
|
||||
tags: $:/tags/AI/CompletionServer
|
||||
url: https://api.openai.com
|
||||
caption: OpenAI Service
|
||||
|
||||
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline html conditional commentblock commentinline
|
||||
{
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "<$text text={{{ [<conversationTitle>get[system-prompt]jsonstringify[]] }}}/>"
|
||||
}
|
||||
<!-- Loop through the tiddlers tagged with this one to pick up all the messages in the conversation -->
|
||||
<$list filter="[all[shadows+tiddlers]tag<conversationTitle>!is[draft]sort[created]]">
|
||||
,
|
||||
{
|
||||
<!-- We use JSON stringify to escape the characters that can't be used directly in JSON -->
|
||||
"role": "<$text text={{{ [<currentTiddler>get[role]jsonstringify[]] }}}/>",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "<$text text={{{ [<currentTiddler>get[text]jsonstringify[]] }}}/>"
|
||||
}
|
||||
<%if [<currentTiddler>get[image]else[]!match[]] %>
|
||||
,
|
||||
{
|
||||
"type": "image_url",
|
||||
"image_url": {
|
||||
"url": "<$text text={{{ [[data:]] [<currentTiddler>get[image]get[type]] [[;base64,]] [<currentTiddler>get[image]get[text]jsonstringify[]] +[join[]] }}}/>"
|
||||
}
|
||||
}
|
||||
<%endif%>
|
||||
]
|
||||
|
||||
}
|
||||
</$list>
|
||||
]
|
||||
}
|
@ -65,46 +65,6 @@ Procedure to display a message from an AI conversation. Current tiddler is the c
|
||||
</$qualify>
|
||||
\end ai-message
|
||||
|
||||
<!--
|
||||
Procedure that is wikified to generate the JSON payload for the LLM
|
||||
-->
|
||||
\procedure payload()
|
||||
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline html conditional commentblock commentinline
|
||||
{
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "<$text text={{{ [<currentTiddler>get[system-prompt]jsonstringify[]] }}}/>"
|
||||
}
|
||||
<!-- Loop through the tiddlers tagged with this one to pick up all the messages in the conversation -->
|
||||
<$list filter="[all[shadows+tiddlers]tag<currentTiddler>!is[draft]sort[created]]">
|
||||
,
|
||||
{
|
||||
<!-- We use JSON stringify to escape the characters that can't be used directly in JSON -->
|
||||
"role": "<$text text={{{ [<currentTiddler>get[role]jsonstringify[]] }}}/>",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "<$text text={{{ [<currentTiddler>get[text]jsonstringify[]] }}}/>"
|
||||
}
|
||||
<%if [<currentTiddler>get[image]else[]!match[]] %>
|
||||
,
|
||||
{
|
||||
"type": "image_url",
|
||||
"image_url": {
|
||||
"url": "<$text text={{{ [[data:]] [<currentTiddler>get[image]get[type]] [[;base64,]] [<currentTiddler>get[image]get[text]jsonstringify[]] +[join[]] }}}/>"
|
||||
}
|
||||
}
|
||||
<%endif%>
|
||||
]
|
||||
|
||||
}
|
||||
</$list>
|
||||
]
|
||||
}
|
||||
\end payload
|
||||
|
||||
<!--
|
||||
Action procedure to get the next response from the LLM
|
||||
-->
|
||||
@ -123,16 +83,14 @@ Action procedure to get the next response from the LLM
|
||||
>
|
||||
<$action-deletefield $tiddler=<<currentTiddler>> $field="current-response-text"/>
|
||||
<$action-deletefield $tiddler=<<currentTiddler>> $field="current-response-image"/>
|
||||
<$wikify name="json" text=<<payload>>>
|
||||
<$transclude
|
||||
$variable="get-llm-completion"
|
||||
payload=<<json>>
|
||||
completionServer={{!!completion-server}}
|
||||
resultTitlePrefix=<<resultTitlePrefix>>
|
||||
resultTags=<<resultTags>>
|
||||
statusTitle=<<statusTitle>>
|
||||
/>
|
||||
</$wikify>
|
||||
<$transclude
|
||||
$variable="get-llm-completion"
|
||||
conversationTitle=<<currentTiddler>>
|
||||
completionServer={{!!completion-server}}
|
||||
resultTitlePrefix=<<resultTitlePrefix>>
|
||||
resultTags=<<resultTags>>
|
||||
statusTitle=<<statusTitle>>
|
||||
/>
|
||||
</$action-createtiddler>
|
||||
</$let>
|
||||
\end action-get-response
|
||||
|
Loading…
Reference in New Issue
Block a user