1
0
mirror of https://github.com/Jermolene/TiddlyWiki5 synced 2025-08-08 14:55:17 +00:00

feat: allow choose model, and use authTokenStoreKey for different server

This commit is contained in:
lin onetwo 2025-03-01 01:09:13 +08:00
parent 303b711ef0
commit 9b20e4a798
8 changed files with 44 additions and 9 deletions

View File

@ -12,10 +12,12 @@ resultTitlePrefix - Prefix of the tiddler to be used for saving the result. If t
resultTags - Tags to be applied to the result tiddler
ai-tools-status-title - Optional title of a tiddler to which the status of the request will be bound: "pending", "complete", "error"
completionServer - Optional URL of server
completionModel - Optional model to use
-->
\procedure ai-tools-get-llm-completion(conversationTitle,resultTitlePrefix,resultTags,ai-tools-status-title,completionServer)
\procedure ai-tools-get-llm-completion(conversationTitle,resultTitlePrefix,resultTags,ai-tools-status-title,completionServer,completionModel,authTokenStoreKey)
<$let
completionServer={{{ [<completionServer>!is[blank]else<ai-tools-default-llm-completion-server>] }}}
completionModel={{{ [<completionModel>!is[blank]else<ai-tools-default-llm-completion-model>] }}}
>
<$importvariables filter="[<completionServer>]">
<$wikify name="json" text=<<json-prompt>>>
@ -26,7 +28,7 @@ completionServer - Optional URL of server
url={{{ [<completionServer>get[url]] }}}
body=<<json>>
header-content-type="application/json"
bearer-auth-token-from-store="openai-secret-key"
bearer-auth-token-from-store=<<authTokenStoreKey>>
method="POST"
oncompletion=<<completion-callback>>
bind-status=<<ai-tools-status-title>>
@ -136,6 +138,8 @@ Action procedure to get the next response from the LLM
$variable="ai-tools-get-llm-completion"
conversationTitle=<<currentTiddler>>
completionServer={{!!completion-server}}
completionModel={{!!completion-model}}
authTokenStoreKey={{{[{!!completion-server}get[auth-token-store-key]]}}}
resultTitlePrefix=<<resultTitlePrefix>>
resultTags=<<resultTags>>
ai-tools-status-title=<<ai-tools-status-title>>
@ -152,6 +156,11 @@ Action procedure to get the next response from the LLM
<option value=<<currentTiddler>>><$view field='caption'/></option>
</$list>
</$select>
Model: <$select tiddler=<<currentTiddler>> field="completion-model" default=<<ai-tools-default-llm-completion-model>>>
<$list filter="[{!!completion-server}get[models]enlist-input[]]">
<option value=<<currentTiddler>>><<currentTiddler>></option>
</$list>
</$select>
<div class="ai-conversation">
<$transclude

View File

@ -2,6 +2,6 @@
"title": "$:/plugins/tiddlywiki/ai-tools",
"name": "AI Tools",
"description": "AI Tools for TiddlyWiki",
"list": "readme docs utilities settings tree",
"list": "readme docs settings utilities tree",
"stability": "STABILITY_1_EXPERIMENTAL"
}

View File

@ -1,7 +1,9 @@
title: $:/plugins/tiddlywiki/ai-tools/servers/openai
tags: $:/tags/AI/CompletionServer
url: https://api.openai.com/v1/chat/completions
auth-token-store-key: openai-secret-key
caption: OpenAI Service
models: gpt-4o gpt-4.5-preview gpt-4o-mini o1 o1-mini o3-mini
<!--
Wikified JSON text to be sent to server
@ -9,7 +11,7 @@ Wikified JSON text to be sent to server
\procedure json-prompt()
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline html conditional commentblock commentinline
{
"model": "gpt-4o",
"model": <<completionModel>>,
"messages": [
{
"role": "system",

View File

@ -0,0 +1,8 @@
title: $:/plugins/tiddlywiki/ai-tools/servers/siliconflow
tags: $:/tags/AI/CompletionServer
url: https://api.siliconflow.cn/v1/chat/completions
caption: SiliconFlow
auth-token-store-key: siliconflow-secret-key
models: deepseek-ai/DeepSeek-V3 deepseek-ai/DeepSeek-R1
\import $:/plugins/tiddlywiki/ai-tools/servers/openai

View File

@ -1,6 +1,7 @@
title: $:/plugins/tiddlywiki/ai-tools/settings/llamafile
tags: $:/tags/AI/Setting
tags: $:/tags/AI/ServerSetting
caption: Llamafile
models:
!! Llamafile Setup

View File

@ -1,5 +1,5 @@
title: $:/plugins/tiddlywiki/ai-tools/settings/openai
tags: $:/tags/AI/Setting
tags: $:/tags/AI/ServerSetting
caption: OpenAI
!! ~OpenAI API key
@ -12,4 +12,4 @@ This plugin runs entirely in the browser, with no backend server component. A co
# Visit https://platform.openai.com/api-keys to create a new secret API key
# Copy and paste the value into the box below
~OpenAI Secret API Key: <$password name="openai-secret-key"/>
~OpenAI Secret API Key: <$password name={{$:/plugins/tiddlywiki/ai-tools/servers/openai!!auth-token-store-key}}/>

View File

@ -1,5 +1,8 @@
title: $:/plugins/tiddlywiki/ai-tools/settings
tags: $:/tags/ControlPanel/SettingsTab
caption: AI Tools
These settings let you customise the behaviour of the "AI Tools" plugin.
<<tabs "[all[shadows]tag[$:/tags/AI/Setting]]">>
!! Completion Servers
<<tabs "[all[shadows]tag[$:/tags/AI/ServerSetting]]">>

View File

@ -0,0 +1,12 @@
title: $:/plugins/tiddlywiki/ai-tools/settings/siliconflow
tags: $:/tags/AI/ServerSetting
caption: SiliconFlow
!! ~SiliconFlow API key
# Register for an account at https://siliconflow.com/ or https://siliconflow.cn/
#* Newly registered accounts can claim a small amount of credit, no payment info is required
# Visit https://cloud.siliconflow.com/account/ak or https://cloud.siliconflow.cn/account/ak to create a new secret API key
# Copy and paste the value into the box below
~SiliconFlow Secret API Key: <$password name={{$:/plugins/tiddlywiki/ai-tools/servers/siliconflow!!auth-token-store-key}}/>