1
0
mirror of https://github.com/Jermolene/TiddlyWiki5 synced 2026-05-05 13:11:36 +00:00

AI tools: add more servers (#8966)

* feat: add tree

* refactor: make tools and settings extensible

* fix: shining color on dark mode

* feat: allow choose model, and use authTokenStoreKey for different server

* feat: allow extends openai api

* fix: Input tag 'error' found using 'role' does not match any of the expected tags: 'system', 'user', 'assistant', 'tool'"

* fix: shining result color on dark mode

* Delete import-chatgpt.tid

* feat: add deepseek

* feat: Assign a server and model on tiddler if user lazy or forget to pick one.

* feat: allow config default server and model
This commit is contained in:
lin onetwo
2025-06-09 16:43:36 +08:00
committed by GitHub
parent ce9209f00e
commit 53edb20da5
17 changed files with 154 additions and 60 deletions

View File

@@ -14,16 +14,9 @@ See the ''settings'' tab for set up instructions.
#* If using ~OpenAI it is possible to attach a single image to a prompt
# Click "Send" and wait for the output of the LLM
!! Import ~ChatGPT Conversation Archives
!! Use utility
# [[Follow the instructions|https://help.openai.com/en/articles/7260999-how-do-i-export-my-chatgpt-history-and-data]] to request an export of your ~ChatGPT data
# You will receive a link to download your data as a ZIP file
# Download and unzip the file
# Locate the file `conversations.json` within the archive and import it into your TiddlyWiki
# Visit the ''tools'' tab and locate your `conversations.json` tiddler
# Click the associated ''import'' button
# See the imported conversations listed in the ''tools'' tab
# The imported tiddler `conversations.json` is no longer required and can be deleted
For example, to import ~ChatGPT conversation archives, follow the instructions in the [[utilities tab|$:/plugins/tiddlywiki/ai-tools/utilities]].
!! Conversation Format

View File

@@ -2,7 +2,10 @@ title: $:/plugins/tiddlywiki/ai-tools/globals
tags: $:/tags/Global
\function ai-tools-default-llm-completion-server()
[all[shadows+tiddlers]tag[$:/tags/AI/CompletionServer]sort[caption]first[]]
[[$:/plugins/tiddlywiki/ai-tools/configs/default-server]get[completion-server]]
\end
\function ai-tools-default-llm-completion-model()
[[$:/plugins/tiddlywiki/ai-tools/configs/default-server]get[completion-model]]
\end
<!--
@@ -12,12 +15,14 @@ resultTitlePrefix - Prefix of the tiddler to be used for saving the result. If t
resultTags - Tags to be applied to the result tiddler
ai-tools-status-title - Optional title of a tiddler to which the status of the request will be bound: "pending", "complete", "error"
completionServer - Optional URL of server
completionModel - Optional model to use
-->
\procedure ai-tools-get-llm-completion(conversationTitle,resultTitlePrefix,resultTags,ai-tools-status-title,completionServer)
\procedure ai-tools-get-llm-completion(conversationTitle,resultTitlePrefix,resultTags,ai-tools-status-title,completionServer,completionModel,authTokenStoreKey)
<$let
completionServer={{{ [<completionServer>!is[blank]else<ai-tools-default-llm-completion-server>] }}}
completionModel={{{ [<completionModel>!is[blank]else<ai-tools-default-llm-completion-model>] }}}
>
<$importvariables filter="[<completionServer>]">
<$importvariables filter="[<completionServer>get[extends]] [<completionServer>]">
<$wikify name="json" text=<<json-prompt>>>
<$action-log message="ai-tools-get-llm-completion"/>
<$action-log/>
@@ -26,7 +31,7 @@ completionServer - Optional URL of server
url={{{ [<completionServer>get[url]] }}}
body=<<json>>
header-content-type="application/json"
bearer-auth-token-from-store="openai-secret-key"
bearer-auth-token-from-store=<<authTokenStoreKey>>
method="POST"
oncompletion=<<completion-callback>>
bind-status=<<ai-tools-status-title>>
@@ -114,9 +119,10 @@ Procedure to display a message from an AI conversation. Current tiddler is the c
\end
<!--
Action procedure to get the next response from the LLM
Action procedure to get the next response from the LLM on a chat tiddler.
-->
\procedure ai-tools-action-get-response()
<!-- Get the response -->
<$let
resultTitlePrefix={{{ [<currentTiddler>addsuffix[ - Prompt]] }}}
resultTags={{{ [<currentTiddler>format:titlelist[]] }}}
@@ -136,6 +142,8 @@ Action procedure to get the next response from the LLM
$variable="ai-tools-get-llm-completion"
conversationTitle=<<currentTiddler>>
completionServer={{!!completion-server}}
completionModel={{!!completion-model}}
authTokenStoreKey={{{[{!!completion-server}get[auth-token-store-key]]}}}
resultTitlePrefix=<<resultTitlePrefix>>
resultTags=<<resultTags>>
ai-tools-status-title=<<ai-tools-status-title>>
@@ -152,6 +160,16 @@ Action procedure to get the next response from the LLM
<option value=<<currentTiddler>>><$view field='caption'/></option>
</$list>
</$select>
Model: <$select tiddler=<<currentTiddler>> field="completion-model" default=<<ai-tools-default-llm-completion-model>>>
<$list filter="[<ai-tools-default-llm-completion-server>get[models]enlist-input[]]">
<option value=<<currentTiddler>>><<currentTiddler>></option>
</$list>
</$select>
<$list filter="[<ai-tools-default-llm-completion-server>get[settings]]">
<$button to=<<currentTiddler>> class="tc-btn-invisible">
{{$:/core/images/options-button}}
</$button>
</$list>
<div class="ai-conversation">
<$transclude

View File

@@ -1,6 +0,0 @@
title: $:/plugins/tiddlywiki/ai-tools/page-menu/import-chatgpt
tags: $:/tags/AI/PageMenu
<$button actions=<<ai-tools-import-conversations>> class="tc-btn-invisible">
{{$:/core/images/input-button}} Import Conversations from ~ChatGPT
</$button>

View File

@@ -2,6 +2,6 @@
"title": "$:/plugins/tiddlywiki/ai-tools",
"name": "AI Tools",
"description": "AI Tools for TiddlyWiki",
"list": "readme docs tools settings",
"list": "readme docs settings utilities tree",
"stability": "STABILITY_1_EXPERIMENTAL"
}

View File

@@ -0,0 +1,8 @@
title: $:/plugins/tiddlywiki/ai-tools/servers/deepseek
tags: $:/tags/AI/CompletionServer
url: https://api.deepseek.com/chat/completions
caption: DeepSeek
auth-token-store-key: deepseek-secret-key
models: deepseek-chat deepseek-reasoner
extends: $:/plugins/tiddlywiki/ai-tools/servers/openai
settings: $:/plugins/tiddlywiki/ai-tools/settings/deepseek

View File

@@ -2,6 +2,7 @@ title: $:/plugins/tiddlywiki/ai-tools/servers/llamafile-llava
tags: $:/tags/AI/CompletionServer
url: http://127.0.0.1:8080/completion
caption: Local Llamafile server running LLaVA models
settings: $:/plugins/tiddlywiki/ai-tools/settings/llamafile
<!--
Wikified JSON text to be sent to server

View File

@@ -1,7 +1,10 @@
title: $:/plugins/tiddlywiki/ai-tools/servers/openai
tags: $:/tags/AI/CompletionServer
url: https://api.openai.com/v1/chat/completions
auth-token-store-key: openai-secret-key
caption: OpenAI Service
models: gpt-4o gpt-4.5-preview gpt-4o-mini o1 o1-mini o3-mini
settings: $:/plugins/tiddlywiki/ai-tools/settings/openai
<!--
Wikified JSON text to be sent to server
@@ -9,14 +12,17 @@ Wikified JSON text to be sent to server
\procedure json-prompt()
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline html conditional commentblock commentinline
{
"model": "gpt-4o",
"model": "<<completionModel>>",
"messages": [
{
"role": "system",
"content": "<$text text={{{ [<conversationTitle>get[system-prompt]jsonstringify[]] }}}/>"
}
<!-- Loop through the tiddlers tagged with this one to pick up all the messages in the conversation -->
<$list filter="[all[shadows+tiddlers]tag<conversationTitle>!is[draft]sort[created]]">
<!--
Loop through the tiddlers tagged with this one to pick up all the messages in the conversation.
Exclude 'error' to limit role in 'system', 'user', 'assistant', 'tool'.
-->
<$list filter="[all[shadows+tiddlers]tag<conversationTitle>!is[draft]!role[error]sort[created]]">
,
{
<!-- We use JSON stringify to escape the characters that can't be used directly in JSON -->

View File

@@ -0,0 +1,8 @@
title: $:/plugins/tiddlywiki/ai-tools/servers/siliconflow
tags: $:/tags/AI/CompletionServer
url: https://api.siliconflow.cn/v1/chat/completions
caption: SiliconFlow
auth-token-store-key: siliconflow-secret-key
models: deepseek-ai/DeepSeek-V3 deepseek-ai/DeepSeek-R1
extends: $:/plugins/tiddlywiki/ai-tools/servers/openai
settings: $:/plugins/tiddlywiki/ai-tools/settings/siliconflow

View File

@@ -0,0 +1,12 @@
title: $:/plugins/tiddlywiki/ai-tools/settings/deepseek
tags: $:/tags/AI/ServerSetting
caption: DeepSeek
!! ~DeepSeek API key
# Register for an account at https://deepseek.com/
# Complete the KYC and charge your account with a minimum of ¥1 on https://platform.deepseek.com/top_up
# Visit https://platform.deepseek.com/api_keys to create a new secret API key
# Copy and paste the value into the box below
~DeepSeek Secret API Key: <$password name={{$:/plugins/tiddlywiki/ai-tools/servers/deepseek!!auth-token-store-key}}/>

View File

@@ -0,0 +1,12 @@
title: $:/plugins/tiddlywiki/ai-tools/settings/llamafile
tags: $:/tags/AI/ServerSetting
caption: Llamafile
models:
!! Llamafile Setup
[[Llamafile|https://github.com/Mozilla-Ocho/llamafile]] lets you download and run LLMs as a single file. See the [[announcement blog post|https://hacks.mozilla.org/2023/11/introducing-llamafile/]] for background.
# Download and run Llamafile as [[described in the QuickStart guide|https://github.com/Mozilla-Ocho/llamafile?tab=readme-ov-file#quickstart]]
# Visit http://127.0.0.1:8080 in a browser and verify that you can see the Llamafile interface. You can also try it out here
# Return to AI Tools and start a conversation, specifying Llamafile as the server

View File

@@ -1,6 +1,6 @@
title: $:/plugins/tiddlywiki/ai-tools/settings
! AI Tools Settings
title: $:/plugins/tiddlywiki/ai-tools/settings/openai
tags: $:/tags/AI/ServerSetting
caption: OpenAI
!! ~OpenAI API key
@@ -12,12 +12,4 @@ This plugin runs entirely in the browser, with no backend server component. A co
# Visit https://platform.openai.com/api-keys to create a new secret API key
# Copy and paste the value into the box below
~OpenAI Secret API Key: <$password name="openai-secret-key"/>
!! Llamafile Setup
[[Llamafile|https://github.com/Mozilla-Ocho/llamafile]] lets you download and run LLMs as a single file. See the [[announcment blog post|https://hacks.mozilla.org/2023/11/introducing-llamafile/]] for background.
# Download and run Llamafile as [[described in the QuickStart guide|https://github.com/Mozilla-Ocho/llamafile?tab=readme-ov-file#quickstart]]
# Visit http://127.0.0.1:8080 in a browser and verify that you can see the Llamafile interface. You can also try it out here
# Return to AI Tools and start a conversation, specifying Llamafile as the server
~OpenAI Secret API Key: <$password name={{$:/plugins/tiddlywiki/ai-tools/servers/openai!!auth-token-store-key}}/>

View File

@@ -0,0 +1,23 @@
title: $:/plugins/tiddlywiki/ai-tools/settings
tags: $:/tags/ControlPanel/SettingsTab
caption: AI Tools
These settings let you customise the behaviour of the "AI Tools" plugin.
!! Completion Servers
Default Server: <$select tiddler="$:/plugins/tiddlywiki/ai-tools/configs/default-server" field="completion-server">
<$list filter="[all[shadows+tiddlers]tag[$:/tags/AI/CompletionServer]sort[caption]]">
<option value=<<currentTiddler>>><$view field='caption'/></option>
</$list>
</$select>
Default Model: <$select tiddler="$:/plugins/tiddlywiki/ai-tools/configs/default-server" field="completion-model">
<$list filter="[{$:/plugins/tiddlywiki/ai-tools/configs/default-server!!completion-server}get[models]enlist-input[]]">
<option value=<<currentTiddler>>><<currentTiddler>></option>
</$list>
</$select>
<$transclude
$variable="tabs"
tabsList="[all[shadows+tiddlers]tag[$:/tags/AI/ServerSetting]]"
default={{{[{$:/plugins/tiddlywiki/ai-tools/configs/default-server!!completion-server}get[settings]]}}}
/>

View File

@@ -0,0 +1,12 @@
title: $:/plugins/tiddlywiki/ai-tools/settings/siliconflow
tags: $:/tags/AI/ServerSetting
caption: SiliconFlow
!! ~SiliconFlow API key
# Register for an account at https://siliconflow.com/ or https://siliconflow.cn/
#* Newly registered accounts can claim a small amount of credit, no payment info is required
# Visit https://cloud.siliconflow.com/account/ak or https://cloud.siliconflow.cn/account/ak to create a new secret API key
# Copy and paste the value into the box below
~SiliconFlow Secret API Key: <$password name={{$:/plugins/tiddlywiki/ai-tools/servers/siliconflow!!auth-token-store-key}}/>

View File

@@ -4,25 +4,25 @@ tags: [[$:/tags/Stylesheet]]
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline
.ai-conversation {
background: #f0eeff;
background: <<colour background>>;
border-radius: 2em;
padding: 1em 1em;
display: flex;
flex-direction: column;
gap: 1em;
box-shadow: 2px 2px 5px rgba(0,0,0,0.2);
box-shadow: 2px 2px 5px <<colour muted-foreground>>;
}
.ai-conversation .ai-tools-message {
box-shadow: 2px 2px 5px rgba(0,0,0,0.2);
box-shadow: 2px 2px 5px <<colour muted-foreground>>;
border-radius: 1em;
display: flex;
flex-direction: column;
}
.ai-conversation .ai-tools-message .ai-tools-message-toolbar {
background: rgba(1,1,1,0.35);
color: white;
background: <<colour sidebar-foreground-shadow>>;
color: <<colour foreground>>;
padding: 0.25em 1em 0.25em 1em;
border-top-left-radius: 1em;
border-top-right-radius: 1em;
@@ -35,8 +35,8 @@ tags: [[$:/tags/Stylesheet]]
}
.ai-conversation .ai-tools-message .ai-tools-message-toolbar .ai-tools-message-toolbar-button {
background: rgba(255,255,255,0.35);
color: #333333;
background: <<colour sidebar-foreground-shadow>>;
color: <<colour foreground>>;
cursor: pointer;
display: inline-block;
outline: 0;
@@ -54,8 +54,8 @@ tags: [[$:/tags/Stylesheet]]
}
.ai-conversation .ai-tools-message .ai-tools-message-toolbar .ai-tools-message-toolbar-button:hover {
color: #ffffff;
background: rgba(255,255,255,0.55);
color: <<colour foreground>>;
background: <<colour sidebar-foreground-shadow>>;
}
@@ -65,27 +65,27 @@ tags: [[$:/tags/Stylesheet]]
.ai-conversation .ai-tools-message.ai-tools-message-role-system {
width: 60%;
background: #4c4c80;
color: white;
background: <<colour footnote-target-background>>;
color: <<colour foreground>>;
}
.ai-conversation .ai-tools-message.ai-tools-message-role-user {
width: 60%;
margin-left: auto;
background: #ffcde0;
background: <<colour code-background>>;
}
.ai-conversation .ai-tools-message.ai-tools-message-role-assistant {
background: #dfd;
background: <<colour tiddler-editor-background>>;
}
.ai-conversation .ai-tools-message.ai-tools-message-role-error {
background: #fdd;
background: <<colour notification-background>>;
}
.ai-conversation .ai-user-prompt {
padding: 1em;
background: #ffcde0;
background: <<colour code-background>>;
border-radius: 1em;
box-shadow: inset 3px 4px 2px rgba(0, 0, 0, 0.1);
}
@@ -101,8 +101,8 @@ tags: [[$:/tags/Stylesheet]]
}
.ai-conversation .ai-user-prompt-text textarea {
color: #000;
background: #fff;
color: <<colour foreground>>;
background: <<colour background>>;
}
.ai-conversation .ai-user-prompt button.ai-user-prompt-send {
@@ -110,7 +110,7 @@ tags: [[$:/tags/Stylesheet]]
background-image: linear-gradient(-180deg, #e0c3ce, #963057);
border-radius: 1em;
box-shadow: rgba(0, 0, 0, 0.1) 0 2px 4px;
color: #FFFFFF;
color: <<colour foreground>>;
cursor: pointer;
display: inline-block;
outline: 0;
@@ -143,8 +143,8 @@ tags: [[$:/tags/Stylesheet]]
}
.ai-conversation .ai-user-prompt .ai-user-prompt-image button {
color: #000;
fill: #000;
color: <<colour foreground>>;
fill: <<colour foreground>>;
}
.ai-conversation .ai-user-prompt-type {

View File

@@ -0,0 +1,3 @@
title: $:/plugins/tiddlywiki/ai-tools/tree
<<tree prefix:"$:/plugins/tiddlywiki/ai-tools/">>

View File

@@ -1,12 +1,21 @@
title: $:/plugins/tiddlywiki/ai-tools/tools
! Import ~ChatGPT Export Archive
title: $$:/plugins/tiddlywiki/ai-tools/utilities/openai
tags: $:/tags/AI/Utility
caption: Import ChatGPT Export Archive
These instructions allow you to import the conversations from a ~ChatGPT export archive.
# [[Follow the instructions|https://help.openai.com/en/articles/7260999-how-do-i-export-my-chatgpt-history-and-data]] to request an export of your ~ChatGPT data
# You will receive a link to download your data as a ZIP file
# Download and unzip the file
# Locate the file `conversations.json` within the archive and import it into your TiddlyWiki
# Visit the ''tools'' tab and locate your `conversations.json` tiddler
# Click the associated ''import'' button
# See the imported conversations listed in the ''tools'' tab
# The imported tiddler `conversations.json` is no longer required and can be deleted
!! 1- Request your archive
Visit the ~ChatGPT site to request your achive. You will be sent an email with a link to a ZIP file. Download the file and locate the file `conversations.json` within it.
Visit the ~ChatGPT site to request your archive. You will be sent an email with a link to a ZIP file. Download the file and locate the file `conversations.json` within it.
!! 2 - Import `conversations.json` as a tiddler

View File

@@ -0,0 +1,3 @@
title: $:/plugins/tiddlywiki/ai-tools/utilities
<<tabs "[all[shadows+tiddlers]tag[$:/tags/AI/Utility]]">>