1
0
mirror of https://github.com/Jermolene/TiddlyWiki5 synced 2026-01-22 19:04:38 +00:00

Compare commits

...

39 Commits

Author SHA1 Message Date
lin onetwo
53edb20da5 AI tools: add more servers (#8966)
* feat: add tree

* refactor: make tools and settings extensible

* fix: shining color on dark mode

* feat: allow choose model, and use authTokenStoreKey for different server

* feat: allow extends openai api

* fix: Input tag 'error' found using 'role' does not match any of the expected tags: 'system', 'user', 'assistant', 'tool'"

* fix: shining result color on dark mode

* Delete import-chatgpt.tid

* feat: add deepseek

* feat: Assign a server and model on tiddler if user lazy or forget to pick one.

* feat: allow config default server and model
2025-06-09 09:43:36 +01:00
Jeremy Ruston
ce9209f00e Merge branch 'master' into feat-ai-tools 2025-03-06 17:42:02 +00:00
Jeremy Ruston
a6f7c05203 Merge branch 'master' into feat-ai-tools 2025-03-02 17:16:59 +00:00
Jeremy Ruston
2ae1e5aa09 Merge branch 'tiddlywiki-com' 2025-02-27 11:01:28 +00:00
Leilei332
378b9b3627 Allow building external core version of main site (#8953) 2025-02-27 11:00:58 +00:00
Jeremy Ruston
ceddef7b51 Merge branch 'master' into feat-ai-tools 2024-10-10 15:11:20 +01:00
Jeremy Ruston
db692738fe Allow content type of user input to be specified
Useful to use text/plain for code
2024-10-10 14:54:12 +01:00
Jeremy Ruston
8efec8920d User messages should be wikified
Makes it possible to include other tiddlers in a prompt
2024-10-10 13:22:59 +01:00
Jeremy Ruston
904155043a Merge branch 'master' into feat-ai-tools 2024-10-09 21:51:36 +01:00
Jeremy Ruston
97c9456f21 Merge branch 'master' into feat-ai-tools 2024-09-30 16:43:36 +01:00
Jeremy Ruston
a1a6b6fa25 Merge branch 'master' into feat-ai-tools 2024-08-30 14:13:58 +01:00
Jeremy Ruston
422df10841 Fix ChatGPT import 2024-08-05 16:33:07 +01:00
Jeremy Ruston
ea595dfe2f Refactor procedures and functions to be global for reuse 2024-07-28 13:07:24 +01:00
Jeremy Ruston
58f96e779a Add documentation for the conversation format 2024-07-28 12:52:43 +01:00
Jeremy Ruston
a32a1a3802 Merge branch 'master' into feat-ai-tools 2024-07-28 12:39:43 +01:00
Jeremy Ruston
28d262e371 Merge branch 'master' into feat-ai-tools 2024-07-27 17:16:43 +01:00
Jeremy Ruston
cd58622c06 Clarify that the Llamafile prompt is for Llava models 2024-07-24 22:28:30 +01:00
Jeremy Ruston
d39a3d61e3 Merge branch 'master' into feat-ai-tools 2024-07-24 22:05:24 +01:00
Jeremy Ruston
80fdaae6de Llamafile use native /completion API endpoint
So that we can do image analysis
2024-07-21 21:08:42 +01:00
Jeremy Ruston
4a79af9eea Revise default system prompt 2024-07-21 19:34:25 +01:00
Jeremy Ruston
370ff3057e Refactor completion servers so that they handle their own response 2024-07-21 16:51:19 +01:00
Jeremy Ruston
fb641d340c Fix an annoying little bug that prevents importvariables being used inside action-createtiddler in action strings
The root cause was that action-createtiddler widget was calling refreshChildren() with no argument.

A secondary factor was that importvariables widget was not defensive in handling a missing changedTiddlers parameter
2024-07-21 16:50:55 +01:00
Jeremy Ruston
3bdd449b3e Don't hardcode the API route 2024-07-21 16:11:46 +01:00
Jeremy Ruston
0037935af5 Revert "Give conversation tiddlers a dummy text field"
This reverts commit 0e5955397b.
2024-07-21 15:48:33 +01:00
Jeremy Ruston
dbb7e1c300 Merge branch 'master' into feat-ai-tools 2024-07-21 15:48:14 +01:00
Jeremy Ruston
0e5955397b Give conversation tiddlers a dummy text field
To avoid triggering a docs template
2024-07-19 14:51:18 +01:00
Jeremy Ruston
638bd78059 Formatting typo 2024-07-18 09:52:24 +01:00
Jeremy Ruston
d56958331b Docs tweaks 2024-07-18 09:49:33 +01:00
Jeremy Ruston
a2cff69fee Update docs 2024-07-18 09:24:37 +01:00
Jeremy Ruston
837374b5ab Don't try to parse plugins as conversations 2024-07-18 08:39:03 +01:00
Jeremy Ruston
d6f3058e9d Merge branch 'master' into feat-ai-tools 2024-07-18 08:34:21 +01:00
Jeremy Ruston
95f3e224b0 Merge branch 'master' into feat-ai-tools 2024-07-16 20:01:24 +01:00
Jeremy Ruston
a1782b1e4e Palette fixes 2024-07-16 19:41:44 +01:00
Jeremy Ruston
a921034561 Improved spinner colours 2024-07-14 21:27:52 +01:00
Jeremy Ruston
d652f820b8 Basic support for importing ChatGPT archives 2024-07-14 21:16:11 +01:00
Jeremy Ruston
e00c761088 Tweak default prompt 2024-07-11 15:02:30 +01:00
Jeremy Ruston
cb9deaa9b5 Update docs 2024-07-11 09:56:41 +01:00
Jeremy Ruston
3b07607d1b Fix Llamafile compatibility 2024-07-11 09:43:23 +01:00
Jeremy Ruston
2faba2e820 Initial Commit 2024-07-09 08:59:49 +01:00
32 changed files with 1079 additions and 20 deletions

View File

@@ -105,6 +105,7 @@ node $TW5_BUILD_TIDDLYWIKI \
fi
# /index.html Main site
# /external-(version).html External core version of main site
# /favicon.ico Favicon for main site
# /static.html Static rendering of default tiddlers
# /alltiddlers.html Static rendering of all tiddlers
@@ -117,7 +118,7 @@ node $TW5_BUILD_TIDDLYWIKI \
--version \
--load $TW5_BUILD_OUTPUT/build.tid \
--output $TW5_BUILD_OUTPUT \
--build favicon static index \
--build favicon static index external-js \
|| exit 1
# /empty.html Empty

View File

@@ -104,7 +104,7 @@ CreateTiddlerWidget.prototype.invokeAction = function(triggeringWidget,event) {
}
this.setVariable("createTiddler-title",title);
this.setVariable("createTiddler-draftTitle",draftTitle);
this.refreshChildren();
this.refreshChildren([]);
return true; // Action was invoked
};

View File

@@ -107,6 +107,7 @@ ImportVariablesWidget.prototype.execute = function(tiddlerList) {
Selectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering
*/
ImportVariablesWidget.prototype.refresh = function(changedTiddlers) {
changedTiddlers = changedTiddlers || {};
// Recompute our attributes and the filter list
var changedAttributes = this.computeAttributes(),
tiddlerList = this.wiki.filterTiddlers(this.getAttribute("filter"),this);

View File

@@ -2,11 +2,4 @@ created: 20131127215321439
modified: 20140912135951542
title: $:/DefaultTiddlers
[[TiddlyWiki Pre-release]]
HelloThere
[[Quick Start]]
[[Find Out More]]
[[TiddlyWiki on the Web]]
[[Testimonials and Reviews]]
GettingStarted
Community
$:/plugins/tiddlywiki/ai-tools

View File

@@ -17,7 +17,9 @@
"tiddlywiki/jszip",
"tiddlywiki/confetti",
"tiddlywiki/dynannotate",
"tiddlywiki/tour"
"tiddlywiki/tour",
"tiddlywiki/markdown",
"tiddlywiki/ai-tools"
],
"themes": [
"tiddlywiki/vanilla",

View File

@@ -3,10 +3,4 @@ modified: 20140912135951542
title: $:/DefaultTiddlers
type: text/vnd.tiddlywiki
HelloThere
[[Quick Start]]
[[Find Out More]]
[[TiddlyWiki on the Web]]
[[Testimonials and Reviews]]
GettingStarted
Community
$:/plugins/tiddlywiki/ai-tools

View File

@@ -3,4 +3,4 @@ modified: 20131211131023829
title: $:/SiteTitle
type: text/vnd.tiddlywiki
TiddlyWiki @@font-size:small; v<<version>>@@
TiddlyWiki AI Tools Plugin

View File

@@ -7,7 +7,9 @@
"tiddlywiki/menubar",
"tiddlywiki/confetti",
"tiddlywiki/dynannotate",
"tiddlywiki/tour"
"tiddlywiki/tour",
"tiddlywiki/qrcode",
"tiddlywiki/ai-tools"
],
"themes": [
"tiddlywiki/vanilla",

View File

@@ -0,0 +1,46 @@
title: $:/plugins/tiddlywiki/ai-tools/docs
!! Setting Up
See the ''settings'' tab for set up instructions.
!! Live AI Conversations in ~TiddlyWiki
# Click the {{||$:/plugins/tiddlywiki/ai-tools/page-menu}} icon in the sidebar to open a new conversation
# Choose the server from the dropdown:
#* ''Locally running Llamafile server''
#* ''~OpenAI Service'' (requires API key to be specified in ''settings'')
# Type a prompt for the LLM in the text box
#* If using ~OpenAI it is possible to attach a single image to a prompt
# Click "Send" and wait for the output of the LLM
!! Use utility
For example, to import ~ChatGPT conversation archives, follow the instructions in the [[utilities tab|$:/plugins/tiddlywiki/ai-tools/utilities]].
!! Conversation Format
This plugin defines a simple schema for representing conversations with an LLM.
In a nutshell, tiddlers tagged <<tag $:/tags/AI/Conversation>> define conversations. The individual messages are tiddlers that are tagged with the title of the conversation tiddler.
Currently, the ordering of the messages is determined by the value of their "created" field. The ordering defined by the tag mechanism is ignored. It is intended to change this behaviour so that the ordering of messages is defined by the tag mechanism.
The fields with defined meanings for conversation tiddlers are:
|!Field |!Description |
|''system-prompt'' |Defines the system prompt for the conversation |
|''tags'' |Must include <<tag $:/tags/AI/Conversation>> |
|''text'' |Optional description or notes displayed at the top of the conversation |
|''current-response-image'' |Optional title of an image tiddler to be attached to the current user response |
|''current-response-text'' |Text of the current user response before it is sent |
The fields with defined meanings for conversation tiddlers are:
|!Field |!Description |
|''created'' |Creation date of the message (currently used for ordering) |
|''image'' |Optional image associated with this message |
|''role'' |Possible values include ''user'' and ''assistant'' |
|''tags'' |Must include the title of the parent conversation |
|''type'' |Typically ''text/markdown'' |

View File

@@ -0,0 +1,255 @@
title: $:/plugins/tiddlywiki/ai-tools/globals
tags: $:/tags/Global
\function ai-tools-default-llm-completion-server()
[[$:/plugins/tiddlywiki/ai-tools/configs/default-server]get[completion-server]]
\end
\function ai-tools-default-llm-completion-model()
[[$:/plugins/tiddlywiki/ai-tools/configs/default-server]get[completion-model]]
\end
<!--
Action procedure to retrieve an LLM completion, given the following parameters:
conversationTitle - Title of the tiddler containing the conversation
resultTitlePrefix - Prefix of the tiddler to be used for saving the result. If the tiddler already exists then a number will be added repeatedly until the resulting title is unique
resultTags - Tags to be applied to the result tiddler
ai-tools-status-title - Optional title of a tiddler to which the status of the request will be bound: "pending", "complete", "error"
completionServer - Optional URL of server
completionModel - Optional model to use
-->
\procedure ai-tools-get-llm-completion(conversationTitle,resultTitlePrefix,resultTags,ai-tools-status-title,completionServer,completionModel,authTokenStoreKey)
<$let
completionServer={{{ [<completionServer>!is[blank]else<ai-tools-default-llm-completion-server>] }}}
completionModel={{{ [<completionModel>!is[blank]else<ai-tools-default-llm-completion-model>] }}}
>
<$importvariables filter="[<completionServer>get[extends]] [<completionServer>]">
<$wikify name="json" text=<<json-prompt>>>
<$action-log message="ai-tools-get-llm-completion"/>
<$action-log/>
<$action-sendmessage
$message="tm-http-request"
url={{{ [<completionServer>get[url]] }}}
body=<<json>>
header-content-type="application/json"
bearer-auth-token-from-store=<<authTokenStoreKey>>
method="POST"
oncompletion=<<completion-callback>>
bind-status=<<ai-tools-status-title>>
var-resultTitlePrefix=<<resultTitlePrefix>>
var-resultTags=<<resultTags>>
/>
</$wikify>
</$importvariables>
</$let>
\end ai-tools-get-llm-completion
<!--
-->
\function ai-tools-status-title()
[<currentTiddler>addprefix[$:/temp/ai-tools/status/]]
\end ai-tools-status-title
<!--
Procedure to display a message from an AI conversation. Current tiddler is the conversation tiddler
-->
\procedure ai-tools-message(tiddler,field,role,makeLink:"yes")
<$qualify
name="state"
title={{{ [[$:/state/ai-tools-message-state/]addsuffix<tiddler>] }}}
>
<$let
editStateTiddler={{{ [<state>addsuffix[-edit-state]] }}}
editState={{{ [<editStateTiddler>get[text]else[view]] }}}
>
<div class={{{ ai-tools-message [<role>addprefix[ai-tools-message-role-]] +[join[ ]] }}}>
<div class="ai-tools-message-toolbar">
<div class="ai-tools-message-toolbar-left">
<$genesis $type={{{ [<makeLink>match[yes]then[$link]else[span]] }}} to=<<tiddler>>>
<$text text=<<role>>/>
</$genesis>
</div>
<div class="ai-tools-message-toolbar-left">
<%if [<editState>!match[edit]] %>
<$button class="ai-tools-message-toolbar-button">
<$action-setfield $tiddler=<<editStateTiddler>> text="edit"/>
edit
</$button>
<%endif%>
<%if [<editState>!match[view]] %>
<$button class="ai-tools-message-toolbar-button">
<$action-setfield $tiddler=<<editStateTiddler>> text="view"/>
view
</$button>
<%endif%>
<$button class="ai-tools-message-toolbar-button">
<$action-sendmessage $message="tm-copy-to-clipboard" $param={{{ [<tiddler>get<field>else[]] }}}/>
copy
</$button>
<$button class="ai-tools-message-toolbar-button">
<$action-deletetiddler $tiddler=<<tiddler>>/>
delete
</$button>
</div>
</div>
<div class="ai-tools-message-body">
<%if [<editState>match[view]] %>
<$transclude $tiddler=<<tiddler>> $field=<<field>> $mode="block"/>
<%else%>
<$edit-text tiddler=<<tiddler>> field=<<field>> tag="textarea" class="tc-edit-texteditor"/>
<%endif%>
<%if [<tiddler>get[image]else[]!match[]] %>
<$image source={{{ [<tiddler>get[image]] }}}/>
<%endif%>
</div>
</div>
</$let>
</$qualify>
\end ai-tools-message
\procedure ai-tools-get-message()
\whitespace trim
<$wikify
name="messageText"
text={{!!text}}
type={{!!type}}
output="text"
>
<$text text={{{ [<messageText>jsonstringify[]] }}}/>
</$wikify>
\end
<!--
Action procedure to get the next response from the LLM on a chat tiddler.
-->
\procedure ai-tools-action-get-response()
<!-- Get the response -->
<$let
resultTitlePrefix={{{ [<currentTiddler>addsuffix[ - Prompt]] }}}
resultTags={{{ [<currentTiddler>format:titlelist[]] }}}
>
<$action-createtiddler
$basetitle=<<resultTitlePrefix>>
tags=<<resultTags>>
role="user"
text={{!!current-response-text}}
type={{!!current-response-type}}
image={{!!current-response-image}}
>
<$action-deletefield $tiddler=<<currentTiddler>> $field="current-response-text"/>
<$action-setfield $tiddler=<<currentTiddler>> $field="current-response-type" $value="text/vnd.tiddlywiki"/>
<$action-deletefield $tiddler=<<currentTiddler>> $field="current-response-image"/>
<$transclude
$variable="ai-tools-get-llm-completion"
conversationTitle=<<currentTiddler>>
completionServer={{!!completion-server}}
completionModel={{!!completion-model}}
authTokenStoreKey={{{[{!!completion-server}get[auth-token-store-key]]}}}
resultTitlePrefix=<<resultTitlePrefix>>
resultTags=<<resultTags>>
ai-tools-status-title=<<ai-tools-status-title>>
/>
</$action-createtiddler>
</$let>
\end ai-tools-action-get-response
\procedure ai-tools-conversation(conversationTitle)
<$let currentTiddler=<<conversationTitle>>>
Server: <$select tiddler=<<currentTiddler>> field="completion-server" default=<<ai-tools-default-llm-completion-server>>>
<$list filter="[all[shadows+tiddlers]tag[$:/tags/AI/CompletionServer]sort[caption]]">
<option value=<<currentTiddler>>><$view field='caption'/></option>
</$list>
</$select>
Model: <$select tiddler=<<currentTiddler>> field="completion-model" default=<<ai-tools-default-llm-completion-model>>>
<$list filter="[<ai-tools-default-llm-completion-server>get[models]enlist-input[]]">
<option value=<<currentTiddler>>><<currentTiddler>></option>
</$list>
</$select>
<$list filter="[<ai-tools-default-llm-completion-server>get[settings]]">
<$button to=<<currentTiddler>> class="tc-btn-invisible">
{{$:/core/images/options-button}}
</$button>
</$list>
<div class="ai-conversation">
<$transclude
$variable="ai-tools-message"
tiddler=<<currentTiddler>>
field="system-prompt"
role="system"
makeLink="no"
/>
<$list filter="[all[shadows+tiddlers]tag<currentTiddler>!is[draft]sort[created]]" variable="message" storyview="pop">
<$transclude
$variable="ai-tools-message"
tiddler=<<message>>
field="text"
role={{{ [<message>get[role]] }}}
/>
</$list>
<%if [<ai-tools-status-title>get[text]else[complete]match[pending]] %>
<div class="ai-request-status">
<div class="ai-request-spinner"></div>
</div>
<%endif%>
<div class="ai-user-prompt">
<div class="ai-user-prompt-text">
<$edit-text tiddler=<<currentTiddler>> field="current-response-text" tag="textarea" class="tc-edit-texteditor"/>
<$button
class="ai-user-prompt-send"
actions=<<ai-tools-action-get-response>>
disabled={{{ [<ai-tools-status-title>get[text]else[complete]match[pending]then[yes]] [<currentTiddler>get[current-response-text]else[]match[]then[yes]] ~[[no]] }}}
>
Send
</$button>
</div>
<div class="ai-user-prompt-image">
<div class="tc-drop-down-wrapper">
<$let state=<<qualify "$:/state/ai-user-prompt-image-dropdown-state/">>>
<$button popup=<<state>> class="tc-btn-invisible tc-btn-dropdown">Choose an image {{$:/core/images/down-arrow}}</$button>
<$link to={{!!current-response-image}}>
<$text text={{!!current-response-image}}/>
</$link>
<$reveal state=<<state>> type="popup" position="belowleft" text="" default="" class="tc-popup-keep">
<div class="tc-drop-down" style="text-align:center;">
<$transclude
$variable="image-picker"
filter="[all[shadows+tiddlers]is[image]is[binary]!has[_canonical_uri]] -[type[application/pdf]] +[!has[draft.of]sort[title]]"
actions="""
<$action-setfield
$tiddler=<<currentTiddler>>
current-response-image=<<imageTitle>>
/>
<$action-deletetiddler $tiddler=<<state>>/>
"""
/>
</div>
</$reveal>
</$let>
<$image source={{!!current-response-image}}/>
</div>
</div>
</div>
<div class="ai-user-prompt-type">
<$edit-text tiddler=<<currentTiddler>> field="current-response-type" tag="input" class="tc-edit-texteditor"/>
Note that your text will be wikified before being sent to the LLM. Use &#x7B;&#x7B;transclusion&#x7D;&#x7D; to include other tiddlers in your messages
</div>
</div>
</$let>
\end ai-tools-conversation
\procedure ai-tools-new-conversation()
<$action-createtiddler
$basetitle="AI Conversation"
tags="$:/tags/AI/Conversation"
system-prompt="Transcript of a never ending dialog, where the User interacts with an Assistant. The Assistant is helpful, kind, honest, good at writing, and never fails to answer the User's requests immediately and with precision."
current-response-text="Please list the 10 most important mechanical inventions of the Twentieth Century"
current-response-type="text/vnd.tiddlywiki"
>
<$action-navigate $to=<<createTiddler-title>>/>
</$action-createtiddler>
\end ai-tools-new-conversation
\procedure ai-tools-import-conversations()
<$action-navigate $to="$:/plugins/tiddlywiki/ai-tools/tools"/>
\end ai-tools-import-conversations

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,87 @@
/*\
title: $:/plugins/tiddlywiki/ai-tools/modules/conversations-archive-importer.js
type: application/javascript
module-type: library
Conversations archive importer
\*/
(function(){
/*jslint node: true, browser: true */
/*global $tw: false */
"use strict";
function ConversationsArchiveImporter() {
}
ConversationsArchiveImporter.prototype.import = function(widget,conversationsTitle) {
var logger = new $tw.utils.Logger("ai-tools");
var jsonConversations = widget.wiki.getTiddlerData(conversationsTitle,[]);
var tiddlers = [];
$tw.utils.each(jsonConversations,function(jsonConversation) {
var conversationTitle = (jsonConversation.title || "Untitled") + " (" + jsonConversation.conversation_id + ")",
conversationCreated = convertDate(jsonConversation.create_time),
conversationModified = convertDate(jsonConversation.update_time);
var conversationFields = {
title: conversationTitle,
tags: $tw.utils.stringifyList(["$:/tags/AI/Conversation"]),
created: conversationCreated,
modified: conversationModified
};
tiddlers.push(conversationFields);
var messageIndex = 1;
$tw.utils.each(jsonConversation.mapping,function(jsonMessage,messageId) {
// Skip messages where "message" is null
if(jsonMessage.message) {
var messageFields = {
title: conversationTitle + " " + (messageIndex + 1),
created: convertDate(jsonMessage.message.create_time) || conversationCreated,
modified: convertDate(jsonMessage.message.update_time) || conversationModified,
tags: $tw.utils.stringifyList([conversationTitle]),
role: jsonMessage.message.author.role,
"message-type": jsonMessage.message.content.content_type
}
switch(jsonMessage.message.content.content_type) {
case "code":
messageFields.text = jsonMessage.message.content.text;
messageFields.type = "text/plain";
break;
case "execution_output":
messageFields.text = jsonMessage.message.content.text;
messageFields.type = "text/plain";
break;
case "system_error":
messageFields.text = jsonMessage.message.content.text;
messageFields.type = "text/plain";
break;
case "text":
messageFields.text = jsonMessage.message.content.parts.join("");
messageFields.type = "text/markdown";
break;
default:
messageFields.text = JSON.stringify(jsonMessage.message,null,4);
messageFields.type = "text/plain";
break;
}
tiddlers.push(messageFields);
messageIndex += 1;
}
});
});
// Create summary tiddler
$tw.utils.each(tiddlers,function(tidder) {
});
// Create the tiddlers
widget.wiki.addTiddlers(tiddlers);
// widget.dispatchEvent({type: "tm-import-tiddlers", param: JSON.stringify(tiddlers)});
};
function convertDate(unixTimestamp) {
return $tw.utils.stringifyDate(new Date(unixTimestamp * 1000));
}
exports.ConversationsArchiveImporter = ConversationsArchiveImporter;
})();

View File

@@ -0,0 +1,30 @@
/*\
title: $:/plugins/tiddlywiki/ai-tools/modules/startup.js
type: application/javascript
module-type: startup
Setup the root widget event handlers
\*/
(function(){
/*jslint node: true, browser: true */
/*global $tw: false */
"use strict";
// Export name and synchronous status
exports.name = "ai-tools";
exports.platforms = ["browser"];
exports.after = ["startup"];
exports.synchronous = true;
// Install the root widget event handlers
exports.startup = function() {
var ConversationsArchiveImporter = require("$:/plugins/tiddlywiki/ai-tools/modules/conversations-archive-importer.js").ConversationsArchiveImporter;
$tw.conversationsArchiveImporter = new ConversationsArchiveImporter();
$tw.rootWidget.addEventListener("tm-import-conversations",function(event) {
$tw.conversationsArchiveImporter.import(event.widget,event.param);
});
};
})();

View File

@@ -0,0 +1,21 @@
title: $:/plugins/tiddlywiki/ai-tools/page-menu
tags: $:/tags/PageControls
caption: {{$:/plugins/tiddlywiki/ai-tools/icon}} AI Tools
description: Tools for interactive AI services
\whitespace trim
<$button popup=<<qualify "$:/state/popup/ai-tools/page-menu">> tooltip="Tools for interactive AI services" aria-label="AI Tools" class=<<tv-config-toolbar-class>> selectedClass="tc-selected">
<%if [<tv-config-toolbar-icons>match[yes]] %>
{{$:/plugins/tiddlywiki/ai-tools/icon}}
<%endif%>
<%if [<tv-config-toolbar-text>match[yes]] %>
<span class="tc-btn-text"><$text text="AI Tools"/></span>
<%endif%>
</$button>
<$reveal state=<<qualify "$:/state/popup/ai-tools/page-menu">> type="popup" position="belowleft" animate="yes">
<div class="tc-drop-down">
<$list filter="[all[shadows+tiddlers]tag[$:/tags/AI/PageMenu]!has[draft.of]]" variable="listItem">
<$transclude tiddler=<<listItem>>/>
</$list>
</div>
</$reveal>

View File

@@ -0,0 +1,6 @@
title: $:/plugins/tiddlywiki/ai-tools/page-menu/new-conversation
tags: $:/tags/AI/PageMenu
<$button actions=<<ai-tools-new-conversation>> class="tc-btn-invisible">
{{$:/core/images/new-button}} New Conversation
</$button>

View File

@@ -0,0 +1,7 @@
{
"title": "$:/plugins/tiddlywiki/ai-tools",
"name": "AI Tools",
"description": "AI Tools for TiddlyWiki",
"list": "readme docs settings utilities tree",
"stability": "STABILITY_1_EXPERIMENTAL"
}

View File

@@ -0,0 +1,12 @@
title: $:/plugins/tiddlywiki/ai-tools/readme
!! AI Tools for TiddlyWiki
This plugin adds integrated LLM conversations to the TiddlyWiki platform.
The plugin allows live conversations within TiddlyWiki and also importing ~ChatGPT conversation archives.
The plugin supports two options for the LLM server:
* ''Locally running Llamafile server'' - LLlamafile is an open source project that lets you distribute and run LLMs as a single file. The files are large, typically 4+ gigabytes but offer reasonable performance on modern hardware, and total privacy
* ''~OpenAI Service'' - ~OpenAI is a commercial service that offers paid APIs for accessing some of the most sophisticated LLMs that are available. ~OpenAI requires tokens to be purchased for API usage (this is entirely separate from ~ChatGPT subscriptions)

View File

@@ -0,0 +1,8 @@
title: $:/plugins/tiddlywiki/ai-tools/servers/deepseek
tags: $:/tags/AI/CompletionServer
url: https://api.deepseek.com/chat/completions
caption: DeepSeek
auth-token-store-key: deepseek-secret-key
models: deepseek-chat deepseek-reasoner
extends: $:/plugins/tiddlywiki/ai-tools/servers/openai
settings: $:/plugins/tiddlywiki/ai-tools/settings/deepseek

View File

@@ -0,0 +1,86 @@
title: $:/plugins/tiddlywiki/ai-tools/servers/llamafile-llava
tags: $:/tags/AI/CompletionServer
url: http://127.0.0.1:8080/completion
caption: Local Llamafile server running LLaVA models
settings: $:/plugins/tiddlywiki/ai-tools/settings/llamafile
<!--
Wikified JSON text to be sent to server
-->
\procedure json-prompt()
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline html conditional commentblock commentinline
{
"image_data": [
<$list filter="[all[shadows+tiddlers]tag<conversationTitle>!is[draft]sort[created]]" counter="counter">
<%if [<currentTiddler>has[image]] %>
<%if [<counter-first>!match[yes]] %>,<%endif%>
{
"id": <$text text=<<counter>>/>,
"data": "<$text text={{{ [<currentTiddler>get[image]get[text]jsonstringify[]] +[join[]] }}}/>"
}
<%endif%>
</$list>
],
"prompt": "<<json-prompt-text>>",
"cache_prompt": false,
"frequency_penalty": 0,
"grammar": "",
"mirostat_eta": 0.1,
"mirostat_tau": 5,
"mirostat": 0,
"n_predict": 400,
"n_probs": 0,
"presence_penalty": 0,
"repeat_last_n": 256,
"repeat_penalty": 1.18,
"slot_id": -1,
"stop": ["</s>", "Llama:", "User:"],
"stream" : false,
"temperature": 0.7,
"tfs_z": 1,
"top_k": 40,
"top_p": 0.5,
"typical_p": 1
}
\end json-prompt
\procedure json-prompt-text()
\whitespace trim
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline html conditional commentblock commentinline
<$text text={{{ [<conversationTitle>get[system-prompt]jsonstringify[]] }}}/>
\n
<$list filter="[all[shadows+tiddlers]tag<conversationTitle>!is[draft]sort[created]]" counter="counter">
\n
<$text text={{{ [<currentTiddler>get[role]else[user]match[user]then[User:]else[Llama:]] }}}/>
<%if [<currentTiddler>has[image]] %>
[img-<$text text=<<counter>>/>]
<%endif%>
<<ai-tools-get-message>>
</$list>
\nLlama:
\end json-prompt-text
<!--
Callback for the HTTP response from the LLM
-->
\procedure completion-callback()
<%if [<status>compare:number:gteq[200]compare:number:lteq[299]] %>
<!-- Success -->
<$action-createtiddler
$basetitle=<<resultTitlePrefix>>
tags=<<resultTags>>
type="text/markdown"
role="assistant"
text={{{ [<data>jsonget[content]] }}}
/>
<%else%>
<!-- Error -->
<$action-createtiddler
$basetitle=<<resultTitlePrefix>>
tags=<<resultTags>>
type="text/markdown"
role="error"
text={{{ [[Error:]] [<statusText>] [<data>jsonget[error],[message]] +[join[]] }}}
/>
<%endif%>
\end completion-callback

View File

@@ -0,0 +1,75 @@
title: $:/plugins/tiddlywiki/ai-tools/servers/openai
tags: $:/tags/AI/CompletionServer
url: https://api.openai.com/v1/chat/completions
auth-token-store-key: openai-secret-key
caption: OpenAI Service
models: gpt-4o gpt-4.5-preview gpt-4o-mini o1 o1-mini o3-mini
settings: $:/plugins/tiddlywiki/ai-tools/settings/openai
<!--
Wikified JSON text to be sent to server
-->
\procedure json-prompt()
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline html conditional commentblock commentinline
{
"model": "<<completionModel>>",
"messages": [
{
"role": "system",
"content": "<$text text={{{ [<conversationTitle>get[system-prompt]jsonstringify[]] }}}/>"
}
<!--
Loop through the tiddlers tagged with this one to pick up all the messages in the conversation.
Exclude 'error' to limit role in 'system', 'user', 'assistant', 'tool'.
-->
<$list filter="[all[shadows+tiddlers]tag<conversationTitle>!is[draft]!role[error]sort[created]]">
,
{
<!-- We use JSON stringify to escape the characters that can't be used directly in JSON -->
"role": "<$text text={{{ [<currentTiddler>get[role]jsonstringify[]] }}}/>",
"content": [
{
"type": "text",
"text": "<<ai-tools-get-message>>"
}
<%if [<currentTiddler>get[image]else[]!match[]] %>
,
{
"type": "image_url",
"image_url": {
"url": "<$text text={{{ [[data:]] [<currentTiddler>get[image]get[type]] [[;base64,]] [<currentTiddler>get[image]get[text]jsonstringify[]] +[join[]] }}}/>"
}
}
<%endif%>
]
}
</$list>
]
}
\end json-prompt
<!--
Callback for the HTTP response from the LLM
-->
\procedure completion-callback()
<%if [<status>compare:number:gteq[200]compare:number:lteq[299]] %>
<!-- Success -->
<$action-createtiddler
$basetitle=<<resultTitlePrefix>>
tags=<<resultTags>>
type="text/markdown"
role={{{ [<data>jsonget[choices],[0],[message],[role]] }}}
text={{{ [<data>jsonget[choices],[0],[message],[content]] }}}
/>
<%else%>
<!-- Error -->
<$action-createtiddler
$basetitle=<<resultTitlePrefix>>
tags=<<resultTags>>
type="text/markdown"
role="error"
text={{{ [[Error:]] [<statusText>] [<data>jsonget[error],[message]] +[join[]] }}}
/>
<%endif%>
\end completion-callback

View File

@@ -0,0 +1,8 @@
title: $:/plugins/tiddlywiki/ai-tools/servers/siliconflow
tags: $:/tags/AI/CompletionServer
url: https://api.siliconflow.cn/v1/chat/completions
caption: SiliconFlow
auth-token-store-key: siliconflow-secret-key
models: deepseek-ai/DeepSeek-V3 deepseek-ai/DeepSeek-R1
extends: $:/plugins/tiddlywiki/ai-tools/servers/openai
settings: $:/plugins/tiddlywiki/ai-tools/settings/siliconflow

View File

@@ -0,0 +1,12 @@
title: $:/plugins/tiddlywiki/ai-tools/settings/deepseek
tags: $:/tags/AI/ServerSetting
caption: DeepSeek
!! ~DeepSeek API key
# Register for an account at https://deepseek.com/
# Complete the KYC and charge your account with a minimum of ¥1 on https://platform.deepseek.com/top_up
# Visit https://platform.deepseek.com/api_keys to create a new secret API key
# Copy and paste the value into the box below
~DeepSeek Secret API Key: <$password name={{$:/plugins/tiddlywiki/ai-tools/servers/deepseek!!auth-token-store-key}}/>

View File

@@ -0,0 +1,12 @@
title: $:/plugins/tiddlywiki/ai-tools/settings/llamafile
tags: $:/tags/AI/ServerSetting
caption: Llamafile
models:
!! Llamafile Setup
[[Llamafile|https://github.com/Mozilla-Ocho/llamafile]] lets you download and run LLMs as a single file. See the [[announcement blog post|https://hacks.mozilla.org/2023/11/introducing-llamafile/]] for background.
# Download and run Llamafile as [[described in the QuickStart guide|https://github.com/Mozilla-Ocho/llamafile?tab=readme-ov-file#quickstart]]
# Visit http://127.0.0.1:8080 in a browser and verify that you can see the Llamafile interface. You can also try it out here
# Return to AI Tools and start a conversation, specifying Llamafile as the server

View File

@@ -0,0 +1,15 @@
title: $:/plugins/tiddlywiki/ai-tools/settings/openai
tags: $:/tags/AI/ServerSetting
caption: OpenAI
!! ~OpenAI API key
This plugin runs entirely in the browser, with no backend server component. A consequence of this design is that the API keys required to access external services must be obtained by the end user. These keys are stored in the browser and so only need to be set up once.
# Register for an account at https://platform.openai.com/
#* Newly registered accounts can claim a small amount of credit, thereafter payment is needed
#* Note that ~OpenAI run completely different payment systems for ~ChatGPT and the API platform. Even if you are already a subscriber to ~ChatGPT you will still need to pay for API usage after the initial free service
# Visit https://platform.openai.com/api-keys to create a new secret API key
# Copy and paste the value into the box below
~OpenAI Secret API Key: <$password name={{$:/plugins/tiddlywiki/ai-tools/servers/openai!!auth-token-store-key}}/>

View File

@@ -0,0 +1,23 @@
title: $:/plugins/tiddlywiki/ai-tools/settings
tags: $:/tags/ControlPanel/SettingsTab
caption: AI Tools
These settings let you customise the behaviour of the "AI Tools" plugin.
!! Completion Servers
Default Server: <$select tiddler="$:/plugins/tiddlywiki/ai-tools/configs/default-server" field="completion-server">
<$list filter="[all[shadows+tiddlers]tag[$:/tags/AI/CompletionServer]sort[caption]]">
<option value=<<currentTiddler>>><$view field='caption'/></option>
</$list>
</$select>
Default Model: <$select tiddler="$:/plugins/tiddlywiki/ai-tools/configs/default-server" field="completion-model">
<$list filter="[{$:/plugins/tiddlywiki/ai-tools/configs/default-server!!completion-server}get[models]enlist-input[]]">
<option value=<<currentTiddler>>><<currentTiddler>></option>
</$list>
</$select>
<$transclude
$variable="tabs"
tabsList="[all[shadows+tiddlers]tag[$:/tags/AI/ServerSetting]]"
default={{{[{$:/plugins/tiddlywiki/ai-tools/configs/default-server!!completion-server}get[settings]]}}}
/>

View File

@@ -0,0 +1,12 @@
title: $:/plugins/tiddlywiki/ai-tools/settings/siliconflow
tags: $:/tags/AI/ServerSetting
caption: SiliconFlow
!! ~SiliconFlow API key
# Register for an account at https://siliconflow.com/ or https://siliconflow.cn/
#* Newly registered accounts can claim a small amount of credit, no payment info is required
# Visit https://cloud.siliconflow.com/account/ak or https://cloud.siliconflow.cn/account/ak to create a new secret API key
# Copy and paste the value into the box below
~SiliconFlow Secret API Key: <$password name={{$:/plugins/tiddlywiki/ai-tools/servers/siliconflow!!auth-token-store-key}}/>

View File

@@ -0,0 +1,230 @@
title: $:/plugins/tiddlywiki/ai-tools/styles
tags: [[$:/tags/Stylesheet]]
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline
.ai-conversation {
background: <<colour background>>;
border-radius: 2em;
padding: 1em 1em;
display: flex;
flex-direction: column;
gap: 1em;
box-shadow: 2px 2px 5px <<colour muted-foreground>>;
}
.ai-conversation .ai-tools-message {
box-shadow: 2px 2px 5px <<colour muted-foreground>>;
border-radius: 1em;
display: flex;
flex-direction: column;
}
.ai-conversation .ai-tools-message .ai-tools-message-toolbar {
background: <<colour sidebar-foreground-shadow>>;
color: <<colour foreground>>;
padding: 0.25em 1em 0.25em 1em;
border-top-left-radius: 1em;
border-top-right-radius: 1em;
display: flex;
justify-content: space-between;
}
.ai-conversation .ai-tools-message .ai-tools-message-toolbar .tc-tiddlylink {
color: inherit;
}
.ai-conversation .ai-tools-message .ai-tools-message-toolbar .ai-tools-message-toolbar-button {
background: <<colour sidebar-foreground-shadow>>;
color: <<colour foreground>>;
cursor: pointer;
display: inline-block;
outline: 0;
overflow: hidden;
pointer-events: auto;
position: relative;
text-align: center;
touch-action: manipulation;
user-select: none;
-webkit-user-select: none;
vertical-align: top;
white-space: nowrap;
border: 0;
border-radius: 4px;
}
.ai-conversation .ai-tools-message .ai-tools-message-toolbar .ai-tools-message-toolbar-button:hover {
color: <<colour foreground>>;
background: <<colour sidebar-foreground-shadow>>;
}
.ai-conversation .ai-tools-message .ai-tools-message-body {
padding: 0 1em 0 1em
}
.ai-conversation .ai-tools-message.ai-tools-message-role-system {
width: 60%;
background: <<colour footnote-target-background>>;
color: <<colour foreground>>;
}
.ai-conversation .ai-tools-message.ai-tools-message-role-user {
width: 60%;
margin-left: auto;
background: <<colour code-background>>;
}
.ai-conversation .ai-tools-message.ai-tools-message-role-assistant {
background: <<colour tiddler-editor-background>>;
}
.ai-conversation .ai-tools-message.ai-tools-message-role-error {
background: <<colour notification-background>>;
}
.ai-conversation .ai-user-prompt {
padding: 1em;
background: <<colour code-background>>;
border-radius: 1em;
box-shadow: inset 3px 4px 2px rgba(0, 0, 0, 0.1);
}
.ai-conversation .ai-user-prompt button svg.tc-image-button {
fill: #000;
}
.ai-conversation .ai-user-prompt-text {
display: flex;
align-items: flex-start;
gap: 1em;
}
.ai-conversation .ai-user-prompt-text textarea {
color: <<colour foreground>>;
background: <<colour background>>;
}
.ai-conversation .ai-user-prompt button.ai-user-prompt-send {
background-color: initial;
background-image: linear-gradient(-180deg, #e0c3ce, #963057);
border-radius: 1em;
box-shadow: rgba(0, 0, 0, 0.1) 0 2px 4px;
color: <<colour foreground>>;
cursor: pointer;
display: inline-block;
outline: 0;
overflow: hidden;
padding: 0 20px;
pointer-events: auto;
position: relative;
text-align: center;
touch-action: manipulation;
user-select: none;
-webkit-user-select: none;
vertical-align: top;
white-space: nowrap;
border: 0;
transition: box-shadow .2s;
line-height: 2;
}
.ai-conversation .ai-user-prompt button.ai-user-prompt-send:hover:not(:disabled) {
box-shadow: rgb(255 62 135 / 64%) 0 3px 8px;
}
.ai-conversation .ai-user-prompt button.ai-user-prompt-send:disabled {
background: #ddd;
color: #444;
}
.ai-conversation .ai-user-prompt textarea {
margin: 0;
}
.ai-conversation .ai-user-prompt .ai-user-prompt-image button {
color: <<colour foreground>>;
fill: <<colour foreground>>;
}
.ai-conversation .ai-user-prompt-type {
font-size: 0.8em;
line-height: 1.5;
}
.ai-request-spinner {
animation: ai-request-spinner-animation-rotate 1s infinite;
height: 50px;
width: 50px;
margin-left: auto;
margin-right: auto;
}
.ai-request-spinner:before,
.ai-request-spinner:after {
border-radius: 50%;
content: "";
display: block;
height: 20px;
width: 20px;
}
.ai-request-spinner:before {
animation: ai-request-spinner-animation-ball1 1s infinite;
background-color: #9c9ab0;
box-shadow: 30px 0 0 #fefdff;
margin-bottom: 10px;
}
.ai-request-spinner:after {
animation: ai-request-spinner-animation-ball2 1s infinite;
background-color: #fefdff;
box-shadow: 30px 0 0 #9c9ab0;
}
@keyframes ai-request-spinner-animation-rotate {
0% { transform: rotate(0deg) scale(0.8) }
50% { transform: rotate(360deg) scale(1.2) }
100% { transform: rotate(720deg) scale(0.8) }
}
@keyframes ai-request-spinner-animation-ball1 {
0% {
box-shadow: 30px 0 0 #fefdff;
}
50% {
box-shadow: 0 0 0 #fefdff;
margin-bottom: 0;
transform: translate(15px, 15px);
}
100% {
box-shadow: 30px 0 0 #fefdff;
margin-bottom: 10px;
}
}
@keyframes ai-request-spinner-animation-ball2 {
0% {
box-shadow: 30px 0 0 #9c9ab0;
}
50% {
box-shadow: 0 0 0 #9c9ab0;
margin-top: -20px;
transform: translate(15px, 15px);
}
100% {
box-shadow: 30px 0 0 #9c9ab0;
margin-top: 0;
}
}
.tc-ai-tools-dropzone {
background: yellow;
text-align: center;
width: 100%;
height: 4em;
}
.tc-ai-tools-dropzone.tc-dragover {
background: red;
}

View File

@@ -0,0 +1,3 @@
title: $:/plugins/tiddlywiki/ai-tools/tree
<<tree prefix:"$:/plugins/tiddlywiki/ai-tools/">>

View File

@@ -0,0 +1,51 @@
title: $$:/plugins/tiddlywiki/ai-tools/utilities/openai
tags: $:/tags/AI/Utility
caption: Import ChatGPT Export Archive
These instructions allow you to import the conversations from a ~ChatGPT export archive.
# [[Follow the instructions|https://help.openai.com/en/articles/7260999-how-do-i-export-my-chatgpt-history-and-data]] to request an export of your ~ChatGPT data
# You will receive a link to download your data as a ZIP file
# Download and unzip the file
# Locate the file `conversations.json` within the archive and import it into your TiddlyWiki
# Visit the ''tools'' tab and locate your `conversations.json` tiddler
# Click the associated ''import'' button
# See the imported conversations listed in the ''tools'' tab
# The imported tiddler `conversations.json` is no longer required and can be deleted
!! 1- Request your archive
Visit the ~ChatGPT site to request your archive. You will be sent an email with a link to a ZIP file. Download the file and locate the file `conversations.json` within it.
!! 2 - Import `conversations.json` as a tiddler
It is not possible to use the normal import process to import the `conversations.json` file because TiddlyWiki will erroneously recognise it as a JSON file of tiddlers.
Instead, drag the `conversations.json` file to the dropzone below. Then click the "Import" button to complete the import:
<$dropzone deserializer="text/plain" autoOpenOnImport="yes" filesOnly="yes" class="tc-ai-tools-dropzone">
Drop your file here
</$dropzone>
!! 3 - Import the conversations within it
Any tiddlers containing ~ChatGPT exported `conversation.json` files will be shown here for import.
<$list filter="[all[tiddlers+shadows]type[application/json]!has[plugin-type]sort[title]]" template="$:/plugins/tiddlywiki/ai-tools/view-templates/imported-conversations-json"/>
!! 4 - Review Loaded Conversations
<ul>
<$list filter="[all[tiddlers+shadows]tag[$:/tags/AI/Conversation]sort[title]]">
<$list-empty>
No conversations found
</$list-empty>
<$list-template>
<li>
<$link>
<$text text=<<currentTiddler>>/>
</$link>
</li>
</$list-template>
</$list>
</ul>

View File

@@ -0,0 +1,3 @@
title: $:/plugins/tiddlywiki/ai-tools/utilities
<<tabs "[all[shadows+tiddlers]tag[$:/tags/AI/Utility]]">>

View File

@@ -0,0 +1,14 @@
title: $:/plugins/tiddlywiki/ai-tools/view-templates/conversation
tags: $:/tags/ViewTemplate
list-after: $:/core/ui/ViewTemplate/body
<%if [<currentTiddler>tag[$:/tags/AI/Conversation]] %>
<$transclude
$variable="ai-tools-conversation"
$mode="block"
conversationTitle=<<currentTiddler>>
/>
<%endif%>

View File

@@ -0,0 +1,45 @@
title: $:/plugins/tiddlywiki/ai-tools/view-templates/imported-conversations-json
tags: $:/tags/ViewTemplate
list-before: $:/core/ui/ViewTemplate/body
\whitespace trim
\procedure importer()
<p>
<div>
<$link>
<$text text=`$(currentTiddler)$ appears to be a ChatGPT export containing $(numberOfConversations)$ conversations`/>
</$link>
</div>
<div>
<$button>
<$action-sendmessage $message="tm-import-conversations" $param=<<currentTiddler>>/>
{{$:/core/images/input-button}} Import
</$button>
</div>
</p>
\end importer
<%if [<currentTiddler>type[application/json]] %>
<$let json={{{ [<currentTiddler>get[text]] }}} >
<%if [<json>jsontype[]match[array]] %>
<$let
numberOfConversations={{{ [<json>jsonindexes[]count[]] }}}
json={{{ [<json>jsonextract[0]] }}}
>
<%if [<json>jsontype[]match[object]] %>
<%if
[<json>jsontype[title]match[string]]
:and[<json>jsontype[create_time]match[number]]
:and[<json>jsontype[update_time]match[number]]
:and[<json>jsontype[mapping]match[object]]
:and[<json>jsontype[id]match[string]]
%>
<<importer>>
<%endif%>
<%endif%>
</$let>
<%endif%>
</$let>
<%endif%>