1
0
mirror of https://github.com/Jermolene/TiddlyWiki5 synced 2025-04-06 10:46:57 +00:00

Merge ce9209f00e6328e45f98d54124c0caefbad3331e into 961e74f73d230d0028efb586db07699120eac888

This commit is contained in:
Jeremy Ruston 2025-03-23 22:28:00 +08:00 committed by GitHub
commit baa8cd1a63
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 985 additions and 20 deletions

View File

@ -105,6 +105,7 @@ node $TW5_BUILD_TIDDLYWIKI \
fi
# /index.html Main site
# /external-(version).html External core version of main site
# /favicon.ico Favicon for main site
# /static.html Static rendering of default tiddlers
# /alltiddlers.html Static rendering of all tiddlers
@ -117,7 +118,7 @@ node $TW5_BUILD_TIDDLYWIKI \
--version \
--load $TW5_BUILD_OUTPUT/build.tid \
--output $TW5_BUILD_OUTPUT \
--build favicon static index \
--build favicon static index external-js \
|| exit 1
# /empty.html Empty

View File

@ -101,7 +101,7 @@ CreateTiddlerWidget.prototype.invokeAction = function(triggeringWidget,event) {
}
this.setVariable("createTiddler-title",title);
this.setVariable("createTiddler-draftTitle",draftTitle);
this.refreshChildren();
this.refreshChildren([]);
return true; // Action was invoked
};

View File

@ -104,6 +104,7 @@ ImportVariablesWidget.prototype.execute = function(tiddlerList) {
Selectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering
*/
ImportVariablesWidget.prototype.refresh = function(changedTiddlers) {
changedTiddlers = changedTiddlers || {};
// Recompute our attributes and the filter list
var changedAttributes = this.computeAttributes(),
tiddlerList = this.wiki.filterTiddlers(this.getAttribute("filter"),this);

View File

@ -2,11 +2,4 @@ created: 20131127215321439
modified: 20140912135951542
title: $:/DefaultTiddlers
[[TiddlyWiki Pre-release]]
HelloThere
[[Quick Start]]
[[Find Out More]]
[[TiddlyWiki on the Web]]
[[Testimonials and Reviews]]
GettingStarted
Community
$:/plugins/tiddlywiki/ai-tools

View File

@ -17,7 +17,9 @@
"tiddlywiki/jszip",
"tiddlywiki/confetti",
"tiddlywiki/dynannotate",
"tiddlywiki/tour"
"tiddlywiki/tour",
"tiddlywiki/markdown",
"tiddlywiki/ai-tools"
],
"themes": [
"tiddlywiki/vanilla",

View File

@ -3,10 +3,4 @@ modified: 20140912135951542
title: $:/DefaultTiddlers
type: text/vnd.tiddlywiki
HelloThere
[[Quick Start]]
[[Find Out More]]
[[TiddlyWiki on the Web]]
[[Testimonials and Reviews]]
GettingStarted
Community
$:/plugins/tiddlywiki/ai-tools

View File

@ -3,4 +3,4 @@ modified: 20131211131023829
title: $:/SiteTitle
type: text/vnd.tiddlywiki
TiddlyWiki @@font-size:small; v<<version>>@@
TiddlyWiki AI Tools Plugin

View File

@ -7,7 +7,9 @@
"tiddlywiki/menubar",
"tiddlywiki/confetti",
"tiddlywiki/dynannotate",
"tiddlywiki/tour"
"tiddlywiki/tour",
"tiddlywiki/qrcode",
"tiddlywiki/ai-tools"
],
"themes": [
"tiddlywiki/vanilla",

View File

@ -0,0 +1,53 @@
title: $:/plugins/tiddlywiki/ai-tools/docs
!! Setting Up
See the ''settings'' tab for set up instructions.
!! Live AI Conversations in ~TiddlyWiki
# Click the {{||$:/plugins/tiddlywiki/ai-tools/page-menu}} icon in the sidebar to open a new conversation
# Choose the server from the dropdown:
#* ''Locally running Llamafile server''
#* ''~OpenAI Service'' (requires API key to be specified in ''settings'')
# Type a prompt for the LLM in the text box
#* If using ~OpenAI it is possible to attach a single image to a prompt
# Click "Send" and wait for the output of the LLM
!! Import ~ChatGPT Conversation Archives
# [[Follow the instructions|https://help.openai.com/en/articles/7260999-how-do-i-export-my-chatgpt-history-and-data]] to request an export of your ~ChatGPT data
# You will receive a link to download your data as a ZIP file
# Download and unzip the file
# Locate the file `conversations.json` within the archive and import it into your TiddlyWiki
# Visit the ''tools'' tab and locate your `conversations.json` tiddler
# Click the associated ''import'' button
# See the imported conversations listed in the ''tools'' tab
# The imported tiddler `conversations.json` is no longer required and can be deleted
!! Conversation Format
This plugin defines a simple schema for representing conversations with an LLM.
In a nutshell, tiddlers tagged <<tag $:/tags/AI/Conversation>> define conversations. The individual messages are tiddlers that are tagged with the title of the conversation tiddler.
Currently, the ordering of the messages is determined by the value of their "created" field. The ordering defined by the tag mechanism is ignored. It is intended to change this behaviour so that the ordering of messages is defined by the tag mechanism.
The fields with defined meanings for conversation tiddlers are:
|!Field |!Description |
|''system-prompt'' |Defines the system prompt for the conversation |
|''tags'' |Must include <<tag $:/tags/AI/Conversation>> |
|''text'' |Optional description or notes displayed at the top of the conversation |
|''current-response-image'' |Optional title of an image tiddler to be attached to the current user response |
|''current-response-text'' |Text of the current user response before it is sent |
The fields with defined meanings for conversation tiddlers are:
|!Field |!Description |
|''created'' |Creation date of the message (currently used for ordering) |
|''image'' |Optional image associated with this message |
|''role'' |Possible values include ''user'' and ''assistant'' |
|''tags'' |Must include the title of the parent conversation |
|''type'' |Typically ''text/markdown'' |

View File

@ -0,0 +1,237 @@
title: $:/plugins/tiddlywiki/ai-tools/globals
tags: $:/tags/Global
\function ai-tools-default-llm-completion-server()
[all[shadows+tiddlers]tag[$:/tags/AI/CompletionServer]sort[caption]first[]]
\end
<!--
Action procedure to retrieve an LLM completion, given the following parameters:
conversationTitle - Title of the tiddler containing the conversation
resultTitlePrefix - Prefix of the tiddler to be used for saving the result. If the tiddler already exists then a number will be added repeatedly until the resulting title is unique
resultTags - Tags to be applied to the result tiddler
ai-tools-status-title - Optional title of a tiddler to which the status of the request will be bound: "pending", "complete", "error"
completionServer - Optional URL of server
-->
\procedure ai-tools-get-llm-completion(conversationTitle,resultTitlePrefix,resultTags,ai-tools-status-title,completionServer)
<$let
completionServer={{{ [<completionServer>!is[blank]else<ai-tools-default-llm-completion-server>] }}}
>
<$importvariables filter="[<completionServer>]">
<$wikify name="json" text=<<json-prompt>>>
<$action-log message="ai-tools-get-llm-completion"/>
<$action-log/>
<$action-sendmessage
$message="tm-http-request"
url={{{ [<completionServer>get[url]] }}}
body=<<json>>
header-content-type="application/json"
bearer-auth-token-from-store="openai-secret-key"
method="POST"
oncompletion=<<completion-callback>>
bind-status=<<ai-tools-status-title>>
var-resultTitlePrefix=<<resultTitlePrefix>>
var-resultTags=<<resultTags>>
/>
</$wikify>
</$importvariables>
</$let>
\end ai-tools-get-llm-completion
<!--
-->
\function ai-tools-status-title()
[<currentTiddler>addprefix[$:/temp/ai-tools/status/]]
\end ai-tools-status-title
<!--
Procedure to display a message from an AI conversation. Current tiddler is the conversation tiddler
-->
\procedure ai-tools-message(tiddler,field,role,makeLink:"yes")
<$qualify
name="state"
title={{{ [[$:/state/ai-tools-message-state/]addsuffix<tiddler>] }}}
>
<$let
editStateTiddler={{{ [<state>addsuffix[-edit-state]] }}}
editState={{{ [<editStateTiddler>get[text]else[view]] }}}
>
<div class={{{ ai-tools-message [<role>addprefix[ai-tools-message-role-]] +[join[ ]] }}}>
<div class="ai-tools-message-toolbar">
<div class="ai-tools-message-toolbar-left">
<$genesis $type={{{ [<makeLink>match[yes]then[$link]else[span]] }}} to=<<tiddler>>>
<$text text=<<role>>/>
</$genesis>
</div>
<div class="ai-tools-message-toolbar-left">
<%if [<editState>!match[edit]] %>
<$button class="ai-tools-message-toolbar-button">
<$action-setfield $tiddler=<<editStateTiddler>> text="edit"/>
edit
</$button>
<%endif%>
<%if [<editState>!match[view]] %>
<$button class="ai-tools-message-toolbar-button">
<$action-setfield $tiddler=<<editStateTiddler>> text="view"/>
view
</$button>
<%endif%>
<$button class="ai-tools-message-toolbar-button">
<$action-sendmessage $message="tm-copy-to-clipboard" $param={{{ [<tiddler>get<field>else[]] }}}/>
copy
</$button>
<$button class="ai-tools-message-toolbar-button">
<$action-deletetiddler $tiddler=<<tiddler>>/>
delete
</$button>
</div>
</div>
<div class="ai-tools-message-body">
<%if [<editState>match[view]] %>
<$transclude $tiddler=<<tiddler>> $field=<<field>> $mode="block"/>
<%else%>
<$edit-text tiddler=<<tiddler>> field=<<field>> tag="textarea" class="tc-edit-texteditor"/>
<%endif%>
<%if [<tiddler>get[image]else[]!match[]] %>
<$image source={{{ [<tiddler>get[image]] }}}/>
<%endif%>
</div>
</div>
</$let>
</$qualify>
\end ai-tools-message
\procedure ai-tools-get-message()
\whitespace trim
<$wikify
name="messageText"
text={{!!text}}
type={{!!type}}
output="text"
>
<$text text={{{ [<messageText>jsonstringify[]] }}}/>
</$wikify>
\end
<!--
Action procedure to get the next response from the LLM
-->
\procedure ai-tools-action-get-response()
<$let
resultTitlePrefix={{{ [<currentTiddler>addsuffix[ - Prompt]] }}}
resultTags={{{ [<currentTiddler>format:titlelist[]] }}}
>
<$action-createtiddler
$basetitle=<<resultTitlePrefix>>
tags=<<resultTags>>
role="user"
text={{!!current-response-text}}
type={{!!current-response-type}}
image={{!!current-response-image}}
>
<$action-deletefield $tiddler=<<currentTiddler>> $field="current-response-text"/>
<$action-setfield $tiddler=<<currentTiddler>> $field="current-response-type" $value="text/vnd.tiddlywiki"/>
<$action-deletefield $tiddler=<<currentTiddler>> $field="current-response-image"/>
<$transclude
$variable="ai-tools-get-llm-completion"
conversationTitle=<<currentTiddler>>
completionServer={{!!completion-server}}
resultTitlePrefix=<<resultTitlePrefix>>
resultTags=<<resultTags>>
ai-tools-status-title=<<ai-tools-status-title>>
/>
</$action-createtiddler>
</$let>
\end ai-tools-action-get-response
\procedure ai-tools-conversation(conversationTitle)
<$let currentTiddler=<<conversationTitle>>>
Server: <$select tiddler=<<currentTiddler>> field="completion-server" default=<<ai-tools-default-llm-completion-server>>>
<$list filter="[all[shadows+tiddlers]tag[$:/tags/AI/CompletionServer]sort[caption]]">
<option value=<<currentTiddler>>><$view field='caption'/></option>
</$list>
</$select>
<div class="ai-conversation">
<$transclude
$variable="ai-tools-message"
tiddler=<<currentTiddler>>
field="system-prompt"
role="system"
makeLink="no"
/>
<$list filter="[all[shadows+tiddlers]tag<currentTiddler>!is[draft]sort[created]]" variable="message" storyview="pop">
<$transclude
$variable="ai-tools-message"
tiddler=<<message>>
field="text"
role={{{ [<message>get[role]] }}}
/>
</$list>
<%if [<ai-tools-status-title>get[text]else[complete]match[pending]] %>
<div class="ai-request-status">
<div class="ai-request-spinner"></div>
</div>
<%endif%>
<div class="ai-user-prompt">
<div class="ai-user-prompt-text">
<$edit-text tiddler=<<currentTiddler>> field="current-response-text" tag="textarea" class="tc-edit-texteditor"/>
<$button
class="ai-user-prompt-send"
actions=<<ai-tools-action-get-response>>
disabled={{{ [<ai-tools-status-title>get[text]else[complete]match[pending]then[yes]] [<currentTiddler>get[current-response-text]else[]match[]then[yes]] ~[[no]] }}}
>
Send
</$button>
</div>
<div class="ai-user-prompt-image">
<div class="tc-drop-down-wrapper">
<$let state=<<qualify "$:/state/ai-user-prompt-image-dropdown-state/">>>
<$button popup=<<state>> class="tc-btn-invisible tc-btn-dropdown">Choose an image {{$:/core/images/down-arrow}}</$button>
<$link to={{!!current-response-image}}>
<$text text={{!!current-response-image}}/>
</$link>
<$reveal state=<<state>> type="popup" position="belowleft" text="" default="" class="tc-popup-keep">
<div class="tc-drop-down" style="text-align:center;">
<$transclude
$variable="image-picker"
filter="[all[shadows+tiddlers]is[image]is[binary]!has[_canonical_uri]] -[type[application/pdf]] +[!has[draft.of]sort[title]]"
actions="""
<$action-setfield
$tiddler=<<currentTiddler>>
current-response-image=<<imageTitle>>
/>
<$action-deletetiddler $tiddler=<<state>>/>
"""
/>
</div>
</$reveal>
</$let>
<$image source={{!!current-response-image}}/>
</div>
</div>
</div>
<div class="ai-user-prompt-type">
<$edit-text tiddler=<<currentTiddler>> field="current-response-type" tag="input" class="tc-edit-texteditor"/>
Note that your text will be wikified before being sent to the LLM. Use &#x7B;&#x7B;transclusion&#x7D;&#x7D; to include other tiddlers in your messages
</div>
</div>
</$let>
\end ai-tools-conversation
\procedure ai-tools-new-conversation()
<$action-createtiddler
$basetitle="AI Conversation"
tags="$:/tags/AI/Conversation"
system-prompt="Transcript of a never ending dialog, where the User interacts with an Assistant. The Assistant is helpful, kind, honest, good at writing, and never fails to answer the User's requests immediately and with precision."
current-response-text="Please list the 10 most important mechanical inventions of the Twentieth Century"
current-response-type="text/vnd.tiddlywiki"
>
<$action-navigate $to=<<createTiddler-title>>/>
</$action-createtiddler>
\end ai-tools-new-conversation
\procedure ai-tools-import-conversations()
<$action-navigate $to="$:/plugins/tiddlywiki/ai-tools/tools"/>
\end ai-tools-import-conversations

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,87 @@
/*\
title: $:/plugins/tiddlywiki/ai-tools/modules/conversations-archive-importer.js
type: application/javascript
module-type: library
Conversations archive importer
\*/
(function(){
/*jslint node: true, browser: true */
/*global $tw: false */
"use strict";
function ConversationsArchiveImporter() {
}
ConversationsArchiveImporter.prototype.import = function(widget,conversationsTitle) {
var logger = new $tw.utils.Logger("ai-tools");
var jsonConversations = widget.wiki.getTiddlerData(conversationsTitle,[]);
var tiddlers = [];
$tw.utils.each(jsonConversations,function(jsonConversation) {
var conversationTitle = (jsonConversation.title || "Untitled") + " (" + jsonConversation.conversation_id + ")",
conversationCreated = convertDate(jsonConversation.create_time),
conversationModified = convertDate(jsonConversation.update_time);
var conversationFields = {
title: conversationTitle,
tags: $tw.utils.stringifyList(["$:/tags/AI/Conversation"]),
created: conversationCreated,
modified: conversationModified
};
tiddlers.push(conversationFields);
var messageIndex = 1;
$tw.utils.each(jsonConversation.mapping,function(jsonMessage,messageId) {
// Skip messages where "message" is null
if(jsonMessage.message) {
var messageFields = {
title: conversationTitle + " " + (messageIndex + 1),
created: convertDate(jsonMessage.message.create_time) || conversationCreated,
modified: convertDate(jsonMessage.message.update_time) || conversationModified,
tags: $tw.utils.stringifyList([conversationTitle]),
role: jsonMessage.message.author.role,
"message-type": jsonMessage.message.content.content_type
}
switch(jsonMessage.message.content.content_type) {
case "code":
messageFields.text = jsonMessage.message.content.text;
messageFields.type = "text/plain";
break;
case "execution_output":
messageFields.text = jsonMessage.message.content.text;
messageFields.type = "text/plain";
break;
case "system_error":
messageFields.text = jsonMessage.message.content.text;
messageFields.type = "text/plain";
break;
case "text":
messageFields.text = jsonMessage.message.content.parts.join("");
messageFields.type = "text/markdown";
break;
default:
messageFields.text = JSON.stringify(jsonMessage.message,null,4);
messageFields.type = "text/plain";
break;
}
tiddlers.push(messageFields);
messageIndex += 1;
}
});
});
// Create summary tiddler
$tw.utils.each(tiddlers,function(tidder) {
});
// Create the tiddlers
widget.wiki.addTiddlers(tiddlers);
// widget.dispatchEvent({type: "tm-import-tiddlers", param: JSON.stringify(tiddlers)});
};
function convertDate(unixTimestamp) {
return $tw.utils.stringifyDate(new Date(unixTimestamp * 1000));
}
exports.ConversationsArchiveImporter = ConversationsArchiveImporter;
})();

View File

@ -0,0 +1,30 @@
/*\
title: $:/plugins/tiddlywiki/ai-tools/modules/startup.js
type: application/javascript
module-type: startup
Setup the root widget event handlers
\*/
(function(){
/*jslint node: true, browser: true */
/*global $tw: false */
"use strict";
// Export name and synchronous status
exports.name = "ai-tools";
exports.platforms = ["browser"];
exports.after = ["startup"];
exports.synchronous = true;
// Install the root widget event handlers
exports.startup = function() {
var ConversationsArchiveImporter = require("$:/plugins/tiddlywiki/ai-tools/modules/conversations-archive-importer.js").ConversationsArchiveImporter;
$tw.conversationsArchiveImporter = new ConversationsArchiveImporter();
$tw.rootWidget.addEventListener("tm-import-conversations",function(event) {
$tw.conversationsArchiveImporter.import(event.widget,event.param);
});
};
})();

View File

@ -0,0 +1,21 @@
title: $:/plugins/tiddlywiki/ai-tools/page-menu
tags: $:/tags/PageControls
caption: {{$:/plugins/tiddlywiki/ai-tools/icon}} AI Tools
description: Tools for interactive AI services
\whitespace trim
<$button popup=<<qualify "$:/state/popup/ai-tools/page-menu">> tooltip="Tools for interactive AI services" aria-label="AI Tools" class=<<tv-config-toolbar-class>> selectedClass="tc-selected">
<%if [<tv-config-toolbar-icons>match[yes]] %>
{{$:/plugins/tiddlywiki/ai-tools/icon}}
<%endif%>
<%if [<tv-config-toolbar-text>match[yes]] %>
<span class="tc-btn-text"><$text text="AI Tools"/></span>
<%endif%>
</$button>
<$reveal state=<<qualify "$:/state/popup/ai-tools/page-menu">> type="popup" position="belowleft" animate="yes">
<div class="tc-drop-down">
<$list filter="[all[shadows+tiddlers]tag[$:/tags/AI/PageMenu]!has[draft.of]]" variable="listItem">
<$transclude tiddler=<<listItem>>/>
</$list>
</div>
</$reveal>

View File

@ -0,0 +1,6 @@
title: $:/plugins/tiddlywiki/ai-tools/page-menu/import-chatgpt
tags: $:/tags/AI/PageMenu
<$button actions=<<ai-tools-import-conversations>> class="tc-btn-invisible">
{{$:/core/images/input-button}} Import Conversations from ~ChatGPT
</$button>

View File

@ -0,0 +1,6 @@
title: $:/plugins/tiddlywiki/ai-tools/page-menu/new-conversation
tags: $:/tags/AI/PageMenu
<$button actions=<<ai-tools-new-conversation>> class="tc-btn-invisible">
{{$:/core/images/new-button}} New Conversation
</$button>

View File

@ -0,0 +1,7 @@
{
"title": "$:/plugins/tiddlywiki/ai-tools",
"name": "AI Tools",
"description": "AI Tools for TiddlyWiki",
"list": "readme docs tools settings",
"stability": "STABILITY_1_EXPERIMENTAL"
}

View File

@ -0,0 +1,12 @@
title: $:/plugins/tiddlywiki/ai-tools/readme
!! AI Tools for TiddlyWiki
This plugin adds integrated LLM conversations to the TiddlyWiki platform.
The plugin allows live conversations within TiddlyWiki and also importing ~ChatGPT conversation archives.
The plugin supports two options for the LLM server:
* ''Locally running Llamafile server'' - LLlamafile is an open source project that lets you distribute and run LLMs as a single file. The files are large, typically 4+ gigabytes but offer reasonable performance on modern hardware, and total privacy
* ''~OpenAI Service'' - ~OpenAI is a commercial service that offers paid APIs for accessing some of the most sophisticated LLMs that are available. ~OpenAI requires tokens to be purchased for API usage (this is entirely separate from ~ChatGPT subscriptions)

View File

@ -0,0 +1,85 @@
title: $:/plugins/tiddlywiki/ai-tools/servers/llamafile-llava
tags: $:/tags/AI/CompletionServer
url: http://127.0.0.1:8080/completion
caption: Local Llamafile server running LLaVA models
<!--
Wikified JSON text to be sent to server
-->
\procedure json-prompt()
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline html conditional commentblock commentinline
{
"image_data": [
<$list filter="[all[shadows+tiddlers]tag<conversationTitle>!is[draft]sort[created]]" counter="counter">
<%if [<currentTiddler>has[image]] %>
<%if [<counter-first>!match[yes]] %>,<%endif%>
{
"id": <$text text=<<counter>>/>,
"data": "<$text text={{{ [<currentTiddler>get[image]get[text]jsonstringify[]] +[join[]] }}}/>"
}
<%endif%>
</$list>
],
"prompt": "<<json-prompt-text>>",
"cache_prompt": false,
"frequency_penalty": 0,
"grammar": "",
"mirostat_eta": 0.1,
"mirostat_tau": 5,
"mirostat": 0,
"n_predict": 400,
"n_probs": 0,
"presence_penalty": 0,
"repeat_last_n": 256,
"repeat_penalty": 1.18,
"slot_id": -1,
"stop": ["</s>", "Llama:", "User:"],
"stream" : false,
"temperature": 0.7,
"tfs_z": 1,
"top_k": 40,
"top_p": 0.5,
"typical_p": 1
}
\end json-prompt
\procedure json-prompt-text()
\whitespace trim
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline html conditional commentblock commentinline
<$text text={{{ [<conversationTitle>get[system-prompt]jsonstringify[]] }}}/>
\n
<$list filter="[all[shadows+tiddlers]tag<conversationTitle>!is[draft]sort[created]]" counter="counter">
\n
<$text text={{{ [<currentTiddler>get[role]else[user]match[user]then[User:]else[Llama:]] }}}/>
<%if [<currentTiddler>has[image]] %>
[img-<$text text=<<counter>>/>]
<%endif%>
<<ai-tools-get-message>>
</$list>
\nLlama:
\end json-prompt-text
<!--
Callback for the HTTP response from the LLM
-->
\procedure completion-callback()
<%if [<status>compare:number:gteq[200]compare:number:lteq[299]] %>
<!-- Success -->
<$action-createtiddler
$basetitle=<<resultTitlePrefix>>
tags=<<resultTags>>
type="text/markdown"
role="assistant"
text={{{ [<data>jsonget[content]] }}}
/>
<%else%>
<!-- Error -->
<$action-createtiddler
$basetitle=<<resultTitlePrefix>>
tags=<<resultTags>>
type="text/markdown"
role="error"
text={{{ [[Error:]] [<statusText>] [<data>jsonget[error],[message]] +[join[]] }}}
/>
<%endif%>
\end completion-callback

View File

@ -0,0 +1,69 @@
title: $:/plugins/tiddlywiki/ai-tools/servers/openai
tags: $:/tags/AI/CompletionServer
url: https://api.openai.com/v1/chat/completions
caption: OpenAI Service
<!--
Wikified JSON text to be sent to server
-->
\procedure json-prompt()
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline html conditional commentblock commentinline
{
"model": "gpt-4o",
"messages": [
{
"role": "system",
"content": "<$text text={{{ [<conversationTitle>get[system-prompt]jsonstringify[]] }}}/>"
}
<!-- Loop through the tiddlers tagged with this one to pick up all the messages in the conversation -->
<$list filter="[all[shadows+tiddlers]tag<conversationTitle>!is[draft]sort[created]]">
,
{
<!-- We use JSON stringify to escape the characters that can't be used directly in JSON -->
"role": "<$text text={{{ [<currentTiddler>get[role]jsonstringify[]] }}}/>",
"content": [
{
"type": "text",
"text": "<<ai-tools-get-message>>"
}
<%if [<currentTiddler>get[image]else[]!match[]] %>
,
{
"type": "image_url",
"image_url": {
"url": "<$text text={{{ [[data:]] [<currentTiddler>get[image]get[type]] [[;base64,]] [<currentTiddler>get[image]get[text]jsonstringify[]] +[join[]] }}}/>"
}
}
<%endif%>
]
}
</$list>
]
}
\end json-prompt
<!--
Callback for the HTTP response from the LLM
-->
\procedure completion-callback()
<%if [<status>compare:number:gteq[200]compare:number:lteq[299]] %>
<!-- Success -->
<$action-createtiddler
$basetitle=<<resultTitlePrefix>>
tags=<<resultTags>>
type="text/markdown"
role={{{ [<data>jsonget[choices],[0],[message],[role]] }}}
text={{{ [<data>jsonget[choices],[0],[message],[content]] }}}
/>
<%else%>
<!-- Error -->
<$action-createtiddler
$basetitle=<<resultTitlePrefix>>
tags=<<resultTags>>
type="text/markdown"
role="error"
text={{{ [[Error:]] [<statusText>] [<data>jsonget[error],[message]] +[join[]] }}}
/>
<%endif%>
\end completion-callback

View File

@ -0,0 +1,23 @@
title: $:/plugins/tiddlywiki/ai-tools/settings
! AI Tools Settings
!! ~OpenAI API key
This plugin runs entirely in the browser, with no backend server component. A consequence of this design is that the API keys required to access external services must be obtained by the end user. These keys are stored in the browser and so only need to be set up once.
# Register for an account at https://platform.openai.com/
#* Newly registered accounts can claim a small amount of credit, thereafter payment is needed
#* Note that ~OpenAI run completely different payment systems for ~ChatGPT and the API platform. Even if you are already a subscriber to ~ChatGPT you will still need to pay for API usage after the initial free service
# Visit https://platform.openai.com/api-keys to create a new secret API key
# Copy and paste the value into the box below
~OpenAI Secret API Key: <$password name="openai-secret-key"/>
!! Llamafile Setup
[[Llamafile|https://github.com/Mozilla-Ocho/llamafile]] lets you download and run LLMs as a single file. See the [[announcment blog post|https://hacks.mozilla.org/2023/11/introducing-llamafile/]] for background.
# Download and run Llamafile as [[described in the QuickStart guide|https://github.com/Mozilla-Ocho/llamafile?tab=readme-ov-file#quickstart]]
# Visit http://127.0.0.1:8080 in a browser and verify that you can see the Llamafile interface. You can also try it out here
# Return to AI Tools and start a conversation, specifying Llamafile as the server

View File

@ -0,0 +1,230 @@
title: $:/plugins/tiddlywiki/ai-tools/styles
tags: [[$:/tags/Stylesheet]]
\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline
.ai-conversation {
background: #f0eeff;
border-radius: 2em;
padding: 1em 1em;
display: flex;
flex-direction: column;
gap: 1em;
box-shadow: 2px 2px 5px rgba(0,0,0,0.2);
}
.ai-conversation .ai-tools-message {
box-shadow: 2px 2px 5px rgba(0,0,0,0.2);
border-radius: 1em;
display: flex;
flex-direction: column;
}
.ai-conversation .ai-tools-message .ai-tools-message-toolbar {
background: rgba(1,1,1,0.35);
color: white;
padding: 0.25em 1em 0.25em 1em;
border-top-left-radius: 1em;
border-top-right-radius: 1em;
display: flex;
justify-content: space-between;
}
.ai-conversation .ai-tools-message .ai-tools-message-toolbar .tc-tiddlylink {
color: inherit;
}
.ai-conversation .ai-tools-message .ai-tools-message-toolbar .ai-tools-message-toolbar-button {
background: rgba(255,255,255,0.35);
color: #333333;
cursor: pointer;
display: inline-block;
outline: 0;
overflow: hidden;
pointer-events: auto;
position: relative;
text-align: center;
touch-action: manipulation;
user-select: none;
-webkit-user-select: none;
vertical-align: top;
white-space: nowrap;
border: 0;
border-radius: 4px;
}
.ai-conversation .ai-tools-message .ai-tools-message-toolbar .ai-tools-message-toolbar-button:hover {
color: #ffffff;
background: rgba(255,255,255,0.55);
}
.ai-conversation .ai-tools-message .ai-tools-message-body {
padding: 0 1em 0 1em
}
.ai-conversation .ai-tools-message.ai-tools-message-role-system {
width: 60%;
background: #4c4c80;
color: white;
}
.ai-conversation .ai-tools-message.ai-tools-message-role-user {
width: 60%;
margin-left: auto;
background: #ffcde0;
}
.ai-conversation .ai-tools-message.ai-tools-message-role-assistant {
background: #dfd;
}
.ai-conversation .ai-tools-message.ai-tools-message-role-error {
background: #fdd;
}
.ai-conversation .ai-user-prompt {
padding: 1em;
background: #ffcde0;
border-radius: 1em;
box-shadow: inset 3px 4px 2px rgba(0, 0, 0, 0.1);
}
.ai-conversation .ai-user-prompt button svg.tc-image-button {
fill: #000;
}
.ai-conversation .ai-user-prompt-text {
display: flex;
align-items: flex-start;
gap: 1em;
}
.ai-conversation .ai-user-prompt-text textarea {
color: #000;
background: #fff;
}
.ai-conversation .ai-user-prompt button.ai-user-prompt-send {
background-color: initial;
background-image: linear-gradient(-180deg, #e0c3ce, #963057);
border-radius: 1em;
box-shadow: rgba(0, 0, 0, 0.1) 0 2px 4px;
color: #FFFFFF;
cursor: pointer;
display: inline-block;
outline: 0;
overflow: hidden;
padding: 0 20px;
pointer-events: auto;
position: relative;
text-align: center;
touch-action: manipulation;
user-select: none;
-webkit-user-select: none;
vertical-align: top;
white-space: nowrap;
border: 0;
transition: box-shadow .2s;
line-height: 2;
}
.ai-conversation .ai-user-prompt button.ai-user-prompt-send:hover:not(:disabled) {
box-shadow: rgb(255 62 135 / 64%) 0 3px 8px;
}
.ai-conversation .ai-user-prompt button.ai-user-prompt-send:disabled {
background: #ddd;
color: #444;
}
.ai-conversation .ai-user-prompt textarea {
margin: 0;
}
.ai-conversation .ai-user-prompt .ai-user-prompt-image button {
color: #000;
fill: #000;
}
.ai-conversation .ai-user-prompt-type {
font-size: 0.8em;
line-height: 1.5;
}
.ai-request-spinner {
animation: ai-request-spinner-animation-rotate 1s infinite;
height: 50px;
width: 50px;
margin-left: auto;
margin-right: auto;
}
.ai-request-spinner:before,
.ai-request-spinner:after {
border-radius: 50%;
content: "";
display: block;
height: 20px;
width: 20px;
}
.ai-request-spinner:before {
animation: ai-request-spinner-animation-ball1 1s infinite;
background-color: #9c9ab0;
box-shadow: 30px 0 0 #fefdff;
margin-bottom: 10px;
}
.ai-request-spinner:after {
animation: ai-request-spinner-animation-ball2 1s infinite;
background-color: #fefdff;
box-shadow: 30px 0 0 #9c9ab0;
}
@keyframes ai-request-spinner-animation-rotate {
0% { transform: rotate(0deg) scale(0.8) }
50% { transform: rotate(360deg) scale(1.2) }
100% { transform: rotate(720deg) scale(0.8) }
}
@keyframes ai-request-spinner-animation-ball1 {
0% {
box-shadow: 30px 0 0 #fefdff;
}
50% {
box-shadow: 0 0 0 #fefdff;
margin-bottom: 0;
transform: translate(15px, 15px);
}
100% {
box-shadow: 30px 0 0 #fefdff;
margin-bottom: 10px;
}
}
@keyframes ai-request-spinner-animation-ball2 {
0% {
box-shadow: 30px 0 0 #9c9ab0;
}
50% {
box-shadow: 0 0 0 #9c9ab0;
margin-top: -20px;
transform: translate(15px, 15px);
}
100% {
box-shadow: 30px 0 0 #9c9ab0;
margin-top: 0;
}
}
.tc-ai-tools-dropzone {
background: yellow;
text-align: center;
width: 100%;
height: 4em;
}
.tc-ai-tools-dropzone.tc-dragover {
background: red;
}

View File

@ -0,0 +1,42 @@
title: $:/plugins/tiddlywiki/ai-tools/tools
! Import ~ChatGPT Export Archive
These instructions allow you to import the conversations from a ~ChatGPT export archive.
!! 1- Request your archive
Visit the ~ChatGPT site to request your achive. You will be sent an email with a link to a ZIP file. Download the file and locate the file `conversations.json` within it.
!! 2 - Import `conversations.json` as a tiddler
It is not possible to use the normal import process to import the `conversations.json` file because TiddlyWiki will erroneously recognise it as a JSON file of tiddlers.
Instead, drag the `conversations.json` file to the dropzone below. Then click the "Import" button to complete the import:
<$dropzone deserializer="text/plain" autoOpenOnImport="yes" filesOnly="yes" class="tc-ai-tools-dropzone">
Drop your file here
</$dropzone>
!! 3 - Import the conversations within it
Any tiddlers containing ~ChatGPT exported `conversation.json` files will be shown here for import.
<$list filter="[all[tiddlers+shadows]type[application/json]!has[plugin-type]sort[title]]" template="$:/plugins/tiddlywiki/ai-tools/view-templates/imported-conversations-json"/>
!! 4 - Review Loaded Conversations
<ul>
<$list filter="[all[tiddlers+shadows]tag[$:/tags/AI/Conversation]sort[title]]">
<$list-empty>
No conversations found
</$list-empty>
<$list-template>
<li>
<$link>
<$text text=<<currentTiddler>>/>
</$link>
</li>
</$list-template>
</$list>
</ul>

View File

@ -0,0 +1,14 @@
title: $:/plugins/tiddlywiki/ai-tools/view-templates/conversation
tags: $:/tags/ViewTemplate
list-after: $:/core/ui/ViewTemplate/body
<%if [<currentTiddler>tag[$:/tags/AI/Conversation]] %>
<$transclude
$variable="ai-tools-conversation"
$mode="block"
conversationTitle=<<currentTiddler>>
/>
<%endif%>

View File

@ -0,0 +1,45 @@
title: $:/plugins/tiddlywiki/ai-tools/view-templates/imported-conversations-json
tags: $:/tags/ViewTemplate
list-before: $:/core/ui/ViewTemplate/body
\whitespace trim
\procedure importer()
<p>
<div>
<$link>
<$text text=`$(currentTiddler)$ appears to be a ChatGPT export containing $(numberOfConversations)$ conversations`/>
</$link>
</div>
<div>
<$button>
<$action-sendmessage $message="tm-import-conversations" $param=<<currentTiddler>>/>
{{$:/core/images/input-button}} Import
</$button>
</div>
</p>
\end importer
<%if [<currentTiddler>type[application/json]] %>
<$let json={{{ [<currentTiddler>get[text]] }}} >
<%if [<json>jsontype[]match[array]] %>
<$let
numberOfConversations={{{ [<json>jsonindexes[]count[]] }}}
json={{{ [<json>jsonextract[0]] }}}
>
<%if [<json>jsontype[]match[object]] %>
<%if
[<json>jsontype[title]match[string]]
:and[<json>jsontype[create_time]match[number]]
:and[<json>jsontype[update_time]match[number]]
:and[<json>jsontype[mapping]match[object]]
:and[<json>jsontype[id]match[string]]
%>
<<importer>>
<%endif%>
<%endif%>
</$let>
<%endif%>
</$let>
<%endif%>