WIP: Add a multipart/form-data convenience function

This is the start of adding support for large attachments.

We have a new endpoint for POSTing tiddler data. The idea is that it will take any kind of data and figure out how to extract tiddlers from the upload and save them in the nominated bag.

The next step is to move the attachment files into a special folder and reference them from the database so that we can construct _canonical_uris for them.
This commit is contained in:
Jeremy Ruston 2024-02-02 15:42:02 +00:00
parent 262a730534
commit 6675358e85
2 changed files with 168 additions and 0 deletions

View File

@ -179,6 +179,102 @@ function sendResponse(request,response,statusCode,headers,data,encoding) {
response.end(data,encoding);
}
/*
Options include:
cbPartStart(headers,name,filename) - invoked when a file starts being received
cbPartChunk(chunk) - invoked when a chunk of a file is received
cbPartEnd() - invoked when a file finishes being received
cbFinished(err) - invoked when the all the form data has been processed
*/
function streamMultipartData(request,options) {
// Check that the Content-Type is multipart/form-data
const contentType = request.headers['content-type'];
if(!contentType.startsWith("multipart/form-data")) {
return options.cbFinished("Expected multipart/form-data content type");
}
// Extract the boundary string from the Content-Type header
const boundaryMatch = contentType.match(/boundary=(.+)$/);
if(!boundaryMatch) {
return options.cbFinished("Missing boundary in multipart/form-data");
}
const boundary = boundaryMatch[1];
const boundaryBuffer = Buffer.from("--" + boundary);
// Initialise
let buffer = Buffer.alloc(0);
let processingPart = false;
// Process incoming chunks
request.on("data", (chunk) => {
// Accumulate the incoming data
buffer = Buffer.concat([buffer, chunk]);
// Loop through any parts within the current buffer
while (true) {
if(!processingPart) {
// If we're not processing a part then we try to find a boundary marker
const boundaryIndex = buffer.indexOf(boundaryBuffer);
if(boundaryIndex === -1) {
// Haven't reached the boundary marker yet, so we should wait for more data
break;
}
// Look for the end of the headers
const endOfHeaders = buffer.indexOf("\r\n\r\n",boundaryIndex + boundaryBuffer.length);
if(endOfHeaders === -1) {
// Haven't reached the end of the headers, so we should wait for more data
break;
}
// Extract and parse headers
const headersPart = Uint8Array.prototype.slice.call(buffer,boundaryIndex + boundaryBuffer.length,endOfHeaders).toString();
const currentHeaders = {};
headersPart.split("\r\n").forEach(headerLine => {
const [key, value] = headerLine.split(": ");
currentHeaders[key.toLowerCase()] = value;
});
// Parse the content disposition header
const contentDisposition = {
name: null,
filename: null
};
if(currentHeaders["content-disposition"]) {
// Split the content-disposition header into semicolon-delimited parts
const parts = currentHeaders["content-disposition"].split(";").map(part => part.trim());
// Iterate over each part to extract name and filename if they exist
parts.forEach(part => {
if(part.startsWith("name=")) {
// Remove "name=" and trim quotes
contentDisposition.name = part.substring(6,part.length - 1);
} else if(part.startsWith("filename=")) {
// Remove "filename=" and trim quotes
contentDisposition.filename = part.substring(10,part.length - 1);
}
});
}
processingPart = true;
options.cbPartStart(currentHeaders,contentDisposition.name,contentDisposition.filename);
// Slice the buffer to the next part
buffer = Uint8Array.prototype.slice.call(buffer,endOfHeaders + 4);
} else {
const boundaryIndex = buffer.indexOf(boundaryBuffer);
if(boundaryIndex >= 0) {
// Return the part up to the boundary
options.cbPartChunk(Uint8Array.prototype.slice.call(buffer,0,boundaryIndex));
options.cbPartEnd();
processingPart = false;
buffer = Uint8Array.prototype.slice.call(buffer,boundaryIndex);
} else {
// Return the rest of the buffer
options.cbPartChunk(buffer);
// Reset the buffer and wait for more data
buffer = Buffer.alloc(0);
break;
}
}
}
});
// All done
request.on("end", () => {
options.cbFinished(null);
});
}
Server.prototype.defaultVariables = {
port: "8080",
host: "127.0.0.1",
@ -271,6 +367,7 @@ Server.prototype.requestHandler = function(request,response,options) {
state.queryParameters = querystring.parse(state.urlInfo.query);
state.pathPrefix = options.pathPrefix || this.get("path-prefix") || "";
state.sendResponse = sendResponse.bind(self,request,response);
state.streamMultipartData = streamMultipartData.bind(self,request);
// Get the principals authorized to access this resource
state.authorizationType = options.authorizationType || this.methodMappings[request.method] || "readers";
// Check for the CSRF header if this is a write

View File

@ -0,0 +1,71 @@
/*\
title: $:/plugins/tiddlywiki/multiwikiserver/route-post-recipe-tiddlers.js
type: application/javascript
module-type: route
POST /wikis/:recipe_name/recipes/:recipe_name/tiddlers
NOTE: Urls currently include the recipe name twice. This is temporary to minimise the changes to the TiddlyWeb plugin
\*/
(function() {
/*jslint node: true, browser: true */
/*global $tw: false */
"use strict";
exports.method = "POST";
exports.path = /^\/wiki\/([^\/]+)\/recipes\/([^\/]+)\/tiddlers$/;
exports.bodyFormat = "stream";
exports.handler = function(request,response,state) {
const fs = require("fs");
// Get the parameters
var recipe_name = $tw.utils.decodeURIComponentSafe(state.params[0]),
recipe_name_2 = $tw.utils.decodeURIComponentSafe(state.params[1]);
console.log(`Got to here ${recipe_name} and ${recipe_name_2}`)
// Require the recipe names to match
if(recipe_name !== recipe_name_2) {
return state.sendResponse(400,{"Content-Type": "text/plain"},"Bad Request: recipe names do not match");
}
// Process the incoming data
let fileStream = null;
let fieldValue = "";
state.streamMultipartData({
cbPartStart: function(headers,name,filename) {
console.log(`Received file ${name} and ${filename} with ${JSON.stringify(headers)}`)
if(filename) {
fileStream = fs.createWriteStream(filename);
} else {
fieldValue = "";
}
},
cbPartChunk: function(chunk) {
if(fileStream) {
fileStream.write(chunk);
} else {
fieldValue = fieldValue + chunk;
}
},
cbPartEnd: function() {
if(fileStream) {
fileStream.end();
fileStream = null;
} else {
console.log("Data was " + fieldValue);
fieldValue = "";
}
},
cbFinished: function(err) {
if(err) {
state.sendResponse(400,{"Content-Type": "text/plain"},"Bad Request: " + err);
} else {
state.sendResponse(200, {"Content-Type": "text/plain"},"Multipart data processed");
}
}
});
};
}());