I want to achieve the functionality where I can upload a certain part of a file to Google Drive. I am able to do this by splitting the original read stream into multiple write steams and then uploading them, but this technique isn't acceptable since the write streams will live on the server (which I don't want).
I've tried to overcome this by sending the original read stream in the body of the upload request but I can't find a way to stop the upload when a certain condition is met. My upload function is as follows:
var upload = (auth, google, fileName, readStream, res, lastChunk) => {
console.log(`uploading ${fileName}`);
const drive = google.drive({ version: 'v3', auth });
var fileMetadata = {
'name': fileName
};
var media = {
mimeType: 'application/octet-stream',
body: readStream
};
drive.files.create({
resource: fileMetadata,
media: media,
fields: 'id',
headers: {
'uploadType': 'multipart'
}
}, {
onUploadProgress: function (e) {
if (e.bytesRead > 786432){
// want to stop uploading
//readStream.close() <- this doesn't stop the upload
}
console.log(`Uploaded ${fileName}:`, e.bytesRead.toString());
}
}, function (err, file) {
if (err) {
console.error(err);
} else {
if (lastChunk)
res.render('upload-success.hbs');
console.log('File Id: ', file.id);
}
});
Is there a way I can stop uploading and have the uploaded part intact on Google Drive? I've tried multiple actions on the stream including closing, pausing but none seem to stop the upload. Something I would like to add is that if I've read an X bytes of stream of and upload it, then the correct bytes are uploaded, i.e. bytes from X on wards.
Based on this blog, you can add a handler for a request part body chunk received.
Something like this:
function write_chunk(request, fileDescriptor, chunk, isLast, closePromise) {
// Pause receiving request data (until current chunk is written)
request.pause();
// Write chunk to file
sys.debug("Writing chunk");
posix.write(fileDescriptor, chunk).addCallback(function() {
sys.debug("Wrote chunk");
// Resume receiving request data
request.resume();
// Close file if completed
if (isLast) {
sys.debug("Closing file");
posix.close(fileDescriptor).addCallback(function() {
sys.debug("Closed file");
// Emit file close promise
closePromise.emitSuccess();
});
}
});
}
function upload_file(req, res) {
// Request body is binary
req.setBodyEncoding("binary");
// Handle request as multipart
var stream = new multipart.Stream(req);
// Create promise that will be used to emit event on file close
var closePromise = new events.Promise();
// Add handler for a request part received
stream.addListener("part", function(part) {
sys.debug("Received part, name = " + part.name + ", filename = " + part.filename);
var openPromise = null;
// Add handler for a request part body chunk received
part.addListener("body", function(chunk) {
// Calculate upload progress
var progress = (stream.bytesReceived / stream.bytesTotal * 100).toFixed(2);
var mb = (stream.bytesTotal / 1024 / 1024).toFixed(1);
sys.debug("Uploading " + mb + "mb (" + progress + "%)");
// Ask to open/create file (if not asked before)
if (openPromise == null) {
sys.debug("Opening file");
openPromise = posix.open("./uploads/" + part.filename, process.O_CREAT | process.O_WRONLY, 0600);
}
// Add callback to execute after file is opened
// If file is already open it is executed immediately
openPromise.addCallback(function(fileDescriptor) {
// Write chunk to file
write_chunk(req, fileDescriptor, chunk,
(stream.bytesReceived == stream.bytesTotal), closePromise);
});
});
});
All I/O calls are asynchronous, so write method is not immediately executed. Therefore, use addCallback
method of events.Promise
to process the method calls and to use for notification. It says that to achieve robust functioning, request.resume()
and request.pause()
is needed to avoid file corruption.