The AWS Rekognition Javascript API states that for rekognition.compareFaces(params,...)
method, the SourceImage
and TargetImage
can take Bytes
or S3Object
. I want to use the Bytes
which can be
"Bytes — (Buffer, Typed Array, Blob, String)"
Blob of image bytes up to 5 MBs.
When I pass the Base64
encoded string of the images, the JS SDK is re-encoding again (i.e double encoded). Hence server responding with error saying
{"__type":"InvalidImageFormatException","Message":"Invalid image
encoding"}
Did anyone manage to use the compareFaces JS SDK API using base64 encoded images (not S3Object
)? or any JavaScript examples using Bytes
param would help.
The technique from this AWS Rekognition JS SDK Invalid image encoding error thread worked.
Convert the base64 image encoding to a ArrayBuffer:
function getBinary(base64Image) {
var binaryImg = atob(base64Image);
var length = binaryImg.length;
var ab = new ArrayBuffer(length);
var ua = new Uint8Array(ab);
for (var i = 0; i < length; i++) {
ua[i] = binaryImg.charCodeAt(i);
}
return ab;
}
Pass into rekognition as Bytes
parameter:
var data = canvas.toDataURL('image/jpeg');
var base64Image = data.replace(/^data:image\/(png|jpeg|jpg);base64,/, '');
var imageBytes = getBinary(base64Image);
var rekognitionRequest = {
CollectionId: collectionId,
Image: {
Bytes: imageBytes
}
};
Based on the answer supplied by @Sean, I wanted to add another way to get the bytes from a URL request using axios and passed to rekognition.detectLabels()
-- or other various detection methods for Amazon Rekognition.
I went ahead create a promise for fs.readFile
that should work with the async/await structure. Then some regex to determine if you need a URL fetch or file read as a fallback.
I've also added a check for Gray
and World Of Warcraft
for the labels. Not sure if anyone else experiences that but lorempixel seems to throw those labels every once in a while. I've seen them show on an all black image before as well.
/* jshint esversion: 6, node:true, devel: true, undef: true, unused: true */
const AWS = require('aws-sdk'),
axios = require('axios'),
fs = require('fs'),
path = require('path');
// Get credentials from environmental variables.
const {S3_ACCESS_KEY, S3_SECRET_ACCESS_KEY, S3_REGION} = process.env;
// Set AWS credentials.
AWS.config.update({
accessKeyId: S3_ACCESS_KEY,
secretAccessKey: S3_SECRET_ACCESS_KEY,
region: S3_REGION
});
const rekognition = new AWS.Rekognition({
apiVersion: '2016-06-27'
});
startDetection();
// ----------------
async function startDetection() {
let found = {};
found = await detectFromPath(path.join(__dirname, 'test.jpg'));
console.log(found);
found = await detectFromPath('https://upload.wikimedia.org/wikipedia/commons/9/96/Bill_Nye%2C_Barack_Obama_and_Neil_deGrasse_Tyson_selfie_2014.jpg');
console.log(found);
found = await detectFromPath('http://placekitten.com/g/200/300');
console.log(found);
found = await detectFromPath('https://loremflickr.com/g/320/240/text');
console.log(found);
found = await detectFromPath('http://lorempixel.com/400/200/sports/');
console.log(found);
// Sometimes 'Grey' and 'World Of Warcraft' are the only labels...
if (found && found.labels.length === 2 && found.labels.some(i => i.Name === 'Gray') && found.labels.some(i => i.Name === 'World Of Warcraft')) {
console.log('⚠️', '\n\tMaybe this is a bad image...`Gray` and `World Of Warcraft`???\n');
}
}
// ----------------
/**
* @param {string} path URL or filepath on your local machine.
* @param {Number} maxLabels
* @param {Number} minConfidence
* @param {array} attributes
*/
async function detectFromPath(path, maxLabels, minConfidence, attributes) {
// Convert path to base64 Buffer data.
const bytes = (/^https?:\/\//gm.exec(path)) ?
await getBase64BufferFromURL(path) :
await getBase64BufferFromFile(path);
// Invalid data.
if (!bytes)
return {
path,
faces: [],
labels: [],
text: [],
celebs: [],
moderation: []
};
// Pass buffer to rekognition methods.
let labels = await detectLabelsFromBytes(bytes, maxLabels, minConfidence),
text = await detectTextFromBytes(bytes),
faces = await detectFacesFromBytes(bytes, attributes),
celebs = await recognizeCelebritiesFromBytes(bytes),
moderation = await detectModerationLabelsFromBytes(bytes, minConfidence);
// Filter out specific values.
labels = labels && labels.Labels ? labels.Labels : [];
faces = faces && faces.FaceDetails ? faces.FaceDetails : [];
text = text && text.TextDetections ? text.TextDetections.map(i => i.DetectedText) : [];
celebs = celebs && celebs.CelebrityFaces ? celebs.CelebrityFaces.map(i => ({
Name: i.Name,
MatchConfidence: i.MatchConfidence
})) : [];
moderation = moderation && moderation.ModerationLabels ? moderation.ModerationLabels.map(i => ({
Name: i.Name,
Confidence: i.Confidence
})) : [];
// Return collection.
return {
path,
faces,
labels,
text,
celebs,
moderation
};
}
/**
* https://nodejs.org/api/fs.html#fs_fs_readfile_path_options_callback
*
* @param {string} filename
*/
function getBase64BufferFromFile(filename) {
return (new Promise(function(resolve, reject) {
fs.readFile(filename, 'base64', (err, data) => {
if (err) return reject(err);
resolve(new Buffer(data, 'base64'));
});
})).catch(error => {
console.log('[ERROR]', error);
});
}
/**
* https://github.com/axios/axios
*
* @param {string} url
*/
function getBase64BufferFromURL(url) {
return axios
.get(url, {
responseType: 'arraybuffer'
})
.then(response => new Buffer(response.data, 'base64'))
.catch(error => {
console.log('[ERROR]', error);
});
}
/**
* https://docs.aws.amazon.com/rekognition/latest/dg/labels.html
* https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Rekognition.html#detectLabels-property
*
* @param {Buffer} bytes
* @param {Number} maxLabels
* @param {Number} minConfidence
*/
function detectLabelsFromBytes(bytes, maxLabels, minConfidence) {
return rekognition
.detectLabels({
Image: {
Bytes: bytes
},
MaxLabels: typeof maxLabels !== 'undefined' ? maxLabels : 1000,
MinConfidence: typeof minConfidence !== 'undefined' ? minConfidence : 50.0
})
.promise()
.catch(error => {
console.error('[ERROR]', error);
});
}
/**
* https://docs.aws.amazon.com/rekognition/latest/dg/text-detection.html
* https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Rekognition.html#detectText-property
*
* @param {Buffer} bytes
*/
function detectTextFromBytes(bytes) {
return rekognition
.detectText({
Image: {
Bytes: bytes
}
})
.promise()
.catch(error => {
console.error('[ERROR]', error);
});
}
/**
* https://docs.aws.amazon.com/rekognition/latest/dg/celebrities.html
* https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Rekognition.html#recognizeCelebrities-property
*
* @param {Buffer} bytes
*/
function recognizeCelebritiesFromBytes(bytes) {
return rekognition
.recognizeCelebrities({
Image: {
Bytes: bytes
}
})
.promise()
.catch(error => {
console.error('[ERROR]', error);
});
}
/**
* https://docs.aws.amazon.com/rekognition/latest/dg/moderation.html
* https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Rekognition.html#detectModerationLabels-property
*
* @param {Buffer} bytes
* @param {Number} minConfidence
*/
function detectModerationLabelsFromBytes(bytes, minConfidence) {
return rekognition
.detectModerationLabels({
Image: {
Bytes: bytes
},
MinConfidence: typeof minConfidence !== 'undefined' ? minConfidence : 60.0
})
.promise()
.catch(error => {
console.error('[ERROR]', error);
});
}
/**
* https://docs.aws.amazon.com/rekognition/latest/dg/faces.html
* https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Rekognition.html#detectFaces-property
*
* @param {Buffer} bytes
* @param {array} attributes Attributes can be "ALL" or "DEFAULT". "DEFAULT" includes: BoundingBox, Confidence, Landmarks, Pose, and Quality.
*/
function detectFacesFromBytes(bytes, attributes) {
return rekognition
.detectFaces({
Image: {
Bytes: bytes
},
Attributes: typeof attributes !== 'undefined' ? attributes : ['ALL']
})
.promise()
.catch(error => {
console.error('[ERROR]', error);
});
}
/**
* https://docs.aws.amazon.com/rekognition/latest/dg/API_CompareFaces.html
* https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Rekognition.html#compareFaces-property
*
* @param {Buffer} sourceBytes
* @param {Buffer} targetBytes
* @param {Number} similarityThreshold
*/
function compareFaces(sourceBytes, targetBytes, similarityThreshold) {
return rekognition
.detectModerationLabels({
SourceImage: {
Bytes: sourceBytes
},
TargetImage: {
Bytes: targetBytes
},
SimilarityThreshold: typeof similarityThreshold !== 'undefined' ? similarityThreshold : 0.0
})
.promise()
.catch(error => {
console.error('[ERROR]', error);
});
}
Resources:
- https://github.com/axios/axios
- https://docs.aws.amazon.com/rekognition/latest/dg/labels.html
- https://docs.aws.amazon.com/rekognition/latest/dg/text-detection.html
- https://docs.aws.amazon.com/rekognition/latest/dg/celebrities.html
- https://docs.aws.amazon.com/rekognition/latest/dg/moderation.html
- https://docs.aws.amazon.com/rekognition/latest/dg/faces.html
- https://docs.aws.amazon.com/rekognition/latest/dg/API_CompareFaces.html
- https://docs.aws.amazon.com/rekognition/latest/dg/image-bytes-javascript.html
AWS JavaScript SDK Reference:
- detectLabels
- detectText
- recognizeCelebrities
- detectModerationLabels
- detectFaces
- compareFaces
Reference:
- Download an image using Axios and convert it to base64
- Upload a binary file to S3 using AWS SDK for Node.js
- AWS Rekognition JS SDK Invalid image encoding error
- Pipe a stream to s3.upload()
- untarring files to S3 fails, not sure why
- Using Promises with fs.readFile in a loop
- How do I return the response from an asynchronous call?
- NodeJS UnhandledPromiseRejectionWarning
- How do I check whether an array contains a string in TypeScript?
- Do you need to use path.join in node.js?
I was running into a similar issue when reading in a file in Node as a byte array buffer and sending it to Rekognition.
I solved it by instead reading in the base64 representation, then turning it into a buffer like this:
const aws = require('aws-sdk');
const fs = require('fs');
var rekognition = new aws.Rekognition({
apiVersion: '2016-06-27'
});
// pull base64 representation of image from file system (or somewhere else)
fs.readFile('./test.jpg', 'base64', (err, data) => {
// create a new buffer out of the string passed to us by fs.readFile()
const buffer = new Buffer(data, 'base64');
// now that we have things in the right type, send it to rekognition
rekognition.detectLabels({
Image: {
Bytes: buffer
}
}).promise()
.then((res) => {
// print out the labels that rekognition sent back
console.log(res);
});
});
This might also be relevant to people getting the: Expected params.Image.Bytes to be a string, Buffer, Stream, Blob, or typed array object
message.
I had same issue you had and i'm going to tell you how i solved it.
Amazon Rekognition supports image type are JPEG and PNG
It means that if you input image file with encoding other formats like webp, you always get same that error.
After changing image formats which not encoding with jpeg or png to jpeg, i could solved that problem.
Hope you to solve this problem!
It seems that converting the string to a buffer works more consistently but documentation on it is very hard to find.
For Node, you can use this to convert the params from the string (make sure you take off the data... up to the ",".
var params = {
CollectionId: collectionId,
Image: {
Bytes: new Buffer(imageBytes, 'base64')
}
};
In normal JS, you'd want can convert with atob and pass the Array buffer using this code:
function getBinary(base64Image) {
var binaryImg = Buffer.from(base64Image, 'base64').toString();
var length = binaryImg.length;
var ab = new ArrayBuffer(length);
var ua = new Uint8Array(ab);
for (var i = 0; i < length; i++) {
ua[i] = binaryImg.charCodeAt(i);
}
return ab;
}