My project has been created by GAE Plugin for Eclipse (without Maven) and i'm goint to post my code composed by:
home.jsp
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<title>Upload Test</title>
</head>
<body>
<form action="/upload" method="post" name="putFile" id="putFile"
enctype="multipart/form-data">
<input type="file" name="myFile" id="fileName">
<input type="submit" value="Upload">
</form>
</body>
</html>
UploadServlet.java:
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.nio.channels.Channels;
import java.util.Enumeration;
import java.util.logging.Logger;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.FileItemIterator;
import org.apache.commons.fileupload.FileItemStream;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import com.google.appengine.tools.cloudstorage.GcsFileOptions;
import com.google.appengine.tools.cloudstorage.GcsFilename;
import com.google.appengine.tools.cloudstorage.GcsOutputChannel;
import com.google.appengine.tools.cloudstorage.GcsService;
import com.google.appengine.tools.cloudstorage.GcsServiceFactory;
import com.google.appengine.tools.cloudstorage.RetryParams;
public class UploadServlet extends HttpServlet {
private static final Logger log = Logger.getLogger(UploadServlet.class.getName());
private final GcsService gcsService = GcsServiceFactory.createGcsService(new RetryParams.Builder()
.initialRetryDelayMillis(10)
.retryMaxAttempts(10)
.totalRetryPeriodMillis(15000)
.build());
private String bucketName = "myBucketNameOnGoogleCloudStorage";
/**Used below to determine the size of chucks to read in. Should be > 1kb and < 10MB */
private static final int BUFFER_SIZE = 2 * 1024 * 1024;
@SuppressWarnings("unchecked")
@Override
public void doPost(HttpServletRequest req, HttpServletResponse res)
throws ServletException, IOException {
String sctype = null, sfieldname, sname = null;
ServletFileUpload upload;
FileItemIterator iterator;
FileItemStream item;
InputStream stream = null;
try {
upload = new ServletFileUpload();
res.setContentType("text/plain");
iterator = upload.getItemIterator(req);
while (iterator.hasNext()) {
item = iterator.next();
stream = item.openStream();
if (item.isFormField()) {
log.warning("Got a form field: " + item.getFieldName());
} else {
log.warning("Got an uploaded file: " + item.getFieldName() +
", name = " + item.getName());
sfieldname = item.getFieldName();
sname = item.getName();
sctype = item.getContentType();
GcsFilename gcsfileName = new GcsFilename(bucketName, sname);
GcsFileOptions options = new GcsFileOptions.Builder()
.acl("public-read").mimeType(sctype).build();
GcsOutputChannel outputChannel =
gcsService.createOrReplace(gcsfileName, options);
copy(stream, Channels.newOutputStream(outputChannel));
res.sendRedirect("/");
}
}
} catch (Exception ex) {
throw new ServletException(ex);
}
}
private void copy(InputStream input, OutputStream output) throws IOException {
try {
byte[] buffer = new byte[BUFFER_SIZE];
int bytesRead = input.read(buffer);
while (bytesRead != -1) {
output.write(buffer, 0, bytesRead);
bytesRead = input.read(buffer);
}
} finally {
input.close();
output.close();
}
}
}
I tried also to set the maximumSize of the Upload using upload.setMaxSize(-1); or changing the BUFFER_SIZE from 2*1024*1024 into 200*1024*1024, but the issue stil occur. To be more specific, when the uploading reach the 100% I receive this message on the webpage:
Error: Request Entity Too Large Your client issued a request that was too large.
How can i fix that using JAVA and Google Cloud Storage Client Library for Java? (I'm not going to change drastically the Project with other Programming Languages)
Could you please help me to find a solution? Thank you so much!
App Engine request limit is 32Mb. That's why your uploads are failing when you send a file > 32Mb. Checkout Quotas and Limits section.
You have two options for uploading files > 32Mb:
Blobstore API. You can specify a GCS bucket instead of using Blobstore storage space.
To do that use
createUploadUrl(java.lang.String successPath, UploadOptions uploadOptions)
method ofBlobstoreService
.Here's a sample app: https://github.com/crhym3/java-blobstore-gcs-sample
Signed URLs feature of GCS
Or you could just use Google Drive and store only doc IDs in the datastore :)
I will suggest you to take a look at this great and sample example: http://docs.oracle.com/javaee/6/tutorial/doc/glraq.html A good idea will be to monitor the data stream to the server.
Hope it helps