We've a requirement to upload 100MB document in to Sharepoint. I've found an article where they are uploading using chunks with startUpload, continueUpload and finishUpload methods. I've tried using so, but it's taking 60 mins to upload the document. Is it the right code?
public Map<String, String> uploadFile(String folder, String fileName,
byte[] binary, boolean overwrite,boolean toaddInsideFolder,String localFileURL) throws LoginException, IOException, URISyntaxException {
String url=URIUtil.encodePath("/Files/Add(url='" + fileName + "', overwrite=" + overwrite + ")");
if(toaddInsideFolder)
url = restServiceURL.toString() + "/GetFolderByServerRelativeUrl('" + folder + "')"+url;
else
url = restServiceURL.toString() + "/Folders/GetByUrl('" + folder + "')"+url;
HttpPost httppost = new HttpPost(url);
//To receive xml response add below header to http get request
httppost.addHeader("Accept", "application/json; odata=verbose");
httppost.addHeader("Cookie", getSecurityToken().getAuthCookiesToken());
HttpResponse response = httpclient.execute(httppost);
String statusLine = response.getStatusLine().toString();
String jsonresponse = EntityUtils.toString(response.getEntity());
JSONObject json = new JSONObject(jsonresponse);
String gUid =json.getJSONObject("d").getString("UniqueId");
String endpointUrlS = restServiceURL.toString() + "/GetFileByServerRelativeUrl('"+ folder +"/"+fileName+"')/savebinarystream";
HttpPost httppos = new HttpPost(url);
//To receive xml response add below header to http get request
httppos.addHeader("Accept", "application/json; odata=verbose");
httppos.addHeader("Cookie", getSecurityToken().getAuthCookiesToken());
HttpResponse response1 = httpclient.execute(httppos);
File file = new File(localFileURL);int fileSize=(int) file.length();
final int chunkSize = 50 * 200 * 200;
byte[] buffer = new byte[(int) fileSize <= chunkSize ? (int) fileSize : chunkSize];
long count = 0;
if (fileSize % chunkSize == 0)
count = fileSize / chunkSize;
else
count = (fileSize / chunkSize) + 1;
// try-with-resources to ensure closing stream
try (FileInputStream fis = new FileInputStream(file);
BufferedInputStream bis = new BufferedInputStream(fis)) {
int bytesAmount = 0;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int i = 0;
String startUploadUrl = "";
int k = 0;
while ((bytesAmount = bis.read(buffer)) > 0) {
baos.write(buffer, 0, bytesAmount);
byte partialData[] = baos.toByteArray();
if (i == 0) {
startUploadUrl = restServiceURL.toString() + "/GetFileByServerRelativeUrl('"+ folder +"/"+fileName+"')/StartUpload(uploadId=guid'"+gUid+"')";
executeMultiPartRequest(startUploadUrl, partialData);
System.out.println("first worked");
// StartUpload call
} else if (i == count-1) {
String finishUploadUrl = restServiceURL.toString() + "/GetFileByServerRelativeUrl('"+ folder +"/"+fileName+"')/FinishUpload(uploadId=guid'"+gUid+"',fileOffset="+i+")";
executeMultiPartRequest(finishUploadUrl, partialData);
System.out.println("FinishUpload worked");
// FinishUpload call
} else {
String continueUploadUrl = restServiceURL.toString() + "/GetFileByServerRelativeUrl('"+ folder +"/"+fileName+"')/ContinueUpload(uploadId=guid'"+gUid+"',fileOffset="+i+")";
executeMultiPartRequest(continueUploadUrl, partialData);
System.out.println("continue worked");
}
i++;
}
}
Map<String, String> result = new HashMap<String, String>();
return result;
}
public void executeMultiPartRequest(String urlString, byte[] fileByteArray) throws IOException, LoginException, URISyntaxException {
HttpPost httppost = new HttpPost(urlString);
httppost.addHeader("Accept", "application/json; odata=verbose");httppost.addHeader("Cookie", getSecurityToken().getAuthCookiesToken());
HttpEntity entity = new ByteArrayEntity(fileByteArray);
httppost.setEntity(entity);
HttpResponse response = httpclient.execute(httppost);
}
httpclient.execute(…)is synchronous, right? So the chunk uploads will happen sequentially, not in parallel.