How to upload assets to Pics.io
Google Drive
- Get url and token for file upload (/images/buildGDUploadLink/{assetId}
- Upload a file to the url (write
Authorization: Bearer ${token}
in request's header)
- In the response object swap id for storageId (part of assetData to create an asset) and send a request to /images(additionalFields and assetData are required). In response you will get a created asset.
function getGDUploadUrl(token, fileName, fileSize, mimeType, storageCollectionId, assetId) {
let url = 'https://api.pics.io/images/buildGDUploadLink';
if (assetId) {
url += `/${assetId}`;
}
const metadata = {
title: fileName,
parents: storageCollectionId ? [{ id: storageCollectionId }] : undefined,
mimeType
};
return fetch(url, {
method: 'POST',
headers: { Authorization: `Bearer ${token}` },
body: JSON.stringify({ metadata, contentType: mimeType, contentLength: fileSize })
}).then(r => r.json());
}
function sendFile(url, tokenForUpload, file, contentType, contentLength, onProgress) {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.upload.onprogress = ({ loaded, total }) => onProgress(loaded, total, xhr);
xhr.onload = resolve;
xhr.onabort = reject;
xhr.onerror = reject;
xhr.open('PUT', url, true);
xhr.setRequestHeader('Authorization', `Bearer ${tokenForUpload}`);
xhr.setRequestHeader('X-Upload-Content-Type', contentType);
xhr.setRequestHeader('X-Upload-Content-Length', contentLength);
xhr.send(file);
});
}
async function uploadFile(file, collectionId, storageCollectionId, token, id) {
const fileName = file.name;
const fileSize = file.size;
const mimeType = file.type;
const res = await getGDUploadUrl(token, fileName, fileSize, mimeType, storageCollectionId, id);
const { url, token: tokenForUpload } = res;
const uploadResponse = await sendFile(url, tokenForUpload, file, mimeType, fileSize, (loaded, total) => {
console.info(`Loaded ${loaded} from ${total}`);
});
const assetData = {
...uploadResponse,
uploadId: Date.now(),
storageId: uploadResponse.id,
tags: [{ _id: collectionId }]
};
if (id) {
const { imageMediaMetadata, headRevisionId, revisionId } = res;
const { width, height, rotation } = imageMediaMetadata || {};
assetData = {
revisionId: headRevisionId || revisionId,
fileSize: `${fileSize}`,
width,
height,
rotation
};
}
delete assetData.id;
const additionalFields = {
comment: '',
title: '',
description: '',
keywordsIds: [],
assigneeIds: [],
flag: null,
color: null,
rating: null,
selectedCustomFields: []
};
let url = '/images';
if (id) {
url = `/images/${id}/revisions`;
}
return await fetch(url, {
method: 'POST',
headers: {
Authorization: `Bearer ${token}`
},
body: JSON.stringify({ assetData, additionalFields })
}).then(r => r.json());
}
export default uploadFile;
S3 Storage
function getS3UploadUrl(token, fileName, fileSize, mimeType, collectionId, assetId) {
let url = 'https://api.pics.io/images/buildS3UploadLink';
if (assetId) {
url += `/${assetId}`;
}
return fetch(url, {
method: 'POST',
headers: { Authorization: `Bearer ${token}` },
body: JSON.stringify({ fileName, fileSize, mimeType, collectionId, lightboardId })
}).then(r => r.json());
}
function completeMultipart(token, parts, uploadId, storageId) {
return fetch('https://api.pics.io/images/completeS3Multipart', {
method: 'POST',
headers: { Authorization: `Bearer ${token}` },
body: JSON.stringify({ parts, uploadId, storageId })
}).then(r => r.json());
}
function sendFile(url, file, storageId, revisionId, partNum, onProgress) {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.upload.onprogress = ({ loaded, total }) => onProgress(loaded, total, xhr);
xhr.onload = () => {
const etag = xhr.getResponseHeader('etag');
return resolve({
storageId,
etag,
partNum,
headRevisionId: revisionId,
fileSize: file.size.toString()
});
};
xhr.onabort = reject;
xhr.onerror = reject;
xhr.open('PUT', url, true);
xhr.setRequestHeader('content-disposition', 'attachment');
xhr.send(file);
});
}
async function uploadFile(file, collectionId, token, id) {
const fileName = file.name;
const fileSize = file.size;
const mimeType = file.type;
const res = await getS3UploadUrl(token, fileName, fileSize, mimeType, collectionId, id);
const { urls: urlsData, uploadId, storageId, revisionId, chunkNums } = res;
const chunkSize = file.size / chunkNums;
const fileBlobChunks = [...Array(chunkNums).keys()].map(num => {
const start = num * chunkSize;
const end = (num + 1) * chunkSize;
return num < chunkNums ? file.slice(start, end, file.type) : file.slice(start, undefined, file.type);
});
let initialPercentage = 0;
const parts = [];
for (const [index, urlObject] of urlsData.entries()) {
if (index > 0) initialPercentage += 100 / chunkNums;
const chunk = fileBlobChunks[urlObject.partNum - 1];
const result = await sendFile(urlObject.url, chunk, storageId, revisionId, urlObject.partNum, (loaded, total) => {
const percentage = initialPercentage + (loaded / chunkNums / total) * 100;
console.info(percentage);
});
parts.push({
ETag: result.etag,
PartNumber: result.partNum
});
}
await completeMultipart(token, parts, uploadId, storageId);
}
export default uploadFile;