Finally I make it.
I split the big file to chunk with js code In my Browser,
this is the way which makes session with IpfsHttpClient, it makes to be able to upload or download >100Mb files.
Even though I had enough physical banwidth, the more sessions I create, the slower per single session. Although not exactly 1/n.
I think this is because single thread job in js. so many add or get function call not guarantees more speed
This is the part of sample code I wrote. it’s a bit messy, because I just intended to test.
chooseClient = function() {
var selectedClient = $('#ipfs_client').val();
console.log(selectedClient);
if (selectedClient == 'infura') {
for(var i =0; i<numOfChunks; i++){
ipfs[i] = window.IpfsHttpClient({
host: 'ipfs.infura.io',
port: 5001,
protocol: 'https'
});
}
$('#title').html('IPFS Client TEST (Infura)');
} else if (selectedClient == 'private_gateway') {
ipfs = window.IpfsHttpClient({
host: 'devipfs.nemodax.com',
port: 5001,
protocol: 'https'
})
$('#title').html('IPFS Client TEST (devipfs Private Gateway)');
}
}
async function sendIpfs() {
var uploadStartTime = new Date();
console.log("start upload (api): ", uploadStartTime)
var path = $("#file").val().split("\\")[2]
// const results = await ipfs.add([{
// path: path,
// content: buf,
// }], {
// recursive: true,
// wrapWithDirectory: true
// })
results = new Array(numOfChunks)
hashBuffer = new Array(numOfChunks)
var uploadedChunkCnt = 0;
for (var i = 0; i < numOfChunks; i++) {
var chunkStartTime = new Date();
console.log("start upload chunk[", i, "](api): ", chunkStartTime)
ipfs[i].add([{
path: path + '.nemo.' + i,
content: uploadBuf[i],
}], {
recursive: true,
wrapWithDirectory: true
//progress: (length) => {
// console.log(length);
//}
}).then((res) => {
var token = res[0].path.split('.')
var chunkNum = token[token.length - 1]
results[chunkNum] = res;
console.log(results[chunkNum]);
var chunkEndTime = new Date();
var chunkInterval = chunkEndTime - chunkStartTime // ms
var chunkSize = uploadBuf[chunkNum].length // byte
var speed = chunkSize / chunkInterval // byte / ms == kb/s
console.log('interval upload chunk[', chunkNum, '](api): ', chunkInterval / 1000, 'size:', chunkSize / 1000, 'kb', speed, 'kb/s')
hashBuffer[chunkNum] = results[chunkNum][1].hash;
console.log(hashBuffer[chunkNum]);
uploadedChunkCnt++;
if (uploadedChunkCnt == numOfChunks) {
var chunkMap = ""
for (var j = 0; j < numOfChunks; j++) {
chunkMap = chunkMap + hashBuffer[j] + ','
}
ipfs[0].add(Ipfs.Buffer.from(chunkMap)).then((res) => {
indexHash = res[0].hash
var uploadFileSize = fullLength // byte
var uploadEndTime = new Date();
var uploadInterval = uploadEndTime - uploadStartTime // ms
var uploadSpeed = uploadFileSize / uploadInterval // byte / ms == kb/s
console.log('* upload hash:', indexHash)
console.log('* chunkMap:', chunkMap);
console.log('* upload path:', path)
console.log('* upload size:', formatSizeUnits(uploadFileSize))
console.log('* upload speed:', uploadSpeed, 'kb/s')
console.log('* upload interval:', uploadInterval / 1000, 'sec')
});
}
})
}
async function getIpfs() {
const validCID = document.getElementById('downloadHash').value
// $('#get_ipfs').parent().append('<br> <a id="downloadLink" href="' + ipfsUrl + '" download>'+ ipfsUrl+' </a>');
$('#get_ipfs').parent().append("<br> <span> it's been downloading, please wait... </span>");
var downloadStartTime = new Date();
console.log("start download(api): ", downloadStartTime)
var res = await ipfs[0].cat(validCID)
var chunkMapArr = res.toString().split(',');
var receivedChunkCnt = 0;
var fileObject = new Array(numOfChunks)
console.log("chunkMapArr", chunkMapArr)
for (var i = 0; i < numOfChunks; i++) {
var chunkStartTime = new Date();
console.log("start download chunk[", i, "](api): ", chunkStartTime)
ipfs[i].get(chunkMapArr[i]).then((files) => {
console.log(files)
files.forEach((file) => {
if (typeof(file.content) != 'undefined') {
console.log(file.path)
var fileName = file.path.split('/')[1]
var token = fileName.split('.')
chunkNum = token[token.length - 1]
fileObject[chunkNum] = file
var chunkEndTime = new Date();
var chunkInterval = chunkEndTime - chunkStartTime // ms
var chunkSize = file.content.length // byte
var speed = chunkSize / chunkInterval // byte / ms == kb/s
console.log('interval download chunk[', chunkNum, '](api): ', chunkInterval / 1000, 'size:', chunkSize / 1000, 'kb', speed, 'kb/s')
receivedChunkCnt++;
if (receivedChunkCnt == numOfChunks) {
var downloadFileSize = 0
for (var j = 0; j < numOfChunks; j++) {
downloadFileSize += fileObject[j].content.length
downloadBuf[j] = fileObject[j].content
}
mergedContent = Ipfs.Buffer.concat(downloadBuf, downloadFileSize)
var blob = new Blob(
[mergedContent], {
type: 'video/mp4'
}
);
var blobUrl = URL.createObjectURL(blob);
$('#get_ipfs').parent().append("<a>다운받기</a>");
$('#get_ipfs ~ a:last')[0].setAttribute('href', blobUrl);
$('#get_ipfs ~ a:last')[0].innerHTML = '다운받기';
//$('#get_ipfs ~ a:last')[0].download = validCID + '.mp4'
$('#get_ipfs ~ a:last')[0].download = fileName.slice(0, fileName.length - 7)
var downloadEndTime = new Date();
var downloadInterval = downloadEndTime - downloadStartTime // ms
var downloadSpeed = downloadFileSize / downloadInterval // byte / ms == kb/s
console.log('* download hash:', validCID)
console.log('* chunkMap:', chunkMapArr);
console.log('* path:', fileObject[0].path)
console.log('* download size:', formatSizeUnits(downloadFileSize))
console.log('* download speed:', downloadSpeed, 'kb/s')
console.log('* download interval:', downloadInterval / 1000, 'sec')
}
}
})
})
}