'Error: Payload Too Large' is occurred, when I tried to add some files to infura ipfs service

Hello guys,

I tried to add some file via infura ipfs service.
I wrote js code with js-ipfs and I made it finally :stuck_out_tongue:
but it was only for small size mp4 files. when I was going to do for little more big files, I got this error message from infura.

request.js:79 POST https://ipfs.infura.io:5001/api/v0/add?stream-channels=true 413
u._onFinish @ request.js:79
(anonymous) @ request.js:24
a.emit @ events.js:139
C @ _stream_writable.js:620
I @ _stream_writable.js:466
m.run @ browser.js:169
d @ browser.js:133
setTimeout (async)
u @ browser.js:45
o.nextTick @ browser.js:158
(anonymous) @ _stream_readable.js:886
k.resume @ _stream_readable.js:876
k.on @ _stream_readable.js:794
k.pipe @ _stream_readable.js:658
(anonymous) @ send-files-stream.js:148
(anonymous) @ add.js:34
(anonymous) @ index.js:41
(anonymous) @ index.js:36
(anonymous) @ add.js:53
sendIpfs @ (index):95
onclick @ (index):33
send-request.js:22 Uncaught (in promise) Error: Payload Too Large

    at send-request.js:22
    at t.exports.<anonymous> (send-request.js:64)
    at t.exports.a.emit (events.js:134)
    at t.exports.u._connect (request.js:102)
    at request.js:89

I think if the files’s size I tried to upload are bigger than 100MB, this message occurred
any idea for this?

<script src="https://unpkg.com/ipfs/dist/index.min.js"></script>
<script src="https://unpkg.com/ipfs-http-client/dist/index.min.js"></script>

<script type="text/javascript">
...
  var buf;
  const reader = new FileReader();

  var loadFile = function(event) {
    console.log('upload::');

    const reader = new FileReader();
    reader.onload = function() {
      console.log('onload::');

      buf = Ipfs.Buffer(reader.result); // Convert data into buffer

    };

    const file = document.getElementById("file");
    reader.readAsArrayBuffer(file.files[0]); // Read Provided File

  };

  async function sendToIpfs() {
    var start = new Date();
    console.log("start time: ", start)
    const results = await ipfs.add(buf);
    console.log(results);
    var end = new Date();
    console.log('interval: ', (end-start)/1000, 'size:', buf.length/1000, 'kb')

    const hash = results[0].hash;
    console.log(hash);
  }
...
</script>

External Image

Thanks for reaching out, currently uploads are limited to 100mb, we will be releasing some big changes to the IPFS API in the next months and will revisit this limit in those releases.

Thank you for your reply

I need more than 1GB limitations. there’re any way to transfer split file? and there’re any references?

Not aware of any way to take a >1GB video file and splice it up in <100mb hashes to then stitch back together unfortunately.

Finally I make it.

I split the big file to chunk with js code In my Browser,
this is the way which makes session with IpfsHttpClient, it makes to be able to upload or download >100Mb files.

Even though I had enough physical banwidth, the more sessions I create, the slower per single session. Although not exactly 1/n.
I think this is because single thread job in js. so many add or get function call not guarantees more speed

This is the part of sample code I wrote. it’s a bit messy, because I just intended to test.

  chooseClient = function() {
    var selectedClient = $('#ipfs_client').val();
    console.log(selectedClient);
    if (selectedClient == 'infura') {
      for(var i =0; i<numOfChunks; i++){
        ipfs[i] = window.IpfsHttpClient({
          host: 'ipfs.infura.io',
          port: 5001,
          protocol: 'https'
        });
      }


      $('#title').html('IPFS Client TEST (Infura)');
    } else if (selectedClient == 'private_gateway') {
      ipfs = window.IpfsHttpClient({
        host: 'devipfs.nemodax.com',
        port: 5001,
        protocol: 'https'
      })
      $('#title').html('IPFS Client TEST (devipfs Private Gateway)');
    }
  }


async function sendIpfs() {

    var uploadStartTime = new Date();
    console.log("start upload (api): ", uploadStartTime)

    var path = $("#file").val().split("\\")[2]
    // const results = await ipfs.add([{
    //   path: path,
    //   content: buf,
    // }], {
    //   recursive: true,
    //   wrapWithDirectory: true
    // })
    results = new Array(numOfChunks)
    hashBuffer = new Array(numOfChunks)
    var uploadedChunkCnt = 0;

    for (var i = 0; i < numOfChunks; i++) {
      var chunkStartTime = new Date();
      console.log("start upload chunk[", i, "](api): ", chunkStartTime)

      ipfs[i].add([{
        path: path + '.nemo.' + i,
        content: uploadBuf[i],
      }], {
        recursive: true,
        wrapWithDirectory: true
        //progress: (length) => {
        //  console.log(length);
        //}
      }).then((res) => {
        var token = res[0].path.split('.')
        var chunkNum = token[token.length - 1]
        results[chunkNum] = res;

        console.log(results[chunkNum]);
        var chunkEndTime = new Date();
        var chunkInterval = chunkEndTime - chunkStartTime // ms
        var chunkSize = uploadBuf[chunkNum].length // byte
        var speed = chunkSize / chunkInterval // byte / ms == kb/s
        console.log('interval upload chunk[', chunkNum, '](api): ', chunkInterval / 1000, 'size:', chunkSize / 1000, 'kb', speed, 'kb/s')

        hashBuffer[chunkNum] = results[chunkNum][1].hash;
        console.log(hashBuffer[chunkNum]);
        uploadedChunkCnt++;
        if (uploadedChunkCnt == numOfChunks) {
          var chunkMap = ""
          for (var j = 0; j < numOfChunks; j++) {
            chunkMap = chunkMap + hashBuffer[j] + ','
          }
          ipfs[0].add(Ipfs.Buffer.from(chunkMap)).then((res) => {
            indexHash = res[0].hash
            var uploadFileSize = fullLength // byte

            var uploadEndTime = new Date();
            var uploadInterval = uploadEndTime - uploadStartTime // ms
            var uploadSpeed = uploadFileSize / uploadInterval // byte / ms == kb/s
            console.log('* upload hash:', indexHash)
            console.log('* chunkMap:', chunkMap);
            console.log('* upload path:', path)
            console.log('* upload size:', formatSizeUnits(uploadFileSize))
            console.log('* upload speed:', uploadSpeed, 'kb/s')
            console.log('* upload interval:', uploadInterval / 1000, 'sec')
          });
        }
      })
    }


async function getIpfs() {

    const validCID = document.getElementById('downloadHash').value

    // $('#get_ipfs').parent().append('<br> <a id="downloadLink" href="' + ipfsUrl + '" download>'+ ipfsUrl+' </a>');
    $('#get_ipfs').parent().append("<br> <span> it's been downloading, please wait... </span>");

    var downloadStartTime = new Date();
    console.log("start download(api): ", downloadStartTime)

    var res = await ipfs[0].cat(validCID)
    var chunkMapArr = res.toString().split(',');
    var receivedChunkCnt = 0;
    var fileObject = new Array(numOfChunks)

    console.log("chunkMapArr", chunkMapArr)

    for (var i = 0; i < numOfChunks; i++) {
      var chunkStartTime = new Date();
      console.log("start download chunk[", i, "](api): ", chunkStartTime)

      ipfs[i].get(chunkMapArr[i]).then((files) => {
        console.log(files)
        files.forEach((file) => {
          if (typeof(file.content) != 'undefined') {
            console.log(file.path)
            var fileName = file.path.split('/')[1]
            var token = fileName.split('.')
            chunkNum = token[token.length - 1]
            fileObject[chunkNum] = file

            var chunkEndTime = new Date();
            var chunkInterval = chunkEndTime - chunkStartTime // ms
            var chunkSize = file.content.length // byte
            var speed = chunkSize / chunkInterval // byte / ms == kb/s
            console.log('interval download chunk[', chunkNum, '](api): ', chunkInterval / 1000, 'size:', chunkSize / 1000, 'kb', speed, 'kb/s')

            receivedChunkCnt++;
            if (receivedChunkCnt == numOfChunks) {

              var downloadFileSize = 0
              for (var j = 0; j < numOfChunks; j++) {
                downloadFileSize += fileObject[j].content.length
                downloadBuf[j] = fileObject[j].content
              }
              mergedContent = Ipfs.Buffer.concat(downloadBuf, downloadFileSize)

              var blob = new Blob(
                [mergedContent], {
                  type: 'video/mp4'
                }
              );
              var blobUrl = URL.createObjectURL(blob);

              $('#get_ipfs').parent().append("<a>다운받기</a>");

              $('#get_ipfs ~ a:last')[0].setAttribute('href', blobUrl);
              $('#get_ipfs ~ a:last')[0].innerHTML = '다운받기';
              //$('#get_ipfs ~ a:last')[0].download = validCID + '.mp4'
              $('#get_ipfs ~ a:last')[0].download = fileName.slice(0, fileName.length - 7)

              var downloadEndTime = new Date();
              var downloadInterval = downloadEndTime - downloadStartTime // ms
              var downloadSpeed = downloadFileSize / downloadInterval // byte / ms == kb/s
              console.log('* download hash:', validCID)
              console.log('* chunkMap:', chunkMapArr);
              console.log('* path:', fileObject[0].path)
              console.log('* download size:', formatSizeUnits(downloadFileSize))
              console.log('* download speed:', downloadSpeed, 'kb/s')
              console.log('* download interval:', downloadInterval / 1000, 'sec')
            }
          }

        })
      })
    }
1 Like

Very cool, thank you for sharing!