上传插件,不包含上传的前端实现,只提供后端接口等,其他地方接入插件上传。包括上传进度、断点续传等

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253
  1. 'use strict';
  2. Object.defineProperty(exports, "__esModule", {
  3. value: true
  4. });
  5. exports.default = getFilemd5sum;
  6. var _sparkMd = require('spark-md5');
  7. var _sparkMd2 = _interopRequireDefault(_sparkMd);
  8. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
  9. function getFilemd5sum(ofile) {
  10. return new Promise(function (resolve, reject) {
  11. var file = ofile;
  12. var tmp_md5 = void 0;
  13. var blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
  14. // file = this.files[0],
  15. chunkSize = 8097152,
  16. // Read in chunks of 2MB
  17. chunks = Math.ceil(file.size / chunkSize),
  18. currentChunk = 0,
  19. spark = new _sparkMd2.default.ArrayBuffer(),
  20. fileReader = new FileReader();
  21. function loadNext() {
  22. var start = currentChunk * chunkSize,
  23. end = start + chunkSize >= file.size ? file.size : start + chunkSize;
  24. fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
  25. }
  26. fileReader.onload = function (e) {
  27. spark.append(e.target.result); // Append array buffer
  28. currentChunk++;
  29. if (currentChunk < chunks) {
  30. loadNext();
  31. } else {
  32. tmp_md5 = spark.end();
  33. resolve(tmp_md5);
  34. }
  35. };
  36. fileReader.onerror = function () {
  37. reject('error');
  38. };
  39. loadNext();
  40. });
  41. }