上传插件,不包含上传的前端实现,只提供后端接口等,其他地方接入插件上传。包括上传进度、断点续传等

utils.js 1.5KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. import SparkMD5 from 'spark-md5'
  2. export function getFilemd5sum(ofile) {
  3. return new Promise((resolve, reject) => {
  4. let file = ofile;
  5. let tmp_md5;
  6. let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
  7. // file = this.files[0],
  8. chunkSize = 8097152, // Read in chunks of 2MB
  9. chunks = Math.ceil(file.size / chunkSize),
  10. currentChunk = 0,
  11. spark = new SparkMD5.ArrayBuffer(),
  12. fileReader = new FileReader();
  13. fileReader.onload = function (e) {
  14. spark.append(e.target.result); // Append array buffer
  15. currentChunk++;
  16. if (currentChunk < chunks) {
  17. loadNext();
  18. } else {
  19. tmp_md5 = spark.end();
  20. resolve(tmp_md5)
  21. }
  22. };
  23. fileReader.onerror = function () {
  24. reject('error');
  25. };
  26. loadNext = () => {
  27. var start = currentChunk * chunkSize,
  28. end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
  29. fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
  30. }
  31. loadNext();
  32. })
  33. }
  34. export function getBase64(file) {
  35. return new Promise((resolve, reject) => {
  36. var reader = new FileReader();
  37. reader.readAsDataURL(file);
  38. reader.onload = function (e) {
  39. resolve(e.target.result);
  40. }
  41. reader.onerror = function() {
  42. reject('error')
  43. }
  44. });
  45. }