Explorar el Código

大文件上传bug 修复

liubo hace 2 años
padre
commit
063cd7f935

+ 15 - 4
zd-modules/zd-base/src/main/java/com/zd/base/files/bigupload/service/impl/UploadServiceImpl.java

@@ -18,6 +18,7 @@ import java.io.*;
 import java.net.UnknownHostException;
 import java.rmi.ServerException;
 import java.util.*;
+import java.util.concurrent.TimeUnit;
 
 
 /**
@@ -100,11 +101,19 @@ public class UploadServiceImpl implements IUploadService {
     @Override
     public String mergeChunk(String identifier, String fileName, Integer totalChunks) throws IOException {
         String suffix = fileName.substring(fileName.lastIndexOf("."));
+        String key = FILE_PREFIX + identifier;
         if(null==suffix){
+            redisTemplate.opsForHash().delete(key);
             throw new RuntimeException("文件格式有误");
         }
-        fileName= UUID.randomUUID().toString()+suffix;
-        return mergeChunks(identifier, fileName, totalChunks);
+        String fileUrl = (String) redisTemplate.opsForHash().get(key, "fileUrl");
+        if(fileUrl == null){
+            fileName= UUID.randomUUID().toString()+suffix;
+            fileUrl = mergeChunks(identifier, fileName, totalChunks);
+            redisTemplate.opsForHash().put(key, "fileUrl", fileUrl);
+        }
+
+        return fileUrl;
     }
 
     /**
@@ -202,6 +211,7 @@ public class UploadServiceImpl implements IUploadService {
      */
     private synchronized long saveToRedis(FileChunkDTO chunkDTO) {
         Set<Integer> uploaded = (Set<Integer>) redisTemplate.opsForHash().get(FILE_PREFIX + chunkDTO.getIdentifier(), "uploaded");
+        String key = FILE_PREFIX + chunkDTO.getIdentifier();
         if (uploaded == null) {
             uploaded = new HashSet<>(Arrays.asList(chunkDTO.getChunkNumber()));
             HashMap<String, Object> objectObjectHashMap = new HashMap<>();
@@ -209,10 +219,11 @@ public class UploadServiceImpl implements IUploadService {
             objectObjectHashMap.put("totalChunks", chunkDTO.getTotalChunks());
             objectObjectHashMap.put("totalSize", chunkDTO.getTotalSize());
             objectObjectHashMap.put("path", chunkDTO.getFilename());
-            redisTemplate.opsForHash().putAll(FILE_PREFIX + chunkDTO.getIdentifier(), objectObjectHashMap);
+            redisTemplate.opsForHash().putAll(key, objectObjectHashMap);
+            redisTemplate.expire(key, 5, TimeUnit.DAYS);
         } else {
             uploaded.add(chunkDTO.getChunkNumber());
-            redisTemplate.opsForHash().put(FILE_PREFIX + chunkDTO.getIdentifier(), "uploaded", uploaded);
+            redisTemplate.opsForHash().put(key, "uploaded", uploaded);
         }
         return uploaded.size();
     }