V2ex看到的一道Java编程练习题

原文

原文:求解一道 Java 练习题,请喝一杯星巴克 o(╥﹏╥)o - V2EX

尝试编写了一个,包含文件随机切割以及合并


import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.SeekableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
import java.util.zip.ZipEntry;
import java.util.zip.ZipException;
import java.util.zip.ZipFile;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;

public class Main {
	
	public static final String chunkFilePrefix = "p";
	
	public static final String logFileName = "splitlog.txt";
	
	public static final int EOF = -1;
	
	public static final int defaultBufferSize = 1024 << 2;
	
	
	public static void main(String[] args) throws IOException {
		
		// 切割文件,返回zip
		Path zipFile = split(Paths.get("C:\\Users\\KevinBlandy\\Desktop\\test\\springboot.png"), 10);
		
		// 解压zip,以及合并文件块儿
		merge(zipFile, zipFile.resolveSibling("springboot-merge.png"));
	}
	
	/**
	 * 随机切割文件
	 * @param file		文件
	 * @param chunk		分块数量
	 * @return			包含了文件块以及切割日志的zip文件
	 * @throws IOException
	 */
	public static Path split (Path file, int chunk) throws IOException {
		if(Files.notExists(file)){
			throw new IllegalArgumentException(file.toString() + " 不存在");
		}
		
		// 文件大小
		long fileSize = Files.size(file);
		
		// 空文件
		if (fileSize == 0) {
			return null;
		}
		
		// 每个分块文件的最大大小 = (总大小 / 总个数)* 2
		long chunkMaxSize = (fileSize / chunk) * 2;
		
		// 切割的日志
		Path logFile = file.resolveSibling(logFileName);
		
		// 初始化分块文件
		Path[] chunks = new Path[chunk];
		for (int i = 0; i < chunk; i ++) {
			Path chunkFile = file.resolveSibling(chunkFilePrefix + (i + 1));
			if (Files.notExists(chunkFile)){
				Files.createFile(chunkFile);
			}
			chunks[i] = chunkFile;
		}
		
		// 随机源
		Random random = ThreadLocalRandom.current();
		
		long position = 0;
		// 随机切割
		try(FileChannel fileChannel  = FileChannel.open(file); 
				BufferedWriter logFileWriter = Files.newBufferedWriter(logFile, StandardOpenOption.CREATE)){
			while (true) {
				// 随机选一个分片文件,写入随机字节数据
				
				int ramdonChunk = random.nextInt(chunk);
				
				try(FileChannel chunkFileChannel = FileChannel.open(chunks[ramdonChunk], StandardOpenOption.CREATE, StandardOpenOption.APPEND)){
					long write = fileChannel.transferTo(position, random.nextLong(1, chunkMaxSize + 1), chunkFileChannel);
					if (write == 0) {
						break;
					}
					position += write;
					
					// 写入日志
					logFileWriter.write(String.format("%d\t%d", ramdonChunk + 1, write));
					logFileWriter.newLine();
					logFileWriter.flush();
				}
			}
		}
		
		// 写入到zip文件
		Path zip = file.resolveSibling("files.zip");
		try(ZipOutputStream zipOutputStream = new ZipOutputStream(Files.newOutputStream(zip, StandardOpenOption.CREATE))){
			// 写入分块文件
			for (Path chunkFile : chunks) {
				zipOutputStream.putNextEntry(new ZipEntry(chunkFile.getFileName().toString()));
				Files.copy(chunkFile, zipOutputStream);
			}
			// 写入日志文件
			zipOutputStream.putNextEntry(new ZipEntry(logFile.getFileName().toString()));
			Files.copy(logFile, zipOutputStream);
		}
		
		// 删除分块文件
		for (Path chunkFile : chunks) {
			Files.delete(chunkFile);
		}
		// 删除日志文件
		Files.delete(logFile);
		
		return zip;
	}
	
	/**
	 * 合并文件
	 * @param zipFile		压缩文件
	 * @param targetFile	合并后的目标文件
	 * @throws IOException 
	 * @throws ZipException 
	 */
	public static void merge (Path zip, Path targetFile) throws ZipException, IOException {
		
		// 文件所在目录
		Path dir = zip.getParent();
		
		// 解压所有文件到目录
		try (ZipFile zipFile = new ZipFile(zip.toFile())) {
			try (ZipInputStream zipInputStream = new ZipInputStream(Files.newInputStream(zip))) {
				ZipEntry zipEntry = null;
				while ((zipEntry = zipInputStream.getNextEntry()) != null) {
					String entryName = zipEntry.getName();
					Path entryFile = dir.resolve(entryName);
					if (zipEntry.isDirectory() && !Files.isDirectory(entryFile)) {
						Files.createDirectories(entryFile);
					} else {
						try (InputStream zipEntryInputStream = zipFile.getInputStream(zipEntry)) {
							Files.copy(zipEntryInputStream, entryFile);
						}
					}
				}
			}
		}
		
		// 文件块流
		Map<String, FileChannel> chunkFiles = new HashMap<>();
		
		// 创建目标文件,也是还原后的图片文件
		try(SeekableByteChannel targetFileChannel = Files.newByteChannel(targetFile, StandardOpenOption.CREATE, StandardOpenOption.WRITE); 
				// 读取日志文件
				BufferedReader logReader = Files.newBufferedReader(dir.resolve(logFileName))){
			String line = null;
			while ((line = logReader.readLine()) != null) {
				// 读取每一行
				String[] item = line.split("\t");
				// 块文件
				String fileNumber = item[0];
				// 长度
				long size = Long.parseLong(item[1]);
				
				// 文件流
				FileChannel chunkFileChannel = chunkFiles.computeIfAbsent(fileNumber, key -> {
					try {
						return FileChannel.open(dir.resolve(chunkFilePrefix + fileNumber));
					} catch (IOException e) {
						throw new RuntimeException(e);
					}
				});
				
				// 复制到目标文件
				copyN(chunkFileChannel, targetFileChannel,  size);
			}
		}
		
		// 关闭所有流
		chunkFiles.values().stream().forEach(t -> {
			try {
				t.close();
			} catch (IOException e) {
				throw new RuntimeException(e);
			}
		});
	}
	
	
	// 从src 复制n个字节到dest
	public static int copyN (ReadableByteChannel src, WritableByteChannel dest, long n) throws IOException {
		
		if (n < 0) {
			throw new IllegalArgumentException();
		}

		ByteBuffer buffer = ByteBuffer.allocate((int) Math.min(n, defaultBufferSize));

		int readCount = 0;

		while (readCount < n) {
			
			if ((n - readCount) < buffer.capacity()) {
				buffer.limit((int) (n - readCount));
			}
			
			int count = src.read(buffer);
			if (count == EOF) {
				break;
			}
			
			readCount += count;
			
			buffer.flip();
			
			while (buffer.hasRemaining()) {
				dest.write(buffer);	
			}
			
			buffer.clear();
		}
		
		return readCount;
	}
}

本地试了一下,可以正常的随机切割文件以及合并。暂时没发现啥问题还。