Рефакторинг

This commit is contained in:
serega6531
2020-04-15 21:11:51 +03:00
parent 070f6b7673
commit 7dd4c6f468

View File

@@ -1,6 +1,6 @@
package ru.serega6531.packmate.service.optimization; package ru.serega6531.packmate.service.optimization;
import lombok.AllArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows; import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import ru.serega6531.packmate.model.Packet; import ru.serega6531.packmate.model.Packet;
@@ -14,18 +14,20 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
@Slf4j @Slf4j
@AllArgsConstructor @RequiredArgsConstructor
public class HttpChunksProcessor { public class HttpChunksProcessor {
private final List<Packet> packets; private final List<Packet> packets;
public void processChunkedEncoding() { private int position;
boolean chunkStarted = false; private boolean chunkStarted = false;
int start = -1; private final List<Packet> chunkPackets = new ArrayList<>();
List<Packet> chunk = new ArrayList<>();
for (int i = 0; i < packets.size(); i++) { public void processChunkedEncoding() {
Packet packet = packets.get(i); int start = -1;
for (position = 0; position < packets.size(); position++) {
Packet packet = packets.get(position);
if (!packet.isIncoming()) { if (!packet.isIncoming()) {
String content = packet.getContentString(); String content = packet.getContentString();
@@ -37,97 +39,100 @@ public class HttpChunksProcessor {
boolean chunked = headers.contains("Transfer-Encoding: chunked\r\n"); boolean chunked = headers.contains("Transfer-Encoding: chunked\r\n");
if (chunked) { if (chunked) {
chunkStarted = true; chunkStarted = true;
start = i; start = position;
chunk.add(packet); chunkPackets.add(packet);
if (checkCompleteChunk(chunk, start)) { checkCompleteChunk(chunkPackets, start);
chunkStarted = false;
chunk.clear();
i = start + 1;
}
} else { } else {
chunkStarted = false; chunkStarted = false;
chunk.clear(); chunkPackets.clear();
} }
} else if (chunkStarted) { } else if (chunkStarted) {
chunk.add(packet); chunkPackets.add(packet);
if (checkCompleteChunk(chunk, start)) { checkCompleteChunk(chunkPackets, start);
chunkStarted = false;
chunk.clear();
i = start + 1;
}
} }
} }
} }
} }
/** private void checkCompleteChunk(List<Packet> packets, int start) {
* @return true если чанк завершен
*/
@SneakyThrows
private boolean checkCompleteChunk(List<Packet> packets, int start) {
boolean end = packets.get(packets.size() - 1).getContentString().endsWith("\r\n0\r\n\r\n"); boolean end = packets.get(packets.size() - 1).getContentString().endsWith("\r\n0\r\n\r\n");
if (end) { if (end) {
//noinspection OptionalGetWithoutIsPresent processChunk(packets, start);
final byte[] content = PacketUtils.mergePackets(packets).get(); }
}
ByteArrayOutputStream output = new ByteArrayOutputStream(content.length); @SneakyThrows
private void processChunk(List<Packet> packets, int start) {
//noinspection OptionalGetWithoutIsPresent
final byte[] content = PacketUtils.mergePackets(packets).get();
final int contentStart = Bytes.indexOf(content, "\r\n\r\n".getBytes()) + 4; ByteArrayOutputStream output = new ByteArrayOutputStream(content.length);
output.write(content, 0, contentStart);
ByteBuffer buf = ByteBuffer.wrap(Arrays.copyOfRange(content, contentStart, content.length)); final int contentStart = Bytes.indexOf(content, "\r\n\r\n".getBytes()) + 4;
output.write(content, 0, contentStart);
while (true) { ByteBuffer buf = ByteBuffer.wrap(Arrays.copyOfRange(content, contentStart, content.length));
final String found = readChunkSize(buf);
if (found != null) {
final int chunkSize = Integer.parseInt(found, 16);
if (chunkSize == 0) { // конец потока чанков while (true) {
Packet result = Packet.builder() final String found = readChunkSize(buf);
.incoming(false) if (found != null) {
.timestamp(packets.get(0).getTimestamp()) final int chunkSize = Integer.parseInt(found, 16);
.ungzipped(false)
.webSocketParsed(false)
.content(output.toByteArray())
.build();
this.packets.removeAll(packets); if (chunkSize == 0) { // конец потока чанков
this.packets.add(start, result); Packet result = Packet.builder()
.incoming(false)
.timestamp(packets.get(0).getTimestamp())
.ungzipped(false)
.webSocketParsed(false)
.content(output.toByteArray())
.build();
return true; this.packets.removeAll(packets);
} this.packets.add(start, result);
if (chunkSize > buf.remaining()) { resetChunk();
log.warn("Failed to merge chunks, chunk size too big: {} + {} > {}", position = start + 1;
buf.position(), chunkSize, buf.capacity());
return true; // обнулить список, но не заменять пакеты
}
byte[] chunk = new byte[chunkSize]; return;
buf.get(chunk);
output.write(chunk);
if (buf.remaining() < 2) {
log.warn("Failed to merge chunks, chunk doesn't end with \\r\\n");
return true; // обнулить список, но не заменять пакеты
}
int c1 = buf.get();
int c2 = buf.get();
if(c1 != '\r' || c2 != '\n') {
log.warn("Failed to merge chunks, chunk trailer is not equal to \\r\\n");
return true; // обнулить список, но не заменять пакеты
}
} else {
log.warn("Failed to merge chunks, next chunk size not found");
return true; // обнулить список, но не заменять пакеты
} }
if (chunkSize > buf.remaining()) {
log.warn("Failed to merge chunks, chunk size too big: {} + {} > {}",
buf.position(), chunkSize, buf.capacity());
resetChunk();
return;
}
byte[] chunk = new byte[chunkSize];
buf.get(chunk);
output.write(chunk);
if (buf.remaining() < 2) {
log.warn("Failed to merge chunks, chunk doesn't end with \\r\\n");
resetChunk();
return;
}
int c1 = buf.get();
int c2 = buf.get();
if(c1 != '\r' || c2 != '\n') {
log.warn("Failed to merge chunks, chunk trailer is not equal to \\r\\n");
resetChunk();
return;
}
} else {
log.warn("Failed to merge chunks, next chunk size not found");
resetChunk();
return;
} }
} }
}
return false; private void resetChunk() {
chunkStarted = false;
chunkPackets.clear();
} }
private String readChunkSize(ByteBuffer buf) { private String readChunkSize(ByteBuffer buf) {