Работа над разбором Chunked transfer encoding

This commit is contained in:
serega6531
2020-04-14 02:07:29 +03:00
parent 8157a2aa51
commit ba5ec0e86d
3 changed files with 141 additions and 10 deletions

View File

@@ -16,6 +16,8 @@ public class CtfService {
private String name;
private boolean processChunkedEncoding;
private boolean ungzipHttp;
private boolean urldecodeHttpRequests;

View File

@@ -57,7 +57,7 @@ public class FilePcapWorker extends AbstractPcapWorker {
final Packet packet = pcap.getNextPacketEx();
gotPacket(packet);
} catch (PcapNativeException e) {
log.error("Pcap read", e);
log.error("Pcap read error: {}", e.getMessage());
Thread.sleep(100);
} catch (EOFException e) {
stop();

View File

@@ -14,8 +14,10 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.zip.GZIPInputStream;
import java.util.zip.ZipException;
@@ -32,6 +34,10 @@ public class StreamOptimizer {
* Вызвать для выполнения оптимизаций на переданном списке пакетов.
*/
public List<Packet> optimizeStream() {
if (service.isProcessChunkedEncoding()) {
processChunkedEncoding();
}
if (service.isUngzipHttp()) {
unpackGzip();
}
@@ -92,10 +98,7 @@ public class StreamOptimizer {
final boolean webSocketParsed = cut.stream().anyMatch(Packet::isWebSocketParsed);
boolean incoming = cut.get(0).isIncoming();
//noinspection OptionalGetWithoutIsPresent
final byte[] content = cut.stream()
.map(Packet::getContent)
.reduce(ArrayUtils::addAll)
.get();
final byte[] content = mergePackets(cut).get();
packets.removeAll(cut);
packets.add(start, Packet.builder()
@@ -135,6 +138,129 @@ public class StreamOptimizer {
}
}
private void processChunkedEncoding() {
boolean chunkStarted = false;
int start = -1;
List<Packet> chunk = new ArrayList<>();
for (int i = 0; i < packets.size(); i++) {
Packet packet = packets.get(i);
if (!packet.isIncoming()) {
String content = packet.getContentString();
boolean http = content.startsWith("HTTP/");
int contentPos = content.indexOf("\r\n\r\n");
if (http && contentPos != -1) { // начало body
String headers = content.substring(0, contentPos + 2); // захватываем первые \r\n
boolean chunked = headers.contains("Transfer-Encoding: chunked\r\n");
if (chunked) {
chunkStarted = true;
start = i;
chunk.add(packet);
if (checkCompleteChunk(chunk, start)) {
chunkStarted = false;
chunk.clear();
}
} else {
chunkStarted = false;
chunk.clear();
}
} else if (chunkStarted) {
chunk.add(packet);
if (checkCompleteChunk(chunk, start)) {
chunkStarted = false;
chunk.clear();
}
}
}
}
}
/**
* @return true если чанк завершен
*/
private boolean checkCompleteChunk(List<Packet> chunk, int start) {
boolean end = chunk.get(chunk.size() - 1).getContentString().endsWith("\r\n0\r\n\r\n");
if (end) {
//noinspection OptionalGetWithoutIsPresent
final byte[] content = mergePackets(chunk).get();
ByteArrayOutputStream output = new ByteArrayOutputStream(content.length);
final int contentStart = Bytes.indexOf(content, "\r\n\r\n".getBytes()) + 4;
output.write(content, 0, contentStart);
final byte[] body = Arrays.copyOfRange(content, contentStart, content.length);
int currentPos = 0;
while (true) {
final String found = readChunkSize(body, currentPos);
if (found != null) {
final int chunkSize = Integer.parseInt(found, 16);
if (chunkSize == 0) { // конец потока чанков
output.write('\r');
output.write('\n');
Packet result = Packet.builder()
.incoming(false)
.timestamp(chunk.get(0).getTimestamp())
.ungzipped(false)
.webSocketParsed(false)
.content(output.toByteArray())
.build();
packets.removeAll(chunk);
packets.add(start, result);
return true;
}
currentPos += found.length() + 2;
if (currentPos + chunkSize >= body.length) {
log.warn("Failed to merge chunks, chunk size too big: {} + {} > {}", currentPos, chunkSize, body.length);
return true; // обнулить список, но не заменять пакеты
}
output.write(body, currentPos, chunkSize);
currentPos += chunkSize;
if (currentPos + 2 >= body.length || body[currentPos] != '\r' || body[currentPos + 1] != '\n') {
log.warn("Failed to merge chunks, chunk doesn't end with \\r\\n");
return true; // обнулить список, но не заменять пакеты
}
currentPos += 2;
} else {
log.warn("Failed to merge chunks, next chunk size not found");
return true; // обнулить список, но не заменять пакеты
}
}
}
return false;
}
private String readChunkSize(byte[] content, int start) {
StringBuilder sb = new StringBuilder();
for (int i = start; i < content.length - 1; i++) {
byte b = content[i];
if ((b >= '0' && b <= '9') || (b >= 'a' && b <= 'f')) {
sb.append((char) b);
} else if (b == '\r' && content[i + 1] == '\n') {
return sb.toString();
}
}
return null;
}
/**
* Попытаться распаковать GZIP из исходящих http пакетов. <br>
* GZIP поток начинается на найденном HTTP пакете с заголовком Content-Encoding: gzip
@@ -206,10 +332,7 @@ public class StreamOptimizer {
private Packet decompressGzipPackets(List<Packet> cut) {
//noinspection OptionalGetWithoutIsPresent
final byte[] content = cut.stream()
.map(Packet::getContent)
.reduce(ArrayUtils::addAll)
.get();
final byte[] content = mergePackets(cut).get();
final int gzipStart = Bytes.indexOf(content, GZIP_HEADER);
byte[] httpHeader = Arrays.copyOfRange(content, 0, gzipStart);
@@ -252,4 +375,10 @@ public class StreamOptimizer {
packets = parser.getParsedPackets();
}
private Optional<byte[]> mergePackets(List<Packet> cut) {
return cut.stream()
.map(Packet::getContent)
.reduce(ArrayUtils::addAll);
}
}