Skip to content

Commit

Permalink
*v1.0.2* fix Old Chunk false positives in overworld
Browse files Browse the repository at this point in the history
**1.0.2**
***NewerNewChunks Updates:***
- Fixed the false positives in the Overworld (old chunks in the new) that were occurring from structure generation. They are extremely rare now at the tradeoff of now also getting extremely rare new chunks in the old (it's fine, trust me) 
- The above was done by implementing an algorithm for analyzing the percent of chunk sections that appear to be new in chunks that aren't immediately defined as new.
- In testing I realized I didn't actualy fix the spawn chunk region always showing up as New Chunks in the last update.
  • Loading branch information
etianl authored Jul 9, 2024
1 parent 7587bda commit 63dfa4b
Show file tree
Hide file tree
Showing 4 changed files with 153 additions and 36 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -108,9 +108,9 @@ This will return the lowest block placed with AutoMountain until AutoLavacast is
- The **ByteExploit** option enabled by default detects new chunks by scanning the order of chunk section palettes, and also by checking the capacity of the writer index of chunks.
- The **ByteExploit** does not work in Minecraft servers where their version is less than 1.18. For those servers, disable **ByteExploit** and enable Liquid flow and BlockExploit.
- The **ByteExploit** does not work in flat worlds that are entirely void.
- In the End dimension there are very rare false old chunks in the newchunks (not enough to notice mostly).
- In the End and Overworld dimension there are very rare false old chunks in the newchunks (not enough to notice mostly).
- Chunks appear to be defined as new until the person who generated them has unrendered them.
- In the nether the chunks that stay loaded due to the spawn chunk region always show up as new for some reason.
- The chunks that stay loaded due to the spawn chunk region always show up as new for some reason.

*These next things are to be used if **ByteExploit** doesn't work for you:*
- the **Pre 1.17 OldChunk Detector** detects chunks in the overworld that do not contain copper ore above a certain Y level. This should be used when the .world command returns "This chunk is pre 1.17 generation!" when run at spawn.)
Expand Down
2 changes: 1 addition & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ yarn_mappings=1.21+build.2
loader_version=0.15.11

# Mod Properties
mod_version=1.0.1-1.21
mod_version=1.0.2-1.21
maven_group=pwn.noobs
archives_base_name=1trouser-streak

Expand Down
181 changes: 149 additions & 32 deletions src/main/java/pwn/noobs/trouserstreak/modules/NewerNewChunks.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package pwn.noobs.trouserstreak.modules;

import io.netty.buffer.Unpooled;
import meteordevelopment.meteorclient.events.game.GameLeftEvent;
import meteordevelopment.meteorclient.events.game.OpenScreenEvent;
import meteordevelopment.meteorclient.events.packets.PacketEvent;
Expand All @@ -16,6 +17,7 @@
import meteordevelopment.meteorclient.utils.render.color.SettingColor;
import meteordevelopment.orbit.EventHandler;
import net.minecraft.block.Block;
import net.minecraft.block.BlockState;
import net.minecraft.block.Blocks;
import net.minecraft.client.gui.screen.DisconnectedScreen;
import net.minecraft.client.gui.screen.DownloadingTerrainScreen;
Expand All @@ -24,9 +26,13 @@
import net.minecraft.network.PacketByteBuf;
import net.minecraft.network.packet.c2s.play.AcknowledgeChunksC2SPacket;
import net.minecraft.network.packet.s2c.play.*;
import net.minecraft.registry.Registry;
import net.minecraft.registry.RegistryKeys;
import net.minecraft.util.Identifier;
import net.minecraft.util.WorldSavePath;
import net.minecraft.util.math.*;
import net.minecraft.world.World;
import net.minecraft.world.biome.Biome;
import net.minecraft.world.chunk.WorldChunk;
import pwn.noobs.trouserstreak.Trouser;

Expand Down Expand Up @@ -440,7 +446,7 @@ private void onPreTick(TickEvent.Pre event) {
oldchunksfound++;
}
} catch (IOException e) {
e.printStackTrace();
//e.printStackTrace();
}
try {
List<String> allLines = Files.readAllLines(Paths.get("TrouserStreak/NewChunks/"+serverip+"/"+world+"/NewChunkData.txt"));
Expand All @@ -449,7 +455,7 @@ private void onPreTick(TickEvent.Pre event) {
newchunksfound++;
}
} catch (IOException e) {
e.printStackTrace();
//e.printStackTrace();
}
try {
List<String> allLines = Files.readAllLines(Paths.get("TrouserStreak/NewChunks/"+serverip+"/"+world+"/BlockExploitChunkData.txt"));
Expand All @@ -458,7 +464,7 @@ private void onPreTick(TickEvent.Pre event) {
tickexploitchunksfound++;
}
} catch (IOException e) {
e.printStackTrace();
//e.printStackTrace();
}
}
}
Expand Down Expand Up @@ -576,7 +582,7 @@ else if (!(event.packet instanceof AcknowledgeChunksC2SPacket) && event.packet i
}
}
catch (Exception e){
e.printStackTrace();
//e.printStackTrace();
}
}
if (!packet.getState().getFluidState().isEmpty() && !packet.getState().getFluidState().isStill() && liquidexploit.get()) {
Expand Down Expand Up @@ -607,28 +613,35 @@ else if (event.packet instanceof ChunkDataS2CPacket && mc.world != null) {
int widx = buf.writerIndex();
if ((mc.world.getRegistryKey() == World.OVERWORLD || mc.world.getRegistryKey() == World.NETHER || mc.world.getRegistryKey() == World.END) && byteexploit.get()) {
boolean isNewChunk = false;
if (mc.world.getRegistryKey() == World.NETHER) {
if (buf.readableBytes() < 3) return; // Ensure we have at least 3 bytes (short + byte)

if (mc.world.getRegistryKey() == World.END){
//if (!newChunkWidxValues.contains(widx)) System.out.println("widx: " + widx);
buf.readShort();
int blockBitsPerEntry = buf.readUnsignedByte();

if (blockBitsPerEntry >= 4 && blockBitsPerEntry <= 8) {
int blockPaletteLength = buf.readVarInt();
//System.out.println("Block palette length: " + blockPaletteLength);
int blockPaletteEntry = buf.readVarInt();
if (blockPaletteEntry == 0) isNewChunk = true;
//System.out.println("Block palette entry " + i + ": " + blockPaletteEntry);
}
} else if (mc.world.getRegistryKey() == World.END){
if (newChunkWidxValues.contains(widx)) isNewChunk = true;

if (buf.readableBytes() < 1) return; // Ensure we have at least 3 bytes (short + byte)
if (buf.readableBytes() < 3) return; // Ensure we have at least 3 bytes (short + byte)

buf.readShort(); // Skip block count
int blockBitsPerEntry = buf.readUnsignedByte(); //Continue reading all the data to consume it

// Block palette
int blockBitsPerEntry = buf.readUnsignedByte();
if (blockBitsPerEntry >= 4 && blockBitsPerEntry <= 8) {
// Indirect palette
if (blockBitsPerEntry >= 4 && blockBitsPerEntry <= 8) { // Indirect palette
int blockPaletteLength = buf.readVarInt();
//System.out.println("Block palette length: " + blockPaletteLength);
for (int i = 0; i < blockPaletteLength; i++) {
int blockPaletteEntry = buf.readVarInt();
//System.out.println("Block palette entry " + i + ": " + blockPaletteEntry);
}

// Skip block data array
int blockDataArrayLength = buf.readVarInt();
int bytesToSkip = blockDataArrayLength * 8; // Each entry is a long (8 bytes)
if (buf.readableBytes() >= bytesToSkip) {
Expand All @@ -646,37 +659,141 @@ else if (event.packet instanceof ChunkDataS2CPacket && mc.world != null) {
return;
}

// Biome palette
int biomeBitsPerEntry = buf.readUnsignedByte();
if (biomeBitsPerEntry >= 0 && biomeBitsPerEntry <= 3) {
// Indirect palette

if (biomeBitsPerEntry >= 1 && biomeBitsPerEntry <= 3) { // Indirect palette
int biomePaletteLength = buf.readVarInt();
//System.out.println("Biome palette length: " + biomePaletteLength);

int biomePaletteEntry = buf.readVarInt();
if (biomePaletteEntry != 0) isNewChunk = true;
//System.out.println("Biome palette entry " + i + ": " + biomePaletteEntry);
} else {
//System.out.println("Invalid biome bits per entry: " + biomeBitsPerEntry);
return;
}
} else if (mc.world.getRegistryKey() == World.OVERWORLD) {
PacketByteBuf bufferCopy = new PacketByteBuf(Unpooled.copiedBuffer(buf.nioBuffer())); //copy the packetByteBuf for later use
if (buf.readableBytes() < 3) return; // Ensure we have at least 3 bytes (short + byte)

} else {
if (buf.readableBytes() < 1) return; // Ensure we have at least 3 bytes (short + byte)

buf.readShort();

buf.readShort(); // Skip block count

// Block palette
int blockBitsPerEntry = buf.readUnsignedByte();
if (blockBitsPerEntry >= 4 && blockBitsPerEntry <= 8) {
// Indirect palette
int blockPaletteLength = buf.readVarInt();
//System.out.println("Block palette length: " + blockPaletteLength);
int blockPaletteEntry = buf.readVarInt();
if (blockPaletteEntry == 0) isNewChunk = true;
//System.out.println("Block palette entry " + i + ": " + blockPaletteEntry);
}
if (isNewChunk==false) { //If the chunk isn't immediately new, then process it further to really determine if it's new
if (bufferCopy.readableBytes() < 3) return;
int loops = 0;
int newChunkQuantifier = 0;
try {
while (bufferCopy.readableBytes() > 0) {
bufferCopy.readShort();
int blockBitsPerEntry2 = bufferCopy.readUnsignedByte();
if (blockBitsPerEntry2 == 0) {
int blockPaletteEntry = bufferCopy.readVarInt();
//BlockState blockState = Block.STATE_IDS.get(blockPaletteEntry);
//System.out.println(chunk.getPos()+ " || Single Block palette entry: " + blockPaletteEntry + " (" + blockState + ")");
}
else if (blockBitsPerEntry2 >= 4 && blockBitsPerEntry2 <= 8) {
int blockPaletteLength = bufferCopy.readVarInt();
//System.out.println("Block palette length: " + blockPaletteLength);
int isNewSection = 0;
for (int i = 0; i < blockPaletteLength; i++) {
int blockPaletteEntry = bufferCopy.readVarInt();
if (i == 0 && blockPaletteEntry == 0) isNewSection++;
if (i == 1 && (blockPaletteEntry == 80 || blockPaletteEntry == 1 || blockPaletteEntry == 9 || blockPaletteEntry == 5781)) isNewSection++;
if (i == 2 && (blockPaletteEntry == 5781 || blockPaletteEntry == 10 || blockPaletteEntry == 22318)) isNewSection++;
//BlockState blockState = Block.STATE_IDS.get(blockPaletteEntry);
//System.out.println(chunk.getPos()+ " || Block palette entry " + i + ": " + blockPaletteEntry + " (" + blockState + ")");
}
if (isNewSection>=2)newChunkQuantifier++;
loops++;

int blockDataArrayLength = bufferCopy.readVarInt();
int bytesToSkip = blockDataArrayLength * 8;
if (bufferCopy.readableBytes() >= bytesToSkip) {
bufferCopy.skipBytes(bytesToSkip);
} else {
//System.out.println("Not enough data for block array, skipping remaining: " + bufferCopy.readableBytes());
bufferCopy.skipBytes(bufferCopy.readableBytes());
break;
}
}

if (bufferCopy.readableBytes() < 1) {
//System.out.println("No biome data available");
break;
}

int biomeBitsPerEntry = bufferCopy.readUnsignedByte();
if (biomeBitsPerEntry == 0) {
int biomePaletteEntry = bufferCopy.readVarInt();
/*Registry<Biome> biomeRegistry = mc.world.getRegistryManager().get(RegistryKeys.BIOME);
Biome biome = biomeRegistry.get(biomePaletteEntry);
if (biome != null) {
Identifier biomeId = biomeRegistry.getId(biome);
//System.out.println(chunk.getPos() + " || Single Biome: " + biomeId + biomePaletteEntry);
} else {
//System.out.println(chunk.getPos() + " || Unknown Biome palette entry: " + biomePaletteEntry);
}*/
}
else if (biomeBitsPerEntry >= 1 && biomeBitsPerEntry <= 3) {
int biomePaletteLength = bufferCopy.readVarInt();
//System.out.println("Biome palette length: " + biomePaletteLength);
for (int i = 0; i < biomePaletteLength; i++) {
if (bufferCopy.readableBytes() < 1) {
//System.out.println("Incomplete biome palette data");
break;
}
int biomePaletteEntry = bufferCopy.readVarInt();
/*Registry<Biome> biomeRegistry = mc.world.getRegistryManager().get(RegistryKeys.BIOME);
Biome biome = biomeRegistry.get(biomePaletteEntry);
if (biome != null) {
Identifier biomeId = biomeRegistry.getId(biome);
//System.out.println(chunk.getPos()+ " || Biome palette entry " + i + ": " + biomeId + biomePaletteEntry);
} else {
//System.out.println(chunk.getPos() + " || Unknown Biome palette entry: " + biomePaletteEntry);
}*/
}

if (bufferCopy.readableBytes() >= 1) {
int biomeDataArrayLength = bufferCopy.readVarInt();
int biomeBytesToSkip = biomeDataArrayLength * 8;
if (bufferCopy.readableBytes() >= biomeBytesToSkip) {
bufferCopy.skipBytes(biomeBytesToSkip);
} else {
//System.out.println("Not enough data for biome array, skipping remaining: " + bufferCopy.readableBytes());
bufferCopy.skipBytes(bufferCopy.readableBytes());
}
} else {
//System.out.println("Not enough data for biome array length");
}
} else {
//System.out.println("Invalid biome bits per entry: " + biomeBitsPerEntry);
break;
}

}
//System.out.println("newChunkQuantifier: " + newChunkQuantifier + ", loops: " + loops);
double percentage = ((newChunkQuantifier/((double)loops-1))*100);
//System.out.println("Percentage: " + percentage);
if (percentage >=40) {
isNewChunk = true;
}
} catch (Exception e){
//e.printStackTrace();
//System.out.println("newChunkQuantifier: " + newChunkQuantifier + ", loops: " + loops);
double percentage = ((newChunkQuantifier/((double)loops-1))*100);
//System.out.println("Percentage: " + percentage);
if (percentage >=40) {
isNewChunk = true;
}
}
}
}

if (isNewChunk == false) {
Expand All @@ -688,8 +805,8 @@ else if (event.packet instanceof ChunkDataS2CPacket && mc.world != null) {
}
}
} catch (Exception e) {
e.printStackTrace();
}//>0 works for flat overworld
//e.printStackTrace();
}
} else if (isNewChunk == true) {
try {
if (!tickexploitChunks.contains(oldpos) && !oldChunks.contains(oldpos) && !newChunks.contains(oldpos)) {
Expand All @@ -699,7 +816,7 @@ else if (event.packet instanceof ChunkDataS2CPacket && mc.world != null) {
}
}
} catch (Exception e) {
e.printStackTrace();
//e.printStackTrace();
}
}
}
Expand Down Expand Up @@ -772,7 +889,7 @@ private void loadData() {
}
}
} catch (IOException e) {
e.printStackTrace();
//e.printStackTrace();
}
try {
List<String> allLines = Files.readAllLines(Paths.get("TrouserStreak/NewChunks/"+serverip+"/"+world+"/NewChunkData.txt"));
Expand All @@ -792,7 +909,7 @@ private void loadData() {
}
}
} catch (IOException e) {
e.printStackTrace();
//e.printStackTrace();
}
try {
List<String> allLines = Files.readAllLines(Paths.get("TrouserStreak/NewChunks/"+serverip+"/"+world+"/BlockExploitChunkData.txt"));
Expand All @@ -812,7 +929,7 @@ private void loadData() {
}
}
} catch (IOException e) {
e.printStackTrace();
//e.printStackTrace();
}
}
private void saveNewChunkData(ChunkPos chunkpos) {
Expand All @@ -823,7 +940,7 @@ private void saveNewChunkData(ChunkPos chunkpos) {
writer.write("\r\n"); // write new line
writer.close();
} catch (IOException e) {
e.printStackTrace();
//e.printStackTrace();
}
}
private void saveOldChunkData(ChunkPos chunkpos) {
Expand All @@ -834,7 +951,7 @@ private void saveOldChunkData(ChunkPos chunkpos) {
writer.write("\r\n"); // write new line
writer.close();
} catch (IOException e) {
e.printStackTrace();
//e.printStackTrace();
}
}
private void saveBlockExploitChunkData(ChunkPos chunkpos) {
Expand All @@ -845,7 +962,7 @@ private void saveBlockExploitChunkData(ChunkPos chunkpos) {
writer.write("\r\n"); // write new line
writer.close();
} catch (IOException e) {
e.printStackTrace();
//e.printStackTrace();
}
}
}
2 changes: 1 addition & 1 deletion src/main/resources/fabric.mod.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"schemaVersion": 1,
"id": "streak-addon",
"version": "1.0.1",
"version": "1.0.2",
"name": "TrouserStreak",
"description": "Trouser-Streak is a compilation of modules, updated to the latest version and optimized for maximum grief. I did not make all of these.",
"authors": [
Expand Down

0 comments on commit 63dfa4b

Please sign in to comment.