GP-5944 cleanup GFilesystem file attribute handling

This commit is contained in:
dev747368 2025-08-21 16:03:40 +00:00
parent 7b36c1649f
commit 3b0ee82a5f
14 changed files with 243 additions and 220 deletions

View file

@ -259,7 +259,7 @@ public interface GFileSystem extends Closeable, Iterable<GFile>, ExtensionPoint
* *
* @return An {@link Iterable} over this {@link GFileSystem}'s {@link GFile files}. * @return An {@link Iterable} over this {@link GFileSystem}'s {@link GFile files}.
*/ */
public default Iterable<GFile> files() { default Iterable<GFile> files() {
return () -> new GFileSystemIterator(this); return () -> new GFileSystemIterator(this);
} }
@ -271,7 +271,7 @@ public interface GFileSystem extends Closeable, Iterable<GFile>, ExtensionPoint
* @throws UncheckedIOException if {@code dir} is not a directory * @throws UncheckedIOException if {@code dir} is not a directory
* @return An {@link Iterable} over this {@link GFileSystem}'s {@link GFile files}. * @return An {@link Iterable} over this {@link GFileSystem}'s {@link GFile files}.
*/ */
public default Iterable<GFile> files(GFile dir) throws UncheckedIOException { default Iterable<GFile> files(GFile dir) throws UncheckedIOException {
return () -> new GFileSystemIterator(Objects.requireNonNullElse(dir, getRootDir())); return () -> new GFileSystemIterator(Objects.requireNonNullElse(dir, getRootDir()));
} }
@ -281,7 +281,7 @@ public interface GFileSystem extends Closeable, Iterable<GFile>, ExtensionPoint
* @param fileFilter A filter to apply to the {@link GFile files} iterated over * @param fileFilter A filter to apply to the {@link GFile files} iterated over
* @return An {@link Iterable} over this {@link GFileSystem}'s {@link GFile files}. * @return An {@link Iterable} over this {@link GFileSystem}'s {@link GFile files}.
*/ */
public default Iterable<GFile> files(Predicate<GFile> fileFilter) { default Iterable<GFile> files(Predicate<GFile> fileFilter) {
return () -> new GFileSystemIterator(getRootDir(), fileFilter); return () -> new GFileSystemIterator(getRootDir(), fileFilter);
} }
@ -294,14 +294,14 @@ public interface GFileSystem extends Closeable, Iterable<GFile>, ExtensionPoint
* @throws UncheckedIOException if {@code dir} is not a directory * @throws UncheckedIOException if {@code dir} is not a directory
* @return An {@link Iterable} over this {@link GFileSystem}'s {@link GFile files}. * @return An {@link Iterable} over this {@link GFileSystem}'s {@link GFile files}.
*/ */
public default Iterable<GFile> files(GFile dir, Predicate<GFile> fileFilter) default Iterable<GFile> files(GFile dir, Predicate<GFile> fileFilter)
throws UncheckedIOException { throws UncheckedIOException {
return () -> new GFileSystemIterator(Objects.requireNonNullElse(dir, getRootDir()), return () -> new GFileSystemIterator(Objects.requireNonNullElse(dir, getRootDir()),
fileFilter); fileFilter);
} }
@Override @Override
public default Iterator<GFile> iterator() { default Iterator<GFile> iterator() {
return new GFileSystemIterator(this); return new GFileSystemIterator(this);
} }

View file

@ -33,7 +33,7 @@ import ghidra.formats.gfilesystem.FSRL;
public enum FileAttributeType { public enum FileAttributeType {
FSRL_ATTR("FSRL", GENERAL_INFO, FSRL.class), FSRL_ATTR("FSRL", GENERAL_INFO, FSRL.class),
NAME_ATTR("Name", GENERAL_INFO, String.class), NAME_ATTR("Name", GENERAL_INFO, String.class),
PATH_ATTR("Path", GENERAL_INFO, String.class), PATH_ATTR("Path", GENERAL_INFO, String.class), // the directory containing the file
FILE_TYPE_ATTR("File type", GENERAL_INFO, FileType.class), FILE_TYPE_ATTR("File type", GENERAL_INFO, FileType.class),
PROJECT_FILE_ATTR("Project file", GENERAL_INFO, String.class), PROJECT_FILE_ATTR("Project file", GENERAL_INFO, String.class),

View file

@ -115,7 +115,7 @@ public class GetInfoFSBFileHandler implements FSBFileHandler {
fattrs.add(NAME_ATTR, file.getName()); fattrs.add(NAME_ATTR, file.getName());
} }
if (!fattrs.contains(PATH_ATTR)) { if (!fattrs.contains(PATH_ATTR)) {
fattrs.add(PATH_ATTR, FilenameUtils.getFullPath(file.getPath())); fattrs.add(PATH_ATTR, FilenameUtils.getFullPathNoEndSeparator(file.getPath()));
} }
if (!fattrs.contains(FSRL_ATTR)) { if (!fattrs.contains(FSRL_ATTR)) {
fattrs.add(FSRL_ATTR, file.getFSRL()); fattrs.add(FSRL_ATTR, file.getFSRL());

View file

@ -23,6 +23,8 @@ import java.nio.charset.StandardCharsets;
import java.util.BitSet; import java.util.BitSet;
import java.util.Date; import java.util.Date;
import org.apache.commons.io.FilenameUtils;
import ghidra.app.util.bin.*; import ghidra.app.util.bin.*;
import ghidra.formats.gfilesystem.*; import ghidra.formats.gfilesystem.*;
import ghidra.formats.gfilesystem.annotations.FileSystemInfo; import ghidra.formats.gfilesystem.annotations.FileSystemInfo;
@ -176,10 +178,31 @@ public class Ext4FileSystem extends AbstractFileSystem<Ext4File> {
public FileAttributes getFileAttributes(GFile file, TaskMonitor monitor) { public FileAttributes getFileAttributes(GFile file, TaskMonitor monitor) {
FileAttributes result = new FileAttributes(); FileAttributes result = new FileAttributes();
if (fsIndex.getRootDir().equals(file)) {
result.add(NAME_ATTR, file.getName());
result.add(PATH_ATTR, FilenameUtils.getFullPathNoEndSeparator(file.getPath()));
String volStr = superBlock.getVolumeName();
if (!volStr.isEmpty()) {
result.add("Volume", volStr);
}
String lastMountedAt = superBlock.getLastMountedString();
if (!lastMountedAt.isEmpty()) {
result.add("Last Mounted At", lastMountedAt);
}
result.add("UUID", uuid);
result.add(MODIFIED_DATE_ATTR, "Superblock last mod",
new Date((long) superBlock.getS_mtime() * 1000));
result.add(MODIFIED_DATE_ATTR, "Superblock last write",
new Date((long) superBlock.getS_wtime() * 1000));
result.add(CREATE_DATE_ATTR, new Date((long) superBlock.getS_mkfs_time() * 1000));
return result;
}
Ext4File ext4File = fsIndex.getMetadata(file); Ext4File ext4File = fsIndex.getMetadata(file);
if (ext4File != null) { if (ext4File != null) {
Ext4Inode inode = ext4File.getInode(); Ext4Inode inode = ext4File.getInode();
result.add(NAME_ATTR, ext4File.getName()); result.add(NAME_ATTR, ext4File.getName());
result.add(PATH_ATTR, FilenameUtils.getFullPathNoEndSeparator(file.getPath()));
result.add(SIZE_ATTR, inode.getSize()); result.add(SIZE_ATTR, inode.getSize());
result.add(FILE_TYPE_ATTR, inodeToFileType(inode)); result.add(FILE_TYPE_ATTR, inodeToFileType(inode));
if (inode.isSymLink()) { if (inode.isSymLink()) {
@ -192,7 +215,7 @@ public class Ext4FileSystem extends AbstractFileSystem<Ext4File> {
} }
result.add(SYMLINK_DEST_ATTR, symLinkDest); result.add(SYMLINK_DEST_ATTR, symLinkDest);
} }
result.add(MODIFIED_DATE_ATTR, new Date(inode.getI_mtime() * 1000)); result.add(MODIFIED_DATE_ATTR, new Date((long) inode.getI_mtime() * 1000));
result.add(UNIX_ACL_ATTR, (long) (inode.getI_mode() & 0xFFF)); result.add(UNIX_ACL_ATTR, (long) (inode.getI_mode() & 0xFFF));
result.add(USER_ID_ATTR, Short.toUnsignedLong(inode.getI_uid())); result.add(USER_ID_ATTR, Short.toUnsignedLong(inode.getI_uid()));
result.add(GROUP_ID_ATTR, Short.toUnsignedLong(inode.getI_gid())); result.add(GROUP_ID_ATTR, Short.toUnsignedLong(inode.getI_gid()));

View file

@ -359,17 +359,27 @@ public class Ext4SuperBlock implements StructConverter {
} }
public String getVolumeName() { public String getVolumeName() {
int i = 0; return getSBString(s_volume_name);
while (i < s_volume_name.length && s_volume_name[i] != '\0') {
i++;
}
return new String(s_volume_name, 0, i, Ext4FileSystem.EXT4_DEFAULT_CHARSET);
} }
public byte[] getS_last_mounted() { public byte[] getS_last_mounted() {
return s_last_mounted; return s_last_mounted;
} }
public String getLastMountedString() {
return getSBString(s_last_mounted);
}
private String getSBString(byte[] bytes) {
try {
BinaryReader br = new BinaryReader(new ByteArrayProvider(bytes), true);
return br.readNextString(bytes.length, Ext4FileSystem.EXT4_DEFAULT_CHARSET, 1);
}
catch (IOException e) {
return "";
}
}
public int getS_algorithm_usage_bitmap() { public int getS_algorithm_usage_bitmap() {
return s_algorithm_usage_bitmap; return s_algorithm_usage_bitmap;
} }

View file

@ -28,16 +28,16 @@ public class SquashDirectoryTableEntry {
private final short inodeNumberOffset; private final short inodeNumberOffset;
// Stores the basic inode type (i.e. if it's an "extended file" inode, it will be a "basic file" here) // Stores the basic inode type (i.e. if it's an "extended file" inode, it will be a "basic file" here)
private final short inodeType; private final int inodeType;
// The number of bytes that will represent the name of this sub-entry // The number of bytes that will represent the name of this sub-entry
private final short nameSize; private final int nameSize;
// The result of the addition of the base inode and the offset // The result of the addition of the base inode and the offset
private final int inodeNumber; private final int inodeNumber;
// Upon creation, this is just the name of this sub-entry, but will be expanded to the full path // Upon creation, this is just the name of this sub-entry, but will be expanded to the full path
private String path; private final String path;
/** /**
* Represents an entry in the directory table * Represents an entry in the directory table
@ -51,12 +51,12 @@ public class SquashDirectoryTableEntry {
addressOffset = reader.readNextUnsignedShort(); addressOffset = reader.readNextUnsignedShort();
inodeNumberOffset = reader.readNextShort(); // NOTE: Signed inodeNumberOffset = reader.readNextShort(); // NOTE: Signed
inodeType = reader.readNextShort(); inodeType = reader.readNextUnsignedShort();
nameSize = reader.readNextShort(); nameSize = reader.readNextUnsignedShort();
// The stored filename doesn't include the terminating null byte // The stored filename doesn't include the terminating null byte
// Note: Though technically 16 bits, Linux caps name size at 256 chars // Note: Though technically 16 bits, Linux caps name size at 256 chars
path = reader.readNextAsciiString(nameSize + 1); path = reader.readNextUtf8String(nameSize + 1);
// Find the inode number using the base in the table entry header and the offset // Find the inode number using the base in the table entry header and the offset
inodeNumber = (int) (baseInode + inodeNumberOffset); inodeNumber = (int) (baseInode + inodeNumberOffset);
@ -67,7 +67,7 @@ public class SquashDirectoryTableEntry {
return addressOffset; return addressOffset;
} }
public short getInodeType() { public int getInodeType() {
return inodeType; return inodeType;
} }

View file

@ -66,8 +66,8 @@ public class SquashExtendedDirectoryInode extends SquashBasicDirectoryInode {
private void skipDirectoryListing(BinaryReader reader) throws IOException { private void skipDirectoryListing(BinaryReader reader) throws IOException {
long index = reader.readNextUnsignedInt(); long index = reader.readNextUnsignedInt();
long start = reader.readNextUnsignedInt(); long start = reader.readNextUnsignedInt();
int nameSize = reader.readNextInt(); int nameSize = reader.readNextUnsignedIntExact();
String name = reader.readNextAsciiString(nameSize + 1); String name = reader.readNextUtf8String(nameSize + 1);
} }
long getXattrIndex() { long getXattrIndex() {

View file

@ -20,13 +20,17 @@ import static ghidra.formats.gfilesystem.fileinfo.FileAttributeType.*;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.Arrays; import java.util.Arrays;
import java.util.Date; import java.util.List;
import org.apache.commons.io.FilenameUtils;
import ghidra.app.util.bin.BinaryReader; import ghidra.app.util.bin.BinaryReader;
import ghidra.app.util.bin.ByteProvider; import ghidra.app.util.bin.ByteProvider;
import ghidra.formats.gfilesystem.*; import ghidra.formats.gfilesystem.*;
import ghidra.formats.gfilesystem.annotations.FileSystemInfo; import ghidra.formats.gfilesystem.annotations.FileSystemInfo;
import ghidra.formats.gfilesystem.fileinfo.FileAttributes; import ghidra.formats.gfilesystem.fileinfo.FileAttributes;
import ghidra.formats.gfilesystem.fileinfo.FileType;
import ghidra.util.Msg;
import ghidra.util.exception.CancelledException; import ghidra.util.exception.CancelledException;
import ghidra.util.task.TaskMonitor; import ghidra.util.task.TaskMonitor;
@ -37,19 +41,20 @@ public class SquashFileSystem extends AbstractFileSystem<SquashedFile> {
private BinaryReader reader; private BinaryReader reader;
private SquashSuperBlock superBlock; private SquashSuperBlock superBlock;
public SquashFileSystem(FSRLRoot fsFSRL, ByteProvider provider, FileSystemService fsService) { public SquashFileSystem(FSRLRoot fsFSRL, FileSystemService fsService) {
super(fsFSRL, fsService); super(fsFSRL, fsService);
fsIndex = new FileSystemIndexHelper<>(this, fsFSRL); fsIndex = new FileSystemIndexHelper<>(this, fsFSRL);
}
public void mount(ByteProvider provider, TaskMonitor monitor)
throws IOException, CancelledException {
monitor.setMessage("Opening " + SquashFileSystem.class.getSimpleName() + "...");
this.provider = provider; this.provider = provider;
// BinaryReader representing the entire archive // BinaryReader representing the entire archive
// Squash versions after 3.0 (2006) should be little endian // Squash versions after 3.0 (2006) should be little endian
reader = new BinaryReader(provider, true /* LE */); reader = new BinaryReader(provider, true /* LE */);
}
public void mount(TaskMonitor monitor) throws IOException, CancelledException {
monitor.setMessage("Opening " + SquashFileSystem.class.getSimpleName() + "...");
// Get the super block information for how to process the archive // Get the super block information for how to process the archive
superBlock = new SquashSuperBlock(reader); superBlock = new SquashSuperBlock(reader);
@ -72,8 +77,7 @@ public class SquashFileSystem extends AbstractFileSystem<SquashedFile> {
directoryTable.assignInodes(inodes, monitor); directoryTable.assignInodes(inodes, monitor);
// Give file structure to Ghidra to present to the user // Give file structure to Ghidra to present to the user
SquashUtils.buildDirectoryStructure(fragmentTable, directoryTable, inodes, fsIndex, buildDirectoryStructure(fragmentTable, directoryTable, inodes, monitor);
monitor);
} }
@Override @Override
@ -265,45 +269,44 @@ public class SquashFileSystem extends AbstractFileSystem<SquashedFile> {
FileAttributes result = new FileAttributes(); FileAttributes result = new FileAttributes();
// Add general attributes
result.add(NAME_ATTR, file.getName());
result.add(FSRL_ATTR, file.getFSRL());
result.add(PATH_ATTR, FilenameUtils.getFullPathNoEndSeparator(file.getPath()));
SquashedFile squashedFile = fsIndex.getMetadata(file); SquashedFile squashedFile = fsIndex.getMetadata(file);
Object squashInfo = fsIndex.getRootDir().equals(file) ? superBlock
: squashedFile != null ? squashedFile.getInode() : null;
if (squashedFile != null) { switch (squashInfo) {
case SquashSuperBlock sb: // superBlock also avail as member var
SquashInode inode = squashedFile.getInode();
// Add additional attributes to the root directory
if (fsIndex.getRootDir().equals(file)) {
result.add("Compression used", superBlock.getCompressionTypeString()); result.add("Compression used", superBlock.getCompressionTypeString());
result.add("Block size", superBlock.getBlockSize()); result.add("Block size", superBlock.getBlockSize());
result.add("Inode count", superBlock.getInodeCount()); result.add("Inode count", superBlock.getInodeCount());
result.add("Fragment count", superBlock.getTotalFragments()); result.add("Fragment count", superBlock.getTotalFragments());
result.add("SquashFS version", superBlock.getVersionString()); result.add("SquashFS version", superBlock.getVersionString());
result.add(MODIFIED_DATE_ATTR, new Date(superBlock.getModTime())); result.add(MODIFIED_DATE_ATTR, superBlock.getModTimeAsDate());
} break;
else { case SquashBasicFileInode fileInode:
result.add(MODIFIED_DATE_ATTR, new Date(inode.getModTime()));
}
// Add general attributes
result.add(NAME_ATTR, squashedFile.getName());
result.add(FSRL_ATTR, file.getFSRL());
// Add file-related attributes
if (inode.isFile()) {
SquashBasicFileInode fileInode = (SquashBasicFileInode) inode;
result.add(SIZE_ATTR, squashedFile.getUncompressedSize()); result.add(SIZE_ATTR, squashedFile.getUncompressedSize());
result.add(COMPRESSED_SIZE_ATTR, fileInode.getCompressedFileSize()); result.add(COMPRESSED_SIZE_ATTR, fileInode.getCompressedFileSize());
result.add(FILE_TYPE_ATTR, fileInode.isDir() ? FileType.DIRECTORY : FileType.FILE);
result.add(MODIFIED_DATE_ATTR, fileInode.getModTimeAsDate());
result.add(UNIX_ACL_ATTR, (long) fileInode.getPermissions());
break;
case SquashBasicDirectoryInode dirInode:
result.add(FILE_TYPE_ATTR, FileType.DIRECTORY);
result.add(MODIFIED_DATE_ATTR, dirInode.getModTimeAsDate());
result.add(UNIX_ACL_ATTR, (long) dirInode.getPermissions());
break;
case SquashSymlinkInode symlinkInode:
result.add(SYMLINK_DEST_ATTR, symlinkInode.getPath());
result.add(FILE_TYPE_ATTR, FileType.SYMBOLIC_LINK);
result.add(MODIFIED_DATE_ATTR, symlinkInode.getModTimeAsDate());
result.add(UNIX_ACL_ATTR, (long) symlinkInode.getPermissions());
break;
default:
} }
else if (inode.isSymLink()) {
SquashSymlinkInode symLinkInode = (SquashSymlinkInode) inode;
result.add(SYMLINK_DEST_ATTR, symLinkInode.getPath());
}
}
return result; return result;
} }
@ -314,6 +317,110 @@ public class SquashFileSystem extends AbstractFileSystem<SquashedFile> {
if (provider != null) { if (provider != null) {
provider.close(); provider.close();
provider = null; provider = null;
reader = null;
} }
} }
private void buildDirectoryStructure(SquashFragmentTable fragTable,
SquashDirectoryTable dirTable, SquashInodeTable inodes, TaskMonitor monitor)
throws CancelledException, IOException {
SquashInode[] inodeArray = inodes.getInodes();
SquashInode rootInode = inodes.getRootInode();
// Make sure the root inode is a directory
if (rootInode != null && rootInode.isDir()) {
// Treat root inode as a directory inode
SquashBasicDirectoryInode dirInode = (SquashBasicDirectoryInode) rootInode;
// For each header associated with the root inode, process all entries
List<SquashDirectoryTableHeader> headers = dirTable.getHeaders(dirInode);
if (headers.size() == 0) {
throw new IOException("Unable to find headers for the root directory");
}
for (SquashDirectoryTableHeader header : headers) {
// For all files/directories immediately under the root
List<SquashDirectoryTableEntry> entries = header.getEntries();
for (SquashDirectoryTableEntry entry : entries) {
// Recurse down the directory tree, storing directories and files
assignPathsRecursively(fragTable, dirTable, entry, inodeArray,
fsIndex.getRootDir(), monitor);
}
}
}
else {
// If root is NOT a directory, stop processing
throw new IOException("Root inode was not a directory!");
}
}
private void assignPathsRecursively(SquashFragmentTable fragTable,
SquashDirectoryTable dirTable, SquashDirectoryTableEntry entry, SquashInode[] inodes,
GFile parentDir, TaskMonitor monitor) throws CancelledException, IOException {
// Check if the user cancelled the load
monitor.checkCancelled();
// Validate the inode number of the current entry
if (entry == null || entry.getInodeNumber() < 1 || entry.getInodeNumber() > inodes.length) {
throw new IOException(
"Entry found with invalid inode number: " + entry.getInodeNumber());
}
// Get the inode for the current entry
SquashInode inode = inodes[entry.getInodeNumber()];
// If the inode is a directory, recurse downward. Otherwise, just store the file
if (inode.isDir()) {
// Treat as directory inode
SquashBasicDirectoryInode dirInode = (SquashBasicDirectoryInode) inode;
// Create and store a "file" representing the current directory
SquashedFile squashedDirFile = new SquashedFile(dirInode, null);
GFile dirGFile = fsIndex.storeFileWithParent(entry.getFileName(), parentDir,
inode.getNumber(), true, -1, squashedDirFile);
// Get the directory headers for the current inode and process each entry within them
List<SquashDirectoryTableHeader> headers = dirTable.getHeaders(dirInode);
for (SquashDirectoryTableHeader header : headers) {
// For each sub-directory, recurse downward and add each file/directory encountered
List<SquashDirectoryTableEntry> entries = header.getEntries();
for (SquashDirectoryTableEntry currentEntry : entries) {
assignPathsRecursively(fragTable, dirTable, currentEntry, inodes, dirGFile,
monitor);
}
}
}
else if (inode.isFile()) {
// Treat as file inode
SquashBasicFileInode fileInode = (SquashBasicFileInode) inode;
SquashFragment fragment = fragTable.getFragment(fileInode.getFragmentIndex());
// Store the current file
fsIndex.storeFileWithParent(entry.getFileName(), parentDir, fileInode.getNumber(),
false, fileInode.getFileSize(), new SquashedFile(fileInode, fragment));
}
else if (inode.isSymLink()) {
// Treat as symbolic link inode
SquashSymlinkInode symLinkInode = (SquashSymlinkInode) inode;
fsIndex.storeSymlinkWithParent(entry.getFileName(), parentDir, symLinkInode.getNumber(),
symLinkInode.getPath(), 0, new SquashedFile(symLinkInode, null));
}
else {
Msg.info(SquashUtils.class,
"Inode #" + inode.getNumber() + " is not a file or directory. Skipping...");
}
}
} }

View file

@ -34,9 +34,9 @@ public class SquashFileSystemFactory
FileSystemService fsService, TaskMonitor monitor) FileSystemService fsService, TaskMonitor monitor)
throws IOException, CancelledException { throws IOException, CancelledException {
SquashFileSystem fs = new SquashFileSystem(targetFSRL, byteProvider, fsService); SquashFileSystem fs = new SquashFileSystem(targetFSRL, fsService);
try { try {
fs.mount(monitor); fs.mount(byteProvider, monitor);
return fs; return fs;
} }
catch (IOException e) { catch (IOException e) {

View file

@ -16,6 +16,7 @@
package ghidra.file.formats.squashfs; package ghidra.file.formats.squashfs;
import java.io.IOException; import java.io.IOException;
import java.util.Date;
import ghidra.app.util.bin.BinaryReader; import ghidra.app.util.bin.BinaryReader;
@ -82,6 +83,10 @@ public class SquashInode {
return modTime; return modTime;
} }
public Date getModTimeAsDate() {
return new Date(modTime * 1000);
}
public int getNumber() { public int getNumber() {
return inodeNumber; return inodeNumber;
} }

View file

@ -17,6 +17,7 @@
package ghidra.file.formats.squashfs; package ghidra.file.formats.squashfs;
import java.io.IOException; import java.io.IOException;
import java.util.Date;
import ghidra.app.util.bin.BinaryReader; import ghidra.app.util.bin.BinaryReader;
import ghidra.util.Msg; import ghidra.util.Msg;
@ -31,13 +32,13 @@ public class SquashSuperBlock {
private final int magic; private final int magic;
// The number of inodes in the archive // The number of inodes in the archive
private final long inodeCount; private final int inodeCount;
// Unix timestamp of the last time the archive was modified (not counting leap seconds) // Unix timestamp of the last time the archive was modified (not counting leap seconds)
private final long modTime; private final long modTime;
// The size of a data block in bytes (must be a power of 2 between 4KB and 1 MiB) // The size of a data block in bytes (must be a power of 2 between 4KB and 1 MiB)
private final long blockSize; private final int blockSize;
// The number of entries in the fragment table // The number of entries in the fragment table
private final long totalFragments; private final long totalFragments;
@ -120,10 +121,10 @@ public class SquashSuperBlock {
SquashSuperBlock(BinaryReader reader) throws IOException { SquashSuperBlock(BinaryReader reader) throws IOException {
// Fetch the 32 bit integer fields // Fetch the 32 bit integer fields
magic = reader.readNextUnsignedIntExact(); magic = reader.readNextInt();
inodeCount = reader.readNextUnsignedInt(); inodeCount = reader.readNextUnsignedIntExact();
modTime = reader.readNextUnsignedInt(); modTime = reader.readNextUnsignedInt();
blockSize = reader.readNextUnsignedInt(); blockSize = reader.readNextUnsignedIntExact();
totalFragments = reader.readNextUnsignedInt(); totalFragments = reader.readNextUnsignedInt();
// Fetch the 16 bit short fields // Fetch the 16 bit short fields
@ -148,11 +149,11 @@ public class SquashSuperBlock {
checkCompatibility(); checkCompatibility();
} }
public long getMagicBytes() { public int getMagic() {
return magic; return magic;
} }
public long getInodeCount() { public int getInodeCount() {
return inodeCount; return inodeCount;
} }
@ -160,6 +161,10 @@ public class SquashSuperBlock {
return modTime; return modTime;
} }
public Date getModTimeAsDate() {
return new Date(modTime * 1000);
}
public long getBlockSize() { public long getBlockSize() {
return blockSize; return blockSize;
} }

View file

@ -35,8 +35,8 @@ public class SquashSymlinkInode extends SquashInode {
super(reader, superBlock); super(reader, superBlock);
linkCount = reader.readNextUnsignedInt(); linkCount = reader.readNextUnsignedInt();
int targetSize = reader.readNextInt(); int targetSize = reader.readNextUnsignedIntExact();
targetPath = reader.readNextAsciiString(targetSize); targetPath = reader.readNextUtf8String(targetSize);
if (isExtended) { if (isExtended) {
xattrIndex = reader.readNextUnsignedInt(); xattrIndex = reader.readNextUnsignedInt();

View file

@ -17,7 +17,7 @@ package ghidra.file.formats.squashfs;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.util.List; import java.util.Arrays;
import org.apache.commons.compress.compressors.deflate.DeflateCompressorInputStream; import org.apache.commons.compress.compressors.deflate.DeflateCompressorInputStream;
import org.apache.commons.compress.compressors.lz4.BlockLZ4CompressorInputStream; import org.apache.commons.compress.compressors.lz4.BlockLZ4CompressorInputStream;
@ -25,10 +25,6 @@ import org.apache.commons.compress.compressors.xz.XZCompressorInputStream;
import org.tukaani.xz.LZMAInputStream; import org.tukaani.xz.LZMAInputStream;
import ghidra.app.util.bin.*; import ghidra.app.util.bin.*;
import ghidra.file.formats.gzip.GZipConstants;
import ghidra.formats.gfilesystem.FileSystemIndexHelper;
import ghidra.formats.gfilesystem.GFile;
import ghidra.util.Msg;
import ghidra.util.exception.CancelledException; import ghidra.util.exception.CancelledException;
import ghidra.util.task.TaskMonitor; import ghidra.util.task.TaskMonitor;
@ -40,9 +36,8 @@ public class SquashUtils {
* @return Whether or not the bytes match the SquashFS magic * @return Whether or not the bytes match the SquashFS magic
*/ */
public static boolean isSquashFS(byte[] bytes) { public static boolean isSquashFS(byte[] bytes) {
return bytes.length >= GZipConstants.MAGIC_BYTES.length && return bytes.length >= SquashConstants.MAGIC.length && Arrays.equals(SquashConstants.MAGIC,
bytes[0] == SquashConstants.MAGIC[0] && bytes[1] == SquashConstants.MAGIC[1] && 0, SquashConstants.MAGIC.length, bytes, 0, SquashConstants.MAGIC.length);
bytes[2] == SquashConstants.MAGIC[2] && bytes[3] == SquashConstants.MAGIC[3];
} }
/** /**
@ -157,128 +152,4 @@ public class SquashUtils {
} }
} }
/**
* Assemble the directory structure of the archive
* @param fragTable The processed fragment table of the archive
* @param dirTable The processed directory table of the archive
* @param inodes The processed inode table of the archive
* @param fsih An index helper
* @param monitor Monitor to allow the user to cancel the load
* @throws CancelledException Archive load was cancelled
* @throws IOException Root inode was not a directory
*/
public static void buildDirectoryStructure(SquashFragmentTable fragTable,
SquashDirectoryTable dirTable, SquashInodeTable inodes,
FileSystemIndexHelper<SquashedFile> fsih, TaskMonitor monitor)
throws CancelledException, IOException {
SquashInode[] inodeArray = inodes.getInodes();
SquashInode rootInode = inodes.getRootInode();
// Make sure the root inode is a directory
if (rootInode != null && rootInode.isDir()) {
// Treat root inode as a directory inode
SquashBasicDirectoryInode dirInode = (SquashBasicDirectoryInode) rootInode;
// For each header associated with the root inode, process all entries
List<SquashDirectoryTableHeader> headers = dirTable.getHeaders(dirInode);
if (headers.size() == 0) {
throw new IOException("Unable to find headers for the root directory");
}
for (SquashDirectoryTableHeader header : headers) {
// For all files/directories immediately under the root
List<SquashDirectoryTableEntry> entries = header.getEntries();
for (SquashDirectoryTableEntry entry : entries) {
// Recurse down the directory tree, storing directories and files
assignPathsRecursively(fragTable, dirTable, entry, inodeArray,
fsih.getRootDir(), fsih, monitor);
}
}
}
else {
// If root is NOT a directory, stop processing
throw new IOException("Root inode was not a directory!");
}
}
/**
* Recursively assign paths to each of the inodes
* @param dirTable The processed directory table of the archive
* @param entry The directory table entry currently being processed
* @param inodes An array of inodes within the archive
* @param parentDir The parent of the current entry
* @param fsih An index helper
* @param monitor Monitor to allow the user to cancel the load
* @throws CancelledException Archive load was cancelled
* @throws IOException Entry found with an invalid inode number
*/
private static void assignPathsRecursively(SquashFragmentTable fragTable,
SquashDirectoryTable dirTable, SquashDirectoryTableEntry entry, SquashInode[] inodes,
GFile parentDir, FileSystemIndexHelper<SquashedFile> fsih, TaskMonitor monitor)
throws CancelledException, IOException {
// Check if the user cancelled the load
monitor.checkCancelled();
// Validate the inode number of the current entry
if (entry == null || entry.getInodeNumber() < 1 || entry.getInodeNumber() > inodes.length) {
throw new IOException(
"Entry found with invalid inode number: " + entry.getInodeNumber());
}
// Get the inode for the current entry
SquashInode inode = inodes[entry.getInodeNumber()];
// If the inode is a directory, recurse downward. Otherwise, just store the file
if (inode.isDir()) {
// Treat as directory inode
SquashBasicDirectoryInode dirInode = (SquashBasicDirectoryInode) inode;
// Create and store a "file" representing the current directory
SquashedFile squashedDirFile = new SquashedFile(dirInode, null);
GFile dirGFile = fsih.storeFileWithParent(entry.getFileName(), parentDir,
inode.getNumber(), true, -1, squashedDirFile);
// Get the directory headers for the current inode and process each entry within them
List<SquashDirectoryTableHeader> headers = dirTable.getHeaders(dirInode);
for (SquashDirectoryTableHeader header : headers) {
// For each sub-directory, recurse downward and add each file/directory encountered
List<SquashDirectoryTableEntry> entries = header.getEntries();
for (SquashDirectoryTableEntry currentEntry : entries) {
assignPathsRecursively(fragTable, dirTable, currentEntry, inodes, dirGFile,
fsih, monitor);
}
}
}
else if (inode.isFile()) {
// Treat as file inode
SquashBasicFileInode fileInode = (SquashBasicFileInode) inode;
SquashFragment fragment = fragTable.getFragment(fileInode.getFragmentIndex());
// Store the current file
fsih.storeFileWithParent(entry.getFileName(), parentDir, fileInode.getNumber(), false,
fileInode.getFileSize(), new SquashedFile(fileInode, fragment));
}
else if (inode.isSymLink()) {
// Treat as symbolic link inode
SquashSymlinkInode symLinkInode = (SquashSymlinkInode) inode;
fsih.storeSymlinkWithParent(entry.getFileName(), parentDir, symLinkInode.getNumber(),
symLinkInode.getPath(), 0, new SquashedFile(symLinkInode, null));
}
else {
Msg.info(SquashUtils.class,
"Inode #" + inode.getNumber() + " is not a file or directory. Skipping...");
}
}
} }

View file

@ -114,6 +114,8 @@ public class TarFileSystem extends AbstractFileSystem<TarMetadata> {
TarArchiveEntry blob = tmd.tarArchiveEntry; TarArchiveEntry blob = tmd.tarArchiveEntry;
return FileAttributes.of( return FileAttributes.of(
FileAttribute.create(NAME_ATTR, FilenameUtils.getName(blob.getName())), FileAttribute.create(NAME_ATTR, FilenameUtils.getName(blob.getName())),
FileAttribute.create(PATH_ATTR,
FilenameUtils.getFullPathNoEndSeparator(blob.getName())),
FileAttribute.create(SIZE_ATTR, blob.getSize()), FileAttribute.create(SIZE_ATTR, blob.getSize()),
FileAttribute.create(MODIFIED_DATE_ATTR, blob.getLastModifiedDate()), FileAttribute.create(MODIFIED_DATE_ATTR, blob.getLastModifiedDate()),
FileAttribute.create(FILE_TYPE_ATTR, tarToFileType(blob)), FileAttribute.create(FILE_TYPE_ATTR, tarToFileType(blob)),