Merge remote-tracking branch 'origin/Ghidra_10.1'

This commit is contained in:
Ryan Kurtz
2021-12-03 19:10:24 -05:00
21 changed files with 705 additions and 533 deletions

View File

@@ -333,7 +333,13 @@ abstract class AbstractDwarfEHDecoder implements DwarfEHDecoder {
switch (appMode) {
case DW_EH_PE_absptr:
// just pass this through
// if the program has been re-based, need to add in the image base difference.
// but only if there are no relocations at this location
if (prog.getRelocationTable().getRelocation(addr) == null) {
long programBaseAddressFixup = context.getOriginalImageBaseOffset();
val = val + programBaseAddressFixup;
}
break;
case DW_EH_PE_aligned:

View File

@@ -15,6 +15,7 @@
*/
package ghidra.app.plugin.exceptionhandlers.gcc;
import ghidra.app.util.opinion.ElfLoader;
import ghidra.program.model.address.Address;
import ghidra.program.model.listing.Function;
import ghidra.program.model.listing.Program;
@@ -34,6 +35,7 @@ public class DwarfDecodeContext {
private Object decodedValue;
private int encodedLength;
private MemBuffer buffer;
private long originalImageBaseOffset; // offset from image base used in original dwarf, and the actual load image base
/**
* Constructs a Dwarf decode context.
@@ -96,6 +98,8 @@ public class DwarfDecodeContext {
this.ehBlock = ehBlock;
this.functionEntryPoint = entryPoint;
Long oib = ElfLoader.getElfOriginalImageBase(program);
this.originalImageBaseOffset = program.getImageBase().getOffset() - oib.longValue();
}
/**
@@ -180,4 +184,12 @@ public class DwarfDecodeContext {
public Address getFunctionEntryPoint() {
return functionEntryPoint;
}
/**
* Gets the offset from the programs image base and the dwarf original image base
* @return offset that if added to the current image base would be the original dwarf image base
*/
public long getOriginalImageBaseOffset() {
return originalImageBaseOffset;
}
}

View File

@@ -22,6 +22,7 @@ import java.util.stream.Collectors;
import ghidra.app.cmd.comments.AppendCommentCmd;
import ghidra.app.cmd.label.SetLabelPrimaryCmd;
import ghidra.app.util.bin.format.dwarf4.*;
import ghidra.app.util.bin.format.dwarf4.attribs.DWARFNumericAttribute;
import ghidra.app.util.bin.format.dwarf4.encoding.*;
import ghidra.app.util.bin.format.dwarf4.expression.*;
import ghidra.program.database.data.DataTypeUtilities;
@@ -176,7 +177,11 @@ public class DWARFFunctionImporter {
return true;
}
if (diea.getLowPC(-1) == 0) {
// fetch the low_pc attribute directly instead of calling diea.getLowPc() to avoid
// any fixups applied by lower level code
DWARFNumericAttribute attr =
diea.getAttribute(DWARFAttribute.DW_AT_low_pc, DWARFNumericAttribute.class);
if (attr != null && attr.getUnsignedValue() == 0) {
return true;
}

View File

@@ -426,12 +426,35 @@ public class DyldCacheHeader implements StructConverter {
}
/**
* Gets the {@link List} of {@link DyldCacheImageInfo}s. Requires header to have been parsed.
* Generates a {@link List} of {@link DyldCacheImage}s that are mapped in by this
* {@link DyldCacheHeader}. Requires header to have been parsed.
* <p>
* NOTE: A "split" DYLD Cache header may declare an image, but that image may get loaded at an
* address defined by the memory map of a different split header. This method will only return
* the images that are mapped by "this" header's memory map.
*
* @return The {@link List} of {@link DyldCacheImageInfo}s
*
* @return A {@link List} of {@link DyldCacheImage}s mapped by this {@link DyldCacheHeader}
*/
public List<DyldCacheImageInfo> getImageInfos() {
return imageInfoList;
public List<DyldCacheImage> getMappedImages() {
List<DyldCacheImage> images = new ArrayList<>();
if (imageInfoList.size() > 0) {
// The old, simple way
images.addAll(imageInfoList);
}
else {
// The new, split file way. A split file will have an entry for every image, but
// not every image will be mapped.
for (DyldCacheImageTextInfo imageTextInfo : imageTextInfoList) {
for (DyldCacheMappingInfo mappingInfo : mappingInfoList) {
if (mappingInfo.contains(imageTextInfo.getAddress())) {
images.add(imageTextInfo);
break;
}
}
}
}
return images;
}
/**
@@ -691,8 +714,8 @@ public class DyldCacheHeader implements StructConverter {
monitor.setMessage("Marking up DYLD header...");
monitor.initialize(1);
try {
DataUtilities.createData(program, program.getImageBase(), toDataType(), -1, false,
DataUtilities.ClearDataMode.CHECK_FOR_SPACE);
DataUtilities.createData(program, space.getAddress(getBaseAddress()), toDataType(), -1,
false, DataUtilities.ClearDataMode.CHECK_FOR_SPACE);
monitor.incrementProgress(1);
}
catch (CodeUnitInsertionException | DuplicateNameException | IOException e) {

View File

@@ -0,0 +1,36 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.util.bin.format.macho.dyld;
/**
* A convenience interface for getting the address and path of a DYLD Cache image
*/
public interface DyldCacheImage {
/**
* Gets the address the start of the image
*
* @return The address of the start of the image
*/
public long getAddress();
/**
* Gets the path of the image
*
* @return The path of the image
*/
public String getPath();
}

View File

@@ -29,7 +29,7 @@ import ghidra.util.exception.DuplicateNameException;
* @see <a href="https://opensource.apple.com/source/dyld/dyld-852.2/dyld3/shared-cache/dyld_cache_format.h.auto.html">dyld3/shared-cache/dyld_cache_format.h</a>
*/
@SuppressWarnings("unused")
public class DyldCacheImageInfo implements StructConverter {
public class DyldCacheImageInfo implements DyldCacheImage, StructConverter {
private long address;
private long modTime;
@@ -55,20 +55,12 @@ public class DyldCacheImageInfo implements StructConverter {
path = reader.readAsciiString(pathFileOffset);
}
/**
* Gets the address the start of the image.
*
* @return The address of the start of the image
*/
@Override
public long getAddress() {
return address;
}
/**
* Gets the path of the image.
*
* @return The path of the image
*/
@Override
public String getPath() {
return path;
}

View File

@@ -29,7 +29,7 @@ import ghidra.util.exception.DuplicateNameException;
* @see <a href="https://opensource.apple.com/source/dyld/dyld-852.2/dyld3/shared-cache/dyld_cache_format.h.auto.html">dyld3/shared-cache/dyld_cache_format.h</a>
*/
@SuppressWarnings("unused")
public class DyldCacheImageTextInfo implements StructConverter {
public class DyldCacheImageTextInfo implements DyldCacheImage, StructConverter {
private byte[] uuid;
private long loadAddress;
@@ -52,12 +52,13 @@ public class DyldCacheImageTextInfo implements StructConverter {
path = reader.readAsciiString(pathOffset);
}
@Override
public long getAddress() {
return loadAddress;
}
/**
* Gets the path of the image text.
*
* @return The path of the image text.
*/
@Override
public String getPath() {
return path;
}

View File

@@ -106,6 +106,17 @@ public class DyldCacheMappingInfo implements StructConverter {
return (initProt & SegmentConstants.PROTECTION_X) != 0;
}
/**
* Returns true if the mapping contains the given address
*
* @param addr The address to check
* @return True if the mapping contains the given address; otherwise, false
*/
public boolean contains(long addr) {
return Long.compareUnsigned(addr, address) >= 0 &&
Long.compareUnsigned(addr, address + size) < 0;
}
@Override
public DataType toDataType() throws DuplicateNameException, IOException {
StructureDataType struct = new StructureDataType("dyld_cache_mapping_info", 0);

View File

@@ -25,9 +25,11 @@ import ghidra.app.util.opinion.Loader;
import ghidra.framework.model.DomainObject;
import ghidra.program.database.mem.AddressSourceInfo;
import ghidra.program.database.mem.FileBytes;
import ghidra.program.model.address.Address;
import ghidra.program.model.address.AddressSetView;
import ghidra.program.model.listing.Program;
import ghidra.program.model.mem.Memory;
import ghidra.program.model.mem.MemoryBlockSourceInfo;
import ghidra.program.model.reloc.Relocation;
import ghidra.util.Conv;
import ghidra.util.HelpLocation;
@@ -75,44 +77,45 @@ public abstract class AbstractLoaderExporter extends Exporter {
return false;
}
List<FileBytes> fileBytes = memory.getAllFileBytes();
if (fileBytes.isEmpty()) {
log.appendMsg("Exporting a program with no file source bytes is not supported");
return false;
}
if (fileBytes.size() > 1) {
log.appendMsg("Exporting a program with more than 1 file source is not supported");
return false;
}
// Write source program's file bytes to a temp file
File tempFile = File.createTempFile("ghidra_export_", null);
try (OutputStream out = new FileOutputStream(tempFile, false)) {
FileBytes[] fileBytes = memory.getAllFileBytes()
.stream()
.filter(fb -> program.getExecutablePath().endsWith(fb.getFilename()))
.toArray(FileBytes[]::new);
for (FileBytes bytes : fileBytes) {
FileUtilities.copyStreamToStream(new FileBytesInputStream(bytes), out, monitor);
}
FileUtilities.copyStreamToStream(new FileBytesInputStream(fileBytes.get(0)), out,
monitor);
}
// Undo relocations in the temp file
// NOTE: not all relocations are file-backed
String error = null;
// NOTE: not all relocations are file-backed, and some are only partially file-backed
try (RandomAccessFile fout = new RandomAccessFile(tempFile, "rw")) {
Iterable<Relocation> relocs = () -> program.getRelocationTable().getRelocations();
for (Relocation reloc : relocs) {
AddressSourceInfo info = memory.getAddressSourceInfo(reloc.getAddress());
if (info == null) {
Address addr = reloc.getAddress();
AddressSourceInfo addrSourceInfo = memory.getAddressSourceInfo(addr);
if (addrSourceInfo == null) {
continue;
}
long offset = info.getFileOffset();
byte[] bytes = reloc.getBytes();
long offset = addrSourceInfo.getFileOffset();
if (offset >= 0) {
if (offset + bytes.length > fout.length()) {
error = "Relocation at " + reloc.getAddress() + " exceeds file length";
break;
}
MemoryBlockSourceInfo memSourceInfo = addrSourceInfo.getMemoryBlockSourceInfo();
byte[] bytes = reloc.getBytes();
int len = Math.min(bytes.length,
(int) memSourceInfo.getMaxAddress().subtract(addr) + 1);
fout.seek(offset);
fout.write(bytes);
fout.write(bytes, 0, len);
}
}
}
// If errors occurred, log them and delete the malformed temp file
if (error != null) {
log.appendMsg(error);
catch (Exception e) {
if (!tempFile.delete()) {
log.appendMsg("Failed to delete malformed file: " + tempFile);
}

View File

@@ -18,9 +18,7 @@ package ghidra.app.util.opinion;
import java.io.IOException;
import java.util.*;
import ghidra.app.util.MemoryBlockUtils;
import ghidra.app.util.Option;
import ghidra.app.util.OptionUtils;
import ghidra.app.util.*;
import ghidra.app.util.bin.BinaryReader;
import ghidra.app.util.bin.ByteProvider;
import ghidra.app.util.bin.format.macho.dyld.DyldArchitecture;
@@ -51,11 +49,19 @@ public class DyldCacheLoader extends AbstractLibrarySupportLoader {
static final boolean CREATE_DYLIB_SECTIONS_OPTION_DEFAULT = false;
/** Loader option to add relocation entries for each fixed chain pointer */
static final String ADD_RELOCATION_ENTRIES_OPTION_NAME = "Add relocation entries for fixed chain pointers";
static final String ADD_RELOCATION_ENTRIES_OPTION_NAME =
"Add relocation entries for fixed chain pointers";
/** Default value for loader option add relocation entries */
static final boolean ADD_RELOCATION_ENTRIES_OPTION_DEFAULT = false;
/** Loader option to combine split DYLD Cache files (.1, .2, .symbol, etc) into one program */
static final String COMBINE_SPLIT_FILES_OPTION_NAME =
"Auto import and combine split DYLD Cache files";
/** Default value for loader option add relocation entries */
static final boolean COMBINE_SPLIT_FILES_OPTION_DEFAULT = true;
@Override
public Collection<LoadSpec> findSupportedLoadSpecs(ByteProvider provider) throws IOException {
List<LoadSpec> loadSpecs = new ArrayList<>();
@@ -92,7 +98,8 @@ public class DyldCacheLoader extends AbstractLibrarySupportLoader {
DyldCacheProgramBuilder.buildProgram(program, provider,
MemoryBlockUtils.createFileBytes(program, provider, monitor),
shouldProcessSymbols(options), shouldCreateDylibSections(options),
shouldAddRelocationEntries(options), log, monitor);
shouldAddRelocationEntries(options), shouldCombineSplitFiles(options), log,
monitor);
}
catch (CancelledException e) {
return;
@@ -113,25 +120,35 @@ public class DyldCacheLoader extends AbstractLibrarySupportLoader {
list.add(
new Option(CREATE_DYLIB_SECTIONS_OPTION_NAME, CREATE_DYLIB_SECTIONS_OPTION_DEFAULT,
Boolean.class, Loader.COMMAND_LINE_ARG_PREFIX + "-createDylibSections"));
list.add(
new Option(ADD_RELOCATION_ENTRIES_OPTION_NAME, ADD_RELOCATION_ENTRIES_OPTION_DEFAULT,
Boolean.class, Loader.COMMAND_LINE_ARG_PREFIX + "-addRelocationEntries"));
list.add(new Option(ADD_RELOCATION_ENTRIES_OPTION_NAME,
ADD_RELOCATION_ENTRIES_OPTION_DEFAULT, Boolean.class,
Loader.COMMAND_LINE_ARG_PREFIX + "-addRelocationEntries"));
list.add(new Option(COMBINE_SPLIT_FILES_OPTION_NAME, COMBINE_SPLIT_FILES_OPTION_DEFAULT,
Boolean.class, Loader.COMMAND_LINE_ARG_PREFIX + "-combineSplitFiles"));
}
return list;
}
private boolean shouldProcessSymbols(List<Option> options) {
return OptionUtils.getOption(PROCESS_SYMBOLS_OPTION_NAME, options, PROCESS_SYMBOLS_OPTION_DEFAULT);
return OptionUtils.getOption(PROCESS_SYMBOLS_OPTION_NAME, options,
PROCESS_SYMBOLS_OPTION_DEFAULT);
}
private boolean shouldCreateDylibSections(List<Option> options) {
return OptionUtils.getOption(CREATE_DYLIB_SECTIONS_OPTION_NAME, options, CREATE_DYLIB_SECTIONS_OPTION_DEFAULT);
return OptionUtils.getOption(CREATE_DYLIB_SECTIONS_OPTION_NAME, options,
CREATE_DYLIB_SECTIONS_OPTION_DEFAULT);
}
private boolean shouldAddRelocationEntries(List<Option> options) {
return OptionUtils.getOption(ADD_RELOCATION_ENTRIES_OPTION_NAME, options, ADD_RELOCATION_ENTRIES_OPTION_DEFAULT);
return OptionUtils.getOption(ADD_RELOCATION_ENTRIES_OPTION_NAME, options,
ADD_RELOCATION_ENTRIES_OPTION_DEFAULT);
}
private boolean shouldCombineSplitFiles(List<Option> options) {
return OptionUtils.getOption(COMBINE_SPLIT_FILES_OPTION_NAME, options,
COMBINE_SPLIT_FILES_OPTION_DEFAULT);
}
@Override
public String getName() {
return DYLD_CACHE_NAME;

View File

@@ -20,7 +20,6 @@ import java.io.IOException;
import java.util.*;
import ghidra.app.util.MemoryBlockUtils;
import ghidra.app.util.bin.BinaryReader;
import ghidra.app.util.bin.ByteProvider;
import ghidra.app.util.bin.format.macho.MachException;
import ghidra.app.util.bin.format.macho.MachHeader;
@@ -28,6 +27,7 @@ import ghidra.app.util.bin.format.macho.commands.NList;
import ghidra.app.util.bin.format.macho.dyld.*;
import ghidra.app.util.importer.MessageLog;
import ghidra.app.util.importer.MessageLogContinuesFactory;
import ghidra.app.util.opinion.DyldCacheUtils.SplitDyldCache;
import ghidra.program.database.mem.FileBytes;
import ghidra.program.model.address.Address;
import ghidra.program.model.address.AddressSpace;
@@ -44,31 +44,36 @@ import ghidra.util.task.TaskMonitor;
*/
public class DyldCacheProgramBuilder extends MachoProgramBuilder {
protected DyldCacheHeader dyldCacheHeader;
private boolean shouldProcessSymbols;
private boolean shouldCreateDylibSections;
private boolean shouldAddRelocationEntries;
private boolean shouldCombineSplitFiles;
/**
* Creates a new {@link DyldCacheProgramBuilder} based on the given information.
*
* @param program The {@link Program} to build up
* @param provider The {@link ByteProvider} that contains the DYLD Cache bytes
* @param fileBytes Where the Mach-O's bytes came from
* @param fileBytes Where the DYLD Cache's bytes came from
* @param shouldProcessSymbols True if symbols should be processed; otherwise, false
* @param shouldCreateDylibSections True if memory blocks should be created for DYLIB sections;
* otherwise, false
* @param shouldAddRelocationEntries True to create a relocation entry for each fixed up pointer in pointer chain
* @param shouldAddRelocationEntries True to create a relocation entry for each fixed up pointer
* in pointer chain
* @param shouldCombineSplitFiles True if split DYLD Cache files should be automatically
* imported and combined into 1 program; otherwise, false
* @param log The log
* @param monitor A cancelable task monitor
*/
protected DyldCacheProgramBuilder(Program program, ByteProvider provider, FileBytes fileBytes,
boolean shouldProcessSymbols, boolean shouldCreateDylibSections,
boolean shouldAddRelocationEntries, MessageLog log, TaskMonitor monitor) {
boolean shouldAddRelocationEntries, boolean shouldCombineSplitFiles, MessageLog log,
TaskMonitor monitor) {
super(program, provider, fileBytes, log, monitor);
this.shouldProcessSymbols = shouldProcessSymbols;
this.shouldCreateDylibSections = shouldCreateDylibSections;
this.shouldAddRelocationEntries = shouldAddRelocationEntries;
this.shouldCombineSplitFiles = shouldCombineSplitFiles;
}
/**
@@ -80,44 +85,62 @@ public class DyldCacheProgramBuilder extends MachoProgramBuilder {
* @param shouldProcessSymbols True if symbols should be processed; otherwise, false
* @param shouldCreateDylibSections True if memory blocks should be created for DYLIB sections;
* otherwise, false
* @param addRelocationEntries True to create a relocation entry for each fixed up pointer in pointer chain
* @param addRelocationEntries True to create a relocation entry for each fixed up pointer in
* pointer chain; otherwise, false
* @param shouldCombineSplitFiles True if split DYLD Cache files should be automatically
* imported and combined into 1 program; otherwise, false
* @param log The log
* @param monitor A cancelable task monitor
* @throws Exception if a problem occurs
*/
public static void buildProgram(Program program, ByteProvider provider, FileBytes fileBytes,
boolean shouldProcessSymbols, boolean shouldCreateDylibSections,
boolean addRelocationEntries, MessageLog log, TaskMonitor monitor) throws Exception {
DyldCacheProgramBuilder dyldCacheProgramBuilder =
new DyldCacheProgramBuilder(program, provider, fileBytes, shouldProcessSymbols,
shouldCreateDylibSections, addRelocationEntries, log, monitor);
boolean addRelocationEntries, boolean shouldCombineSplitFiles, MessageLog log,
TaskMonitor monitor) throws Exception {
DyldCacheProgramBuilder dyldCacheProgramBuilder = new DyldCacheProgramBuilder(program,
provider, fileBytes, shouldProcessSymbols, shouldCreateDylibSections,
addRelocationEntries, shouldCombineSplitFiles, log, monitor);
dyldCacheProgramBuilder.build();
}
@Override
protected void build() throws Exception {
monitor.setMessage("Parsing DYLD Cache header ...");
monitor.initialize(1);
dyldCacheHeader = new DyldCacheHeader(new BinaryReader(provider, true));
dyldCacheHeader.parseFromFile(shouldProcessSymbols, log, monitor);
monitor.incrementProgress(1);
try (SplitDyldCache splitDyldCache = new SplitDyldCache(provider, shouldProcessSymbols,
shouldCombineSplitFiles, log, monitor)) {
setDyldCacheImageBase();
processDyldCacheMemoryBlocks();
fixPageChains();
markupHeaders();
markupBranchIslands();
createSymbols();
processDylibs();
// Set image base
setDyldCacheImageBase(splitDyldCache.getDyldCacheHeader(0));
// Setup memory
for (int i = 0; i < splitDyldCache.size(); i++) {
DyldCacheHeader header = splitDyldCache.getDyldCacheHeader(i);
ByteProvider bp = splitDyldCache.getProvider(i);
processDyldCacheMemoryBlocks(header, bp);
}
// Perform additional DYLD processing
for (int i = 0; i < splitDyldCache.size(); i++) {
DyldCacheHeader header = splitDyldCache.getDyldCacheHeader(i);
ByteProvider bp = splitDyldCache.getProvider(i);
fixPageChains(header);
markupHeaders(header);
markupBranchIslands(header, bp);
createSymbols(header);
processDylibs(header, bp);
}
}
}
/**
* Sets the program's image base.
*
* @param dyldCacheHeader The "base" DYLD Cache header
* @throws Exception if there was problem setting the program's image base
*/
private void setDyldCacheImageBase() throws Exception {
private void setDyldCacheImageBase(DyldCacheHeader dyldCacheHeader) throws Exception {
monitor.setMessage("Setting image base...");
monitor.initialize(1);
program.setImageBase(space.getAddress(dyldCacheHeader.getBaseAddress()), true);
@@ -127,20 +150,24 @@ public class DyldCacheProgramBuilder extends MachoProgramBuilder {
/**
* Processes the DYLD Cache's memory mappings and creates memory blocks for them.
*
* @param dyldCacheHeader The {@link DyldCacheHeader}
* @param bp The corresponding {@link ByteProvider}
* @throws Exception if there was a problem creating the memory blocks
*/
private void processDyldCacheMemoryBlocks() throws Exception {
private void processDyldCacheMemoryBlocks(DyldCacheHeader dyldCacheHeader, ByteProvider bp)
throws Exception {
List<DyldCacheMappingInfo> mappingInfos = dyldCacheHeader.getMappingInfos();
monitor.setMessage("Processing DYLD mapped memory blocks...");
monitor.initialize(mappingInfos.size());
FileBytes fb = MemoryBlockUtils.createFileBytes(program, bp, monitor);
long endOfMappedOffset = 0;
for (DyldCacheMappingInfo mappingInfo : mappingInfos) {
long offset = mappingInfo.getFileOffset();
long size = mappingInfo.getSize();
MemoryBlockUtils.createInitializedBlock(program, false, "DYLD",
space.getAddress(mappingInfo.getAddress()), fileBytes, offset, size, "", "",
space.getAddress(mappingInfo.getAddress()), fb, offset, size, "", "",
mappingInfo.isRead(), mappingInfo.isWrite(), mappingInfo.isExecute(), log);
if (offset + size > endOfMappedOffset) {
endOfMappedOffset = offset + size;
}
@@ -148,21 +175,22 @@ public class DyldCacheProgramBuilder extends MachoProgramBuilder {
monitor.incrementProgress(1);
}
if (endOfMappedOffset < provider.length()) {
if (endOfMappedOffset < bp.length()) {
monitor.setMessage("Processing DYLD unmapped memory block...");
MemoryBlockUtils.createInitializedBlock(program, true, "FILE",
AddressSpace.OTHER_SPACE.getAddress(endOfMappedOffset), fileBytes,
endOfMappedOffset, provider.length() - endOfMappedOffset,
"Useful bytes that don't get mapped into memory", "", false, false, false, log);
AddressSpace.OTHER_SPACE.getAddress(endOfMappedOffset), fb, endOfMappedOffset,
bp.length() - endOfMappedOffset, "Useful bytes that don't get mapped into memory",
"", false, false, false, log);
}
}
/**
* Marks up the DYLD Cache headers.
*
* @param dyldCacheHeader The {@link DyldCacheHeader}
* @throws Exception if there was a problem marking up the headers
*/
private void markupHeaders() throws Exception {
private void markupHeaders(DyldCacheHeader dyldCacheHeader) throws Exception {
monitor.setMessage("Marking up DYLD headers...");
monitor.initialize(1);
dyldCacheHeader.parseFromMemory(program, space, log, monitor);
@@ -173,15 +201,18 @@ public class DyldCacheProgramBuilder extends MachoProgramBuilder {
/**
* Marks up the DYLD Cache branch islands.
*
* @param dyldCacheHeader The {@link DyldCacheHeader}
* @param bp The corresponding {@link ByteProvider}
* @throws Exception if there was a problem marking up the branch islands.
*/
private void markupBranchIslands() throws Exception {
private void markupBranchIslands(DyldCacheHeader dyldCacheHeader, ByteProvider bp)
throws Exception {
monitor.setMessage("Marking up DYLD branch islands...");
monitor.initialize(dyldCacheHeader.getBranchPoolAddresses().size());
for (Long addr : dyldCacheHeader.getBranchPoolAddresses()) {
try {
MachHeader header =
MachHeader.createMachHeader(MessageLogContinuesFactory.create(log), provider,
MachHeader.createMachHeader(MessageLogContinuesFactory.create(log), bp,
addr - dyldCacheHeader.getBaseAddress());
header.parse();
super.markupHeaders(header, space.getAddress(addr));
@@ -197,9 +228,10 @@ public class DyldCacheProgramBuilder extends MachoProgramBuilder {
/**
* Creates the DYLD Cache symbols.
*
* @param dyldCacheHeader The {@link DyldCacheHeader}
* @throws Exception if there was a problem creating the symbols
*/
private void createSymbols() throws Exception {
private void createSymbols(DyldCacheHeader dyldCacheHeader) throws Exception {
DyldCacheLocalSymbolsInfo localSymbolsInfo = dyldCacheHeader.getLocalSymbolsInfo();
if (localSymbolsInfo != null) {
monitor.setMessage("Processing DYLD symbols...");
@@ -225,10 +257,12 @@ public class DyldCacheProgramBuilder extends MachoProgramBuilder {
/**
* Fixes any chained pointers within each of the data pages.
*
* @param dyldCacheHeader The {@link DyldCacheHeader}
* @throws MemoryAccessException if there was a problem reading/writing memory.
* @throws CancelledException if user cancels
*/
private void fixPageChains() throws MemoryAccessException, CancelledException {
private void fixPageChains(DyldCacheHeader dyldCacheHeader)
throws MemoryAccessException, CancelledException {
// locate slide Info
List<DyldCacheSlideInfoCommon> slideInfos = dyldCacheHeader.getSlideInfos();
for (DyldCacheSlideInfoCommon info : slideInfos) {
@@ -243,19 +277,22 @@ public class DyldCacheProgramBuilder extends MachoProgramBuilder {
* Processes the DYLD Cache's DYLIB files. This will mark up the DYLIB files, added them to the
* program tree, and make memory blocks for them.
*
* @param dyldCacheHeader The {@link DyldCacheHeader}
* @param bp The corresponding {@link ByteProvider}
* @throws Exception if there was a problem processing the DYLIB files
*/
private void processDylibs() throws Exception {
private void processDylibs(DyldCacheHeader dyldCacheHeader, ByteProvider bp) throws Exception {
// Create an "info" object for each DyldCache DYLIB, which will make processing them
// easier
monitor.setMessage("Parsing DYLIB's...");
monitor.initialize(dyldCacheHeader.getImageInfos().size());
TreeSet<DyldCacheMachoInfo> infoSet =
new TreeSet<>((a, b) -> a.headerAddr.compareTo(b.headerAddr));
for (DyldCacheImageInfo dyldCacheImageInfo : dyldCacheHeader.getImageInfos()) {
infoSet.add(new DyldCacheMachoInfo(provider,
dyldCacheImageInfo.getAddress() - dyldCacheHeader.getBaseAddress(),
space.getAddress(dyldCacheImageInfo.getAddress()), dyldCacheImageInfo.getPath()));
List<DyldCacheImage> mappedImages = dyldCacheHeader.getMappedImages();
monitor.initialize(mappedImages.size());
for (DyldCacheImage mappedImage : mappedImages) {
infoSet.add(new DyldCacheMachoInfo(bp,
mappedImage.getAddress() - dyldCacheHeader.getBaseAddress(),
space.getAddress(mappedImage.getAddress()), mappedImage.getPath()));
monitor.checkCanceled();
monitor.incrementProgress(1);
}
@@ -278,7 +315,7 @@ public class DyldCacheProgramBuilder extends MachoProgramBuilder {
do {
DyldCacheMachoInfo next = iter.hasNext() ? iter.next() : null;
try {
curr.addToProgramTree(next);
curr.addToProgramTree(dyldCacheHeader, next);
}
catch (DuplicateNameException exc) {
log.appendException(exc);
@@ -357,11 +394,13 @@ public class DyldCacheProgramBuilder extends MachoProgramBuilder {
/**
* Adds an entry to the program tree for this Mach-O
*
* @param dyldCacheHeader The DYLD Cache header
* @param next The Mach-O that comes directly after this one. Could be null if this
* is the last one.
* @throws Exception If there was a problem adding this Mach-O to the program tree
*/
public void addToProgramTree(DyldCacheMachoInfo next) throws Exception {
public void addToProgramTree(DyldCacheHeader dyldCacheHeader, DyldCacheMachoInfo next)
throws Exception {
ProgramFragment fragment = listing.getDefaultRootModule().createFragment(path);
if (next != null) {
fragment.move(headerAddr, next.headerAddr.subtract(1));

View File

@@ -15,13 +15,19 @@
*/
package ghidra.app.util.opinion;
import java.io.IOException;
import java.io.*;
import java.nio.file.AccessMode;
import java.util.*;
import ghidra.app.util.bin.ByteProvider;
import ghidra.app.util.bin.*;
import ghidra.app.util.bin.format.macho.dyld.DyldArchitecture;
import ghidra.app.util.bin.format.macho.dyld.DyldCacheHeader;
import ghidra.app.util.importer.MessageLog;
import ghidra.program.model.address.Address;
import ghidra.program.model.listing.Program;
import ghidra.program.model.mem.MemoryAccessException;
import ghidra.util.exception.CancelledException;
import ghidra.util.task.TaskMonitor;
/**
* Utilities methods for working with Mach-O DYLD shared cache binaries.
@@ -89,4 +95,124 @@ public class DyldCacheUtils {
return false;
}
/**
* Class to store a "split" DYLD Cache, which is split across several files (base file, .1, .2,
* .symbols, etc).
*/
public static class SplitDyldCache implements Closeable {
List<ByteProvider> providers = new ArrayList<>();
List<DyldCacheHeader> headers = new ArrayList<>();
/**
* Creates a new {@link SplitDyldCache}
*
* @param baseProvider The {@link ByteProvider} of the "base" DYLD Cache file
* @param shouldProcessSymbols True if symbols should be processed; otherwise, false
* @param shouldCombineSplitFiles True if split DYLD Cache files should be automatically
* @param log The log
* @param monitor A cancelable task monitor
* @throws IOException If there was an IO-related issue with processing the split DYLD Cache
* @throws CancelledException If the user canceled the operation
*/
public SplitDyldCache(ByteProvider baseProvider, boolean shouldProcessSymbols,
boolean shouldCombineSplitFiles, MessageLog log, TaskMonitor monitor)
throws IOException, CancelledException {
// Setup "base" DYLD Cache
monitor.setMessage("Parsing " + baseProvider.getName() + " headers...");
providers.add(baseProvider);
DyldCacheHeader baseHeader = new DyldCacheHeader(new BinaryReader(baseProvider, true));
baseHeader.parseFromFile(shouldProcessSymbols, log, monitor);
headers.add(baseHeader);
// Setup additional "split" DYLD Caches (if applicable)
for (File splitFile : getSplitDyldCacheFiles(baseProvider, shouldCombineSplitFiles)) {
monitor.setMessage("Parsing " + splitFile.getName() + " headers...");
ByteProvider provider = new FileByteProvider(splitFile, null, AccessMode.READ);
if (!DyldCacheUtils.isDyldCache(provider)) {
continue;
}
providers.add(provider);
DyldCacheHeader splitHeader = new DyldCacheHeader(new BinaryReader(provider, true));
splitHeader.parseFromFile(shouldProcessSymbols, log, monitor);
headers.add(splitHeader);
log.appendMsg("Including split DYLD: " + splitFile.getName());
}
}
/**
* Gets the i'th {@link ByteProvider} in the split DYLD Cache
*
* @param i The index of the {@link ByteProvider} to get
* @return The i'th {@link ByteProvider} in the split DYLD Cache
*/
public ByteProvider getProvider(int i) {
return providers.get(i);
}
/**
* Gets the i'th {@link DyldCacheHeader} in the split DYLD Cache
*
* @param i The index of the {@link DyldCacheHeader} to get
* @return The i'th {@link DyldCacheHeader} in the split DYLD Cache
*/
public DyldCacheHeader getDyldCacheHeader(int i) {
return headers.get(i);
}
/**
* Gets the number of split DYLD Cache files
*
* @return The number of split DYLD Cache files
*/
public int size() {
return providers.size();
}
@Override
public void close() throws IOException {
// Assume someone else is responsible for closing the base providers that was passed
// in at construction
for (int i = 1; i < providers.size(); i++) {
providers.get(i).close();
}
}
/**
* Gets a {@link List} of extra split DYLD Cache files to load, sorted by name (base
* DYLD Cache file not included)
*
* @param baseProvider The base {@link ByteProvider} that contains the DYLD Cache bytes
* @param shouldCombineSplitFiles True if split DYLD Cache files should be automatically
* combined into one DYLD Cache; false if only the base file should be processed
* @return A {@link List} of extra split DYLD Cache files to load, sorted by name (base
* DYLD Cache file not included).
*/
private List<File> getSplitDyldCacheFiles(ByteProvider baseProvider,
boolean shouldCombineSplitFiles) {
File file = baseProvider.getFile();
if (file != null && shouldCombineSplitFiles) {
String baseName = file.getName();
File[] splitFiles = file.getParentFile().listFiles(f -> {
if (!f.getName().startsWith(baseName)) {
return false;
}
if (f.getName().equals(baseName)) {
return false;
}
if (f.getName().toLowerCase().endsWith(".map")) {
return false;
}
return true;
});
if (splitFiles != null) {
List<File> list = Arrays.asList(splitFiles);
Collections.sort(list);
return list;
}
}
return Collections.emptyList();
}
}
}

View File

@@ -27,7 +27,7 @@
import java.util.List;
import classrecovery.RecoveredClassUtils;
import classrecovery.RecoveredClassHelper;
import ghidra.app.script.GhidraScript;
import ghidra.program.model.data.FunctionDefinition;
import ghidra.program.model.data.Structure;
@@ -44,17 +44,17 @@ public class ApplyClassFunctionDefinitionUpdatesScript extends GhidraScript {
return;
}
RecoveredClassUtils classUtils = new RecoveredClassUtils(currentProgram, currentLocation,
RecoveredClassHelper classHelper = new RecoveredClassHelper(currentProgram, currentLocation,
state.getTool(), this, false, false, false, false, monitor);
Namespace classNamespace = classUtils.getClassNamespace(currentAddress);
Namespace classNamespace = classHelper.getClassNamespace(currentAddress);
if (classNamespace == null) {
println(
"Either cannot retrieve class namespace or cursor is not in a member of a class namepace");
return;
}
List<Symbol> classVftableSymbols = classUtils.getClassVftableSymbols(classNamespace);
List<Symbol> classVftableSymbols = classHelper.getClassVftableSymbols(classNamespace);
if (classVftableSymbols.isEmpty()) {
println("There are no vftables in this class");
return;
@@ -64,7 +64,7 @@ public class ApplyClassFunctionDefinitionUpdatesScript extends GhidraScript {
"Applying differing function definitions for class " + classNamespace.getName(true));
List<Object> changedItems =
classUtils.applyNewFunctionDefinitions(classNamespace, classVftableSymbols);
classHelper.applyNewFunctionDefinitions(classNamespace, classVftableSymbols);
if (changedItems.isEmpty()) {
println("No differences found for class " + classNamespace.getName(true) +
@@ -72,10 +72,10 @@ public class ApplyClassFunctionDefinitionUpdatesScript extends GhidraScript {
return;
}
List<Structure> structuresOnList = classUtils.getStructuresOnList(changedItems);
List<Structure> structuresOnList = classHelper.getStructuresOnList(changedItems);
List<FunctionDefinition> functionDefinitionsOnList =
classUtils.getFunctionDefinitionsOnList(changedItems);
List<Function> functionsOnList = classUtils.getFunctionsOnList(changedItems);
classHelper.getFunctionDefinitionsOnList(changedItems);
List<Function> functionsOnList = classHelper.getFunctionsOnList(changedItems);
println();
println("Updated structures:");

View File

@@ -27,7 +27,7 @@
import java.util.List;
import classrecovery.RecoveredClassUtils;
import classrecovery.RecoveredClassHelper;
import ghidra.app.script.GhidraScript;
import ghidra.program.model.data.FunctionDefinition;
import ghidra.program.model.data.Structure;
@@ -44,17 +44,17 @@ public class ApplyClassFunctionSignatureUpdatesScript extends GhidraScript {
return;
}
RecoveredClassUtils classUtils = new RecoveredClassUtils(currentProgram, currentLocation,
RecoveredClassHelper classHelper = new RecoveredClassHelper(currentProgram, currentLocation,
state.getTool(), this, false, false, false, false, monitor);
Namespace classNamespace = classUtils.getClassNamespace(currentAddress);
Namespace classNamespace = classHelper.getClassNamespace(currentAddress);
if (classNamespace == null) {
println(
"Either cannot retrieve class namespace or cursor is not in a member of a class namepace");
return;
}
List<Symbol> classVftableSymbols = classUtils.getClassVftableSymbols(classNamespace);
List<Symbol> classVftableSymbols = classHelper.getClassVftableSymbols(classNamespace);
if (classVftableSymbols.isEmpty()) {
println("There are no vftables in this class");
return;
@@ -64,7 +64,7 @@ public class ApplyClassFunctionSignatureUpdatesScript extends GhidraScript {
classNamespace.getName(true));
List<Object> changedItems =
classUtils.applyNewFunctionSignatures(classNamespace, classVftableSymbols);
classHelper.applyNewFunctionSignatures(classNamespace, classVftableSymbols);
if (changedItems.isEmpty()) {
println("No differences found for class " + classNamespace.getName(true) +
@@ -72,10 +72,10 @@ public class ApplyClassFunctionSignatureUpdatesScript extends GhidraScript {
return;
}
List<Structure> structuresOnList = classUtils.getStructuresOnList(changedItems);
List<Structure> structuresOnList = classHelper.getStructuresOnList(changedItems);
List<FunctionDefinition> functionDefinitionsOnList =
classUtils.getFunctionDefinitionsOnList(changedItems);
List<Function> functionsOnList = classUtils.getFunctionsOnList(changedItems);
classHelper.getFunctionDefinitionsOnList(changedItems);
List<Function> functionsOnList = classHelper.getFunctionsOnList(changedItems);
println();
println("Updated structures:");

View File

@@ -78,7 +78,8 @@ class EditStructureUtils {
// component lengths are equal if it gets here
// if containing is not undefined then return false because the components would be
// incompatible types
if (!containingComDt.getName().startsWith("undefined")) {
if (!Undefined.isUndefined(containingComDt)) {
return false;
}
@@ -109,7 +110,7 @@ class EditStructureUtils {
return false;
}
DataType dataType = component.getDataType();
if (dataType.getName().equals("undefined") && dataType.getLength() == 1) {
if (dataType == DataType.DEFAULT) {
continue;
}
return false;
@@ -180,17 +181,6 @@ class EditStructureUtils {
return false;
}
/**
* Method to determine if data type is an undefined data type of any size
* @param dataType the given data type
* @return true if given data type is undefined of any size, false otherwise
*/
static boolean isUndefined(DataType dataType) {
if (dataType.getName().contains("undefined")) {
return true;
}
return false;
}
/**
* Method to determine if there are at least the given length of undefined (any size) components
@@ -241,6 +231,8 @@ class EditStructureUtils {
* at the given offset, don't replace. If there is undefined data there then replace
* it with the data type. If the structure empty, insert the data type at the given offset.
* If the structure is not big enough and not-empty, grow it so there is room to replace.
* See {@link #canAdd(Structure, int, int, TaskMonitor)} for ensuring operation will be
* successful.
* @param structure the given structure
* @param offset the offset to add a field
* @param dataType the data type to add to the field at the given offset
@@ -253,76 +245,74 @@ class EditStructureUtils {
static Structure addDataTypeToStructure(Structure structure, int offset,
DataType dataType, String fieldName, TaskMonitor monitor)
throws CancelledException, IllegalArgumentException {
int dataTypeLength = dataType.getLength();
int endOfDataTypeInStruct = offset + dataTypeLength;
int roomForData = structure.getLength() - endOfDataTypeInStruct;
// FIXME: This will not worked for structures where packing is enabled - not sure how to handle
// if structure isn't defined insert
if (structure.isNotYetDefined()) {
structure.insertAtOffset(offset, dataType, dataTypeLength, fieldName, null);
return structure;
if (structure.isPackingEnabled()) {
throw new IllegalArgumentException(
"Packed structures are not supported by this method");
}
// if not enough room, grow the structure
if (roomForData < 0) {
structure.growStructure(0 - roomForData);
if (structure.isZeroLength() || offset >= structure.getLength()) {
structure.insertAtOffset(offset, dataType, -1, fieldName, null);
}
// else replace only if data already there are enough undefined data types at
// that offset to fit the new data type
if (hasEnoughUndefined1sAtOffset(structure, offset, dataTypeLength, monitor)) {
structure.replaceAtOffset(offset, dataType, dataTypeLength, fieldName, null);
else {
structure.replaceAtOffset(offset, dataType, -1, fieldName, null);
}
return structure;
}
/**
* Method to determine if the given structure has room at the given offset to have a component of the given length added to it
* Method to determine if the given structure has room at the given offset to have a component
* of the given length added to it. This is only valid for non-packed structures.
* @param structureDataType the given structure
* @param offset the offset to check for available room
* @param lengthToAdd the length of bytes wanted to add at the offset
* @param monitor task monitor
* @return true if the given structure has room at the given offset to have a component of the given length added to it
* @return true if the given structure has room at the given offset to have a component of the
* given length added to it or if the offset is beyond the end of the structure so that the
* structure can be grown
* @throws CancelledException if cancelled
* @throws IllegalArgumentException if a packed structure is passed in
*/
static boolean canAdd(Structure structureDataType, int offset, int lengthToAdd,
TaskMonitor monitor)
throws CancelledException {
TaskMonitor monitor) throws CancelledException {
// not big enough so return true so it can be grown
DataTypeComponent component = structureDataType.getComponentAt(offset);
if (structureDataType.isPackingEnabled()) {
throw new IllegalArgumentException(
"Packed structures are not supported by this method");
}
DataTypeComponent component = structureDataType.getComponentContaining(offset);
// structure not big enough to contain the offset so return true so it can be grown
if (component == null) {
return true;
}
// no matter what size, if the data type at the offset is defined, return false
// so it is not replaced
if (!component.getDataType().getName().equals("undefined")) {
// if the offset is in the middle of an internal component then return false
if (component.getOffset() != offset) {
return false;
}
// if structure isn't big enough but what is there is all undefined
// return true to grow it
int structLen = structureDataType.getLength();
int spaceAvailable = structLen - (offset + lengthToAdd);
if (spaceAvailable < 0) {
int overflow = 0 - spaceAvailable;
return hasEnoughUndefined1sAtOffset(structureDataType, offset, structLen - overflow,
monitor);
// no matter what size, if the data type at the offset is defined, return false
// so it is not replaced
if (component.getDataType() != DataType.DEFAULT) {
return false;
}
// if structure is big enough and there is room at the offset return true
return hasEnoughUndefined1sAtOffset(structureDataType, offset, lengthToAdd, monitor);
if (lengthToAdd > 1) {
DataTypeComponent nextDefinedComponent =
structureDataType.getDefinedComponentAtOrAfterOffset(offset + 1);
if (nextDefinedComponent == null) {
return true;
}
int available = nextDefinedComponent.getOffset() - offset;
return available >= lengthToAdd;
}
return true;
}
/**
* Method to retrieve the number of undefined size 1 components in the given structure before the given offset
* @param structure the given structure
@@ -372,8 +362,7 @@ class EditStructureUtils {
while (index < structure.getLength()) {
monitor.checkCanceled();
DataTypeComponent component = structure.getComponentAt(index);
if (component.getDataType().getName().equals("undefined") &&
component.getLength() == 1) {
if (component.getDataType() == DataType.DEFAULT) {
index++;
numUndefineds++;
}

View File

@@ -31,7 +31,7 @@ import ghidra.util.Msg;
import ghidra.util.exception.*;
import ghidra.util.task.TaskMonitor;
public class RTTIClassRecoverer extends RecoveredClassUtils {
public class RTTIClassRecoverer extends RecoveredClassHelper {
boolean programHasRTTIApplied = false;

View File

@@ -2938,6 +2938,15 @@ public class RTTIGccClassRecoverer extends RTTIClassRecoverer {
}
//NEW:
classStructureDataType =
addClassVftables(classStructureDataType, recoveredClass, vfPointerDataTypes);
//NEW: unused at this point until something figures out how to create them and where to
// put them
classStructureDataType =
addVbtableToClassStructure(recoveredClass, classStructureDataType, true);
if (classStructureDataType.getNumComponents() == classStructureDataType
.getNumDefinedComponents()) {
classStructureDataType.setPackingEnabled(true);

View File

@@ -2278,7 +2278,10 @@ public class RTTIWindowsClassRecoverer extends RTTIClassRecoverer {
// if cannot recover the base class array return the existing or computed one instead
// so user will at least have some information like correct size and some members
if (baseClassArrayData == null) {
classStructureDataType = createDefaultStructure(classStructure, classStructureDataType);
classStructureDataType.replaceWith(classStructure);
classStructureDataType = (Structure) dataTypeManager.addDataType(classStructureDataType,
DataTypeConflictHandler.DEFAULT_HANDLER);
return classStructureDataType;
}
@@ -2485,183 +2488,14 @@ public class RTTIWindowsClassRecoverer extends RTTIClassRecoverer {
return classStructureDataType;
}
private Structure addClassVftables(Structure classStructureDataType,
RecoveredClass recoveredClass, Map<Address, DataType> vfPointerDataTypes)
throws CancelledException, IllegalArgumentException {
if (vfPointerDataTypes == null) {
return classStructureDataType;
}
if (!isClassOffsetToVftableMapComplete(recoveredClass)) {
Msg.debug(this,
"class vftable offset map for " + recoveredClass.getName() + " is not complete");
}
// iterate over the set of offsets to vftables for the class and if nothing
// is already at the offset, add the vftables
Map<Integer, Address> classOffsetToVftableMap = recoveredClass.getClassOffsetToVftableMap();
Set<Integer> classVftableOffsets = classOffsetToVftableMap.keySet();
if (classVftableOffsets.isEmpty()) {
return classStructureDataType;
}
for (Integer offset : classVftableOffsets) {
monitor.checkCanceled();
Address vftableAddress = classOffsetToVftableMap.get(offset);
int vftableOffset = offset.intValue();
DataType classVftablePointer = vfPointerDataTypes.get(vftableAddress);
boolean addedVftablePointer = false;
while (!addedVftablePointer) {
monitor.checkCanceled();
// if enough empty bytes - add class vftable pointer
if (EditStructureUtils.canAdd(classStructureDataType, vftableOffset,
classVftablePointer.getLength(), monitor)) {
classStructureDataType =
EditStructureUtils.addDataTypeToStructure(classStructureDataType,
vftableOffset,
classVftablePointer, CLASS_VTABLE_PTR_FIELD_EXT, monitor);
addedVftablePointer = true;
continue;
}
// if already has a base class vftable pointer replace with main class vftablePtr
// get the item at that location
//NOTE: this returns the component containing that offset so need to get the
// offset of the start of the component
//TODO: maybe updated to getComponentContaining
DataTypeComponent currentComponent =
classStructureDataType.getComponentAt(vftableOffset);
int componentOffset = currentComponent.getOffset();
if (currentComponent.getFieldName().endsWith(CLASS_VTABLE_PTR_FIELD_EXT)) {
classStructureDataType.replaceAtOffset(vftableOffset, classVftablePointer,
classVftablePointer.getLength(), CLASS_VTABLE_PTR_FIELD_EXT, "");
addedVftablePointer = true;
continue;
}
// ToDO: check size first
if (!(currentComponent.getDataType() instanceof Structure)) {
// Msg.debug(this,
// "Overwriting non-empty, non-vftable, non-struct at offset " +
// offset.intValue() + " of " + classStructureDataType.getName());
classStructureDataType.replaceAtOffset(vftableOffset, classVftablePointer,
classVftablePointer.getLength(), CLASS_VTABLE_PTR_FIELD_EXT, "");
addedVftablePointer = true;
continue;
}
// if there is a structure at the offset, split it into pieces then
// loop again to try to place vftable over either empty bytes or base vftableptr
if (currentComponent.getDataType() instanceof Structure) {
DataType currentDT = currentComponent.getDataType();
Structure internalStruct = (Structure) currentDT;
int sizeBefore = internalStruct.getLength();
internalStruct = splitStructure(internalStruct);
int sizeAfter = internalStruct.getLength();
if (sizeBefore != sizeAfter) {
Msg.debug(this,
"Splitting internal Struct " + internalStruct.getName() + " in " +
classStructureDataType.getName() + " at offset " +
offset.intValue());
}
DataTypeComponent[] components = internalStruct.getComponents();
for (DataTypeComponent component : components) {
int innerOffset = component.getOffset();
int replaceOffset = component.getOffset() + componentOffset;
if (classStructureDataType.getLength() <= replaceOffset) {
Msg.debug(this,
classStructureDataType.getName() + " trying to place component " +
component.getFieldName() + " at offset " +
component.getOffset());
}
// add indiv components of internal structure to the outer structure
classStructureDataType.replaceAtOffset(componentOffset + innerOffset,
component.getDataType(), component.getLength(),
component.getFieldName(), "");
}
}
}
}
return classStructureDataType;
}
private Structure splitStructure(Structure structure)
throws CancelledException, IllegalArgumentException {
structure.setPackingEnabled(false);
if (structure.isNotYetDefined() || isEmptyDefaultSizeStructure(structure)) {
DataType undefinedDataType = new Undefined4DataType();
if (defaultPointerSize == 8) {
undefinedDataType = new Undefined8DataType();
}
structure.replaceAtOffset(0, undefinedDataType, undefinedDataType.getLength(),
structure.getName() + "_expanded", "");
}
else {
DataTypeComponent[] interalStructComponents = structure.getComponents();
// add indiv components of internal structure to the outer structure
for (DataTypeComponent component : interalStructComponents) {
structure.replaceAtOffset(component.getOffset(), component.getDataType(),
component.getLength(), component.getFieldName(), "");
}
}
if (structure.getNumComponents() == structure.getNumDefinedComponents()) {
structure.setPackingEnabled(true);
}
return structure;
}
private boolean isEmptyDefaultSizeStructure(Structure structure) throws CancelledException {
if (structure.getLength() != defaultPointerSize) {
return false;
}
int numUndefined1s =
EditStructureUtils.getNumberOfUndefinedsStartingAtOffset(structure, 0, monitor);
if (structure.getLength() == numUndefined1s) {
return true;
}
return false;
}
/**
* Method to return the offset of the given class's single virtual parent
* @param recoveredClass the given class
* @return the offset of the single virtual parent or null if there is not a single virtual parent
* or if there is no mapping in the offset map for that parent
* @throws CancelledException if cancelled
* @throws AddressOutOfBoundsException
* @throws MemoryAccessException
* @throws AddressOutOfBoundsException if trying to access an address that does not exist in program
* @throws MemoryAccessException if trying to access memory that can't be accessed
*/
public Integer getSingleVirtualParentOffset(RecoveredClass recoveredClass)
throws CancelledException, MemoryAccessException, AddressOutOfBoundsException {
@@ -2782,41 +2616,6 @@ public class RTTIWindowsClassRecoverer extends RTTIClassRecoverer {
return baseClass;
}
/**
* Method to add a pointer to the class vbtable to the given class's class structure
* @param recoveredClass the given class
* @param classStructureDataType the given class's class structure data type
* @return the updated class structure
* @throws CancelledException if cancelled
*/
private Structure addVbtableToClassStructure(RecoveredClass recoveredClass,
Structure classStructureDataType, boolean overwrite) throws CancelledException {
Structure vbtableStructure = recoveredClass.getVbtableStructure();
if (vbtableStructure != null) {
int vbtableOffset = recoveredClass.getVbtableOffset();
DataType vbaseStructPointer = dataTypeManager.getPointer(vbtableStructure);
int dataLength = vbaseStructPointer.getLength();
if (EditStructureUtils.canAdd(classStructureDataType, vbtableOffset, dataLength,
monitor)) {
classStructureDataType =
EditStructureUtils.addDataTypeToStructure(classStructureDataType,
vbtableOffset, vbaseStructPointer, "vbtablePtr", monitor);
}
else if (overwrite) {
classStructureDataType.replaceAtOffset(vbtableOffset, vbaseStructPointer,
vbaseStructPointer.getLength(), "vbtablePtr", "");
}
}
return classStructureDataType;
}
/**
* Method to apply the given class's vbtable structure
* @param recoveredClass the given RecoveredClass object which, if applicable, contains the address and structure to apply

View File

@@ -47,17 +47,19 @@ import ghidra.util.datastruct.ListAccumulator;
import ghidra.util.exception.*;
import ghidra.util.task.TaskMonitor;
public class RecoveredClassUtils {
public class RecoveredClassHelper {
public static final String DTM_CLASS_DATA_FOLDER_NAME = "ClassDataTypes";
private static final String CLASS_DATA_STRUCT_NAME = "_data";
private static final String DEFAULT_VFUNCTION_PREFIX = "vfunction";
private static final String VFUNCTION_COMMENT = "virtual function #";
private static final String CLASS_VFUNCTION_STRUCT_NAME = "_vftable";
private static final String CLASS_VTABLE_PTR_FIELD_EXT = "vftablePtr";
public static final String VFTABLE_LABEL = "vftable";
private static final String VBASE_DESTRUCTOR_LABEL = "vbase_destructor";
private static final String VBTABLE_LABEL = "vbtable";
private static final String VBTABLE_PTR = "vbtablePtr";
private static final String CLONE_LABEL = "clone";
private static final String DELETING_DESTRUCTOR_LABEL = "deleting_destructor";
@@ -145,7 +147,7 @@ public class RecoveredClassUtils {
boolean nameVfunctions;
boolean replaceClassStructures;
public RecoveredClassUtils(Program program, ProgramLocation location, PluginTool tool,
public RecoveredClassHelper(Program program, ProgramLocation location, PluginTool tool,
FlatProgramAPI api, boolean createBookmarks, boolean useShortTemplates,
boolean nameVunctions, boolean replaceClassStructures, TaskMonitor monitor)
throws Exception {
@@ -3521,7 +3523,6 @@ public class RecoveredClassUtils {
}
/**
<<<<<<< HEAD
* Method to replace the program's current class structure, only if an empty placeholder structure,
* with the one generated by this script
* @param function a class method with current class structure applied
@@ -5520,34 +5521,6 @@ public class RecoveredClassUtils {
return structureDataType;
}
/**
* Method to add the given structcure component to the given structure at the given offset
* @param structureDataType the structure to add to
* @param structureToAdd the structure to add
* @param startOffset the starting offset where to add
* @return the updated structure
* @throws CancelledException if cancelled
*/
public Structure addIndividualComponentsToStructure(Structure structureDataType,
Structure structureToAdd, int startOffset) throws CancelledException {
DataTypeComponent[] definedComponents = structureToAdd.getDefinedComponents();
for (int ii = 0; ii < definedComponents.length; ii++) {
monitor.checkCanceled();
DataTypeComponent dataTypeComponent = structureToAdd.getComponent(ii);
int dataComponentOffset = dataTypeComponent.getOffset();
String fieldname = dataTypeComponent.getFieldName();
structureDataType = EditStructureUtils.addDataTypeToStructure(structureDataType,
startOffset + dataComponentOffset, dataTypeComponent.getDataType(), fieldname,
monitor);
}
return structureDataType;
}
/**
* Method to add alignment to the given length based on the default program address size
@@ -7000,7 +6973,7 @@ public class RecoveredClassUtils {
}
DataType dataType = definedComponent.getDataType();
if (dataType.getName().equals("undefined") && dataType.getLength() == 1) {
if (dataType == DataType.DEFAULT) {
dataType = new Undefined1DataType();
}
@@ -7030,30 +7003,6 @@ public class RecoveredClassUtils {
return classDataStructure;
}
/**
* Method to use the computed or existing class structure contents for the main class structure.
* This is called when there is not enough information to create a full structure.
* @param computedClassStructure the structure computed using pcode store information or using pdb information
* @param classStructureDataType the structure that is getting created in the data type manager
* @return the default class structure for this class
* @throws CancelledException if cancelled
*/
public Structure createDefaultStructure(Structure computedClassStructure,
Structure classStructureDataType) throws CancelledException {
DataTypeComponent[] definedComponents = computedClassStructure.getDefinedComponents();
for (DataTypeComponent component : definedComponents) {
monitor.checkCanceled();
classStructureDataType = EditStructureUtils.addDataTypeToStructure(
classStructureDataType, component.getOffset(), component.getDataType(),
component.getFieldName(), monitor);
}
classStructureDataType = (Structure) dataTypeManager.addDataType(classStructureDataType,
DataTypeConflictHandler.DEFAULT_HANDLER);
return classStructureDataType;
}
/**
* Method to find the purecall function.
@@ -8204,4 +8153,166 @@ public class RecoveredClassUtils {
return category;
}
/**
* Method to add class vftable pointers to the given class structure
* @param classStructureDataType the class structure data type
* @param recoveredClass the given recovered class
* @param vfPointerDataTypes the map of addresses/vftables, a null should be passed to indicate
* no known vftables for the given class.
* @return the modified structure with the vftable pointers added or an unchanged structure if
* the vftable map is null or if the given class's offset to vftable map is empty
* @throws CancelledException if cancelled
* @throws IllegalArgumentException if there are issues modifying the structure
*/
protected Structure addClassVftables(Structure classStructureDataType,
RecoveredClass recoveredClass, Map<Address, DataType> vfPointerDataTypes)
throws CancelledException, IllegalArgumentException {
Map<Integer, Address> classOffsetToVftableMap = recoveredClass.getClassOffsetToVftableMap();
Set<Integer> classVftableOffsets = classOffsetToVftableMap.keySet();
if (vfPointerDataTypes == null || classVftableOffsets.isEmpty()) {
return classStructureDataType;
}
// If the map is not empty, the class structure will contain incomplete information so
// put out a debug message to indicate this issue
if (!isClassOffsetToVftableMapComplete(recoveredClass)) {
Msg.debug(this,
"class vftable offset map for " + recoveredClass.getName() + " is not complete");
}
// iterate over the set of offsets to vftables and either add to undefined area or overwrite
// the parent class structures with the class vftable pointer then replace the rest of the
// parent structure with its internal components
for (Integer offset : classVftableOffsets) {
monitor.checkCanceled();
Address vftableAddress = classOffsetToVftableMap.get(offset);
int vftableOffset = offset.intValue();
DataType classVftablePointer = vfPointerDataTypes.get(vftableAddress);
boolean addedVftablePointer = false;
// loop until the vftable pointer is added
// if component at offset is not a structure, replace with vftablePtr
// if component at offset is a structure, replace with components then loop
// until finally component is not a structure and can be replaced
while (!addedVftablePointer) {
monitor.checkCanceled();
// if enough empty bytes - add class vftable pointer
if (EditStructureUtils.canAdd(classStructureDataType, vftableOffset,
classVftablePointer.getLength(), monitor)) {
classStructureDataType =
EditStructureUtils.addDataTypeToStructure(classStructureDataType,
vftableOffset,
classVftablePointer, CLASS_VTABLE_PTR_FIELD_EXT, monitor);
addedVftablePointer = true;
continue;
}
// if already has a base class vftable pointer replace with main class vftablePtr
// get the item at that location
//NEW: replaced with get containing
// if offset is in the middle, get the top of the component
DataTypeComponent currentComponent =
classStructureDataType.getComponentContaining(vftableOffset);
int componentOffset = currentComponent.getOffset();
if (currentComponent.getFieldName().endsWith(CLASS_VTABLE_PTR_FIELD_EXT)) {
classStructureDataType.replaceAtOffset(vftableOffset, classVftablePointer,
classVftablePointer.getLength(), CLASS_VTABLE_PTR_FIELD_EXT, "");
addedVftablePointer = true;
continue;
}
// if the current component isn't a structure just replace it with the
// vftable pointer
if (!(currentComponent.getDataType() instanceof Structure)) {
classStructureDataType.replaceAtOffset(vftableOffset, classVftablePointer,
classVftablePointer.getLength(), CLASS_VTABLE_PTR_FIELD_EXT, "");
addedVftablePointer = true;
continue;
}
// if there is a structure at the offset, replace it with its pieces then
// loop again to try to place vftable pointer over either empty bytes or
// base vftableptr
DataType currentDT = currentComponent.getDataType();
Structure internalStruct = (Structure) currentDT;
DataTypeComponent[] components = internalStruct.getComponents();
// if there is an empty structure at the offset, clear it which will replace
// it with an undefined data type of size 1
if (components.length == 0) {
classStructureDataType.clearAtOffset(componentOffset);
continue;
}
// if non-empty, replace the structure with its components
for (DataTypeComponent component : components) {
int innerOffset = component.getOffset();
int replaceOffset = component.getOffset() + componentOffset;
if (classStructureDataType.getLength() <= replaceOffset) {
Msg.debug(this,
classStructureDataType.getName() + " trying to place component " +
component.getFieldName() + " at offset " + component.getOffset());
}
// add indiv components of internal structure to the outer structure
classStructureDataType.replaceAtOffset(componentOffset + innerOffset,
component.getDataType(), component.getLength(), component.getFieldName(),
"");
}
}
}
return classStructureDataType;
}
/**
* Method to add a pointer to the class vbtable to the given class's class structure
* @param recoveredClass the given class
* @param classStructureDataType the given class's class structure data type
* @param overwrite if true, overwrite existing item with the vbtable pointer, if false, don't
* @return the updated class structure
* @throws CancelledException if cancelled
*/
protected Structure addVbtableToClassStructure(RecoveredClass recoveredClass,
Structure classStructureDataType, boolean overwrite) throws CancelledException {
Structure vbtableStructure = recoveredClass.getVbtableStructure();
if (vbtableStructure != null) {
int vbtableOffset = recoveredClass.getVbtableOffset();
DataType vbaseStructPointer = dataTypeManager.getPointer(vbtableStructure);
int dataLength = vbaseStructPointer.getLength();
if (EditStructureUtils.canAdd(classStructureDataType, vbtableOffset, dataLength,
monitor)) {
classStructureDataType =
EditStructureUtils.addDataTypeToStructure(classStructureDataType,
vbtableOffset, vbaseStructPointer, VBTABLE_PTR, monitor);
}
else if (overwrite) {
classStructureDataType.replaceAtOffset(vbtableOffset, vbaseStructPointer,
vbaseStructPointer.getLength(), VBTABLE_PTR, "");
}
}
return classStructureDataType;
}
}

View File

@@ -23,6 +23,9 @@ import generic.continues.RethrowContinuesFactory;
import ghidra.app.util.bin.*;
import ghidra.app.util.bin.format.macho.*;
import ghidra.app.util.bin.format.macho.commands.*;
import ghidra.app.util.bin.format.macho.dyld.DyldCacheHeader;
import ghidra.app.util.bin.format.macho.dyld.DyldCacheMappingInfo;
import ghidra.app.util.opinion.DyldCacheUtils.SplitDyldCache;
import ghidra.formats.gfilesystem.FSRL;
import ghidra.util.*;
import ghidra.util.exception.NotFoundException;
@@ -38,26 +41,29 @@ public class DyldCacheDylibExtractor {
* DYLIB's header will be altered to account for its segment bytes being packed down.
*
* @param dylibOffset The offset of the DYLIB in the given provider
* @param provider The DYLD
* @param fsrl {@link FSRL} to assign to the resulting ByteProvider
* @param splitDyldCache The {@link SplitDyldCache}
* @param index The DYLIB's {@link SplitDyldCache} index
* @param fsrl {@link FSRL} to assign to the resulting {@link ByteProvider}
* @param monitor {@link TaskMonitor}
* @return {@link ByteProvider} containing the bytes of the dylib
* @return {@link ByteProvider} containing the bytes of the DYLIB
* @throws IOException If there was an IO-related issue with extracting the DYLIB
* @throws MachException If there was an error parsing the DYLIB headers
*/
public static ByteProvider extractDylib(long dylibOffset, ByteProvider provider, FSRL fsrl,
TaskMonitor monitor) throws IOException, MachException {
public static ByteProvider extractDylib(long dylibOffset, SplitDyldCache splitDyldCache,
int index, FSRL fsrl, TaskMonitor monitor) throws IOException, MachException {
// Make sure Mach-O header is valid
MachHeader header = MachHeader.createMachHeader(RethrowContinuesFactory.INSTANCE, provider,
dylibOffset, false);
header.parse();
MachHeader dylibHeader = MachHeader.createMachHeader(RethrowContinuesFactory.INSTANCE,
splitDyldCache.getProvider(index), dylibOffset, false);
dylibHeader.parse();
// Pack the DYLIB
PackedDylib packedDylib = new PackedDylib(header, dylibOffset, provider);
PackedDylib packedDylib = new PackedDylib(dylibHeader, dylibOffset, splitDyldCache, index);
// TODO: Fixup pointer chains
// Fixup indices, offsets, etc in the packed DYLIB's header
for (LoadCommand cmd : header.getLoadCommands()) {
for (LoadCommand cmd : dylibHeader.getLoadCommands()) {
if (monitor.isCancelled()) {
break;
}
@@ -203,17 +209,18 @@ public class DyldCacheDylibExtractor {
/**
* Creates a new {@link PackedDylib} object
*
* @param header The DYLD's DYLIB's Mach-O header
* @param dylibHeader The DYLD's DYLIB's Mach-O header
* @param dylibOffset The offset of the DYLIB in the given provider
* @param provider The DYLD's bytes
* @param splitDyldCache The {@link SplitDyldCache}
* @param index The DYLIB's {@link SplitDyldCache} index
* @throws IOException If there was an IO-related error
*/
public PackedDylib(MachHeader header, long dylibOffset, ByteProvider provider)
throws IOException {
reader = new BinaryReader(provider, true);
public PackedDylib(MachHeader dylibHeader, long dylibOffset, SplitDyldCache splitDyldCache,
int index) throws IOException {
reader = new BinaryReader(splitDyldCache.getProvider(index), true);
packedStarts = new HashMap<>();
int size = 0;
for (SegmentCommand segment : header.getAllSegments()) {
for (SegmentCommand segment : dylibHeader.getAllSegments()) {
packedStarts.put(segment, size);
size += segment.getFileSize();
@@ -224,14 +231,15 @@ public class DyldCacheDylibExtractor {
}
}
packed = new byte[size];
for (SegmentCommand segment : header.getAllSegments()) {
for (SegmentCommand segment : dylibHeader.getAllSegments()) {
long segmentSize = segment.getFileSize();
if (segment.getFileOffset() + segmentSize > provider.length()) {
segmentSize = provider.length() - segment.getFileOffset();
ByteProvider segmentProvider = getSegmentProvider(segment, splitDyldCache);
if (segment.getFileOffset() + segmentSize > segmentProvider.length()) {
segmentSize = segmentProvider.length() - segment.getFileOffset();
Msg.warn(this, segment.getSegmentName() +
" segment extends beyond end of file. Truncating...");
}
byte[] bytes = provider.readBytes(segment.getFileOffset(), segmentSize);
byte[] bytes = segmentProvider.readBytes(segment.getFileOffset(), segmentSize);
System.arraycopy(bytes, 0, packed, packedStarts.get(segment), bytes.length);
}
}
@@ -283,6 +291,28 @@ public class DyldCacheDylibExtractor {
Long.toHexString(fileOffset));
}
/**
* Gets the {@link ByteProvider} that contains the given {@link SegmentCommand segment}
*
* @param segment The {@link SegmentCommand segment}
* @param splitDyldCache The {@link SplitDyldCache}
* @return The {@link ByteProvider} that contains the given {@link SegmentCommand segment}
* @throws IOException If a {@link ByteProvider} could not be found
*/
private ByteProvider getSegmentProvider(SegmentCommand segment,
SplitDyldCache splitDyldCache) throws IOException {
for (int i = 0; i < splitDyldCache.size(); i++) {
DyldCacheHeader header = splitDyldCache.getDyldCacheHeader(i);
for (DyldCacheMappingInfo mappingInfo : header.getMappingInfos()) {
if (mappingInfo.contains(segment.getVMaddress())) {
return splitDyldCache.getProvider(i);
}
}
}
throw new IOException(
"Failed to find provider for segment: " + segment.getSegmentName());
}
/**
* Converts the given value to a byte array
*

View File

@@ -18,13 +18,13 @@ package ghidra.file.formats.ios.dyldcache;
import java.io.IOException;
import java.util.*;
import ghidra.app.util.bin.BinaryReader;
import ghidra.app.util.bin.ByteProvider;
import ghidra.app.util.bin.format.macho.MachException;
import ghidra.app.util.bin.format.macho.dyld.DyldCacheHeader;
import ghidra.app.util.bin.format.macho.dyld.DyldCacheImageInfo;
import ghidra.app.util.bin.format.macho.dyld.DyldCacheImage;
import ghidra.app.util.importer.MessageLog;
import ghidra.app.util.opinion.DyldCacheUtils;
import ghidra.app.util.opinion.DyldCacheUtils.SplitDyldCache;
import ghidra.formats.gfilesystem.*;
import ghidra.formats.gfilesystem.annotations.FileSystemInfo;
import ghidra.formats.gfilesystem.factory.GFileSystemBaseFactory;
@@ -35,8 +35,9 @@ import ghidra.util.task.TaskMonitor;
@FileSystemInfo(type = "dyldcachev1", description = "iOS DYLD Cache Version 1", factory = GFileSystemBaseFactory.class)
public class DyldCacheFileSystem extends GFileSystemBase {
private DyldCacheHeader header;
private Map<GFile, DyldCacheImageInfo> map = new HashMap<>();
private SplitDyldCache splitDyldCache;
private Map<GFile, Long> addrMap = new HashMap<>();
private Map<GFile, Integer> indexMap = new HashMap<>();
public DyldCacheFileSystem(String fileSystemName, ByteProvider provider) {
super(fileSystemName, provider);
@@ -44,20 +45,24 @@ public class DyldCacheFileSystem extends GFileSystemBase {
@Override
public void close() throws IOException {
map.clear();
addrMap.clear();
indexMap.clear();
splitDyldCache.close();
super.close();
}
@Override
public ByteProvider getByteProvider(GFile file, TaskMonitor monitor) throws IOException {
DyldCacheImageInfo data = map.get(file);
if (data == null) {
Long addr = addrMap.get(file);
if (addr == null) {
return null;
}
long machHeaderStartIndexInProvider = data.getAddress() - header.getBaseAddress();
int index = indexMap.get(file);
long machHeaderStartIndexInProvider =
addr - splitDyldCache.getDyldCacheHeader(index).getBaseAddress();
try {
return DyldCacheDylibExtractor.extractDylib(machHeaderStartIndexInProvider, provider,
file.getFSRL(), monitor);
return DyldCacheDylibExtractor.extractDylib(machHeaderStartIndexInProvider,
splitDyldCache, index, file.getFSRL(), monitor);
}
catch (MachException e) {
throw new IOException("Invalid Mach-O header detected at 0x" +
@@ -65,49 +70,11 @@ public class DyldCacheFileSystem extends GFileSystemBase {
}
}
/*
// TODO: support GFileSystemProgramProvider interface?
// Below is commented out implementation of getProgram(), that was present as a comment
// in the previous code, but formatted here so it can be read.
// This needs to be researched and the junit test needs to adjusted to test this.
@Override
public Program getProgram(GFile file, LanguageService languageService, TaskMonitor monitor,
Object consumer) throws Exception {
DyldArchitecture architecture = header.getArchitecture();
LanguageCompilerSpecPair lcs = architecture.getLanguageCompilerSpecPair(languageService);
DyldCacheData dyldCacheData = map.get(file);
long machHeaderStartIndexInProvider =
dyldCacheData.getLibraryOffset() - header.getBaseAddress();
ByteProvider wrapper =
new ByteProviderWrapper(provider, machHeaderStartIndexInProvider, file.getLength());
MachHeader machHeader =
MachHeader.createMachHeader(RethrowContinuesFactory.INSTANCE, wrapper);
Program program =
new ProgramDB(file.getName(), lcs.getLanguage(), lcs.getCompilerSpec(), consumer);
int id = program.startTransaction(getName());
boolean success = false;
try {
MachoLoader loader = new MachoLoader();
loader.load(machHeader, program, new MessageLog(), monitor);
program.setExecutableFormat(MachoLoader.MACH_O_NAME);
program.setExecutablePath(file.getAbsolutePath());
success = true;
}
finally {
program.endTransaction(id, success);
if (!success) {
program.release(consumer);
}
}
return program;
}
*/
@Override
public List<GFile> getListing(GFile directory) throws IOException {
if (directory == null || directory.equals(root)) {
List<GFile> roots = new ArrayList<>();
for (GFile file : map.keySet()) {
for (GFile file : addrMap.keySet()) {
if (file.getParentFile() == root || file.getParentFile().equals(root)) {
roots.add(file);
}
@@ -115,7 +82,7 @@ public class DyldCacheFileSystem extends GFileSystemBase {
return roots;
}
List<GFile> tmp = new ArrayList<>();
for (GFile file : map.keySet()) {
for (GFile file : addrMap.keySet()) {
if (file.getParentFile() == null) {
continue;
}
@@ -133,41 +100,37 @@ public class DyldCacheFileSystem extends GFileSystemBase {
@Override
public void open(TaskMonitor monitor) throws IOException, CryptoException, CancelledException {
MessageLog log = new MessageLog();
monitor.setMessage("Opening DYLD cache...");
BinaryReader reader = new BinaryReader(provider, true);
header = new DyldCacheHeader(reader);
header.parseFromFile(false, new MessageLog(), monitor);
List<DyldCacheImageInfo> dataList = header.getImageInfos();
monitor.initialize(dataList.size());
for (DyldCacheImageInfo data : dataList) {
if (monitor.isCancelled()) {
break;
splitDyldCache = new SplitDyldCache(provider, false, true, log, monitor);
for (int i = 0; i < splitDyldCache.size(); i++) {
DyldCacheHeader header = splitDyldCache.getDyldCacheHeader(i);
monitor.setMessage("Find files...");
List<DyldCacheImage> mappedImages = header.getMappedImages();
monitor.initialize(mappedImages.size());
for (DyldCacheImage mappedImage : mappedImages) {
GFileImpl file =
GFileImpl.fromPathString(this, root, mappedImage.getPath(), null, false, -1);
storeFile(file, mappedImage.getAddress(), i);
monitor.checkCanceled();
monitor.incrementProgress(1);
}
monitor.incrementProgress(1);
GFileImpl file = GFileImpl.fromPathString(this, root, data.getPath(), null, false, -1);
storeFile(file, data);
}
}
private void storeFile(GFile file, DyldCacheImageInfo data) {
private void storeFile(GFile file, Long addr, Integer index) {
if (file == null) {
return;
}
if (file.equals(root)) {
return;
}
if (!map.containsKey(file) || map.get(file) == null) {
map.put(file, data);
if (!addrMap.containsKey(file) || addrMap.get(file) == null) {
addrMap.put(file, addr);
indexMap.put(file, index);
}
GFile parentFile = file.getParentFile();
storeFile(parentFile, null);
storeFile(parentFile, null, null);
}
}