mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2025-10-05 19:42:36 +02:00
Merge remote-tracking branch 'origin/Ghidra_9.2'
This commit is contained in:
commit
9374a67b94
10 changed files with 170 additions and 91 deletions
|
@ -121,6 +121,8 @@
|
||||||
<blockquote><p><u>Bugs</u></p>
|
<blockquote><p><u>Bugs</u></p>
|
||||||
<ul>
|
<ul>
|
||||||
<li><I>Analysis</I>. Function start patterns found at 0x0, function signatures applied from the Data Type Manager at 0x0, and DWARF debug symbols applied at 0x0 will no longer cause stack traces. In addition, DWARF symbols with zero length address range no longer stack trace. (GT-2817, Issue #386, #1560)</li>
|
<li><I>Analysis</I>. Function start patterns found at 0x0, function signatures applied from the Data Type Manager at 0x0, and DWARF debug symbols applied at 0x0 will no longer cause stack traces. In addition, DWARF symbols with zero length address range no longer stack trace. (GT-2817, Issue #386, #1560)</li>
|
||||||
|
<li><I>Analysis</I>. Constant propagation will treat an OR with zero (0) as a simple copy. (GT-3548, Issue #1531)</li>
|
||||||
|
<li><I>Analysis</I>. Corrected <B>Create Structure from Selection</B>, which failed to use proper data organization during the construction process. This could result in improperly sized components such as pointers and primitive types. (GT-3587)</li>
|
||||||
<li><I>Analysis</I>. Fixed an issue where stored context is initializing the set of registers constantly. (GP-25)</li>
|
<li><I>Analysis</I>. Fixed an issue where stored context is initializing the set of registers constantly. (GP-25)</li>
|
||||||
<li><I>Analysis</I>. Fixed an RTTI Analyzer regression when analyzing RTTI0 structures with no RTTI4 references to them. (GP-62, Issue #2153)</li>
|
<li><I>Analysis</I>. Fixed an RTTI Analyzer regression when analyzing RTTI0 structures with no RTTI4 references to them. (GP-62, Issue #2153)</li>
|
||||||
<li><I>Analysis</I>. Fixed an issue where the RTTI analyzer was not filling out RTTI3 structures in some cases. (GP-111)</li>
|
<li><I>Analysis</I>. Fixed an issue where the RTTI analyzer was not filling out RTTI3 structures in some cases. (GP-111)</li>
|
||||||
|
@ -135,6 +137,7 @@
|
||||||
<li><I>Data Types</I>. Updated the DataTypeParser to handle data type names containing templates. (GT-3493, Issue #1417)</li>
|
<li><I>Data Types</I>. Updated the DataTypeParser to handle data type names containing templates. (GT-3493, Issue #1417)</li>
|
||||||
<li><I>Data Types</I>. Corrected pointer data type <code>isEquivalent()</code> method to properly check the equivalence of the base data type. The old implementation could cause a pointer to be replaced by a conflicting pointer with the same name whose base datatype is not equivalent. This change has a negative performance impact associated with it and can cause additional conflict datatypes due to the rigid datatype relationships. (GT-3557)</li>
|
<li><I>Data Types</I>. Corrected pointer data type <code>isEquivalent()</code> method to properly check the equivalence of the base data type. The old implementation could cause a pointer to be replaced by a conflicting pointer with the same name whose base datatype is not equivalent. This change has a negative performance impact associated with it and can cause additional conflict datatypes due to the rigid datatype relationships. (GT-3557)</li>
|
||||||
<li><I>Data Types</I>. Improved composite conflict resolution performance and corrected composite merge issues when composite bitfields and/or flexible arrays are present. (GT-3571)</li>
|
<li><I>Data Types</I>. Improved composite conflict resolution performance and corrected composite merge issues when composite bitfields and/or flexible arrays are present. (GT-3571)</li>
|
||||||
|
<li><I>Data Types</I>. Fixed bug in SymbolPathParser naive parse method that caused a less-than-adequate fall-back parse when angle bracket immediately followed the namespace delimiter. (GT-3620)</li>
|
||||||
<li><I>Data Types</I>. Corrected size of <code><B>long</B></code> for AARCH64 per LP64 standard. (GP-175)</li>
|
<li><I>Data Types</I>. Corrected size of <code><B>long</B></code> for AARCH64 per LP64 standard. (GP-175)</li>
|
||||||
<li><I>Decompiler</I>. Fixed bug causing the Decompiler to miss symbol references when they are stored to the heap. (GT-3267)</li>
|
<li><I>Decompiler</I>. Fixed bug causing the Decompiler to miss symbol references when they are stored to the heap. (GT-3267)</li>
|
||||||
<li><I>Decompiler</I>. Fixed bug in the Decompiler that caused <code>Deleting op with descendants</code> exception. (GT-3506)</li>
|
<li><I>Decompiler</I>. Fixed bug in the Decompiler that caused <code>Deleting op with descendants</code> exception. (GT-3506)</li>
|
||||||
|
@ -152,7 +155,7 @@
|
||||||
<li><I>Decompiler</I>. Addressed various situations where the Decompiler unexpectedly removes active instructions as dead code after renaming or retyping a stack location. If the location was really an array element or structure field, renaming forced the Decompiler to treat the location as a distinct variable. Subsequently, the Decompiler thought that indirect references based before the location could not alias any following stack locations, which could then by considered dead. As of the 9.2 release, the Decompiler's renaming action no longer switches an annotation to <code>forcing</code> if it wasn't already. A retyping action, although it is <code>forcing</code>, won't trigger alias blocking for atomic data-types (this is configurable). (GP-248, Issue #524, #873)</li>
|
<li><I>Decompiler</I>. Addressed various situations where the Decompiler unexpectedly removes active instructions as dead code after renaming or retyping a stack location. If the location was really an array element or structure field, renaming forced the Decompiler to treat the location as a distinct variable. Subsequently, the Decompiler thought that indirect references based before the location could not alias any following stack locations, which could then by considered dead. As of the 9.2 release, the Decompiler's renaming action no longer switches an annotation to <code>forcing</code> if it wasn't already. A retyping action, although it is <code>forcing</code>, won't trigger alias blocking for atomic data-types (this is configurable). (GP-248, Issue #524, #873)</li>
|
||||||
<li><I>Decompiler</I>. Fixed decompiler memory issues reported by a community security researcher. (GP-267)</li>
|
<li><I>Decompiler</I>. Fixed decompiler memory issues reported by a community security researcher. (GP-267)</li>
|
||||||
<li><I>Decompiler</I>. Fix for Decompiler error: <code>Pcode: XML comms: Missing symref attribute in <high> tag</code>. (GP-352, Issue #2360)</li>
|
<li><I>Decompiler</I>. Fix for Decompiler error: <code>Pcode: XML comms: Missing symref attribute in <high> tag</code>. (GP-352, Issue #2360)</li>
|
||||||
<li><I>Decompiler</I>. Fixed bug preventing the decompiler from seeing Equates attached to "compare" instructions. (GP-369, Issue #2386)</li>
|
<li><I>Decompiler</I>. Fixed bug preventing the Decompiler from seeing Equates attached to <B>compare</B> instructions. (GP-369, Issue #2386)</li>
|
||||||
<li><I>Demangler</I>. Fixed the GnuDemangler to parse the full namespace for <code>operator</code> symbols. (GT-3474, Issue #1441, #1448)</li>
|
<li><I>Demangler</I>. Fixed the GnuDemangler to parse the full namespace for <code>operator</code> symbols. (GT-3474, Issue #1441, #1448)</li>
|
||||||
<li><I>Demangler</I>. Fixed numerous GNU Demangler parsing issues. Most notable is the added support for C++ Lambda functions. (GT-3545, Issue #1457, #1569)</li>
|
<li><I>Demangler</I>. Fixed numerous GNU Demangler parsing issues. Most notable is the added support for C++ Lambda functions. (GT-3545, Issue #1457, #1569)</li>
|
||||||
<li><I>Demangler</I>. Updated the GNU Demangler to correctly parse and apply C++ strings using the <code>unnamed type</code> syntax. (GT-3645)</li>
|
<li><I>Demangler</I>. Updated the GNU Demangler to correctly parse and apply C++ strings using the <code>unnamed type</code> syntax. (GT-3645)</li>
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
Capabilities include disassembly, assembly, decompilation, graphing, and scripting, along with
|
Capabilities include disassembly, assembly, decompilation, graphing, and scripting, along with
|
||||||
hundreds of other features. Ghidra supports a wide variety of processor instruction sets and
|
hundreds of other features. Ghidra supports a wide variety of processor instruction sets and
|
||||||
executable formats and can be run in both user-interactive and automated modes. Users may also
|
executable formats and can be run in both user-interactive and automated modes. Users may also
|
||||||
develop their own Ghidra plug-in component and scripts using the exposed API. In addition there are
|
develop their own Ghidra plug-in components and/or scripts using the exposed API. In addition there are
|
||||||
numerous ways to extend Ghidra such as new processors, loaders/exporters, automated analyzers,
|
numerous ways to extend Ghidra such as new processors, loaders/exporters, automated analyzers,
|
||||||
and new visualizations.
|
and new visualizations.
|
||||||
</P>
|
</P>
|
||||||
|
@ -38,7 +38,7 @@
|
||||||
code and generating deep insights for NSA analysts who seek a better understanding of potential
|
code and generating deep insights for NSA analysts who seek a better understanding of potential
|
||||||
vulnerabilities in networks and systems.
|
vulnerabilities in networks and systems.
|
||||||
</P>
|
</P>
|
||||||
|
|
||||||
<BR />
|
<BR />
|
||||||
<H1> What's New in Ghidra 9.2</H1>
|
<H1> What's New in Ghidra 9.2</H1>
|
||||||
|
|
||||||
|
@ -68,21 +68,21 @@
|
||||||
been regenerated, and new ones for VS 2017/2019 have been added.</P>
|
been regenerated, and new ones for VS 2017/2019 have been added.</P>
|
||||||
|
|
||||||
<P>Minor Note: SLA Files: Ghidra-compiled .sla files are not always backwards compatible due to changes in the underlying .sla
|
<P>Minor Note: SLA Files: Ghidra-compiled .sla files are not always backwards compatible due to changes in the underlying .sla
|
||||||
specification. In the pre-built ghidra, all .sla files are re-built from scratch. However if you have local processor modules,
|
specification. In the prebuilt Ghidra, all .sla files are rebuilt from scratch. However if you have local processor modules,
|
||||||
or are building ghidra from scratch, you may need to do a clean build. Any processor modules with changes are normally recompiled
|
or are building Ghidra from scratch, you may need to do a clean build. Any processor modules with changes are normally recompiled
|
||||||
at Ghidra startup so this situation is rare.</P>
|
at Ghidra startup so this situation is rare.</P>
|
||||||
|
|
||||||
<P>Minor Note: AARCH64 Long: The size of a <b>long</b> on the AARCH64 has been changed from 4-bytes to 8-bytes in the data organization within the
|
<P>Minor Note: AARCH64 Long: The size of a <b>long</b> on the AARCH64 has been changed from 4-bytes to 8-bytes in the data organization within the
|
||||||
compiler specification. This change could have ramifications in existing AARCH64 programs using a <b>long</b> within data structures or
|
compiler specification. This change could have ramifications in existing AARCH64 programs using a <b>long</b> within data structures or
|
||||||
custom storage of function parameters (dynamic storage should not be an issue). An included script <i><b>FixupCompositeDataTypesScript</b></i>
|
custom storage of function parameters (dynamic storage should not be an issue). An included script <i><b>FixupCompositeDataTypesScript</b></i>
|
||||||
can be run on programs, only with <i>exclusive checkout</i> in Multi-User, where the datatype sizes for <b>long</b> has changed. This general script can be used
|
can be run on programs, only with <i>exclusive checkout</i> in Multi-User, where the datatype sizes for <b>long</b> has changed. This general script can be used
|
||||||
whenever a program's base datatypes have changed in the compiler specification, which should be rare occurence.</P>
|
whenever a program's base datatypes have changed in the compiler specification, which should be rare occurrence.</P>
|
||||||
|
|
||||||
|
|
||||||
<H2>Open Source Based Graphing</H2>
|
<H2>Open Source Based Graphing</H2>
|
||||||
<P>Ghidra has been integrated with an open source graph visualization package, called JUNGGRAPHT, to display interactive
|
<P>Ghidra has been integrated with an open source graph visualization package, called JUNGGRAPHT, to display interactive
|
||||||
block graphs, call graphs, AST control flow graphs, as well as a general API to create graphs within plugins and scripts.
|
block graphs, call graphs, AST control flow graphs, as well as a general API to create graphs within plug-ins and scripts.
|
||||||
Prior to initial public release, graphing had been provided by a legacy graphing package which was un-releasable publicly due to
|
Prior to initial public release, graphing had been provided by a legacy graphing package which was unreleasable publicly due to
|
||||||
licensing issues.</P>
|
licensing issues.</P>
|
||||||
|
|
||||||
<P>Graphs are displayed in a new tabbed graph window. Current location and selection of vertices are kept in sync with other
|
<P>Graphs are displayed in a new tabbed graph window. Current location and selection of vertices are kept in sync with other
|
||||||
|
@ -96,12 +96,12 @@
|
||||||
the creator of the graph before display. As in everything, the Ghidra team is interested in any feedback you might provide
|
the creator of the graph before display. As in everything, the Ghidra team is interested in any feedback you might provide
|
||||||
on this new capability.</P>
|
on this new capability.</P>
|
||||||
|
|
||||||
<H2>JAVA based Univeral PDB Reader/Analzyer/Loader</H2>
|
<H2>JAVA based Universal PDB Reader/Analzyer/Loader</H2>
|
||||||
<P>Added a new platform-independent PDB Reader/Analyzer/Loader that has the ability to process
|
<P>Added a new platform-independent PDB Reader/Analyzer/Loader that has the ability to process
|
||||||
raw PDB files and apply extracted information to a program. Written in Java, PDBs can be utilized on any supported
|
raw PDB files and apply extracted information to a program. Written in Java, PDBs can be utilized on any supported
|
||||||
platform, not just on Windows as in prior Ghidra versions. PDBs can be applied during analysis
|
platform, not just on Windows as in prior Ghidra versions. PDBs can be applied during analysis
|
||||||
or by loading and applying the PDB before analysis. Information from PDBs can be force-loaded into a program
|
or by loading and applying the PDB before analysis. Information from PDBs can be force-loaded into a program
|
||||||
with a mismatched PDB signature, which is very useful for extracting data types to be used with the
|
with a mismatched PDB signature, which is very useful for extracting datatypes to be used with the
|
||||||
program from a PDB related to that program. Loading the PDB utilizes a new underlying Universal
|
program from a PDB related to that program. Loading the PDB utilizes a new underlying Universal
|
||||||
Reader API.</P>
|
Reader API.</P>
|
||||||
<P>The PDB Reader and Analyzer capabilities are an evolutionary development and are expected to be
|
<P>The PDB Reader and Analyzer capabilities are an evolutionary development and are expected to be
|
||||||
|
@ -109,7 +109,7 @@
|
||||||
and fixing bugs. If the new PDB Analyzer causes issues, you can turn it off and use the original PDB Analyzer.</P>
|
and fixing bugs. If the new PDB Analyzer causes issues, you can turn it off and use the original PDB Analyzer.</P>
|
||||||
|
|
||||||
<H2>Dynamic Modules: OSGI model for scripting</H2>
|
<H2>Dynamic Modules: OSGI model for scripting</H2>
|
||||||
<P>A change to scripting brings a powerful form of dynamic extensibilty to Ghidra scripting, where Java source code is (re)compiled, loaded, and
|
<P>A change to scripting brings a powerful form of dynamic extensibility to Ghidra scripting, where Java source code is (re)compiled, loaded, and
|
||||||
run without exiting Ghidra. When a script grows large or requires external dependencies, it might be worth the effort to split
|
run without exiting Ghidra. When a script grows large or requires external dependencies, it might be worth the effort to split
|
||||||
up code into modules. To support modularity while preserving the dynamic nature of scripts, Ghidra uses OSGi. The new feature
|
up code into modules. To support modularity while preserving the dynamic nature of scripts, Ghidra uses OSGi. The new feature
|
||||||
provides better script change detection, external jar dependencies, script lifecycle management, and modularity.</P>
|
provides better script change detection, external jar dependencies, script lifecycle management, and modularity.</P>
|
||||||
|
@ -129,9 +129,9 @@
|
||||||
</P>
|
</P>
|
||||||
</ul>
|
</ul>
|
||||||
<P>The decompiler GUI as also been enhanced with the addition of multiple highlights of varying color, called secondary highlights. In addition,
|
<P>The decompiler GUI as also been enhanced with the addition of multiple highlights of varying color, called secondary highlights. In addition,
|
||||||
the Decompiler's Auto Create/Fill Structure commands incorporate data-type information from function prototypes
|
the Decompiler's Auto Create/Fill Structure commands incorporate datatype information from function prototypes
|
||||||
and will override undefined or more general data-types with discovered data-types that are more specific.</P>
|
and will override undefined or more general datatypes with discovered datatypes that are more specific.</P>
|
||||||
<P>There is re-writen more comprehensive Decompiler documentation too!</P>
|
<P>There is rewritten more comprehensive Decompiler documentation too!</P>
|
||||||
|
|
||||||
<H2>Performance Improvements</H2>
|
<H2>Performance Improvements</H2>
|
||||||
<P>There have been major performance improvements in both analysis and the display or filtering of information within GUI components.
|
<P>There have been major performance improvements in both analysis and the display or filtering of information within GUI components.
|
||||||
|
@ -140,12 +140,12 @@
|
||||||
or binaries that take a large amount of time to process. If you can find an example binary that is easily obtainable that reproduces
|
or binaries that take a large amount of time to process. If you can find an example binary that is easily obtainable that reproduces
|
||||||
the issue, the root cause can be identified and hopefully improved. There are some continued sore performance areas we are still working
|
the issue, the root cause can be identified and hopefully improved. There are some continued sore performance areas we are still working
|
||||||
such as the non-returning function analyzer. We hope you will find the binary analysis speed and interactivity much improved.</P>
|
such as the non-returning function analyzer. We hope you will find the binary analysis speed and interactivity much improved.</P>
|
||||||
<P>Some specific areas of improvement are binaries with rich data type information, RTTI information, exception records, large number
|
<P>Some specific areas of improvement are binaries with rich datatype information, RTTI information, exception records, large number
|
||||||
of bytes, large number of defined symbols, and many symbols at a single address.</P>
|
of bytes, large number of defined symbols, and many symbols at a single address.</P>
|
||||||
|
|
||||||
<H2>Function Identification Improvements</H2>
|
<H2>Function Identification Improvements</H2>
|
||||||
|
|
||||||
<P>Function Identification databases have been re-created from scratch, including new information for Visual Studio 2017 and 2019 libraries.
|
<P>Function Identification databases have been recreated from scratch, including new information for Visual Studio 2017 and 2019 libraries.
|
||||||
The databases have been cleaned and should overall result in more matches with fewer mis-matched or multiple matches for identified functions.
|
The databases have been cleaned and should overall result in more matches with fewer mis-matched or multiple matches for identified functions.
|
||||||
In addition the FID libraries had to be rebuilt from scratch due to errors or differences in instruction set decode (especially in the 64-bit X86)
|
In addition the FID libraries had to be rebuilt from scratch due to errors or differences in instruction set decode (especially in the 64-bit X86)
|
||||||
with prior versions of Ghidra. The FID is sensitive to the actual instruction bytes, the mnemonic, register, and number of operands.</P>
|
with prior versions of Ghidra. The FID is sensitive to the actual instruction bytes, the mnemonic, register, and number of operands.</P>
|
||||||
|
@ -155,7 +155,7 @@
|
||||||
For normal clean non-heavily optimized, non-malware or obfuscated binaries, these options should cause few issues.</P>
|
For normal clean non-heavily optimized, non-malware or obfuscated binaries, these options should cause few issues.</P>
|
||||||
|
|
||||||
<H2>Symbol Demangling</H2>
|
<H2>Symbol Demangling</H2>
|
||||||
<P>Both GNU and Microsoft symbol de-mangling has been greatly improved resulting in fewer unmangled symbols with better function signature recovery.</P>
|
<P>Both GNU and Microsoft symbol demangling has been greatly improved resulting in fewer unmangled symbols with better function signature recovery.</P>
|
||||||
|
|
||||||
<H2>Processor Models</H2>
|
<H2>Processor Models</H2>
|
||||||
<P>Several new processor specifications have been added, from very old processors to more recent: CP1600, M6809, M8C, RISC-V, V850.</P>
|
<P>Several new processor specifications have been added, from very old processors to more recent: CP1600, M6809, M8C, RISC-V, V850.</P>
|
||||||
|
@ -178,8 +178,8 @@
|
||||||
you use with the Ghidra runtime, from the one you are using with the entire Ghidra source code base imported.
|
you use with the Ghidra runtime, from the one you are using with the entire Ghidra source code base imported.
|
||||||
To find out more read the <i>GhidraSleighEditor_README.html</i>.</P>
|
To find out more read the <i>GhidraSleighEditor_README.html</i>.</P>
|
||||||
|
|
||||||
<P>The External Disassembler is a plugin useful when developing or trouble-shooting sleigh processor specifications. It is part of
|
<P>The External Disassembler is a plug-in useful when developing or trouble-shooting sleigh processor specifications. It is part of
|
||||||
the Xtra SleighDevTools project. The plugin integrates with an external disassembler such as binutils, and provides a code browser
|
the Xtra SleighDevTools project. The plug-in integrates with an external disassembler such as binutils, and provides a code browser
|
||||||
field that displays the disassembly from an external disassembler, such as bintutils, at each instruction or undefined byte in the listing.
|
field that displays the disassembly from an external disassembler, such as bintutils, at each instruction or undefined byte in the listing.
|
||||||
The only external disassembler integration provided is binutils, however it is possible to add support for additional external disassemblers.
|
The only external disassembler integration provided is binutils, however it is possible to add support for additional external disassemblers.
|
||||||
Previously the External Disassembler had trouble with instruction sets which have an alternate mode set of instruction
|
Previously the External Disassembler had trouble with instruction sets which have an alternate mode set of instruction
|
||||||
|
@ -187,7 +187,7 @@
|
||||||
to choose the correct alternate encoding set. This also works well with several scripts that also aide in processor development such as
|
to choose the correct alternate encoding set. This also works well with several scripts that also aide in processor development such as
|
||||||
the <i>CompareSleighExternal</i> script.</P>
|
the <i>CompareSleighExternal</i> script.</P>
|
||||||
|
|
||||||
<P>A new pCode operation POPCOUNT is supported in sleigh processor specifications. POPCOUNT was mainly added to deal with instructions
|
<P>A new p-code operation POPCOUNT is supported in sleigh processor specifications. POPCOUNT was mainly added to deal with instructions
|
||||||
that needed to compute the parity of an operation.
|
that needed to compute the parity of an operation.
|
||||||
In addition, the Sleigh compiler error messages have been reworked to be more comprehensible, consistent in format layout, and to provide
|
In addition, the Sleigh compiler error messages have been reworked to be more comprehensible, consistent in format layout, and to provide
|
||||||
correct line numbers as close to the error as possible. In addition, several cases have been caught during compilation that previously would
|
correct line numbers as close to the error as possible. In addition, several cases have been caught during compilation that previously would
|
||||||
|
@ -199,35 +199,34 @@
|
||||||
|
|
||||||
<H2>Bug Fixes and Enhancements</H2>
|
<H2>Bug Fixes and Enhancements</H2>
|
||||||
<P> Numerous other bug fixes and improvements are fully listed in the <a href="ChangeHistory.html">ChangeHistory</a> file.</P>
|
<P> Numerous other bug fixes and improvements are fully listed in the <a href="ChangeHistory.html">ChangeHistory</a> file.</P>
|
||||||
|
|
||||||
<BR />
|
|
||||||
<BR />
|
<BR />
|
||||||
<H1> What's New in Ghidra 9.1</H1>
|
<H1> What's New in Ghidra 9.1</H1>
|
||||||
|
|
||||||
<H2> <a id="finePrint91"/>The not so fine print: Please Read!</H2>
|
<H2> <a id="finePrint91"/>The not so fine print: Please Read!</H2>
|
||||||
|
|
||||||
<P>Minor Note: Ghidra compiled .sla files are not backwards compatible due to the newly added OTHER space for syscalls
|
<P>Minor Note: Ghidra compiled .sla files are not backwards compatible due to the newly added OTHER space for syscalls
|
||||||
support. In the pre-built ghidra all .sla files are re-built from scratch. However if you have local processor modules,
|
support. In the prebuilt Ghidra all .sla files are rebuilt from scratch. However if you have local processor modules,
|
||||||
or are building ghidra from scratch, you may need to do a clean build. You will get an error if an old .sla file is loaded
|
or are building Ghidra from scratch, you may need to do a clean build. You will get an error if an old .sla file is loaded
|
||||||
without recompilation of the .slaspec file. Any processor modules with changes are normally recompiled at Ghidra startup
|
without recompilation of the .slaspec file. Any processor modules with changes are normally recompiled at Ghidra startup
|
||||||
so this situation is rare.</P>
|
so this situation is rare.</P>
|
||||||
|
|
||||||
<H2>Data Improvements</H2>
|
<H2>Data Improvements</H2>
|
||||||
|
|
||||||
<P>Bitfields within structures are now supported as a Ghidra data type. Bitfield definitions
|
<P>Bitfields within structures are now supported as a Ghidra datatype. Bitfield definitions
|
||||||
can come from PDB, DWARF, parsed header files, and can also be created within the structure
|
can come from PDB, DWARF, parsed header files, and can also be created within the structure
|
||||||
editor. All Data type archives delivered with Ghidra have been re-parsed to capture bitfield
|
editor. All Datatype archives delivered with Ghidra have been reparsed to capture bitfield
|
||||||
information. In addition, compiler bitfield allocation schemes have been carefully implemented.
|
information. In addition, compiler bitfield allocation schemes have been carefully implemented.
|
||||||
Full support for bitfield references within the decompiler is planned for a future
|
Full support for bitfield references within the decompiler is planned for a future
|
||||||
release.</P>
|
release.</P>
|
||||||
|
|
||||||
<P>In support of creating bitfields within structures, a new bitfield editor within the
|
<P>In support of creating bitfields within structures, a new bitfield editor within the
|
||||||
structure editor has been added. The Bitfield Editor includes a visual depiction of the
|
structure editor has been added. The Bitfield Editor includes a visual depiction of the
|
||||||
data type byte layout and the associated bits. The BitField Editor simplifies the creation
|
datatype byte layout and the associated bits. The BitField Editor simplifies the creation
|
||||||
of bitfields within a structure.</P>
|
of bitfields within a structure.</P>
|
||||||
|
|
||||||
<H2>System Calls</H2>
|
<H2>System Calls</H2>
|
||||||
<P>Ghidra now supports overriding indirect calls, CALLOTHER pcode ops, and conditional jumps via new overriding references.
|
<P>Ghidra now supports overriding indirect calls, CALLOTHER p-code ops, and conditional jumps via new overriding references.
|
||||||
These references can be used to achieve correct decompilation of syscall-like instructions. A new script,
|
These references can be used to achieve correct decompilation of syscall-like instructions. A new script,
|
||||||
ResolveX86orX64LinuxSyscallsScript, has been provided as part of this initial implementation.
|
ResolveX86orX64LinuxSyscallsScript, has been provided as part of this initial implementation.
|
||||||
Future releases will automatically identify and apply system calls for other operating systems and versions.</P>
|
Future releases will automatically identify and apply system calls for other operating systems and versions.</P>
|
||||||
|
@ -240,11 +239,11 @@
|
||||||
<H2>Processor Specification</H2>
|
<H2>Processor Specification</H2>
|
||||||
|
|
||||||
<P>A new set of tools designed to make processor specifications easier to create, modify, and validate
|
<P>A new set of tools designed to make processor specifications easier to create, modify, and validate
|
||||||
have been added. The tools consist of a context sensitive Sleigh file editor, a pcode validation
|
have been added. The tools consist of a context sensitive Sleigh file editor, a p-code validation
|
||||||
framework, an external disassembler field, and several scripts to make development easier.
|
framework, an external disassembler field, and several scripts to make development easier.
|
||||||
The Sleigh editor is a plugin for Eclipse and provides modern editor features such as syntax coloring,
|
The Sleigh editor is a plug-in for Eclipse and provides modern editor features such as syntax coloring,
|
||||||
hover, navigation, code formatting, validation, reference finding, and error
|
hover, navigation, code formatting, validation, reference finding, and error
|
||||||
navigation. The test suite emulates the pcode to automatically
|
navigation. The test suite emulates the p-code to automatically
|
||||||
validate the instructions most commonly used by the compiler for that processor.</P>
|
validate the instructions most commonly used by the compiler for that processor.</P>
|
||||||
|
|
||||||
<H2>iOS DYLD and Macho Format</H2>
|
<H2>iOS DYLD and Macho Format</H2>
|
||||||
|
@ -262,7 +261,7 @@
|
||||||
connect to a 9.1 server.</P>
|
connect to a 9.1 server.</P>
|
||||||
|
|
||||||
<P>The Ghidra server has two additional authentication methods, Active Directory using
|
<P>The Ghidra server has two additional authentication methods, Active Directory using
|
||||||
Kerberos and Plugable Authentication Modules (PAM) using JAAS. To utilize these new
|
Kerberos and Pluggable Authentication Modules (PAM) using JAAS. To utilize these new
|
||||||
methods you must configure the server.conf file and use either -a1 for windows authentication
|
methods you must configure the server.conf file and use either -a1 for windows authentication
|
||||||
or -a4 along with -jaas. The JAAS mode will require setup of an additional configuration file (jaas.conf).</P>
|
or -a4 along with -jaas. The JAAS mode will require setup of an additional configuration file (jaas.conf).</P>
|
||||||
|
|
||||||
|
@ -292,9 +291,9 @@
|
||||||
primarily affects functions using floating point Neon instructions.</P>
|
primarily affects functions using floating point Neon instructions.</P>
|
||||||
|
|
||||||
<P>Renaming a parameter in the decompiler will no longer commit the
|
<P>Renaming a parameter in the decompiler will no longer commit the
|
||||||
data types of all parameters, allowing data
|
datatypes of all parameters, allowing datatypes
|
||||||
types to continue to "float" without getting locked into a potentially
|
to continue to "float" without getting locked into a potentially
|
||||||
incorrect initial data type. In addition, the cumbersome warning dialog
|
incorrect initial datatype. In addition, the cumbersome warning dialog
|
||||||
for renaming and retyping has been removed, improving your RE workflow.</P>
|
for renaming and retyping has been removed, improving your RE workflow.</P>
|
||||||
|
|
||||||
<H2>Languages</H2>
|
<H2>Languages</H2>
|
||||||
|
@ -302,7 +301,7 @@
|
||||||
HCS12X/XGATE, HCS08, and user-contributed specifications for MCS-48,
|
HCS12X/XGATE, HCS08, and user-contributed specifications for MCS-48,
|
||||||
SuperH1/2a, and Tricore.</P>
|
SuperH1/2a, and Tricore.</P>
|
||||||
|
|
||||||
<P>The 16-bit x86 processor specification has been re-worked to include
|
<P>The 16-bit x86 processor specification has been reworked to include
|
||||||
protected mode addressing, which the NE loader now uses by default. Handling of
|
protected mode addressing, which the NE loader now uses by default. Handling of
|
||||||
segmented or paged memory has been updated to use a newer scheme, hiding its
|
segmented or paged memory has been updated to use a newer scheme, hiding its
|
||||||
complications from decompilation results. The implementation handles the HCS12X paging scheme as well.</P>
|
complications from decompilation results. The implementation handles the HCS12X paging scheme as well.</P>
|
||||||
|
@ -323,7 +322,7 @@
|
||||||
<P>In case you missed it, in March 2019, a public version of Ghidra was released for the first time. Soon after,
|
<P>In case you missed it, in March 2019, a public version of Ghidra was released for the first time. Soon after,
|
||||||
the full buildable source was made available as an open source project on the NSA GitHub page. The response from the Ghidra
|
the full buildable source was made available as an open source project on the NSA GitHub page. The response from the Ghidra
|
||||||
Open Source community has been overwhelmingly positive. We welcome contributions from GitHub including bug fixes,
|
Open Source community has been overwhelmingly positive. We welcome contributions from GitHub including bug fixes,
|
||||||
requests, scripts, processor modules, and plugins. </P>
|
requests, scripts, processor modules, and plug-ins. </P>
|
||||||
|
|
||||||
<H2> Bug Fixes and Enhancements</H2>
|
<H2> Bug Fixes and Enhancements</H2>
|
||||||
<P> Bug fixes and improvements for 9.0.x are listed in the
|
<P> Bug fixes and improvements for 9.0.x are listed in the
|
||||||
|
|
|
@ -69,20 +69,26 @@ abstract class CompositeDB extends DataTypeDB implements Composite {
|
||||||
protected abstract void initialize();
|
protected abstract void initialize();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the preferred length for a new component. Constraining length of fixed-length datatype
|
* Get the preferred length for a new component. For Unions and internally
|
||||||
* may not be sustainable in response to datatype size changes over time.
|
* aligned structures the preferred component length for a fixed-length dataType
|
||||||
|
* will be the length of that dataType. Otherwise the length returned will be no
|
||||||
|
* larger than the specified length.
|
||||||
|
*
|
||||||
* @param dataType new component datatype
|
* @param dataType new component datatype
|
||||||
* @param length specified length required for Dynamic types such as string
|
* @param length constrained length or -1 to force use of dataType size.
|
||||||
* which must have a positive length specified.
|
* Dynamic types such as string must have a positive length
|
||||||
|
* specified.
|
||||||
* @return preferred component length
|
* @return preferred component length
|
||||||
*/
|
*/
|
||||||
protected int getPreferredComponentLength(DataType dataType, int length) {
|
protected int getPreferredComponentLength(DataType dataType, int length) {
|
||||||
if (length > 0 && (dataType instanceof Composite) &&
|
if ((isInternallyAligned() || (this instanceof Union)) && !(dataType instanceof Dynamic)) {
|
||||||
((Composite) dataType).isNotYetDefined()) {
|
length = -1; // force use of datatype size
|
||||||
return length;
|
|
||||||
}
|
}
|
||||||
int dtLength = dataType.getLength();
|
int dtLength = dataType.getLength();
|
||||||
if (dtLength > 0) {
|
if (length <= 0) {
|
||||||
|
length = dtLength;
|
||||||
|
}
|
||||||
|
else if (dtLength > 0 && dtLength < length) {
|
||||||
length = dtLength;
|
length = dtLength;
|
||||||
}
|
}
|
||||||
if (length <= 0) {
|
if (length <= 0) {
|
||||||
|
|
|
@ -345,17 +345,20 @@ class DataTypeComponentDB implements InternalDataTypeComponent {
|
||||||
DataType myParent = getParent();
|
DataType myParent = getParent();
|
||||||
boolean aligned =
|
boolean aligned =
|
||||||
(myParent instanceof Composite) ? ((Composite) myParent).isInternallyAligned() : false;
|
(myParent instanceof Composite) ? ((Composite) myParent).isInternallyAligned() : false;
|
||||||
// Components don't need to have matching offset when they are aligned, only matching ordinal.
|
// Components don't need to have matching offset when they are aligned
|
||||||
// NOTE: use getOffset() and getOrdinal() methods since returned values will differ from
|
// NOTE: use getOffset() method since returned values will differ from
|
||||||
// stored values for flexible array component
|
// stored values for flexible array component
|
||||||
if ((!aligned && (getOffset() != dtc.getOffset())) ||
|
if ((!aligned && (getOffset() != dtc.getOffset())) ||
|
||||||
// Components don't need to have matching length when they are aligned. Is this correct?
|
|
||||||
(!aligned && (getLength() != dtc.getLength())) || getOrdinal() != dtc.getOrdinal() ||
|
|
||||||
!SystemUtilities.isEqual(getFieldName(), dtc.getFieldName()) ||
|
!SystemUtilities.isEqual(getFieldName(), dtc.getFieldName()) ||
|
||||||
!SystemUtilities.isEqual(getComment(), dtc.getComment())) {
|
!SystemUtilities.isEqual(getComment(), dtc.getComment())) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Component lengths need only be checked for dynamic types
|
||||||
|
if (getLength() != dtc.getLength() && (myDt instanceof Dynamic)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return DataTypeUtilities.isSameOrEquivalentDataType(myDt, otherDt);
|
return DataTypeUtilities.isSameOrEquivalentDataType(myDt, otherDt);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -684,8 +684,9 @@ class StructureDB extends CompositeDB implements Structure {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create copy of structure for target dtm (source archive information is discarded). WARNING!
|
* Create copy of structure for target dtm (source archive information is discarded).
|
||||||
* copying unaligned structures which contain bitfields can produce invalid results when
|
* <p>
|
||||||
|
* WARNING! copying unaligned structures which contain bitfields can produce invalid results when
|
||||||
* switching endianess due to the differences in packing order.
|
* switching endianess due to the differences in packing order.
|
||||||
*
|
*
|
||||||
* @param dtm target data type manager
|
* @param dtm target data type manager
|
||||||
|
@ -1281,7 +1282,22 @@ class StructureDB extends CompositeDB implements Structure {
|
||||||
|
|
||||||
DataType dt = resolvedDts[i]; // ancestry check already performed by caller
|
DataType dt = resolvedDts[i]; // ancestry check already performed by caller
|
||||||
|
|
||||||
int length = getPreferredComponentLength(dt, dtc.getLength());
|
int length = dt.getLength();
|
||||||
|
if (length <= 0 || dtc.isBitFieldComponent()) {
|
||||||
|
length = dtc.getLength();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// do not exceed available space
|
||||||
|
int maxOffset;
|
||||||
|
int nextIndex = i + 1;
|
||||||
|
if (nextIndex < otherComponents.length) {
|
||||||
|
maxOffset = otherComponents[nextIndex].getOffset();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
maxOffset = struct.getLength();
|
||||||
|
}
|
||||||
|
length = Math.min(length, maxOffset - dtc.getOffset());
|
||||||
|
}
|
||||||
|
|
||||||
Record rec = componentAdapter.createRecord(dataMgr.getResolvedID(dt), key, length,
|
Record rec = componentAdapter.createRecord(dataMgr.getResolvedID(dt), key, length,
|
||||||
dtc.getOrdinal(), dtc.getOffset(), dtc.getFieldName(), dtc.getComment());
|
dtc.getOrdinal(), dtc.getOffset(), dtc.getFieldName(), dtc.getComment());
|
||||||
|
@ -1358,6 +1374,9 @@ class StructureDB extends CompositeDB implements Structure {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void dataTypeSizeChanged(DataType dt) {
|
public void dataTypeSizeChanged(DataType dt) {
|
||||||
|
if (dt instanceof BitFieldDataType) {
|
||||||
|
return; // unsupported
|
||||||
|
}
|
||||||
lock.acquire();
|
lock.acquire();
|
||||||
try {
|
try {
|
||||||
checkDeleted();
|
checkDeleted();
|
||||||
|
@ -1374,7 +1393,10 @@ class StructureDB extends CompositeDB implements Structure {
|
||||||
// assume no impact to bitfields since base types
|
// assume no impact to bitfields since base types
|
||||||
// should not change size
|
// should not change size
|
||||||
int dtcLen = dtc.getLength();
|
int dtcLen = dtc.getLength();
|
||||||
int length = getPreferredComponentLength(dt, dtcLen);
|
int length = dt.getLength();
|
||||||
|
if (length <= 0) {
|
||||||
|
length = dtcLen;
|
||||||
|
}
|
||||||
if (length < dtcLen) {
|
if (length < dtcLen) {
|
||||||
dtc.setLength(length, true);
|
dtc.setLength(length, true);
|
||||||
shiftOffsets(i + 1, dtcLen - length, 0);
|
shiftOffsets(i + 1, dtcLen - length, 0);
|
||||||
|
@ -1427,7 +1449,10 @@ class StructureDB extends CompositeDB implements Structure {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
int dtcLen = dtc.getLength();
|
int dtcLen = dtc.getLength();
|
||||||
int length = getPreferredComponentLength(dt, dtcLen);
|
int length = dt.getLength();
|
||||||
|
if (length <= 0) {
|
||||||
|
length = dtcLen;
|
||||||
|
}
|
||||||
if (dtcLen != length) {
|
if (dtcLen != length) {
|
||||||
if (length < dtcLen) {
|
if (length < dtcLen) {
|
||||||
dtc.setLength(length, true);
|
dtc.setLength(length, true);
|
||||||
|
@ -1514,14 +1539,18 @@ class StructureDB extends CompositeDB implements Structure {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
int myNumComps = getNumComponents();
|
int myNumComps = components.size();
|
||||||
int otherNumComps = struct.getNumComponents();
|
int otherNumComps = struct.getNumDefinedComponents();
|
||||||
if (myNumComps != otherNumComps) {
|
if (myNumComps != otherNumComps) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
DataTypeComponent[] otherDefinedComponents = struct.getDefinedComponents();
|
||||||
|
if (otherDefinedComponents.length != myNumComps) { // safety check
|
||||||
|
return false;
|
||||||
|
}
|
||||||
for (int i = 0; i < myNumComps; i++) {
|
for (int i = 0; i < myNumComps; i++) {
|
||||||
DataTypeComponent myDtc = getComponent(i);
|
DataTypeComponent myDtc = components.get(i);
|
||||||
DataTypeComponent otherDtc = struct.getComponent(i);
|
DataTypeComponent otherDtc = otherDefinedComponents[i];
|
||||||
if (!myDtc.isEquivalent(otherDtc)) {
|
if (!myDtc.isEquivalent(otherDtc)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -418,14 +418,19 @@ class UnionDB extends CompositeDB implements Union {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void dataTypeSizeChanged(DataType dt) {
|
public void dataTypeSizeChanged(DataType dt) {
|
||||||
|
if (dt instanceof BitFieldDataType) {
|
||||||
|
return; // unsupported
|
||||||
|
}
|
||||||
lock.acquire();
|
lock.acquire();
|
||||||
try {
|
try {
|
||||||
checkDeleted();
|
checkDeleted();
|
||||||
boolean changed = false;
|
boolean changed = false;
|
||||||
for (DataTypeComponentDB dtc : components) {
|
for (DataTypeComponentDB dtc : components) {
|
||||||
int length = dtc.getLength();
|
|
||||||
if (dtc.getDataType() == dt) {
|
if (dtc.getDataType() == dt) {
|
||||||
length = getPreferredComponentLength(dt, length);
|
int length = dt.getLength();
|
||||||
|
if (length <= 0) {
|
||||||
|
length = dtc.getLength();
|
||||||
|
}
|
||||||
dtc.setLength(length, true);
|
dtc.setLength(length, true);
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
|
@ -448,7 +453,10 @@ class UnionDB extends CompositeDB implements Union {
|
||||||
dt = adjustBitField(dt); // in case base type changed
|
dt = adjustBitField(dt); // in case base type changed
|
||||||
}
|
}
|
||||||
int dtcLen = dtc.getLength();
|
int dtcLen = dtc.getLength();
|
||||||
int length = getPreferredComponentLength(dt, dtcLen);
|
int length = dt.getLength();
|
||||||
|
if (length <= 0) {
|
||||||
|
length = dtcLen;
|
||||||
|
}
|
||||||
if (length != dtcLen) {
|
if (length != dtcLen) {
|
||||||
dtc.setLength(length, true);
|
dtc.setLength(length, true);
|
||||||
changed = true;
|
changed = true;
|
||||||
|
|
|
@ -160,7 +160,9 @@ public interface DataTypeComponent {
|
||||||
* Returns true if the given dataTypeComponent is equivalent to this dataTypeComponent.
|
* Returns true if the given dataTypeComponent is equivalent to this dataTypeComponent.
|
||||||
* A dataTypeComponent is "equivalent" if the other component has a data type
|
* A dataTypeComponent is "equivalent" if the other component has a data type
|
||||||
* that is equivalent to this component's data type. The dataTypeComponents must
|
* that is equivalent to this component's data type. The dataTypeComponents must
|
||||||
* also have the same offset, length, ordinal, field name, and comment.
|
* also have the same offset, field name, and comment. The length is only checked
|
||||||
|
* for components which are dyanmic and whose size must be specified when creating
|
||||||
|
* a component.
|
||||||
* @param dtc the dataTypeComponent being tested for equivalence.
|
* @param dtc the dataTypeComponent being tested for equivalence.
|
||||||
* @return true if the given dataTypeComponent is equivalent to this dataTypeComponent.
|
* @return true if the given dataTypeComponent is equivalent to this dataTypeComponent.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -329,17 +329,20 @@ public class DataTypeComponentImpl implements InternalDataTypeComponent, Seriali
|
||||||
DataType myParent = getParent();
|
DataType myParent = getParent();
|
||||||
boolean aligned =
|
boolean aligned =
|
||||||
(myParent instanceof Composite) ? ((Composite) myParent).isInternallyAligned() : false;
|
(myParent instanceof Composite) ? ((Composite) myParent).isInternallyAligned() : false;
|
||||||
// Components don't need to have matching offset when they are aligned, only matching ordinal.
|
// Components don't need to have matching offset when they are aligned
|
||||||
|
// NOTE: use getOffset() method since returned values will differ from
|
||||||
|
// stored values for flexible array component
|
||||||
if ((!aligned && (getOffset() != dtc.getOffset())) ||
|
if ((!aligned && (getOffset() != dtc.getOffset())) ||
|
||||||
// Components don't need to have matching length when they are aligned. Is this correct?
|
|
||||||
// NOTE: use getOffset() and getOrdinal() methods since returned values will differ from
|
|
||||||
// stored values for flexible array component
|
|
||||||
(!aligned && (getLength() != dtc.getLength())) || getOrdinal() != dtc.getOrdinal() ||
|
|
||||||
!SystemUtilities.isEqual(getFieldName(), dtc.getFieldName()) ||
|
!SystemUtilities.isEqual(getFieldName(), dtc.getFieldName()) ||
|
||||||
!SystemUtilities.isEqual(getComment(), dtc.getComment())) {
|
!SystemUtilities.isEqual(getComment(), dtc.getComment())) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Component lengths need only be checked for dynamic types
|
||||||
|
if (getLength() != dtc.getLength() && (myDt instanceof Dynamic)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return DataTypeUtilities.isSameOrEquivalentDataType(myDt, otherDt);
|
return DataTypeUtilities.isSameOrEquivalentDataType(myDt, otherDt);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -797,15 +797,18 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
int myNumComps = getNumComponents();
|
int myNumComps = components.size();
|
||||||
int otherNumComps = struct.getNumComponents();
|
int otherNumComps = struct.getNumDefinedComponents();
|
||||||
if (myNumComps != otherNumComps) {
|
if (myNumComps != otherNumComps) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
DataTypeComponent[] otherDefinedComponents = struct.getDefinedComponents();
|
||||||
|
if (otherDefinedComponents.length != myNumComps) { // safety check
|
||||||
|
return false;
|
||||||
|
}
|
||||||
for (int i = 0; i < myNumComps; i++) {
|
for (int i = 0; i < myNumComps; i++) {
|
||||||
DataTypeComponent myDtc = getComponent(i);
|
DataTypeComponent myDtc = components.get(i);
|
||||||
DataTypeComponent otherDtc = struct.getComponent(i);
|
DataTypeComponent otherDtc = otherDefinedComponents[i];
|
||||||
|
|
||||||
if (!myDtc.isEquivalent(otherDtc)) {
|
if (!myDtc.isEquivalent(otherDtc)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -815,6 +818,9 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void dataTypeSizeChanged(DataType dt) {
|
public void dataTypeSizeChanged(DataType dt) {
|
||||||
|
if (dt instanceof BitFieldDataType) {
|
||||||
|
return; // unsupported
|
||||||
|
}
|
||||||
if (isInternallyAligned()) {
|
if (isInternallyAligned()) {
|
||||||
adjustInternalAlignment();
|
adjustInternalAlignment();
|
||||||
return;
|
return;
|
||||||
|
@ -823,21 +829,23 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
||||||
int n = components.size();
|
int n = components.size();
|
||||||
for (int i = 0; i < n; i++) {
|
for (int i = 0; i < n; i++) {
|
||||||
DataTypeComponentImpl dtc = components.get(i);
|
DataTypeComponentImpl dtc = components.get(i);
|
||||||
int nextIndex = i + 1;
|
|
||||||
if (dtc.getDataType() == dt) {
|
if (dtc.getDataType() == dt) {
|
||||||
// assume no impact to bitfields since base types
|
// assume no impact to bitfields since base types
|
||||||
// should not change size
|
// should not change size
|
||||||
int dtLen = dt.getLength();
|
|
||||||
int dtcLen = dtc.getLength();
|
int dtcLen = dtc.getLength();
|
||||||
if (dtLen < dtcLen) {
|
int length = dt.getLength();
|
||||||
dtc.setLength(dtLen);
|
if (length <= 0) {
|
||||||
shiftOffsets(nextIndex, dtcLen - dtLen, 0);
|
length = dtcLen;
|
||||||
|
}
|
||||||
|
if (length < dtcLen) {
|
||||||
|
dtc.setLength(length);
|
||||||
|
shiftOffsets(i + 1, dtcLen - length, 0);
|
||||||
didChange = true;
|
didChange = true;
|
||||||
}
|
}
|
||||||
else if (dtLen > dtcLen) {
|
else if (length > dtcLen) {
|
||||||
int consumed = consumeBytesAfter(i, dtLen - dtcLen);
|
int consumed = consumeBytesAfter(i, length - dtcLen);
|
||||||
if (consumed > 0) {
|
if (consumed > 0) {
|
||||||
shiftOffsets(nextIndex, 0 - consumed, 0);
|
shiftOffsets(i + 1, 0 - consumed, 0);
|
||||||
didChange = true;
|
didChange = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -890,8 +898,9 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create copy of structure for target dtm (source archive information is discarded). WARNING!
|
* Create copy of structure for target dtm (source archive information is discarded).
|
||||||
* copying unaligned structures which contain bitfields can produce invalid results when
|
* <p>
|
||||||
|
* WARNING! copying unaligned structures which contain bitfields can produce invalid results when
|
||||||
* switching endianess due to the differences in packing order.
|
* switching endianess due to the differences in packing order.
|
||||||
*
|
*
|
||||||
* @param dtm target data type manager
|
* @param dtm target data type manager
|
||||||
|
@ -991,8 +1000,7 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
||||||
private void doReplaceWithAligned(Structure struct) {
|
private void doReplaceWithAligned(Structure struct) {
|
||||||
// assumes components is clear and that alignment characteristics have been set
|
// assumes components is clear and that alignment characteristics have been set
|
||||||
DataTypeComponent[] otherComponents = struct.getDefinedComponents();
|
DataTypeComponent[] otherComponents = struct.getDefinedComponents();
|
||||||
for (int i = 0; i < otherComponents.length; i++) {
|
for (DataTypeComponent dtc : otherComponents) {
|
||||||
DataTypeComponent dtc = otherComponents[i];
|
|
||||||
DataType dt = dtc.getDataType();
|
DataType dt = dtc.getDataType();
|
||||||
int length = (dt instanceof Dynamic) ? dtc.getLength() : -1;
|
int length = (dt instanceof Dynamic) ? dtc.getLength() : -1;
|
||||||
add(dt, length, dtc.getFieldName(), dtc.getComment());
|
add(dt, length, dtc.getFieldName(), dtc.getComment());
|
||||||
|
@ -1011,11 +1019,25 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
||||||
DataTypeComponent[] otherComponents = struct.getDefinedComponents();
|
DataTypeComponent[] otherComponents = struct.getDefinedComponents();
|
||||||
for (int i = 0; i < otherComponents.length; i++) {
|
for (int i = 0; i < otherComponents.length; i++) {
|
||||||
DataTypeComponent dtc = otherComponents[i];
|
DataTypeComponent dtc = otherComponents[i];
|
||||||
|
|
||||||
DataType dt = dtc.getDataType().clone(dataMgr);
|
DataType dt = dtc.getDataType().clone(dataMgr);
|
||||||
checkAncestry(dt);
|
checkAncestry(dt);
|
||||||
|
|
||||||
int length = getPreferredComponentLength(dt, dtc.getLength());
|
int length = dt.getLength();
|
||||||
|
if (length <= 0 || dtc.isBitFieldComponent()) {
|
||||||
|
length = dtc.getLength();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// do not exceed available space
|
||||||
|
int maxOffset;
|
||||||
|
int nextIndex = i + 1;
|
||||||
|
if (nextIndex < otherComponents.length) {
|
||||||
|
maxOffset = otherComponents[nextIndex].getOffset();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
maxOffset = struct.getLength();
|
||||||
|
}
|
||||||
|
length = Math.min(length, maxOffset - dtc.getOffset());
|
||||||
|
}
|
||||||
|
|
||||||
components.add(new DataTypeComponentImpl(dt, this, length, dtc.getOrdinal(),
|
components.add(new DataTypeComponentImpl(dt, this, length, dtc.getOrdinal(),
|
||||||
dtc.getOffset(), dtc.getFieldName(), dtc.getComment()));
|
dtc.getOffset(), dtc.getFieldName(), dtc.getComment()));
|
||||||
|
@ -1355,8 +1377,7 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void deleteAll() {
|
public void deleteAll() {
|
||||||
for (int i = 0; i < components.size(); i++) {
|
for (DataTypeComponentImpl dtc : components) {
|
||||||
DataTypeComponent dtc = components.get(i);
|
|
||||||
dtc.getDataType().removeParent(this);
|
dtc.getDataType().removeParent(this);
|
||||||
}
|
}
|
||||||
components.clear();
|
components.clear();
|
||||||
|
|
|
@ -372,11 +372,16 @@ public class UnionDataType extends CompositeDataTypeImpl implements Union {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void dataTypeSizeChanged(DataType dt) {
|
public void dataTypeSizeChanged(DataType dt) {
|
||||||
|
if (dt instanceof BitFieldDataType) {
|
||||||
|
return; // unsupported
|
||||||
|
}
|
||||||
boolean changed = false;
|
boolean changed = false;
|
||||||
for (DataTypeComponentImpl dtc : components) {
|
for (DataTypeComponentImpl dtc : components) {
|
||||||
int length = dtc.getLength();
|
|
||||||
if (dtc.getDataType() == dt) {
|
if (dtc.getDataType() == dt) {
|
||||||
length = getPreferredComponentLength(dt, length);
|
int length = dt.getLength();
|
||||||
|
if (length <= 0) {
|
||||||
|
length = dtc.getLength();
|
||||||
|
}
|
||||||
dtc.setLength(length);
|
dtc.setLength(length);
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue