Candidate release of source code.

This commit is contained in:
Dan 2019-03-26 13:45:32 -04:00
parent db81e6b3b0
commit 79d8f164f8
12449 changed files with 2800756 additions and 16 deletions

View file

@ -0,0 +1,151 @@
/*
This gradle file is a 'script plugin'. That is, a script that provides features
meant to be used by other build files via an 'apply' call like so:
apply from: "$rootProject.projectDir/gradleScripts/buildHelp.gradle"
This call will pull in the tasks in this file. Clients can register dependencies
on other help modules by using the 'helpPath' configuration, like so:
helpPath project(path: ":Base", configuration: 'helpPath')
This example will put into the 'helpPath' configuration the project path of
'Base', using the value of it's 'helpPath' configuration. This brings us to the next
point--'helpPath' gets updated when the jar file is built to include the path of
the generated jar file. This allows future clients of a given help module to get
the jar file path of that help module's output.
*/
configurations {
// The Help Build System takes optional paths to resolve dependencies. Build files
// that use this 'script plugin' may put project paths into this variable.
helpPath
}
artifacts {
// The helpPath is updated to include the jar file output of the help build.
helpPath jar
}
sourceSets {
helpIndex {
java {
}
}
main {
resources {
srcDir 'src/main/help'
srcDir 'build/help/main'
}
}
}
dependencies {
helpIndexCompile "javax.help:javahelp:2.0.05"
}
// Task for calling the java help indexer, which creates a searchable index of the
// help contents.
task indexHelp(type: JavaExec) {
group "private"
description "indexes the helps files for this module. [gradleScripts/buildHelp.gradle]"
File helpRootDir = file('src/main/help/help')
File outputFile = file("build/help/main/help/${project.name}_JavaHelpSearch")
dependsOn configurations.helpPath
inputs.dir helpRootDir
outputs.dir outputFile
classpath = sourceSets.helpIndex.runtimeClasspath
main = 'com.sun.java.help.search.Indexer'
// tell the indexer where send its output
args '-db', outputFile.absolutePath
// The index has a config file parameter. The only thing we use in the config file
// is a root directory path that should be stripped off all the help references to
// make them relative instead of absolute
File configFile = file('build/helpconfig')
// gather up all the help files into a file collection
FileTree helpFiles = fileTree('src/main/help') {
include '**/*.htm'
include '**/*.html'
}
// pass the config file we created as an argument to the indexer
args '-c',"$configFile"
doFirst {
if (helpFiles.isEmpty()) {
// must have help to index
throw new GradleException("No help files found")
}
// create the config file when the task runs and not during configuration.
configFile.parentFile.mkdirs();
configFile.write "IndexRemove ${helpRootDir.absolutePath}" + File.separator + "\n"
// for each help file that was found, add it as an argument to the indexer
helpFiles.each { File file ->
args "${file.absolutePath}"
}
}
}
// Task for building Ghidra help files
// - depends on the output from the help indexer
task buildHelp(type: JavaExec, dependsOn: indexHelp) {
group rootProject.GHIDRA_GROUP
description " Builds the help for this module. [gradleScripts/buildHelp.gradle]\n"
File helpRootDir = file('src/main/help/help')
File outputDir = file('build/help/main/help')
inputs.dir helpRootDir
outputs.dir outputDir
// this modules runtime classpath (contains jhall.jar)
classpath = project(':Help').sourceSets.main.runtimeClasspath
main = 'help.GHelpBuilder'
args '-n', "${project.name}" // use the module's name for the help file name
args '-o', "${outputDir.absolutePath}" // set the output directory arg
// args '-debug' // print debug info
doFirst {
configurations.helpPath.each {
args "-hp"
args "${it.absolutePath}"
}
// The help dir to process. This needs to be the last argument to the process,
// thus, this is why it is inside of this block
args "${helpRootDir.absolutePath}"
}
}
// include the help into the module's jar
jar {
from "build/help/main" // include the generated help index files
from "src/main/help" // include the help source files
}
// build the help whenever this module's jar file is built
jar.dependsOn 'buildHelp'

View file

@ -0,0 +1,22 @@
##VERSION: 2.0
buildHelp.gradle||GHIDRA||||END|
developerScripts.gradle||GHIDRA||||END|
distribution.gradle||GHIDRA||||END|
eclipseFilters.gradle||GHIDRA||||END|
eclipseLauncher.gradle||GHIDRA||||END|
ghidraScripts.gradle||GHIDRA||||END|
ip.gradle||GHIDRA||||END|
jacoco.excludes.src.txt||GHIDRA||||END|
jacoco.gradle||GHIDRA||||END|
loadApplicationProperties.gradle||GHIDRA||||END|
nativeBuildProperties.gradle||GHIDRA||||END|
old.wrapper.gradle||GHIDRA||||END|
prepDev.gradle||GHIDRA||||END|
processorUtils.gradle||GHIDRA||||END|
settingsUtil.gradle||GHIDRA||||END|
setupJacoco.gradle||GHIDRA||||END|
setupJava.gradle||GHIDRA||||END|
svg.gradle||GHIDRA||||END|
test.gradle||GHIDRA||||END|
testUtils.gradle||GHIDRA||||END|
usage.gradle||GHIDRA||||END|

View file

@ -0,0 +1,7 @@
sourceSets {
scripts {
java {
srcDir 'developer_scripts'
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,134 @@
eclipse {
project {
name = '___root'
resourceFilter {
appliesTo = 'FILES_AND_FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-projectRelativePath-matches-true-false-.gradle'
}
}
resourceFilter {
appliesTo = 'FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-true-false-GhidraTest'
}
}
resourceFilter {
appliesTo = 'FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-false-Features'
}
}
resourceFilter {
appliesTo = 'FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-false-Framework'
}
}
resourceFilter {
appliesTo = 'FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-false-Processors'
}
}
resourceFilter {
appliesTo = 'FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-false-Test'
}
}
resourceFilter {
appliesTo = 'FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-false-ProcessorTest'
}
}
resourceFilter {
appliesTo = 'FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-false-Configurations'
}
}
resourceFilter {
appliesTo = 'FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-false-Extensions'
}
}
resourceFilter {
appliesTo = 'FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-false-RuntimeScripts'
}
}
resourceFilter {
appliesTo = 'FILES_AND_FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-true-false-GhidraBuild'
}
}
resourceFilter {
appliesTo = 'FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-false-Dev'
}
}
resourceFilter {
appliesTo = 'FILES_AND_FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-true-CabExtract'
}
}
resourceFilter {
appliesTo = 'FILES_AND_FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-false-GhidraDocs'
}
}
resourceFilter {
appliesTo = 'FILES_AND_FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-false-DMG'
}
}
resourceFilter {
appliesTo = 'FILES_AND_FOLDERS'
type = 'EXCLUDE_ALL'
matcher {
id = 'org.eclipse.ui.ide.multiFilter'
arguments = '1.0-name-matches-false-false-DemanglerGnu'
}
}
}
}

View file

@ -0,0 +1,149 @@
import org.gradle.api.*;
import org.gradle.artifacts.*;
import org.gradle.process.JavaExecSpec;
import org.gradle.api.file.*;
import org.gradle.api.tasks.*;
import org.gradle.api.internal.file.UnionFileCollection;
import groovy.xml.MarkupBuilder;
public class WriteEclipseLauncher extends JavaExec {
@OutputFile
File dest;
@Input
boolean isRunFave = false;
@Input
boolean isDbgFave = false;
@Input
boolean useEclipseDefaultClasspath = false
public <E> E one(Collection<E> col) {
assert col.size() == 1;
return col.iterator().next();
}
void walkFileCollection(UnionFileCollection col, Set gathered) {
col.sources.each {
walkFileCollection(it, gathered);
}
}
void walkFileCollection(ConfigurableFileCollection col, Set gathered) {
col.from.each {
walkFileCollection(it, gathered);
}
}
void walkFileCollection(SourceSetOutput col, Set gathered) {
gathered.add(col);
}
void walkFileCollection(Configuration col, Set gathered) {
col.allDependencies.each {
walkDependency(it, gathered)
}
}
void walkDependency(ExternalModuleDependency dep, Set gathered) {
gathered.add(dep)
}
void walkDependency(ProjectDependency dep, Set gathered) {
gathered.add(dep)
Project project = dep.dependencyProject
String confName = dep.targetConfiguration ?: 'default'
Configuration configuration = project.configurations."$confName"
walkFileCollection(configuration, gathered)
}
String makeEntryXml(ExternalModuleDependency dep, FileCollection cp) {
def writer = new StringWriter();
def xml = new MarkupBuilder(writer);
def file = cp.find { it.name.contains(dep.name) }
xml.mkp.xmlDeclaration(version: '1.0', encoding: 'UTF-8', standalone: 'no')
xml.runtimeClasspathEntry(
externalArchive: file,
path: 5,
// TODO: Figure out source jar
type: 2
);
return writer
}
String makeEntryXml(Project proj) {
def writer = new StringWriter();
def xml = new MarkupBuilder(writer);
xml.mkp.xmlDeclaration(version: '1.0', encoding: 'UTF-8', standalone: 'no')
xml.runtimeClasspathEntry(
path: 5,
projectName: proj.eclipse.project.name,
type: 1
)
return writer
}
String makeEntryXml(ProjectDependency dep, FileCollection cp) {
return makeEntryXml(dep.dependencyProject);
}
String makeEntryXml(SourceSetOutput out, FileCollection cp) {
Task task = one(out.buildDependencies.getDependencies(null))
return makeEntryXml(task.project)
}
public File forName(String name) {
return project.file(".launch/${name}.launch")
}
List<String> getJvmArgumentsForEclipse() {
List<String> all = allJvmArgs;
int index = all.indexOf('-cp');
if (index == -1) {
return all;
}
all.remove(index);
all.remove(index);
return all;
}
@TaskAction
void exec() { // Override exec. Instead write launcher
dest.parentFile.mkdirs();
def launcher = new MarkupBuilder(new FileWriter(dest));
Set gathered = new LinkedHashSet();
if (!useEclipseDefaultClasspath) {
walkFileCollection(classpath, gathered);
}
launcher.mkp.xmlDeclaration(version: '1.0', encoding: 'UTF-8', standalone: 'no')
launcher.launchConfiguration(type: 'org.eclipse.jdt.launching.localJavaApplication') {
listAttribute(key: 'org.eclipse.debug.ui.favoriteGroups') {
if (isRunFave) {
listEntry(value: 'org.eclipse.debug.ui.launchGroup.run');
}
if (isDbgFave) {
listEntry(value: 'org.eclipse.debug.ui.launchGroup.debug');
}
}
if (!useEclipseDefaultClasspath) {
listAttribute(key: 'org.eclipse.jdt.launching.CLASSPATH') {
gathered.each {
listEntry(value: makeEntryXml(it, classpath))
}
}
}
booleanAttribute(key: 'org.eclipse.jdt.launching.DEFAULT_CLASSPATH', value: useEclipseDefaultClasspath);
stringAttribute(key: 'org.eclipse.jdt.launching.MAIN_TYPE', value: main);
// TODO: Proper escaping of program and JVM arguments.
stringAttribute(key: 'org.eclipse.jdt.launching.PROGRAM_ARGUMENTS', value: args.join(' '));
stringAttribute(key: 'org.eclipse.jdt.launching.PROJECT_ATTR', value: project.eclipse.project.name);
stringAttribute(key: 'org.eclipse.jdt.launching.VM_ARGUMENTS', value: jvmArgumentsForEclipse.join(' '));
}
}
}
allprojects {
ext.WriteEclipseLauncher = WriteEclipseLauncher
}

View file

@ -0,0 +1,7 @@
sourceSets {
scripts {
java {
srcDir 'ghidra_scripts'
}
}
}

320
gradleScripts/ip.gradle Normal file
View file

@ -0,0 +1,320 @@
/*********************************************************************************
* ip.gradle
*
* This file defines the gradle tasks for generating the LICENSE.txt in each module
* which lists all the 3rd party files in this module and their licenses.
*
* The task also verifies that the license for each of the 3rd party files is allowed
* based on what license files exist in the root/licenses directory.
*
* It also reads the Module.manifest file where license information is recorded for
* jar dependencies and build artifacts. These are added to the LICENSE.txt file. Also,
* the jar dependencies (as defined in the build.gradle file) are examined to make
* sure they are defined in the Module.manifest file.
*********************************************************************************/
/*********************************************************************************
* Defines the main ip task for each subproject
*********************************************************************************/
subprojects {
ext.isPureEclipseProject = false;
ext.excludeFromBuild = false;
task ip {
doLast {
if (isPureEclipseProject || excludeFromBuild) {
// If the build.gradle file exists only to establish an
// eclipse project or is a support project, don't verify IP.
return;
}
// scans all the files in the module, reads ip from header, verifies ip, and creates mapping
def ipToFileMap = getIpForModule(project)
// reads the ip info from the Module.manifest file and verifies each ip
def moduleManifestIpMap = getModuleManifestIp(project)
// gets the external libs from gradle and verifies they are accounted for in the Module.manifest file
checkExternalLibsInMap(moduleManifestIpMap, project)
// adds the ip info from the Module.manifest file to the map generated from scanning the module files.
addModuleManifestIp(ipToFileMap, moduleManifestIpMap)
// writes the LICENSE.txt file for the module
writeLicenseInfo(project, ipToFileMap)
}
}
}
/*********************************************************************************
* Addes the ip information from the Module.manifest file into the ipToFileMap
*********************************************************************************/
def addModuleManifestIp(Map<String, List<String>> ipToFileMap, Map<String, String> moduleManifestIpMap) {
for (path in moduleManifestIpMap.keySet()) {
String ip = moduleManifestIpMap.get(path)
addToMap(ipToFileMap, ip, path)
}
}
/*********************************************************************************
* Reads the ip info in the Module.manifest file and creates a mapping of path to ip
*********************************************************************************/
def Map<String, String> getModuleManifestIp(Project project) {
Map<String, String> map = new HashMap<String, String>();
File moduleManifest = new File(project.projectDir, "Module.manifest")
if (!moduleManifest.exists()) {
return map
}
def allowedIP = getAllowedIP(project)
def lines = moduleManifest.readLines();
String key = "MODULE FILE LICENSE:"
for(line in lines) {
if (line.startsWith(key)) {
String s = line.substring(key.length()).trim()
int index = s.indexOf(' ')
String path = s.substring(0, index).trim()
String ip = s.substring(index+1).trim()
def ipString = ip.replace(' ','_')
assert allowedIP.contains(ipString) : "Encountered Non-allowed IP: "+ip+ " for Module.manifest entry: "+path
map.put(path, ip)
}
}
return map
}
/**********************************************************************************
* Gets the gradle dependences and makes sure each external lib is accounted for in
* the map from the Module.manifest file
*********************************************************************************/
def checkExternalLibsInMap(Map<String, String> map, Project project) {
List<String> libs = getExternalDependencies(project)
libs.each { lib ->
String libName = new File(lib).getName() // get just the filename without the path
String relativePath = "lib/"+libName;
assert map.containsKey(relativePath) : "No License specified for external library: "+relativePath+ " in module "+project.projectDir
}
}
/*********************************************************************************
* Examines all the files in the module, reads their ip from the header, verifies
* that the ip is allowed, and adds an entry to a mapping of the ip to a list of
* files with that ip
*********************************************************************************/
def Map<String, List<String>> getIpForModule(Project p) {
Map<String, List<String>> map = new HashMap<String, List<String>>()
File certificationFile = new File(p.projectDir, "certification.manifest")
if (!certificationFile.exists()) {
return map;
}
def allowedIP = getAllowedIP(p)
FileTree tree = p.fileTree(".") {
exclude "bin/**";
exclude "**/build/**";
exclude "certification.manifest"
exclude "certification.local.manifest"
exclude ".project"
exclude ".classpath"
exclude "Module.manifest"
exclude "build.gradle"
exclude "**/Misc/Tips.htm"
exclude "**/*.sla"
exclude "**/.gradle/**"
exclude "**/.settings/**"
}
tree.each { file ->
String ip = getIp(p.projectDir, file)
assert ip != null : "No IP found for "+file.path+ " in module: "+p.projectDir
String ipString = ip.replace(' ','_')
assert allowedIP.contains(ipString) : "Found non-allowed IP: "+ip+" for file "+file.path+" in module: "+p.projectDir
addToMap(map, ip, getRelativePath(p.projectDir, file))
}
return map;
}
/*********************************************************************************
* Returns the relative path of a file in the module
*********************************************************************************/
def String getRelativePath(File projectDir, File file) {
return file.getPath().substring(projectDir.getPath().length()+1)
}
/*********************************************************************************
* adds a path and its ip to the mapping of ip to list of files
*********************************************************************************/
def addToMap(Map<String, List<String>> map, String ip, String path) {
List<String> list = map.get(ip);
if (list == null) {
list = new ArrayList<String>();
map.put(ip, list);
}
list.add(path)
}
/*********************************************************************************
* checks if a file supports a C style header based on its extension.
*********************************************************************************/
def isSourceFile(File file) {
String filename = file.getName().toLowerCase();
return filename.endsWith(".java") ||
filename.endsWith(".c") ||
filename.endsWith(".groovy") ||
filename.endsWith(".cpp") ||
filename.endsWith(".cc") ||
filename.endsWith(".h") ||
filename.endsWith(".y") ||
filename.endsWith(".l") ||
filename.endsWith(".hh") ||
filename.endsWith(".css") ||
filename.endsWith(".jj");
}
/*********************************************************************************
* Gets the ip for a file in the module from its header (or certification.manifest
*********************************************************************************/
def getIp(File projectDir, File file) {
if (isSourceFile(file)) {
return getIpForSourceFile(file);
}
return getIpForNonSourceFile(projectDir, file);
}
/*********************************************************************************
* Gets the ip from a file that has a certification header
*********************************************************************************/
def getIpForSourceFile(File file) {
String ip =null
String line;
file.withReader { reader ->
while((line = reader.readLine()) != null) {
if (line.startsWith(" * IP:")) {
ip = line.substring(7).trim();
break;
}
}
}
return ip;
}
/*********************************************************************************
* Gets the ip for a file that does not have a header, but has an entry in the
* certication.manifest
*********************************************************************************/
def getIpForNonSourceFile(File projectDir, File file) {
String ip = null
File manifest = new File(projectDir, "certification.manifest");
def lines = manifest.readLines()
lines.each {line ->
line = line.trim();
def parts = line.split("\\|");
if (parts.length > 2 && file.toString().replace(File.separator, "/").endsWith(parts[0])) {
ip = parts[2];
}
}
return ip;
}
/*********************************************************************************
* Writes the license information to the LICENSE.txt file for the module
*********************************************************************************/
def writeLicenseInfo(Project project, Map<String, List<String>> map) {
if (map.isEmpty()) {
return;
}
File buildDir = new File(project.projectDir, "build")
buildDir.mkdir();
File licenseFile = new File(buildDir,"LICENSE.txt");
def buf = new StringBuffer();
addLicenseProlog(project, buf)
map.keySet().each { ip ->
reportLicenseFiles(buf, ip, map.get(ip))
}
licenseFile.text = buf.toString()
}
/*********************************************************************************
* Writes the files for a single ip
*********************************************************************************/
def reportLicenseFiles(StringBuffer buf, String ip, List<String> filepaths) {
if (ip.equals("GHIDRA") || ip.equals("LICENSE")) {
return;
}
buf.append(ip+":\n\n")
filepaths.each { path ->
buf.append("\t")
buf.append(path)
buf.append("\n")
}
buf.append("\n")
}
/*********************************************************************************
* Generates the text for the prolog (non-changing) part of the LICENSE.txt file
*********************************************************************************/
def addLicenseProlog(Project project, StringBuffer buf) {
if (project.projectDir.toString().contains(File.separator + "GPL" + File.separator)) {
buf.append("The program in this module is released under the GPL 3 license. \n")
buf.append("The files used to create this program include both public domain\n")
buf.append("files created by the Ghidra team and 3rd party files with \n")
buf.append("the GPL 3 or GPL 3 compatible license. ")
buf.append("The license files for each of license used can be found in the\n")
buf.append("<installation root>/GPL/licenses.\n\n")
buf.append("\nThe 3rd party files in this module are as follows:\n\n\n")
}
else {
buf.append("Ghidra software is released under the Apache 2.0 license. In addition, \n")
buf.append("there are numerous 3rd party components that each have their \n")
buf.append("own license. The license file for each of these licenses can be found\n")
buf.append("in the licenses directory in the installation root directory.\n")
buf.append("\nThe 3rd party files in this module are as follows:\n\n\n")
}
}
/*********************************************************************************
* Examines the <root>/licenses directory to discover what licenses are allowed
*********************************************************************************/
def Set<String> getAllowedIP(Project p) {
Set<String> set = new HashSet<String>()
def projectPath = p.projectDir.path.replace(File.separator, "/");
if (projectPath.contains("/GPL/")) {
set.add("GPL_3")
set.add("GPL_3_Linking_Permitted")
set.add("GPL_2_With_Classpath_Exception")
set.add("Public_Domain")
set.add("LGPL_3.0")
set.add("LGPL_2.1")
}
else {
File root = p.rootProject.file("..")
root.listFiles().each { f ->
File licenseDir = new File(f, "licenses")
File[] files = licenseDir.listFiles()
files.each { file ->
set.add(getIpName(file.getName()))
}
}
}
set.add("GHIDRA")
set.add("LICENSE")
set.add("Copyright_Distribution_Permitted")
return set;
}
/*********************************************************************************
* converts a file name to an ip name that can be compared to info from headers.
*********************************************************************************/
def String getIpName(String filename) {
if (filename.endsWith(".txt")) filename = filename.substring(0, filename.length()-4)
if (filename.endsWith(".htm")) filename = filename.substring(0, filename.length()-4)
if (filename.endsWith(".html")) filename = filename.substring(0, filename.length()-5)
return filename
}

View file

@ -0,0 +1,126 @@
// Don't profile tests
**/*Test*
generic/test/**
// Ignore exception classes, as usually do not contain logic
**/*Exception*
// Utility classes not used by Ghidra
**/certify/**
**/tracker/**
**/review/**
extract/**
generic/profile/**
ghidra/app/help/**
ghidra/cpp/**
ghidra/feature/fid/debug/**
ghidra/util/profile/**
**/stl/**
ghidra/pcodeCPort/**
ghidra/program/database/data/DataTypeArchiveTransformer*
ghidra/sleigh/grammar/**
ghidra/util/GhidraJarBuilder*
generic/jar/**
ghidra/util/JavaSourceFile*
// this should probably be repackaged as 'help/build'
help/**
help/screenshot/**
help/validator/**
util/DebugThreadDumper**
JsonDoclet*
// Auto-generated code
ghidra/app/util/cparser/C/**
ghidra/app/util/cparser/CPP/**
ghidra/app/util/cparser/cplusplus/**
// Classes not used during testing
db/GhidraDBBufferFileAnalyzer*
db/DbViewer*
ghidra/DatabaseBenchMarks*
ghidra/GhidraLauncher*
ghidra/launch/**
LaunchSupport*
ghidra/GhidraThreadGroup*
ghidra/HelpAdapter*
ghidra/ClassSearcherStatusReportingTaskMonitor*
ghidra/app/plugin/debug/**
generic/platform/OSXAdapter*
// Old/deprecated APIs
ghidra/app/program/database/oldfunction/**
ghidra/feature/vt/api/stringable/deprecated/**
**/BookmarkDBAdapterV0/**
**/BookmarkDBAdapterV1/**
**/BookmarkTypeDBAdapterNoTable/**
**/OldBookmark/**
// Interface/constant classes
ghidra/app/plugin/GenericPluginCategoryNames*
// Language code - currently untested
ghidra/app/plugin/processors/generic/**
ghidra/app/util/disassemble/**
generic/lsh/vector/**
ghidra/pcode/**
ghidra/program/emulation/**
ghidra/program/model/pcode/**
ghidra/util/state/**
// (we currently do not test analyzers)
ghidra/app/plugin/core/analysis/**
ghidra/javaclass/**
ghidra/util/state/analysis/**
// Hard to test headlessly
ghidra/app/plugin/core/printing/**
// File formats -- these should be tested!!!!
ghidra/file/formats/**
ghidra/file/jad/**
ghidra/app/cmd/formats/**
mobiledevices/**
// Items we should probably figure out how to test
# ghidra/util/bean/dnd/**
# ghidra/app/plugin/core/renoir/**
# ghidra/app/util/demangler/gnu/**
# ghidra/util/demangler/**
# ghidra/server/**
# ghidra/remote/security/**
// Packages that use reflection, which can be broken by Jacoco
# ghidra/python/**
# ghidra/app/util/bin/**
// Contribs
ghidra/app/plugin/prototype/**
ghidra/idapro/**
ghidra/machinelearning/**
DelphiAnalyzer*
SortedInstructionMerger*
DecodeBitMasks*
ollydbg/**
dbg/**
// this is only used by dbg
ghidra/io/connection/**
// Old stuff??
# org/crosswire/**
# ghidra/comm/**

208
gradleScripts/jacoco.gradle Normal file
View file

@ -0,0 +1,208 @@
// Used for jacocoBranchReport task. Cmd line param to specify branch origin. Defaults to master.
def jacoco_origin = project.hasProperty('jacoco.origin') ? project.getProperty('jacoco.origin') : "master"
import groovy.io.FileType;
if (project.jacocoEnabled) {
def String jacocoRootExecPath = "$buildDir/jacoco/jacocoMerge.exec"
def numFoundExecutionFiles = 0 // number of jacoco data files found in subprojects
delete new File(jacocoRootExecPath) // If the merged exec file (output from jacocoMerge) exists,
// jacocoReport & jacocoBranchReport tasks are skipped and
// the report is not generated.
// So always delete the merged file before the determination
// to skip a task is made.
List excludesList = generateExcludesList()
/*********************************************************************************
* Task to merge multiple jacoco execution data files into one.
*********************************************************************************/
task jacocoMerge(type: JacocoMerge) {
description = 'Task to merge multiple jacoco execution data files into one.'
destinationFile = new File(jacocoRootExecPath)
// Make this collection of execution data files empty during the configuration phase.
// There may be new exec files generated during the execution phase
// (ex: gradle test jacocoReport). So gather up these files in the execution phase
// via doFirst below.
executionData = project.files([])
// Before Task runs, update executionData by searching for files in each subproject.
doFirst {
logger.debug("jacocoMerge: Searching in " + subprojects.size() + " subproject(s)")
subprojects.each { p ->
logger.debug("jacocoMerge: Searching $p.name subproject in directory: $p.buildDir/jacoco/")
File jacocoExecDir = new File("$p.buildDir/jacoco/")
if (jacocoExecDir.exists()) {
jacocoExecDir.eachFileRecurse (FileType.FILES) { file ->
numFoundExecutionFiles++
logger.debug("jacocoMerge: Adding $p.name: $file")
executionData file
}
}
}
println "jacocoMerge: Added $numFoundExecutionFiles execution data files to $destinationFile"
}
}
/*********************************************************************************
* Task to create a jacoco report based on changes from current branch and origin.
* Default origin is 'master'. Specify -Pjacoco.origin=value to change the value of origin.
*********************************************************************************/
task jacocoBranchReport(type: JacocoReport, group: 'Coverage reports') {
description = 'Generates a Jacoco report based on changes from current branch and origin.'
dependsOn ":jacocoMerge"
executionData new File(jacocoRootExecPath)
// Get current branch name
String[] cmd = ["/bin/bash", "-c", "git rev-parse --abbrev-ref HEAD"]
ProcessBuilder builder = new ProcessBuilder();
builder.command(cmd);
Process process = builder.start();
def branchName = process.in.text
process.waitFor();
branchName = branchName.trim()
logger.debug("jacocoBranchReport: Current branchName is $branchName")
// Find commit in origin before branching. See: https://stackoverflow.com/q/1527234
cmd = ["/bin/bash", "-c", "diff -u <(git rev-list --first-parent $branchName) <(git rev-list --first-parent $jacoco_origin) | sed -ne 's/^ //p' | head -1"]
builder = new ProcessBuilder();
builder.command(cmd);
process = builder.start();
def lastRevision = process.in.text
process.waitFor();
lastRevision = lastRevision.trim()
logger.debug("jacocoBranchReport: last revision before branching from $jacoco_origin is $lastRevision")
// Find the files that were changed in the branch.
builder = new ProcessBuilder();
cmd = ["/bin/bash", "-c", "git diff --name-only $lastRevision"]
builder.command(cmd);
process = builder.start();
def filesChanged = process.in.text
process.waitFor();
logger.debug("jacocoBranchReport: files changed are:" + filesChanged)
List filesToInclude = new ArrayList<String>()
filesChanged.split().each{ fileName ->
// Filter out files not in src/main/java and create an inclusion pattern.
if(fileName.endsWith(".java") && fileName.contains("/src/main/java/")) {
String fqName = fileName.split("/src/main/java/")[1]
fqName = fqName.replace(".java", ".class")
filesToInclude.add(fqName)
}
}
sourceDirectories = files(subprojects.sourceSets.main.allSource.srcDirs)
classDirectories = files(subprojects.sourceSets.main.output)
logger.debug("jacocoBranchReport: Files to include: " + filesToInclude)
// Only include these src/main/java files in the report
if (filesToInclude.size() > 0) {
classDirectories = files(classDirectories.files.collect {
fileTree(dir: it,
include: filesToInclude.toArray(new String[filesToInclude.size()]))
})
}
// Turn on html reports, 'doFirst' may disable this later on.
reports {
html.enabled = true
xml.enabled = false
}
// Output info before execution.
doFirst {
println "jacocoBranchReport: Found $filesToInclude.size Java files to filter on branch '$branchName' and revision $lastRevision from origin '$jacoco_origin'"
println "jacocoBranchReport: Number of jacoco execution data files found from jacocoMerge: $numFoundExecutionFiles"
// Turn off reports if no files to report or no jacoco data files found. Otherwise the jacoco task will create empty report.
if (filesToInclude.size() == 0 || numFoundExecutionFiles == 0) {
reports {
html.enabled = false
xml.enabled = false
}
println "jacocoBranchReport: Empty filter or no jacoco execution data found. Not writing report."
} else {
println "jacocoBranchReport: Writing report to file://$reports.html.destination/index.html"
}
}
}
/*********************************************************************************
* Task to generate an aggregate jacoco report from all subprojects
*********************************************************************************/
task jacocoReport(type: JacocoReport, group: 'Coverage reports') {
description = 'Generates an aggregate Jacoco report from all subprojects'
dependsOn ":jacocoMerge"
executionData new File(jacocoRootExecPath)
sourceDirectories = files(subprojects.sourceSets.main.allSource.srcDirs)
classDirectories = files(subprojects.sourceSets.main.output)
classDirectories = files(classDirectories.files.collect {
fileTree(dir: it, exclude: excludesList)
})
reports {
html.enabled = true
xml.enabled = false
html.destination = new File(project.ext.reportDir + "/jacocoReport")
}
doFirst {
if (numFoundExecutionFiles == 0) {
println "jacocoReport: No execution data files found."
println "jacocoReport: No report written to $reports.html.destination.absolutePath."
reports {
html.enabled = false
xml.enabled = false
}
} else {
println "jacocoReport: Writing report to $reports.html.destination.absolutePath"
}
}
}
}
/*********************************************************************************
* Generate the Jacoco excludes list from file (this will strip out comments and
* whitespace).
*
* This uses 'gradleScripts/jacoco.excludes.src.txt' to generate list of
* class exclusions for the 'jacocoReport' task.
*
*********************************************************************************/
def String[] generateExcludesList() {
File inputFile = new File(rootProject.projectDir, "gradleScripts/jacoco.excludes.src.txt")
def lines = inputFile.readLines()
.findAll({ line ->
!shouldIgnoreLine(line)
})
.collect()
println "Returning ${lines.size()} exclusion line(s) for jacocoReport."
return lines
}
/* An ignorable line is one that is only whitespace or that starts with a comment marker */
def shouldIgnoreLine(line) {
if (line.startsWith('#')){
return true
}
if (line.startsWith("//")) {
return true
}
if (line.trim().isEmpty()) {
return true
}
return false
}

View file

@ -0,0 +1,29 @@
/*****************************************************************************************
*
* Reads the Ghidra/application.properties file and sets properties for the version,
* release name, and distro prefix (ghidira_<version>)
*
*****************************************************************************************/
def ghidraProps = new Properties()
file("Ghidra/application.properties").withReader { reader ->
ghidraProps.load(reader)
version = ghidraProps.getProperty('application.version')
project.ext.RELEASE_VERSION = version
project.ext.RELEASE_NAME = ghidraProps.getProperty('application.release.name')
project.ext.DISTRO_PREFIX = "ghidra_${version}"
project.ext.JAVA_COMPILER = ghidraProps.getProperty('application.java.compiler')
// Build dates may or may not be already present in the application.properties file.
// If they are not present, we will set the dates so Gradle can use them, and we will
// indicate that the build dates need to be injected into the build's final
// application.properties file when it is copied.
project.ext.BUILD_DATE = ghidraProps.getProperty('application.build.date')
project.ext.BUILD_DATE_SHORT = ghidraProps.getProperty('application.build.date.short')
project.ext.BUILD_DATES_NEEDED = false
if (BUILD_DATE == null) {
project.ext.BUILD_DATE = getCurrentDateTimeLong()
project.ext.BUILD_DATE_SHORT = getCurrentDate()
project.ext.BUILD_DATES_NEEDED = true
}
}

View file

@ -0,0 +1,5 @@
// These same settings are needed in the GPL native, so rather than duplicating this file, we will just call the other file
def projectRootDir = rootProject.projectDir.path
apply from: "$projectRootDir/GPL/nativeBuildProperties.gradle"

View file

@ -0,0 +1,12 @@
task wrapper(type: Wrapper) {
gradleVersion = '2.6'
def zipLoc = getBinLoc("Ghidra/contrib/gradle/gradle-${gradleVersion}-all.zip").toPath()
def jarLoc = getBinLoc('Ghidra/contrib/gradle/wrapper').toPath()
distributionUrl = jarLoc.relativize(zipLoc).toFile()
distributionBase = Wrapper.PathBase.PROJECT
distributionPath = ".gradle/wrapper/dists"
archiveBase = Wrapper.PathBase.PROJECT
archivePath = ".gradle/wrapper/dists"
jarFile = jarLoc.resolve("gradle-wrapper-${gradleVersion}.jar").toFile()
}

View file

@ -0,0 +1,44 @@
/******************************************************************************************
* PrepDev - task to prepare a development environment for Ghidra. It needs to be run
* whenever the Ghidra git repos are first cloned or after a 'clean'. It also
* needs to be run after a change to the sleigh ANTLR grammar files.
*
* 1) Creates the help directories in the build directory. These directories are
* included in the eclipse project files and must exist for eclipse to be able to compile
* Ghidra
*
* 2) Builds the sleigh ANTLR code and compiles it into the 'classes' directory. Also builds
* a src-zip of the generated java code. NOTE: this is accomplished by adding the
* following dependency in the processorUtils.gradle file: "prepDev.dependsOn(sleighCompile)"
* We can't do that dependency here because not all projects have a sleighCompile task.
*
*******************************************************************************************/
subprojects {
task prepDev {
group rootProject.GHIDRA_GROUP
description " Prepares a fresh clone of Ghidra for developing in eclipse. [gradleScripts/prepDev.gradle]\n"
// build all help
dependsOn { tasks.findAll { task -> task.name.equals('buildHelp') } }
// make sure the antlr code in the softwareModeling module gets built
dependsOn { project(":SoftwareModeling").compileJava }
// the GhidraLauncher depends on this file to build the classpath in dev mode
dependsOn { generateLibraryDependencyMapping }
}
}
/******************************************************************************************
* TASK generateLibraryDependencyMapping
*
* Summary: Creates a file that lists the libraries used by each module.
******************************************************************************************/
task generateLibraryDependencyMapping {
doFirst{
generateLibraryDependencyMapping()
}
}

View file

@ -0,0 +1,116 @@
/*****************************************************************************************
*
* Create a configuration so the a dependency can be declared on the the software modeling
* project which is where the sleigh compiler java code lives. This will be used to
* form the classpath of the sleighCompile task that follows.
*
*****************************************************************************************/
configurations {
sleighConfig
}
dependencies {
sleighConfig project(':SoftwareModeling')
}
/*****************************************************************************************
*
* Task to compile language files using the sleigh compiler.
*
*****************************************************************************************/
task sleighCompile (type: JavaExec) {
group = rootProject.GHIDRA_GROUP
description " Compiles all the sleigh languages. [processorUtils.gradle]\n"
// define standard parameters for JavaExec
classpath configurations.sleighConfig
main = 'ghidra.pcodeCPort.slgh_compile.SleighCompile'
args '-a'
// Delay adding the directory argument until the first part of the execution phase, so
// that any extra args added by a project override will be added to the arg list before
// this argument.
doFirst {
args './data/languages'
}
jvmArgs '-Xmx2048M'
}
// The task that copies the common files to the distribution folder must depend on
// this sleigh task before executing.
rootProject.assembleCommon.dependsOn(sleighCompile)
// For all tasks of type:Test (i.e., integrationTest, cunitTest, etc.), add a task dependency to
// sleighCompile. The sleighCompile task inputs and outputs are defined such that the *.slaspec
// files will only be compiled once, in other words, the up-to-date checks work ok in the
// sleighCompile task. To learn more, visit:
// https://docs.gradle.org/current/userguide/more_about_tasks.html#sec:up_to_date_checks
// This task dependency is needed because many tests rely on the language
// modules as seen in the use of ghidra.test.ToyProgramBuilder.
// The tasks of type:Test do not know about sleighCompile during their configuration phase, so the
// dependency must be done in this gradle file.
rootProject.subprojects.findAll { subproject ->
if (!isSupportModule(subproject)) {
subproject.tasks.withType(Test).all {
it.dependsOn(sleighCompile)
}
}
}
/*****************************************************************************************
*
* Task to clean out the compile language files (*.sla)
*
*****************************************************************************************/
task cleanSleigh {
group rootProject.GHIDRA_GROUP
description "Removes all the compile sleigh language files (*.sla). [gradleScripts/processUtils.gradle]\n"
doLast {
def deleteTree = fileTree(dir: "data/languages", include: "*.sla")
deleteTree.each { File file ->
delete file
}
}
}
/****************************************************************************************
*
* Set up inputs and outputs for the sleighCompile task so that languages only get build
* when the inputs change
*
* sleigh compile outputs to same directory as input. All files except .sla are input
*
******************************************************************************************/
def taskInputs = fileTree(dir: 'data/languages', exclude: '**/*.sla')
def taskOutputs = fileTree(dir: 'data/languages', include: '**/*.sla')
// define the sleigh compile inputs and outputs so that gradle can check if they need building
sleighCompile.inputs.files (taskInputs)
sleighCompile.outputs.files (taskOutputs)
task eclipseSleighLauncher(type: WriteEclipseLauncher) {
dest = forName("Sleigh $project.name")
isRunFave = true
isDbgFave = false
classpath = configurations.sleighConfig
main 'ghidra.pcodeCPort.slgh_compile.SleighCompile'
args '-a'
// Delay adding the directory argument until the first part of the execution phase, so
// that any extra args added by a project override will be added to the arg list before
// this argument.
doFirst {
args './data/languages'
}
jvmArgs '-Xmx2048M'
}
def isSupportModule(Project p) {
return p.findProperty("isSupportProject") ?: false
}

View file

@ -0,0 +1,50 @@
/**************************************************************************************
* Method to add a single project to this gradle build.
*
* Param name: The name of the project.
* Param path: The path relative to the root project directory
* Param mustExist: True if the project directory must exist for the project to be included
* (ex: devtools exists pre-extraction but not post-extraction but still
* must be created for gradle to compile)
*
*
* Example: if name is 'Utility' and path is "Ghidra/Framework', then this is equal to
*
* include 'Utility'
* project(":Utility").projectDir = new File(rootProject.projectDir, "Ghidra/Framework/Utility")
*
* NOTE: if the project name is in the excludeProjects set, then that project will be skipped.
*
**************************************************************************************/
ext.includeProject = { name, path, mustExist ->
includeProjectNamed(name, name, path, mustExist);
}
ext.includeProjectNamed = { name, dirName, path, mustExist ->
File projectDir = new File(rootProject.projectDir, "$path/$dirName")
if (projectDir.exists() || mustExist) {
include name;
project(":$name").projectDir = projectDir;
}
}
/**************************************************************************************
* Method to add all projects in a single directory to this gradle build. It looks
* for all the directories (one leve down only) under the given path that contain a build.gradle file. Then
* for each of those, it call includeProject() to include that project.
*
* Param path: The path relative to the root project directory
*
* Example: if path is 'Ghidra/Framework', it will create projects for Utility, Generic, DB, etc.
*
**************************************************************************************/
ext.includeProjects = { path ->
FileTree fileTree = fileTree("../" + path) {
include '*/build.gradle'
}
fileTree.each { gradleBuildFile ->
String projectName = gradleBuildFile.parentFile.name
includeProject(projectName, path, true);
}
}

View file

@ -0,0 +1,39 @@
/*********************************************************************************
* Configuration for jacoco tasks.
*********************************************************************************/
// 'jacocoEnabled' will enable jacocoMerge, jacocoBranchReport and jacocoReport if these tasks are
// specified on the cmd line.
// Applying jacoco plugin will create coverage files for each java Test task. This extra analysis
// slows down the overall Test task, so only enable jacoco when specified on the cmd line.
project.ext.jacocoEnabled = (project.gradle.startParameter.taskNames.contains('jacocoMerge') ||
project.gradle.startParameter.taskNames.contains('jacocoBranchReport') ||
project.gradle.startParameter.taskNames.contains('jacocoReport'))
// Apply jacoco plugin to root and subprojects. This will create coverage files for each java Test task.
if (jacocoEnabled) {
allprojects {
apply plugin:'jacoco'
dependencies {
jacocoAnt 'org.jacoco:org.jacoco.ant:0.8.2'
jacocoAgent 'org.jacoco:org.jacoco.agent:0.8.2'
}
}
}
subprojects {
// Clean any jacoco files that may have been left behind previously.
clean {
doFirst{
logger.debug("Deleting subproject jacoco execution data directory: $buildDir/jacoco/")
file("$buildDir/jacoco/").deleteDir() // delete jacoco executionData files in individual subprojects
logger.debug("Deleting root project jacoco execution data directory: $rootProject.buildDir/jacoco/")
file("$rootProject.buildDir/jacoco/").deleteDir() // delete jacocoMerge task output
logger.debug("Deleting jacoco report directory: $rootProject.buildDir/reports/jacoco/")
file("$rootProject.buildDir/reports/jacoco/").deleteDir() // delete jacocoReport, jacocoBranchReport output
}
}
}

View file

@ -0,0 +1,137 @@
import org.gradle.plugins.ide.eclipse.model.Container;
import org.gradle.plugins.ide.eclipse.model.Library;
/*********************************************************************************
* Subproject configuration
* - all subs will have access to these properties.
*********************************************************************************/
subprojects {
apply plugin: 'java'
compileJava {
options.compilerArgs << '-Xlint:none'
options.compilerArgs << '-XDignore.symbol.file'
options.fork = true
options.warnings = false
}
compileTestJava {
options.compilerArgs << '-Xlint:none'
options.compilerArgs << '-XDignore.symbol.file'
options.fork = true
options.warnings = false
}
plugins.withId('java') {
sourceCompatibility = "${rootProject.JAVA_COMPILER}"
targetCompatibility = "${rootProject.JAVA_COMPILER}"
}
jar {
manifest {
attributes (
"Specification-Title": "${project.name}",
"Specification-Version": "${rootProject.RELEASE_VERSION}",
"Specification-Vendor": "Ghidra"
)
}
}
sourceSets {
main {
java {
srcDir 'src/main/java'
}
resources {
srcDir 'src/main/resources'
}
}
test {
java {
srcDir 'src/test/java'
}
resources {
srcDir 'src/test/resources'
}
}
integrationTest {
java {
srcDirs = ['src/test.slow/java'] // overwrite srcDir with new path
compileClasspath += main.output + test.output
runtimeClasspath += main.output + test.output
}
resources {
srcDirs = ['src/test.slow/resources']
}
}
screenShots {
java {
srcDir 'src/screen/java'
compileClasspath += main.output
runtimeClasspath += main.output
}
}
cunitTest {
java {
srcDir 'src/test.processors/java'
compileClasspath += main.output
runtimeClasspath += main.output
}
resources {
srcDir 'src/test.processors/resources'
}
}
}
configurations {
integrationTestCompile.extendsFrom testCompile
integrationTestRuntime.extendsFrom testRuntime, integrationTestCompile
cunitTestCompile.extendsFrom compile
}
/*
Provide test dependencies here so each build file does not have to.
*/
dependencies {
integrationTestCompile "org.hamcrest:hamcrest-all:1.3"
integrationTestCompile "org.jmockit:jmockit:1.44"
testCompile "org.hamcrest:hamcrest-all:1.3"
testCompile "org.jmockit:jmockit:1.44"
testCompile "junit:junit:4.12"
}
// For Java 9, we must explicitly export references to the internal classes we are using.
// We export them to all "unnamed" modules, which are modules that don't define themselves
// as a new Java 9 style module. Ghidra is currently using unnamed modules everywhere.
ext.addExports = { List<String> exports ->
tasks.withType(JavaCompile) {
exports.each {
options.compilerArgs.addAll(['--add-exports', it])
}
}
eclipse.classpath.file.whenMerged { classpath ->
classpath.entries.each { ent ->
if (ent instanceof Container && ent.path.contains('JRE_CONTAINER')) {
ent.entryAttributes.put('module', true);
ent.entryAttributes.put('add-exports', exports.join(':'));
}
}
}
}
afterEvaluate {
eclipse.classpath.file.whenMerged { classpath ->
classpath.entries.removeAll { ent ->
ent instanceof Library && !ent.path.endsWith('.jar')
}
classpath.entries.findAll { entry -> entry.kind == 'lib' }*.exported = true
}
}
}

70
gradleScripts/svg.gradle Normal file
View file

@ -0,0 +1,70 @@
/*********************************************************************************
* SVG
*
* Summary: Uses the Batik library to create PNG files from SVG's.
*
* This task will place all generated pngs in the 'build' folder for the project;
* which will eventually be placed in the 'resources' folder when prepDev is
* run.
*
* Command Line Format: "java -cp <classpath> <batik main class> -scriptSecurityOff -m <inputDir> -d <outputDir>"
*
* TODO: Should we have this task place the pngs in the resources folder
* folder directly instead of waiting for prepDev to do it?
*
*********************************************************************************/
task rasterizeSvg(type: JavaExec) {
group rootProject.GHIDRA_GROUP
description " Converts .svg files to .png files. [gradleScripts/distribution.gradle]\n"
subprojects { p ->
// Set up some vars for the Batik command line call.
def INPUT_DIR = "image/png"
def OUTPUT_DIR = p.projectDir.toString() + "/build/batik/png/main/images"
def MAIN_CLASS = "org.apache.batik.apps.rasterizer.Main"
// The Batik lib requires a few dependencies to be added to the classpath; we could have
// added these in the individual projects which use this task (eg: to the 'compile'
// configuration) but since this is the only task which requires them, it seemed
// appropriate to just add them here.
def BIN_REPO = rootProject.file(BIN_REPO_PATH).toString()
classpath = files ( BIN_REPO + "/ExternalLibraries/libsforBuild/batik-all-1.7.jar",
BIN_REPO + "/ExternalLibraries/libsforBuild/xml-apis-ext.jar")
// Now build a list of all SVG files in the project. We have to do the isEmpty() check
// afterwards since Batik will choke if we don't pass it a valid input list.
FileTree tree = fileTree(p.projectDir.toString()) {
include "**src/**/*.svg"
}
if (tree.isEmpty()) {
return
}
// This is strictly for formatting the file list properly for placing in the command
// line string.
def files = []
tree.each { File file ->
files.push(file.toString())
}
main = MAIN_CLASS
// Set up the entire arg list, minus the input files.
def argsList = ["-scriptSecurityOff",
"-m",
INPUT_DIR,
"-d",
OUTPUT_DIR
]
// Now add the input files to the end of the argument list.
files.each { file ->
argsList.push(file)
}
args = argsList
}
}

833
gradleScripts/test.gradle Normal file
View file

@ -0,0 +1,833 @@
/*********************************************************************************
* test.gradle
*
* Contains tasks for running unit/integration tests and generating associated
* reports.
*
* Testing tasks: unitTestReport
* integrationTestReport
*
*
*********************************************************************************/
/*********************************************************************************
* Define some vars. Note that the first few vars must be defined. If they
* aren't set by a calling script, we default them.
*
*********************************************************************************/
apply from: "gradleScripts/testUtils.gradle"
configurations {
jmockitAgent
}
dependencies {
jmockitAgent('org.jmockit:jmockit:1.44') {
exclude group: 'com.google.code.findbugs', module: 'jsr305'
}
}
if (!project.hasProperty('rootTestDir')) {
project.ext.rootTestDir = "build"
}
if (!project.hasProperty('machineName')) {
try {
project.ext.machineName = InetAddress.localHost.hostName
}
catch (UnknownHostException e) {
project.ext.machineName = "STANDALONE"
}
}
if (!project.hasProperty('xms')) {
project.ext.xms = "512M"
}
if (!project.hasProperty('xmx')) {
project.ext.xmx = "2048M"
}
loadApplicationProperties()
loadMachineSpecificProperties()
if (!project.hasProperty('srcTreeName')) {
project.ext.srcTreeName = System.properties.get("application.version")
}
project.ext.testRootDirName = "JunitTest_" + srcTreeName
project.ext.cunitTestRootDirName = "CunitTest_" + srcTreeName
project.ext.shareDir = System.properties.get('share.dir')
if (project.ext.shareDir != null) {
// add src tree name to machine specified share path
project.ext.testShareDir = shareDir + "/" + testRootDirName
project.ext.cunitTestShareDir = shareDir + "/" + cunitTestRootDirName
}
else {
project.ext.testShareDir = "$rootTestDir" + "/JUnit"
project.ext.cunitTestShareDir = "$rootTestDir" + "/CUnit"
}
project.ext.upgradeProgramErrorMessage = "WARNING! Attempting data upgrade for "
project.ext.upgradeTimeErrorMessage = "Upgrade Time (ms): "
project.ext.logPropertiesUrl = getLogFileUrl()
// Specify final report directory via cmd line using "-PreportDir=<location>". This is useful for
// the 'parallelCombinedTestReport' task.
project.ext.reportDir = project.hasProperty('reportDir') ? project.getProperty('reportDir') + "/reports"
: testShareDir + "/reports"
project.ext.reportArchivesDir = testShareDir + "/reportsArchive"
project.ext.archivedReportDir = reportArchivesDir + "/reports"
project.ext.testOutputDir = file(rootTestDir).getAbsolutePath() + "/output"
project.ext.userHome = System.properties.get("user.home")
// 'parallelMode' will change configs to allow concurrent test execution.
// Enable this mode if 'parallelCombinedTestReport' invoked in the command line.
project.ext.parallelMode = project.gradle.startParameter.taskNames.contains('parallelCombinedTestReport')
// 'parallelCombinedTestReport' task will parse a JUnit test report for test duration.
// Specify the location of the report via cmd line using "-PtestTimeParserInputDir=<location>".
// Otherwise, the default is to look for the latest test report in <reportDir>/../reportsArchive/reports_<date>
project.ext.testTimeParserInputDir = project.hasProperty('testTimeParserInputDir') ?
project.getProperty('testTimeParserInputDir') + "/reports/classes" : getLastArchivedReport("$reportArchivesDir") + "/classes"
// A port of 0 will allow the kernel to give a free port number in the set of "User"
// or "Registered" Ports (usually 1024 - 49151). This will prevent port collisions among
// concurrent JUnit tests.
project.ext.debugPort = parallelMode ? 0 : generateRandomDebugPort()
/*********************************************************************************
* Create specified absolute directory if it does not exist
*********************************************************************************/
def createDir(dirPath) {
println "Creating directory: " + dirPath
File dir = new File(dirPath);
dir.mkdirs();
if (!dir.exists()) {
throw new RuntimeException("Failed to create required directory: " + dirPath);
}
if (!dir.canWrite()) {
throw new RuntimeException("Process does not have write permission for directory: " + dirPath);
}
}
/*********************************************************************************
* Finds the log4j properties file and returns its path.
*********************************************************************************/
def getLogFileUrl() {
String rootDir = System.properties.get("user.dir")
String foundFile
new File(rootDir).eachFileRecurse(groovy.io.FileType.FILES) {
if (it.path.endsWith('src/main/resources/generic.log4jtest.xml')) {
foundFile = "file:" + it.path.toString()
return "file:" + it.path.toString()
}
}
return foundFile
}
/*********************************************************************************
* Creates a new debug port randomly.
*********************************************************************************/
def generateRandomDebugPort() {
// keep port within narrow range (NOTE: must be greater than 1024 and less than 65535)
// Generate random byte value
Random random = new Random()
def ran = random.nextInt();
for (int i = 1; i < 4; i++) {
ran ^= (ran >> 8);
}
ran &= 0xff;
// Generate random port between 18300 and 18555
def port = 18300 + ran
return port
}
/*********************************************************************************
* Loads application specific property file that contains info we need.
* Properties will be imediately added to the global System.properties file so we
* can readily access them from just one place.
*********************************************************************************/
def loadApplicationProperties() {
Properties props = new Properties()
File appProperties = new File("Ghidra/application.properties");
if (!appProperties.exists()) {
return;
}
props.load(new FileInputStream(appProperties));
props.each {k, v ->
System.setProperty(k, v)
}
}
/*********************************************************************************
* Record and Print test task start time
*********************************************************************************/
def startTestTimer(Task task) {
project.ext.testStartTime = new Date()
println ":" + task.project.name + ":" + task.name + " started: " + testStartTime;
}
/*********************************************************************************
* Print test task end time and elapsed time
*********************************************************************************/
def endTestTimer(Task task) {
Date endTime = new Date();
println ":" + task.project.name + ":" + task.name + " ended: " + endTime;
long elapsedMS = endTime.getTime() - testStartTime.getTime();
long msPerMin = 60 * 1000;
long msPerHour = 60 * msPerMin;
long hrs = elapsedMS / msPerHour;
long mins = (elapsedMS - (hrs * msPerHour)) / msPerMin;
long secs = (elapsedMS - (hrs * msPerHour) - (mins * msPerMin)) / 1000;
println ":" + task.project.name + ":" + task.name + " elapsed time: " +
String.format("%d:%02d:%02d", hrs, mins, secs);
}
/*********************************************************************************
* Loads any machine specific property file that contains info we need.
* Only those properties with our machine name prefix will be immediately
* added to the global System.properties file so we can readily access them from
* just one place (machine name prefix will be omitted).
*********************************************************************************/
def loadMachineSpecificProperties() {
if (project.hasProperty('testPropertiesPath')) {
Properties props = new Properties()
def testPropertiesPath = project.getProperty('testPropertiesPath')
File testProperties = new File(testPropertiesPath);
if (!testProperties.exists()) {
return;
}
println "loadMachineSpecificProperties: Using machine specific properties file '$testProperties'"
props.load(new FileInputStream(testProperties));
// Note: Only load those properties that contain our machine name (set above). This means
// that local test runs will not use these values. Also, if the test machine name
// changes, then so too will have to change the properties file.
props.each { k, v ->
if (k.startsWith(machineName)) {
// strip off <machine-name>_ prefix from property key
def key = k.substring(machineName.length()+1)
println "loadMachineSpecificProperties: Setting system property $key:$v"
System.setProperty(key, v)
}
}
}
}
/*********************************************************************************
* Archive previously generated test report
*********************************************************************************/
task archiveTestReport {
File reports = file(reportDir)
File archivedReports = file(archivedReportDir)
File reportArchives = file(reportArchivesDir)
onlyIf {
containsIndexFile(reports)
}
doLast {
reportArchives.mkdirs()
delete archivedReports
reports.renameTo(archivedReports)
renameReportArchive(archivedReports) // renames archived reports directory
generateArchiveIndex(reportArchives)
}
}
/*********************************************************************************
* Remove remnants of previous tests.
*********************************************************************************/
task deleteTestTempAndReportDirs() {
doFirst {
delete file(rootTestDir).getAbsolutePath()
delete file(reportDir).getAbsolutePath()
}
}
/*********************************************************************************
* Initialize test task
*********************************************************************************/
def initTestJVM(Task task, String rootDirName) {
def testTempDir = file(rootTestDir).getAbsolutePath()
def testReportDir = file(reportDir).getAbsolutePath()
task.doFirst {
println "Test Machine Name: " + machineName
println "Root Test Dir: " + rootTestDir
println "Test Output Dir: " + testOutputDir
println "Test Temp Dir: " + testTempDir
println "Test Report Dir: " + testReportDir
println "Java Debug Port: " + debugPort
createDir(testTempDir)
createDir(testOutputDir)
}
// If false, testing will halt when an error is found.
task.ignoreFailures true
// If false, then tests are re-run every time, even if no code has changed.
task.outputs.upToDateWhen {false}
// Must set this to see System.out print statements.
task.testLogging.showStandardStreams = true
// Min/Max heap size. These are passed in.
task.minHeapSize xms
task.maxHeapSize xmx
// for jmockit; needs the javaagent option
// -javaagent:/path/to/jmockit.jar
task.doFirst {
def jmockitPath = configurations.jmockitAgent.singleFile
task.jvmArgs '-DupgradeProgramErrorMessage=' + upgradeProgramErrorMessage,
'-DupgradeTimeErrorMessage=' + upgradeTimeErrorMessage,
'-Dlog4j.configuration=' + logPropertiesUrl,
'-Dghidra.test.property.batch.mode=true',
'-Dghidra.test.property.parallel.mode=' + parallelMode,
'-Dghidra.test.property.output.dir=' + testOutputDir,
'-Dghidra.test.property.report.dir=' + testReportDir,
'-DSystemUtilities.isTesting=true',
'-Dmachine.name=' + machineName,
'-Djava.io.tmpdir=' + testTempDir,
'-Duser.data.dir=' + userHome + '/.ghidra/.ghidra-' + srcTreeName + '-Test',
'-Dcpu.core.override=8',
'-XX:ParallelGCThreads=8',
'-XX:+UseParallelGC',
'-Djava.awt.headless=false',
// Dont' run this long winded analysis when testing (see DecompilerFunctionAnalyzer)
'-DDecompilerFunctionAnalyzer.enabled=false',
'-Djava.util.Arrays.useLegacyMergeSort=true',
'-Djdk.attach.allowAttachSelf',
'-javaagent:' + jmockitPath,
'-DLock.DEBUG=true',
'-Xdebug',
'-Xnoagent',
'-Djava.compiler=NONE',
'-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=' + debugPort
}
}
/*********************************************************************************
* Create this TestReport task so that individual test tasks may later assign themselves
* (using 'reportOn'). These tasks run tests concurrently.
*
* Invoking this task via command line enables 'parallelMode'.
*
* Example:
* ./gradlew parallelCombinedTestReport -PtestTimeParserInputDir=<path/to/report/classes> -PreportDir=<path/to/reports>
*
*********************************************************************************/
task parallelCombinedTestReport(type: TestReport) { t ->
group "test"
destinationDir = file("$reportDir")
logger.debug("parallelCombinedTestReport: Using destinationDir = $reportDir")
dependsOn ":deleteTestTempAndReportDirs"
mustRunAfter ":deleteTestTempAndReportDirs"
doFirst {
startTestTimer(t)
}
doLast {
endTestTimer(t)
}
}
/*********************************************************************************
* GRADLE TEST PROPERTIES
*
* This is where we add the test task to each subproject, and set generic test
* properties. These will apply to both unit and integration tests.
*********************************************************************************/
subprojects {
test { t ->
forkEvery 1
initTestJVM(t, testRootDirName)
// WARNING! WATCH OUT !!
// WARNING! Since a single shared JVM instance is used, the first
// test and its ApplicationConfiguration will be used to initialize
// the class searching environment. This can have a negative impact
// on test results due to the inconsistent Application environment
// which may exist when all tests are run versus a single test.
// Based on this limitation we should only use the Integration base
// test classes within integrationTest regions (e.g., test.slow).
doFirst {
startTestTimer(t)
}
doLast {
endTestTimer(t)
}
}
}
/*********************************************************************************
* UNIT TEST REPORT
*
* Summary: Runs all unit tests and generates a single report.
*
*********************************************************************************/
task unitTestReport(type: TestReport) { t ->
group "test"
destinationDir = file("$reportDir/unitTests")
reportOn subprojects.test
outputs.upToDateWhen {false}
}
/*********************************************************************************
* PREPARE FOR TESTS
*
* Summary: Setup for testing.
*********************************************************************************/
task prepareForTests {
dependsOn { project(":FunctionID").unpackFidDatabases }
}
/*********************************************************************************
* INTEGRATION TEST
*
* Summary: Applies a task to each subproject that will run all integration
* tests for that project.
*
*********************************************************************************/
subprojects { sub ->
task integrationTest (type: Test) { t ->
group "test"
dependsOn ':prepareForTests'
testClassesDirs = files sub.sourceSets.integrationTest.output.classesDirs
classpath = sub.sourceSets.integrationTest.runtimeClasspath
// Do not include suite classes; they trigger the tests in the suite to get run twice
// (once by normal jUnit and again when the suite runs).
excludes = ['**/*Suite*']
// Enable if you want to force Gradle to launch a new JVM for each test.
forkEvery 1
initTestJVM(t, testRootDirName)
doFirst {
startTestTimer(t)
}
doLast {
endTestTimer(t)
}
}
}
/*********************************************************************************
* Create a task of type: Test for a subproject.
* Each task will run a maximum of 'numMaxParallelForks' tests at a time.
* The task will include tests from classesList[classesListPosition, classesListPosition+numMaxParallelForks]
* and inherit excludes from its non-parallel counterpart.
*
* @param subproject
* @param testType - Either 'test' or 'integrationTest'. The sourceSets defined in setupJava.gradle
* @param taskNameCounter - Number to append to task name.
* @param classesList - List of classes for this subproject.testType. Sorted by duration.
* @param classesListPosition - Start position in 'classesList'
* @param numMaxParallelForks - Number of concurrent tests to run.
*********************************************************************************/
def createTestTask(Project subproject, String testType, int taskNameCounter, ArrayList classesList,
int classesListPosition, int numMaxParallelForks) {
subproject.task(testType+"_$taskNameCounter", type: Test) { t ->
group "test"
testClassesDirs = files subproject.sourceSets["$testType"].output.classesDirs
classpath = subproject.sourceSets["$testType"].runtimeClasspath
maxParallelForks = numMaxParallelForks
initTestJVM(t, testRootDirName)
rootProject.tasks['parallelCombinedTestReport'].reportOn t
// include classes in task up to the maxParallelForks value
for (int i = 0; (i < maxParallelForks && classesListPosition < classesList.size); i++) {
// Convert my.package.MyClass to my/package/MyClass.class
include classesList[classesListPosition].replace(".","/") + ".class"
classesListPosition++
}
// Inherit excludes from subproject build.gradle.
// These excludes will override any includes.
excludes = subproject.tasks["$testType"].excludes
// Display granularity of 0 will log method-level events
// as "Task > Worker > org.SomeClass > org.someMethod".
testLogging {
displayGranularity 0
events "started", "passed", "skipped", "failed"
}
doFirst {
startTestTimer(t)
}
doLast {
endTestTimer(t)
}
// Current task mustRunAfter previous task (test_1 before test_2,etc.)
if (taskNameCounter > 1) {
def prevTaskName = "$testType" + "_" + (taskNameCounter -1)
mustRunAfter prevTaskName
}
logger.info("createTestTask: Created $subproject.name" + ":$testType" + "_$taskNameCounter"
+ "\n\tincludes = \n" + t.getIncludes() + "\n\tinheriting excludes (overrides any 'includes') = \n"
+ t.excludes)
} // end task
}
/*********************************************************************************
* Split each subproject's integrationTest ("src/test.slow") and test ("src/test")
* task into multiple tasks, based on duration parsed from a previous JUnit report.
* Each task will run tests concurrently.
*
* Each task will run a maximum of the 'maxParallelForks' value at a time.
*********************************************************************************/
configure(subprojects.findAll {parallelMode == true}) { subproject ->
afterEvaluate {
// "subprojects { afterEvaluate { evaluate()"
// forces evaluation of subproject configuration. Needed for inheriting excludes
// from non-parallel counterpart.
subproject.evaluate()
if (!shouldSkipTestTaskCreation(subproject)) {
logger.info("parallelCombinedTestReport: Creating 'test' tasks for " + subproject.name + " subproject.")
ArrayList classesList = getTestsForSubProject(subproject.sourceSets.test.java)
int classesListPosition = 0 // current position in classesList
int taskNameCounter = 1 // task suffix
int numMaxParallelForks = 40 // unit tests are fast; 40 seems to be a reasonable number
while (classesListPosition < classesList.size()) {
createTestTask(subproject, "test", taskNameCounter, classesList, classesListPosition, numMaxParallelForks)
classesListPosition+=numMaxParallelForks
taskNameCounter+=1; // "test_1", "test_2, etc.
}
}
if (!shouldSkipIntegrationTestTaskCreation(subproject)) {
logger.info("parallelCombinedTestReport: Creating 'integrationTest' tasks for " + subproject.name + " subproject.")
ArrayList classesList = getTestsForSubProject(subproject.sourceSets.integrationTest.java)
int classesListPosition = 0 // current position in classesList
int taskNameCounter = 1 // task suffix
// Through trial-and-error we found that 40 is too many
// concurrent integration tests (ghidratest server has 40 CPUs).
// 20 seems like a good balance of throughput vs resource usage for ghidratest server.
int numMaxParallelForks = 20
while (classesListPosition < classesList.size()) {
createTestTask(subproject, "integrationTest", taskNameCounter, classesList, classesListPosition, numMaxParallelForks)
classesListPosition+=numMaxParallelForks
taskNameCounter+=1; // "integrationTest_1", "integrationTest_2, etc.
}
}
} // end afterEvaluate
}// end subprojects
/*********************************************************************************
* NON-BASE INTEGRATION TEST REPORT
*
* Summary: Runs integration tests for all modules except "Base"
* and generates a single report.
*
*********************************************************************************/
task integrationTestReportNoBase(type: TestReport) { t ->
group "test"
destinationDir = file("$reportDir/integrationTests")
subprojects.each {
if (!it.name.equals("Base")) { // excludes Base module tests
reportOn it.integrationTest
}
}
}
/*********************************************************************************
* INTEGRATION TEST REPORT BASE ONLY
*
* Summary: Runs the integration tests for just the "Base" module and generates a
* report.
*
*********************************************************************************/
task integrationTestReportBaseOnly(type: TestReport) { t ->
group "test"
destinationDir = file("$reportDir/integrationTests")
subprojects.each {
if (it.name.equals("Base")) { // excludes Base module tests
reportOn it.integrationTest
}
}
}
/*********************************************************************************
* COMBINED TEST REPORT
*
* Summary: Runs all integration and unit tests, and creates a single report.
*
*********************************************************************************/
task combinedTestReport(type: TestReport) { t ->
group "test"
destinationDir = file("$reportDir")
dependsOn ":deleteTestTempAndReportDirs"
mustRunAfter ":deleteTestTempAndReportDirs"
subprojects { project ->
afterEvaluate{
if (isTestable(project)) {
reportOn project.test
reportOn project.integrationTest
}
}
}
}
/*********************************************************************************
* ALL TESTS
*
* Summary: Applies a task to each subproject that will run all unit tests and all
* integration tests for that project.
*
*********************************************************************************/
subprojects { sub ->
task allTests {
dependsOn 'integrationTest'
dependsOn 'test'
}
}
/*********************************************************************************
* CUNIT TEST
*
* Summary: Applies a task to each "Processor Test" subproject that will run all
* CUNIT tests for that project.
*
*********************************************************************************/
subprojects { sub ->
task cunitTest (type: Test) { t ->
group "cunit"
dependsOn ':prepareForTests'
testClassesDirs = files sub.sourceSets.cunitTest.output.classesDirs
classpath = sub.sourceSets.cunitTest.runtimeClasspath
// Enable if you want to force Gradle to launch a new JVM for each test.
forkEvery 1
initTestJVM(t, cunitTestRootDirName)
doFirst {
startTestTimer(t)
}
doLast {
endTestTimer(t)
}
}
}
/*********************************************************************************
* CUNIT TEST REPORT
*
* Summary: Runs all CUNIT tests
*
*********************************************************************************/
task cunitTestReport(type: TestReport) { t ->
group "cunit"
destinationDir = file("$cunitTestShareDir" + "/reports")
reportOn subprojects.cunitTest
}
/*********************************************************************************
* CUNIT TEST REPORT w/ CUNIT MATRIX REPORT
*
* Summary: Runs all CUNIT tests and consolidate JUnit and Matrix report in
* results directory.
*
*********************************************************************************/
task cunitConsolidatedTestReport(type: Copy) {
group "cunit"
dependsOn ':cunitTestReport'
into (cunitTestShareDir + "/reports")
from (testOutputDir + "/test-output") {
exclude '**/*.xml', 'cache/**'
}
}
/*********************************************************************************
* Rename archived report directory to reflect lastModified date
*********************************************************************************/
def renameReportArchive(dir) {
long modifiedTime = dir.lastModified()
Calendar calendar = new GregorianCalendar()
calendar.setTimeInMillis( modifiedTime )
java.text.SimpleDateFormat dateFormat = new java.text.SimpleDateFormat("_MMM_d_yyyy")
String newFilename = dir.getName() + dateFormat.format(calendar.getTime())
File newFile = new File( dir.getParent(), newFilename )
newFile = getUniqueName( newFile )
dir.renameTo( newFile )
println "Archived reports: " + newFile.getCanonicalPath()
}
/*********************************************************************************
* Attempt to generate unique directory/file name to avoid duplication
*********************************************************************************/
def getUniqueName(file) {
if (!file.exists()) {
return file
}
for (int i = 2; i < 22; i++ ) {
String newName = file.getName() + "_" + i
File newFile = new File( file.getParentFile(), newName )
if (!newFile.exists()) {
return newFile
}
}
return file
}
/*********************************************************************************
* Generate report archive index.html file within the archivesDir directory
*********************************************************************************/
def generateArchiveIndex(archivesDir) {
File file = new File( archivesDir, "index.html" );
println "Generating Archived Reports Index: " + file.getCanonicalPath()
PrintWriter p = new PrintWriter(file);
try {
p.println("<HTML>");
p.println("<HEAD>");
printStyleSheet(p);
p.println("</HEAD>");
p.println("<BODY>");
p.println("<CENTER>");
p.println("<H2>Past Test Reports</H2>");
p.println(" <TABLE BORDER=1 ALIGN=CENTER WIDTH=\"90%\">");
// get a dir listing of all the files from the report dir and
// create a hyperlink for the filename
File[] reportDirFiles = archivesDir.listFiles();
Arrays.sort(reportDirFiles, new Comparator<File>() {
public int compare(File file1, File file2) {
if ( file1 == file2 ) {
return 0;
}
long timestamp1 = file1.lastModified();
long timestamp2 = file2.lastModified();
if (timestamp1 == timestamp2) {
return file1.getName().compareTo(file2.getName());
}
return (timestamp1 < timestamp2) ? 1 : -1;
}
});
// archived reports older than one month will be removed
long now = System.currentTimeMillis();
println "Current time: " + now
long oldestTime = now - (1000L * 60 * 60 * 24 * 30);
println "Delete date: " + oldestTime
for (File f : reportDirFiles) {
if (!f.isDirectory()) {
continue;
}
println "File: " + f.toString()
println "\tlast modified: " + f.lastModified()
if (f.lastModified() < oldestTime) {
println "\t\tFile older than the oldest time--deleting!..."
f.deleteDir(); // delete old archive results
}
else {
println "\t\tFile is a spring chicken--creating link..."
createLinkForDir(p, f);
}
}
p.println(" </TABLE>");
p.println("</CENTER>");
p.println("</BODY>");
p.println("</HTML>");
}
finally {
p.close();
}
}
/*********************************************************************************
* Add HTML code to PrintWriter p which provides link to specified file which
* is assumed to reside within the same directory as the index.html file being written
*********************************************************************************/
def createLinkForDir(p, file) {
// skip files - dirs only
if ( !file.isDirectory() ) {
return;
}
String columnPadding = " "
p.println( columnPadding + "<TR><TD>" )
String dirName = file.getName()
if (containsIndexFile(file)) {
String linkText = "<a href=\"./" + dirName + "/index.html\">" + dirName + "</a>"
p.println( columnPadding + linkText);
}
else {
p.println( columnPadding + dirName);
}
p.println( columnPadding + "</TD></TR>" )
}
/*********************************************************************************
* Returns true if the specified directory File contains an index.html file
*********************************************************************************/
def containsIndexFile(dir) {
File indexFile = new File(dir, "index.html");
return indexFile.exists();
}
/*********************************************************************************
* Add HTML code to PrintWriter p which provides the STYLE tag
*********************************************************************************/
def printStyleSheet(p) {
p.println( "<style>" );
p.println( "<!--" );
p.println( "" );
p.println( "-->" );
p.println( "</style>" );
}

View file

@ -0,0 +1,327 @@
import java.util.regex.*;
import groovy.io.FileType;
ext.testReport = null; // contains <classname, duration> from JUnit test report
/*
* Checks if html test report for an individual test class has a valid name.
*/
boolean hasValidTestReportClassName(String name) {
return name.endsWith("Test.html") && !name.contains("Suite")
}
/*
* Returns duration for a test class report.
*/
long getDurationFromTestReportClass(String fileContents, String fileName) {
/* The duration for the entire test class appears in the test report as (multiline):
* <div class="infoBox" id="duration">
* <div class="counter">0s</div>
* The duration value may appear in the format of: 1m2s, 1m2.3s, 3.4s
*/
Pattern p = Pattern.compile("(?<=id=\"duration\">[\r\n]<div\\sclass=\"counter\">)(.*?)(?=</div)",
Pattern.MULTILINE);
Matcher m = p.matcher(fileContents);
assert m.find() == true
String duration = m.group()
assert duration != null && duration.trim().length() > 0
long durationInMillis
// Parse out the duration
if (duration.contains("m") && duration.contains("s")) { // has minute and seconds
int minutes = Integer.parseInt(duration.substring(0, duration.indexOf("m")))
double seconds = Double.parseDouble(duration.substring(duration.indexOf("m") + 1
, duration.length()-1))
durationInMillis = (minutes * 60 * 1000) + (seconds * 1000)
} else if (!duration.contains("m") && duration.contains("s")) { // has only seconds
double seconds = Double.parseDouble(duration.substring(0, duration.length()-1))
durationInMillis = (seconds * 1000)
} else { // unknown format
assert false : "getDurationFromTestReportClass: Unknown duration format in $fileName. 'duration' value is $duration"
}
logger.debug("getDurationFromTestReportClass: durationInMillis = '"+ durationInMillis
+"' parsed from duration = '" + duration + "' in $fileName")
return durationInMillis
}
/*
* Creates <fully qualified classname, duration> from JUnit test report
*/
def HashMap<String, Long> getTestReport() {
// populate testReport only once per gradle configuration phase
if (project.testReport == null) {
logger.debug("getTestReport: Populating 'testReport' using '$testTimeParserInputDir'")
assert (testTimeParserInputDir != null && testTimeParserInputDir.contains("classes")) :
"""getTestReport: The value of 'testTimeParserInputDir' does not exist.
Specify this value via cmd line -PtestTimeParserInputDir=<value>"""
File classesReportDir = new File(testTimeParserInputDir)
assert classesReportDir.exists() : "getTestReport: The path '$testTimeParserInputDir' does not exist on the file system"
testReport = new HashMap<String, Long>();
int excludedHtmlFiles = 0 // counter
int totalHtmlFiles = 0
String excludedHtmlFileNames = "" // for log.info summary message
classesReportDir.eachFileRecurse (FileType.FILES) { file ->
totalHtmlFiles++
// Only read html file for a Test and not a test Suite
if(hasValidTestReportClassName(file.name)) {
String fileContents = file.text
/* The fully qualified class name appears in the test report as:
* <h1>Class ghidra.app.plugin.assembler.sleigh.BuilderTest</h1>
*/
String fqNameFromTestReport = fileContents.find("(?<=<h1>Class\\s).*?(?=</h1>)")
long durationInMillis = getDurationFromTestReportClass(fileContents, file.name)
testReport.put(fqNameFromTestReport, durationInMillis)
logger.debug("getTestReport: Added to testReport: class name = '"
+ fqNameFromTestReport + "' and durationInMillis = '"+ durationInMillis
+"' from " + file.name)
} else {
logger.debug("getTestReport: Excluding " + file.name + " from test report parsing.")
excludedHtmlFileNames += file.name + ", "
excludedHtmlFiles++
}
}
assert totalHtmlFiles != 0 : "getTestReport: Did not parse any valid html files in $testTimeParserInputDir. Directory might be empty"
assert totalHtmlFiles == (testReport.size() + excludedHtmlFiles) : "Not all html files processed."
logger.info("getTestReport:\n" +
"\tIncluded " + testReport.size() + " and excluded " + excludedHtmlFiles
+ " html files out of " + totalHtmlFiles + " in Junit test report.\n"
+ "\tExcluded html file names are: " + excludedHtmlFileNames + "\n"
+ "\tParsed test report located at " + testTimeParserInputDir)
}
return project.testReport
}
/*
* Checks if Java test class has a valid name.
*/
boolean hasValidTestClassName(String name) {
return name != null && name.endsWith("Test.java") &&
!(name.contains("Abstract") || name.contains("Suite"))
}
/*
* Checks if Java test class is excluded via 'org.junit.experimental.categories.Category'
*/
boolean hasCategoryExcludes(String fileContents) {
String annotation1 = "@Category\\(PortSensitiveCategory.class\\)" // evaluated as regex
String annotation2 = "@Category\\(NightlyCategory.class\\)"
return fileContents.find(annotation1) || fileContents.find(annotation2)
}
/*
* Returns a fully qualified class name from a java class.
*/
String constructFullyQualifiedClassName(String fileContents, String fileName) {
String packageName = fileContents.find("(?<=package\\s).*?(?=;)")
logger.debug("constructFullyQualifiedClassName: Found '" + packageName + "' in " + fileName)
assert packageName != null : "constructFullyQualifiedClassName: Null packageName found in $fileName"
assert !packageName.startsWith("package")
assert !packageName.endsWith(";")
return packageName + "." + fileName.replace(".java","")
}
/* Creates a list of test classes, sorted by duration, for a subproject.
* First parses JUnit test report located at 'testTimeParserInputDir' for <fully qualified class name, duration in milliseconds> .
* Then traverses a test sourceSet for a subproject for a test to include and assigns a duration value.
* Returns a sorted list of test classes for the sourceSet parameter.
*/
def ArrayList getTestsForSubProject(SourceDirectorySet sourceDirectorySet) {
assert (getTestReport() != null && getTestReport().size() > 0) : "getTestsForSubProject: Problem parsing test report located at: " + testTimeParserInputDir
def testsForSubProject = new LinkedHashMap<>();
int includedClassFilesNotInTestReport = 0 // class in sourceSet but not in test report, 'bumped' to first task
int includedClassFilesInTestReport = 0 // class in sourceSet and in test report
int excludedClassFilesBadName = 0 // excluded class in sourceSet with invalid name
int excludedClassFilesCategory = 0 // excluded class in sourceSet with @Category annotation
int excludedClassAllTestsIgnored = 0 // excluded class in sourceSet with test report duration of 0
logger.debug("getTestsForSubProject: Found " + sourceDirectorySet.files.size()
+ " file(s) in source set to process.")
for (File file : sourceDirectorySet.getFiles()) {
logger.debug("getTestsForSubProject: Found file in sourceSet = " + file.name)
// Must have a valid class name
if(!hasValidTestClassName(file.name)) {
logger.debug("getTestsForSubProject: Excluding file '" + file.name + "' based on name.")
excludedClassFilesBadName++
continue
}
String fileContents = file.text
// Must not have a Category annotation
if (hasCategoryExcludes(fileContents)) {
logger.debug("getTestsForSubProject: Found category exclude for '"
+ file.name + "'. Excluding this class from running.")
excludedClassFilesCategory++
continue
}
String fqName = constructFullyQualifiedClassName( fileContents, file.name)
// Lookup the test duration
if (getTestReport().containsKey(fqName)) {
long duration = getTestReport().get(fqName)
// Some classes from test report have duration value of 0. Exclude these from running.
if (duration > 0) {
testsForSubProject.put(fqName, duration)
logger.debug("getTestsForSubProject: Adding '" + fqName + "'")
includedClassFilesInTestReport++
} else {
logger.debug("getTestsForSubProject: Excluding '" + fqName
+ "' because duration from test report is " + duration
+ "ms. Probably because all test methods are @Ignore'd." )
excludedClassAllTestsIgnored++
}
} else {
logger.debug("getTestsForSubProject: Found test class not in test report."
+ " Bumping to front of tasks '" + fqName + "'")
testsForSubProject.put(fqName, 3600000) // cheap way to bump to front of (eventually) sorted list
includedClassFilesNotInTestReport++
}
}
// Sort by duration
def sorted = testsForSubProject.sort { a, b -> b.value <=> a.value }
logger.info ("getTestsForSubProject:\n"
+ "\tIncluding " + includedClassFilesInTestReport + " test classes for this sourceSet because they are in the test report.\n"
+ "\tIncluding/bumping " + includedClassFilesNotInTestReport + " not in test report.\n"
+ "\tExcluding "+ excludedClassFilesBadName +" based on name not ending in 'Test' or contains 'Abstract' or 'Suite', " + excludedClassFilesCategory
+ " based on '@Category, " + excludedClassAllTestsIgnored + " because duration = 0ms.\n"
+ "\tReturning sorted list of size "+ sorted.size() + " out of " + sourceDirectorySet.files.size()
+ " total files found in sourceSet.")
int filesProcessed = includedClassFilesNotInTestReport + includedClassFilesInTestReport +
excludedClassFilesBadName + excludedClassFilesCategory + excludedClassAllTestsIgnored
assert sourceDirectorySet.files.size() == filesProcessed : "getTestsForSubProject did not process every file in sourceSet"
return new ArrayList(sorted.keySet())
}
/*********************************************************************************
* Determines if test task creation should be skipped for parallelCombinedTestReport task.
*********************************************************************************/
def boolean shouldSkipTestTaskCreation(Project subproject) {
if (!parallelMode) {
logger.debug("shouldSkipTestTaskCreation: Skip task creation for $subproject.name. Not in parallel mode.")
return true
}
if (subproject.sourceSets.test.java.files.isEmpty()) {
logger.debug("shouldSkipTestTaskCreation: Skip task creation for $subproject.name. No test sources.")
return true
}
if (!isTestable(subproject)) {
logger.debug("shouldSkipTestTaskCreation: Skip task creation for $subproject.name. isTestable == false")
return true
}
if (subproject.findProperty("excludeFromParallelTests") ?: false) {
logger.debug("shouldSkipTestTaskCreation: Skip task creation for $subproject.name."
+ " 'excludeFromParallelTests' found.")
return true
}
return false
}
/*********************************************************************************
* Determines if integrationTest task creation should be skipped for parallelCombinedTestReport task.
*********************************************************************************/
def boolean shouldSkipIntegrationTestTaskCreation(Project subproject) {
if (!parallelMode) {
logger.debug("shouldSkipIntegrationTestTaskCreation: Skip task creation for $subproject.name."
+ " Not in parallel mode.")
return true
}
if (subproject.sourceSets.integrationTest.java.files.isEmpty()) {
logger.debug("shouldSkipIntegrationTestTaskCreation: Skip task creation for $subproject.name."
+ " No integrationTest sources.")
return true
}
if (!isTestable(subproject)) {
logger.debug("shouldSkipIntegrationTestTaskCreation: Skip task creation for $subproject.name."
+ " isTestable == false")
return true
}
if (subproject.findProperty("excludeFromParallelIntegrationTests") ?: false) {
logger.debug("shouldSkipIntegrationTestTaskCreation: Skip task creation for $subproject.name."
+ "'excludeFromParallelIntegrationTests' found.")
return true
}
return false
}
/*********************************************************************************
* Gets the path to the last archived test report. This is used by the
* 'parallelCombinedTestReport' task when no -PtestTimeParserInputDir is supplied
* via cmd line.
*********************************************************************************/
def String getLastArchivedReport(String reportArchivesPath) {
// skip configuration for this property if not in parallelMode
if (!parallelMode) {
logger.debug("getLastArchivedReport: not in 'parallelMode'. Skipping.")
return ""
}
File reportArchiveDir = new File(reportArchivesPath);
logger.info("getLastArchivedReport: searching for test report to parse in " + reportArchivesPath)
assert (reportArchiveDir.exists()) :
"""Tried to parse test report durations from archive location ' $reportArchiveDir '
because no -PtestTimeParserInputDir=<path/to/report> supplied via cmd line"""
// filter for report archive directories.
File[] files = reportArchiveDir.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.startsWith("reports_");
}
});
assert (files != null && files.size() > 0) :
"""Could not find test report archives in '$reportArchiveDir'.
because no -PtestTimeParserInputDir=<path/to/report> supplied via cmd line"""
logger.debug("getLastArchivedReport: found " + files.size() + " archived report directories in '"
+ reportArchiveDir.getPath() + "'.")
// Sort by lastModified date. The last modified directory will be first.
files = files.sort{-it.lastModified()}
logger.debug("getLastArchivedReport: selecting report archive to parse: " + files[0].getAbsolutePath())
return files[0].getAbsolutePath()
}
/*********************************************************************************
* Returns true if subproject is not a support module.
* These modules are commonly excluded in type:TestReport tasks.
*********************************************************************************/
def isTestable(Project p) {
return !(p.findProperty("isSupportProject") ?: false)
}
ext {
getTestsForSubProject = this.&getTestsForSubProject // export this utility method to project
shouldSkipTestTaskCreation = this.&shouldSkipTestTaskCreation
shouldSkipIntegrationTestTaskCreation = this.&shouldSkipIntegrationTestTaskCreation
getLastArchivedReport = this.&getLastArchivedReport
isTestable = this.&isTestable
}

View file

@ -0,0 +1,41 @@
defaultTasks ':usage'
task usage {
doLast {
println """
*******************************************************************************************
GHIDRA GRADLE
Note: Most tasks can can be used to affect all modules or can
be run against a specific module. For example:
from root project, "gradle buildHelp" builds help for all modules.
from root project, "gradle :Base:buildHelp" builds help for the "Base" module
from the Base project dir, "gradle buildHelp" builds help for the "Base" module
Primary gradle tasks for Ghidra
usage Displays this text [usage.gradle]
prepDev Prepares development environment [prepDev.gradle]
buildHelp Builds help [buildHelp.gradle]
buildGhidra Builds installation for local platform. [distribution.gradle]
compileSleigh Builds the sleigh languages [processorUtils.gradle]
Task rules:
buildNative_[platform] builds the native executables and libraries for the
specified platform (win64, linux64, osx64, etc.) [nativeBuildProperties.gradle]
prebuildNative_[platform] builds the native executables and libraries for the
specified platform and copies results to bin repo. [nativeBuildProperties.gradle]
"""
}
}
tasks['tasks'].doLast {
// usage.execute()
}