Initial commit.
- qa-tools public release which includes:
- trace-based coverage tool
- quality metrics measurement and tracking setup
- associated in-source documentation.
Signed-off-by: Basil Eljuse <basil.eljuse@arm.com>
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..247d293
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+**/*.so
+**/*.o
+.gitreview
diff --git a/coverage-tool/coverage-plugin/Makefile b/coverage-tool/coverage-plugin/Makefile
new file mode 100644
index 0000000..3a2a18f
--- /dev/null
+++ b/coverage-tool/coverage-plugin/Makefile
@@ -0,0 +1,30 @@
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+
+CPPFLAGS = -I${PVLIB_HOME}/include/fmruntime
+CXXFLAGS = -fpic -Wall -Werror -g
+LDFLAGS =
+CMAKE_CXX_FLAGS = -std=c++11 -O3
+
+ifeq (${CROSS_COMPILE_32BIT},1)
+CXXFLAGS += -m32
+LDFLAGS += -m32
+endif
+
+PLUGIN_NAME = coverage_trace
+
+PLUGIN_LIB = ${PLUGIN_NAME}.so
+PLUGIN_OBJECTS = ${PLUGIN_NAME}.o plugin_utils.o
+
+${PLUGIN_LIB}: ${PLUGIN_OBJECTS}
+ ${CXX} -shared -o $@ ${LDFLAGS} $^
+
+.cc.o:
+ ${CXX} -c -o $@ ${CXXFLAGS} ${CMAKE_CXX_FLAGS} ${CPPFLAGS} $^
+
+.PHONY: clean
+clean:
+ rm -f ${PLUGIN_OBJECTS} ${PLUGIN_LIB}
diff --git a/coverage-tool/coverage-plugin/coverage_trace.cc b/coverage-tool/coverage-plugin/coverage_trace.cc
new file mode 100644
index 0000000..4dc72ee
--- /dev/null
+++ b/coverage-tool/coverage-plugin/coverage_trace.cc
@@ -0,0 +1,349 @@
+/*!
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+*/
+// Implements the trace plugin interface for the MTI interface to trace
+// source data from Arm FVP.
+
+#include "MTI/PluginInterface.h"
+#include "MTI/PluginFactory.h"
+#include "MTI/PluginInstance.h"
+#include "MTI/ModelTraceInterface.h"
+
+#include "plugin_utils.h"
+#include "trace_sources.h"
+
+#include <errno.h>
+#include <string>
+#include <algorithm>
+#include <cstdio>
+#include <sstream>
+#include <vector>
+#include <map>
+#include <typeinfo>
+#include <typeindex>
+#include <utility>
+
+#ifdef SG_MODEL_BUILD
+ #include "builddata.h"
+ #define PLUGIN_VERSION FULL_VERSION_STRING
+#else
+ #define PLUGIN_VERSION "unreleased"
+#endif
+
+using namespace eslapi;
+using namespace MTI;
+using namespace std;
+
+// Implements the plugin interface for trace coverage
+class CoverageTrace :public PluginInstance
+{
+public:
+ virtual CAInterface * ObtainInterface(if_name_t ifName,
+ if_rev_t minRev,
+ if_rev_t * actualRev);
+
+ CoverageTrace(const char *instance_name, const char *trace_file_prefix);
+ ~CoverageTrace();
+
+ /** This is to associate a plugin with a simulation instance. Exactly one
+ * simulation must be registered.
+ * */
+ virtual eslapi::CADIReturn_t RegisterSimulation(eslapi::CAInterface
+ *simulation);
+
+ // This is called before the plugin .dll/.so is unloaded and should allow
+ // the plugin to do it's cleanup.
+ virtual void Release();
+
+ virtual const char *GetName() const;
+
+private:
+ std::string instance_name;
+
+ bool Error(const char *);
+
+ vector<TraceComponentContext*> trace_components;
+ std::string trace_file_prefix;
+};
+
+CAInterface *CoverageTrace::ObtainInterface(if_name_t ifName,
+ if_rev_t minRev,
+ if_rev_t * actualRev)
+{
+ printf("CoverageTrace::ObtainInterface\n");
+ // If someone is asking for the matching interface
+ if((strcmp(ifName,IFNAME()) == 0) &&
+ // and the revision of this interface implementation is
+ (minRev <= IFREVISION()))
+ // at least what is being asked for
+ {
+ if (actualRev) // Make sure this is not a NULL pointer
+ *actualRev = IFREVISION();
+ return this;
+ }
+
+ if((strcmp(ifName, CAInterface::IFNAME()) == 0) &&
+ minRev <= CAInterface::IFREVISION())
+ {
+ if (actualRev != NULL)
+ *actualRev = CAInterface::IFREVISION();
+ return this;// Dynamic_cast<TracePluginInterface *>(this);
+ }
+ return NULL;
+}
+
+
+CoverageTrace::CoverageTrace(const char *instance_name_,
+ const char *trace_file_prefix_) :
+ instance_name(instance_name_),
+ trace_file_prefix(trace_file_prefix_)
+{
+ printf("CoverageTrace::CoverageTrace\n");
+}
+
+CoverageTrace::~CoverageTrace()
+{
+ printf("CoverageTrace::~CoverageTrace\n");
+}
+
+bool
+CoverageTrace::Error(const char *msg)
+{
+ fprintf(stderr, "%s\n", msg);
+ return false;
+}
+
+// Method that registers the simulation traces events. In this case registers
+// for trace sources with the 'INST' name.
+CADIReturn_t
+CoverageTrace::RegisterSimulation(CAInterface *ca_interface)
+{
+ printf("CoverageTrace::RegisterSimulation\n");
+ if (!ca_interface) {
+ Error("Received CAInterface NULL pointer.");
+ return CADI_STATUS_IllegalArgument;
+ }
+ std::stringstream ss;
+
+ SystemTraceInterface *sys_if =
+ ca_interface->ObtainPointer<SystemTraceInterface>();
+ if (sys_if == 0) {
+ Error("Got a NULL SystemTraceInterface.");
+ return CADI_STATUS_GeneralError;
+ }
+
+ for(SystemTraceInterface::TraceComponentIndex tci=0;
+ tci < sys_if->GetNumOfTraceComponents(); ++tci) {
+ const char* tpath = sys_if->GetComponentTracePath(tci);
+ CAInterface *caif = sys_if->GetComponentTrace(tci);
+ ComponentTraceInterface *cti =
+ caif->ObtainPointer<ComponentTraceInterface>();
+ if (cti == 0) {
+ Error("Could not get TraceInterface for component.");
+ continue;
+ }
+
+ if (cti->GetTraceSource("INST") != 0) {
+ TraceComponentContext *trace_component = new
+ TraceComponentContext(tpath);
+
+ // To register a new trace source the arguments are the
+ // name of the trace source followed by a vector of
+ // pairs of (field name,field type).
+ InstructionTraceContext *inst_cont = new InstructionTraceContext(
+ "INST",
+ { {"PC", u32},
+ {"SIZE", u32}}
+ );
+ inst_cont->nb_insts = 0;
+ inst_cont->CreateEvent(&cti, inst_cont->Callback);
+ trace_component->AddTraceSource(inst_cont);
+ trace_components.push_back(trace_component);
+ }
+ }
+
+ return CADI_STATUS_OK;
+}
+
+// This is called before the plugin .dll/.so is unloaded and should allow the
+// plugin to do it's cleanup.
+void
+CoverageTrace::Release()
+{
+ printf("CoverageTrace::Release\n");
+ // We can dump our data now
+ int error = 0;
+ char* fname;
+ int ret;
+ std::vector<TraceComponentContext*>::iterator tcc;
+ for (tcc = trace_components.begin(); tcc < trace_components.end(); ++tcc) {
+ TraceComponentContext *tcont = *tcc;
+ // Print some overall stats
+ InstructionTraceContext* rtc = (InstructionTraceContext*)
+ tcont->trace_sources["INST"];
+ printf("Trace path: %s\n", tcont->trace_path.c_str());
+
+ // Construct a trace file name
+ int status = asprintf(&fname, "%s-%s.log",
+ this->trace_file_prefix.c_str(),
+ tcont->trace_path.c_str());
+ if ( status != 0)
+ {
+ printf("Error in asprintf: %d\n", status);
+ printf("Error description is : %s\n", strerror(errno));
+ }
+
+ // Open it
+ FILE* fp = fopen(fname, "w");
+ if (fp == NULL) {
+ fprintf(stderr, "Can't open file %s for writing.\n", fname);
+ error = 1;
+ break;
+ }
+
+ InstStatMap::iterator map_it;
+ // Dump the detailed stats
+ for (map_it = rtc->stats.begin(); map_it != rtc->stats.end();
+ ++map_it) {
+ fprintf(fp, "%08x %lu %lu\n", map_it->first, map_it->second.cnt,
+ map_it->second.size);
+ }
+
+ // Close the file
+ ret = fclose(fp);
+ if (ret != 0) {
+ fprintf(stderr, "Failed to close %s: %s.", fname, strerror(errno));
+ error = 1;
+ break;
+ }
+
+ free(fname);
+ }
+if (error != 0)
+ delete this;
+}
+
+const char *
+CoverageTrace::GetName() const
+{
+ printf("CoverageTrace::GetName\n");
+ return instance_name.c_str();
+}
+
+// Class used to return a static object CAInterface. CAInterface provides a
+// basis for a software model built around ’components’ and ’interfaces’.
+// A component provides concrete implementations of one or more interfaces.
+// Interfaces are identified by a string name (of type if_name_t), and an
+// integer revision (type if_rev_t). A higher revision number indicates a newer
+// revision of the same interface.
+class ThePluginFactory :public PluginFactory
+{
+public:
+ virtual CAInterface *ObtainInterface(if_name_t ifName,
+ if_rev_t minRev,
+ if_rev_t * actualRev);
+
+ virtual uint32_t GetNumberOfParameters();
+
+ virtual eslapi::CADIReturn_t
+ GetParameterInfos(eslapi::CADIParameterInfo_t *parameter_info_list);
+
+ virtual CAInterface *Instantiate(const char *instance_name,
+ uint32_t number_of_parameters,
+ eslapi::CADIParameterValue_t *parameter_values);
+
+ virtual void Release();
+
+ virtual const char *GetType() const { return "CoverageTrace"; }
+ virtual const char *GetVersion() const { return PLUGIN_VERSION; }
+};
+
+// Allows a client to obtain a reference to any of the interfaces that the
+// component implements. The client specifies the id and revision of the
+// interface that it wants to request. The component can return NULL if it
+// doesn’t implement that interface, or only implements a lower revision.
+// The client in this case is the Arm FVP model.
+CAInterface *ThePluginFactory::ObtainInterface(if_name_t ifName,
+ if_rev_t minRev,
+ if_rev_t * actualRev)
+{
+ printf("ThePluginFactory::ObtainInterface\n");
+ // If someone is asking for the matching interface
+ if((strcmp(ifName,IFNAME()) == 0) &&
+ // and the revision of this interface implementation is
+ (minRev <= IFREVISION()))
+ // at least what is being asked for
+ {
+ if (actualRev) // Make sure this is not a NULL pointer
+ *actualRev = IFREVISION();
+ return static_cast<ThePluginFactory *>(this);
+ }
+
+ if((strcmp(ifName, CAInterface::IFNAME()) == 0) &&
+ minRev <= CAInterface::IFREVISION())
+ {
+ if (actualRev) // Make sure this is not a NULL pointer
+ *actualRev = CAInterface::IFREVISION();
+ return static_cast<CAInterface *>(this);
+ }
+ return NULL;
+}
+
+uint32_t ThePluginFactory::GetNumberOfParameters()
+{
+ printf("ThePluginFactory::GetNumberOfParameters\n");
+ return 1;
+}
+
+eslapi::CADIReturn_t
+ThePluginFactory::GetParameterInfos(
+eslapi::CADIParameterInfo_t *parameter_info_list)
+{
+ printf("ThePluginFactory::GetParameterInfos\n");
+ *parameter_info_list = CADIParameterInfo_t(
+ 0, "trace-file-prefix", CADI_PARAM_STRING,
+ "Prefix of the trace files.", 0, 0, 0, 0, "covtrace"
+ );
+ return CADI_STATUS_OK;
+}
+
+// Method that creates a new instance of the trace plugin
+CAInterface *ThePluginFactory::Instantiate(const char *instance_name,
+ uint32_t param_nb,
+ eslapi::CADIParameterValue_t *values)
+{
+ printf("ThePluginFactory::Instantiate\n");
+ const char *trace_file_prefix = 0;
+ printf("CoverageTrace: number of params: %d\n", param_nb);
+ for (uint32_t i = 0; i < param_nb; ++i) {
+ if (values[i].parameterID == 0) {
+ trace_file_prefix = values[i].stringValue;
+ } else {
+ printf("\tCoverageTrace: got unexpected param %d\n",
+ values[i].parameterID);
+ }
+ }
+ return new CoverageTrace(instance_name, trace_file_prefix);
+}
+
+void ThePluginFactory::Release()
+{
+ printf("ThePluginFactory::Release\n");
+}
+
+static ThePluginFactory factory_instance;
+
+// Entry point for the instantiation of the plugin.
+// Returns a pointer to an static object to create the interface for the
+// plugin.
+CAInterface *GetCAInterface()
+{
+ printf("********->GetCAInterface\n");
+ return &factory_instance;
+}
+
+// End of file CoverageTrace.cpp
diff --git a/coverage-tool/coverage-plugin/plugin_utils.cc b/coverage-tool/coverage-plugin/plugin_utils.cc
new file mode 100644
index 0000000..8eb3024
--- /dev/null
+++ b/coverage-tool/coverage-plugin/plugin_utils.cc
@@ -0,0 +1,65 @@
+/*!
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+*/
+
+#include "plugin_utils.h"
+
+// Get a named trace source, create an event class from the named subset of
+// event fields, register the event class and look up the field indexes, and
+// register a user-provided MTI callback with the trace source.
+// Writes to error_ss and returns false if anything fails.
+bool RegisterCallbackForComponent(const MTI::ComponentTraceInterface *mti,
+ const char *trace_source,
+ ValueBind_t *value_bind, void *this_ptr,
+ MTI::CallbackT callback,
+ MTI::EventClass **ptr_event_class,
+ std::stringstream &error_ss)
+{
+ const MTI::TraceSource *source = mti->GetTraceSource(trace_source);
+ if (!source) {
+ error_ss << "Could not find " << trace_source << " source";
+ return false;
+ }
+
+ MTI::FieldMask mask = 0;
+ const MTI::EventFieldType *eft;
+
+ for(unsigned i=0; value_bind[i].name != 0; i++) {
+ if ((eft = source->GetField( value_bind[i].name )) != 0) {
+ mask |= 1 << eft->GetIndex();
+ } else {
+ error_ss << "No field " << value_bind[i].name <<
+ " found in " << trace_source << " trace source";
+ return false;
+ }
+ }
+
+ MTI::EventClass *event_class = source->CreateEventClass(mask);
+ if (!event_class) {
+ error_ss << "Unable to register event class for " <<
+ trace_source << " trace source.";
+ return false;
+ }
+ for(unsigned i=0; value_bind[i].name != 0; i++)
+ {
+ MTI::ValueIndex idx = event_class->GetValueIndex(value_bind[i].name);
+ if (idx != -1) {
+ *(value_bind[i].index) = idx;
+ } else {
+ error_ss << "Unable to GetValueIndex for " << trace_source
+ << "." << value_bind[i].name << ".";
+ return false;
+ }
+ }
+ if (callback &&
+ event_class->RegisterCallback(callback, this_ptr) != MTI::MTI_OK) {
+ error_ss << "RegisterCallback failed for " << trace_source;
+ return false;
+ }
+ *ptr_event_class = event_class;
+ return true;
+}
diff --git a/coverage-tool/coverage-plugin/plugin_utils.h b/coverage-tool/coverage-plugin/plugin_utils.h
new file mode 100644
index 0000000..546e5fc
--- /dev/null
+++ b/coverage-tool/coverage-plugin/plugin_utils.h
@@ -0,0 +1,46 @@
+/*!
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+*/
+#ifndef _COVERAGE_TOOL_COVERAGE_PLUGIN_PLUGIN_UTILS_H_
+#define _COVERAGE_TOOL_COVERAGE_PLUGIN_PLUGIN_UTILS_H_
+
+#include <sstream>
+#include <map>
+#include <vector>
+#include <string>
+#include "MTI/ModelTraceInterface.h"
+using namespace eslapi;
+using namespace MTI;
+using namespace std;
+
+typedef struct {
+ const char *name;
+ MTI::ValueIndex *index;
+} ValueBind_t;
+
+// Declare an MTI callback method and define a static thunk method to call
+// into this from C code.
+#define CALLBACK_DECL_AND_THUNK(class_name, name) \
+ static void name##Thunk(void * user_data, const MTI::EventClass *event_class, const MTI::EventRecord *record) \
+ { \
+ reinterpret_cast<class_name *>(user_data)->name(event_class, record); \
+ } \
+ void name(const MTI::EventClass *event_class, const MTI::EventRecord *record)
+
+
+// Get a named trace source, create an event class from the named subset of
+// event fields, register the event class and look up the field indexes, and
+// register a user-provided MTI callback with the trace source.
+// Writes to error_ss and returns false if anything fails.
+bool RegisterCallbackForComponent(const MTI::ComponentTraceInterface *mti,
+ const char *trace_source,
+ ValueBind_t *value_bind, void *this_ptr,
+ MTI::CallbackT callback,
+ MTI::EventClass **ptr_event_class,
+ std::stringstream &error_ss);
+
+#endif // _COVERAGE_TOOL_COVERAGE_PLUGIN_PLUGIN_UTILS_H_
diff --git a/coverage-tool/coverage-plugin/trace_sources.h b/coverage-tool/coverage-plugin/trace_sources.h
new file mode 100644
index 0000000..57f6462
--- /dev/null
+++ b/coverage-tool/coverage-plugin/trace_sources.h
@@ -0,0 +1,229 @@
+/*!
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+*/
+
+#ifndef _COVERAGE_TOOL_COVERAGE_PLUGIN_TRACE_SOURCES_H_
+#define _COVERAGE_TOOL_COVERAGE_PLUGIN_TRACE_SOURCES_H_
+
+#include <map>
+#include <vector>
+#include <string>
+#include <algorithm>
+#include "MTI/ModelTraceInterface.h"
+
+using namespace MTI;
+using namespace std;
+
+struct InstStat {
+ uint64_t cnt;
+ uint64_t size;
+};
+
+typedef std::map<uint32_t, InstStat> InstStatMap;
+
+//Defining types for fields
+enum enum_types {u32, boolT};
+typedef enum_types ValueTypes;
+
+/*
+ * Structure used to save field data
+ */
+struct TFields{
+ ValueTypes t;
+ MTI::ValueIndex index;
+ void *value;
+};
+// Map of fields => Key -> Field name
+typedef map<string, TFields> TraceFieldsMap;
+
+/*
+ * Structure used to pass field data between trace contexts
+ */
+struct TParams {
+ void *value;
+ ValueTypes t;
+};
+// Map of fields => Key -> Field name
+typedef map<string, TParams> ParamsMap;
+
+/*
+ * Generic function to output errors
+ */
+bool Error(const char *msg)
+{
+ fprintf(stderr, "%s\n", msg);
+ return false;
+}
+
+/*
+ * Base class for Trace Source contexts
+ *
+ */
+class TraceSourceContext {
+ public:
+ string name; //Trace source name
+ TraceFieldsMap fields; //Fields to be used for the event
+ MTI::EventClass *event_class; //Event object to register callback
+ ParamsMap params; //List of parameters from another trace source
+
+/*
+ * Constructor that converts/stores the pairs of <field name, field type>
+ * in the 'fields' member.
+*/
+TraceSourceContext(const char* tname,
+ vector<pair<string, ValueTypes>> fields_def) {
+ name = tname;
+ string key;
+ // Referenced by field name => field type
+ for (size_t i=0; i < fields_def.size(); ++ i) {
+ key = fields_def[i].first;
+ fields[key].t = fields_def[i].second;
+ }
+}
+
+/*
+ * Generic Callback that can be used by derived objects. It fills the
+ * 'value' member in the 'fields' structure with a void* to the value
+ * retrieved from the component.
+*/
+template <class T>
+static T *TraceCallback(void* user_data,
+ const MTI::EventClass *event_class,
+ const MTI::EventRecord *record) {
+ T *tc = static_cast<T*>(user_data);
+ // Filled by Component
+ TraceFieldsMap::iterator it;
+ for (it = tc->fields.begin(); it != tc->fields.end(); ++it) {
+ // Based in the type creates an object with initial
+ // value retrieved from the component using the index
+ // for that field.
+ switch (it->second.t) {
+ case u32: it->second.value = new uint32_t(
+ record->Get<uint32_t>(event_class, it->second.index));
+ break;
+ case boolT: it->second.value = new bool(
+ record->GetBool(event_class, it->second.index));
+ break;
+ }
+ }
+ return tc;
+}
+
+/*
+ * Generic method to copy the fields from this trace source to the params
+ * member in other trace source. Optionally a list of field names can be
+ * passed to filter the list of field names copied.
+ * The params member is a Map of with the Field Id (name) as the key.
+*/
+void PassFieldstoParams(TraceSourceContext *target,
+ vector<string> field_names={}) {
+ TraceFieldsMap::iterator it;
+ for (it = fields.begin(); it != fields.end(); ++it) {
+ bool found = std::find(field_names.begin(), field_names.end(),
+ it->first) != field_names.end();
+ if ((!field_names.empty()) && (!found))
+ continue;
+ target->params[it->first].t = it->second.t;
+ switch (it->second.t) {
+ case u32:
+ target->params[it->first].value =
+ new uint32_t(*((uint32_t*)it->second.value));
+ break;
+ case boolT:
+ target->params[it->first].value =
+ new bool(*((bool*)it->second.value));
+ break;
+ }
+ }
+}
+/*
+ * Method that creates an event object in the trace source based in the
+ * fields given in the constructor. It then registers the given callback
+ * to this event.
+*/
+MTI::EventClass *CreateEvent(ComponentTraceInterface **ptr_cti,
+ MTI::CallbackT callback) {
+
+ ComponentTraceInterface *cti = *ptr_cti;
+ std::stringstream ss;
+ ComponentTraceInterface *mti = 0;
+
+ if (cti->GetTraceSource(name.c_str()) != 0) {
+ TraceSource* ts = cti->GetTraceSource(name.c_str());
+ printf("Trace source attached: %s\n", ts->GetName());
+
+ size_t map_size = fields.size();
+ ValueBind_t *values_array = new ValueBind_t[map_size + 1];
+ TraceFieldsMap::iterator it;
+ int i = 0;
+ for (it = fields.begin(); it != fields.end(); ++it) {
+ values_array[i]= ((ValueBind_t) { it->first.c_str(),
+ &it->second.index });
+ ++i;
+ };
+ values_array[map_size] = {0, 0}; //sentinel
+
+ mti = static_cast<ModelTraceInterface *>(cti);
+ if (!RegisterCallbackForComponent(mti, name.c_str(), values_array,
+ this, callback, &event_class, ss)) {
+ Error(ss.str().c_str());
+ return 0;
+ }
+ return event_class;
+ }
+ return 0;
+}
+};
+
+/*
+ * Class and types used to handle trace sources belonging to a
+ * component.
+*/
+typedef map<string, TraceSourceContext*> MapTraceSourcesType;
+class TraceComponentContext {
+ public:
+ string trace_path;
+ MapTraceSourcesType trace_sources;
+
+TraceComponentContext(string tpath) {
+ trace_path = tpath;
+}
+
+void AddTraceSource(TraceSourceContext *ts) {
+ trace_sources[ts->name] = ts;
+}
+};
+
+/*
+ * Class used to instantiate a Instruction trace source
+*/
+class InstructionTraceContext: public TraceSourceContext {
+ public:
+ using TraceSourceContext::TraceSourceContext;
+ InstStatMap stats;
+ uint64_t nb_insts;
+
+ static void Callback(void* user_data,
+ const MTI::EventClass *event_class,
+ const MTI::EventRecord *record) {
+ InstructionTraceContext* itc = static_cast<InstructionTraceContext*>
+ (user_data);
+ itc->nb_insts++; // Number of instructions
+ // Filled by Component
+ uint32_t pc = record->GetAs<uint32_t>(event_class,
+ itc->fields["PC"].index);
+ uint32_t size = record->Get<uint32_t>(event_class,
+ itc->fields["SIZE"].index);
+ // Save PC stats. If not already present in the map, a counter with
+ // value 0 will be created before incrementing.
+ InstStat& is = itc->stats[pc];
+ is.cnt++;
+ is.size = size;
+ };
+};
+
+#endif // _COVERAGE_TOOL_COVERAGE_PLUGIN_TRACE_SOURCES_H_
diff --git a/coverage-tool/coverage-reporting/branch_coverage.sh b/coverage-tool/coverage-reporting/branch_coverage.sh
new file mode 100755
index 0000000..3dc88f3
--- /dev/null
+++ b/coverage-tool/coverage-reporting/branch_coverage.sh
@@ -0,0 +1,140 @@
+#!/usr/bin/env bash
+
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+
+#==============================================================================
+# FILE: branch_coverage.sh
+#
+# DESCRIPTION: Generates intermediate layer json file and then
+# code coverage HTML reports using LCOV report Open Source tool
+#==============================================================================
+
+set +x
+set -e
+
+ERROR_FILE=coverage_error.log
+
+###############################################################################
+# Prints error message to STDERR and log file.
+# Globals:
+# ERROR_FILE
+# Arguments:
+# None
+# Outputs:
+# Writes error to STDERR and log file with a timestamp
+###############################################################################
+err() {
+ echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')]: $*" | tee -a ${ERROR_FILE} 1>&2
+}
+
+touch ${ERROR_FILE}
+if ! [ -x "$(command -v lcov)" ]; then
+ err 'Error: lcov is not installed. Install it with:\nsudo apt install lcov\n'
+ exit 1
+fi
+
+###############################################################################
+# Prints script usage.
+# Arguments:
+# None
+# Outputs:
+# Writes usage to stdout
+###############################################################################
+usage()
+{
+ # print the usage information
+ printf "Usage: $(basename $0) [options]\n"
+ printf "\t params:\n"
+ printf "\t --config Configuration json file. Required.\n"
+ printf "\t --workspace Local workspace folder where source codes reside. \
+ Required.\n"
+ printf "\t --json-path Intermediate json file name. Optional defaults to \
+ 'output_file.json'\n"
+ printf "\t --outdir Report folder. Optional defaults to 'out'\n"
+ printf "\t -h|--help Display usage\n"
+ printf "Example of usage:\n"
+ printf "./branch_coverage.sh --config config_file.json \
+ --workspace /server_side/source/ --outdir html_report\n"
+ exit 1
+}
+
+# default values
+JSON_PATH=output_file.json
+OUTDIR=out
+
+###############################################################################
+# Parse arguments.
+# Globals:
+# CONFIG_JSON
+# LOCAL_WORKSPACE
+# JSON_PATH
+# OUTDIR
+# Arguments:
+# Command line arguments
+# Outputs:
+# Writes usage to stdout
+###############################################################################
+parse_arguments()
+{
+ while [ $# -gt 1 ]
+ do
+ key="$1"
+ case $key in
+ --config)
+ CONFIG_JSON="$2"
+ shift
+ ;;
+ --workspace)
+ LOCAL_WORKSPACE="$2"
+ shift
+ ;;
+ --json-path)
+ JSON_PATH="$2"
+ shift
+ ;;
+ --outdir)
+ OUTDIR="$2"
+ shift
+ ;;
+ -h|--help)
+ usage
+ ;;
+ *)
+ printf "Unknown argument $key\n"
+ usage
+ ;;
+ esac
+ shift
+ done
+}
+
+
+parse_arguments $@
+
+if [ -z "$LOCAL_WORKSPACE" ] || [ -z "$CONFIG_JSON" ]; then
+ usage
+fi
+
+if [ ! -d "$LOCAL_WORKSPACE" ]; then
+ err "$LOCAL_WORKSPACE doesn't exist\n"
+ exit 1
+fi
+
+if [ ! -f "$CONFIG_JSON" ]; then
+ err "$CONFIG_JSON doesn't exist\n"
+ exit 1
+fi
+
+clear
+echo "Generating intermediate layer file '$JSON_PATH'..."
+python3 intermediate_layer.py --config-json "$CONFIG_JSON" --local-workspace $LOCAL_WORKSPACE
+echo "Converting intermediate layer file to info file..."
+python3 generate_info_file.py --workspace $LOCAL_WORKSPACE --json $JSON_PATH
+echo "Generating LCOV report at '$OUTDIR'..."
+genhtml --branch-coverage coverage.info --output-directory $OUTDIR
+mv coverage.info $OUTDIR/coverage.info
+mv error_log.txt $OUTDIR/error_log.txt
diff --git a/coverage-tool/coverage-reporting/clone_sources.py b/coverage-tool/coverage-reporting/clone_sources.py
new file mode 100644
index 0000000..fb1807d
--- /dev/null
+++ b/coverage-tool/coverage-reporting/clone_sources.py
@@ -0,0 +1,151 @@
+# !/usr/bin/env python
+###############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+###############################################################################
+
+###############################################################################
+# FILE: clone_sources.py
+#
+# DESCRIPTION: Clone the source files for code coverage
+###############################################################################
+
+import os
+import subprocess
+import json
+import time
+from random import random
+
+
+def call_cmd(cmd, print_cmd=False):
+ """
+ Function that execute an os command and returns its output
+
+ :param cmd: OS command as string
+ :param print_cmd: Optional argument to print the command in stdout
+ :return: The string output of the os command
+ """
+ if print_cmd:
+ print("+" + cmd)
+ out = subprocess.check_output(cmd, shell=True)
+ return out
+
+
+def skip_source(output_dir, source, handler=None):
+ """
+ Function that handles overwriting source files
+
+ :param output_dir: Folder where to put the source files and folders
+ :param source: Dictionary with the information the source
+ :return: True if must skip the given source cloning False otherwise
+ """
+ location = os.path.join(output_dir, source['LOCATION'])
+ # Check if exists and have files
+ if os.path.isdir(location):
+ if not os.listdir(location):
+ if handler is not None:
+ return handler(source, "Directory exists and is empty")
+ else:
+ # By default send a warning and overwrite it
+ print(("WARNING!: Directory {} already exists and is "
+ "empty. Overwriting it...'").format(location))
+ os.rmdir(location)
+ return False
+ commit_id = call_cmd(("cd {} && git log -1 2>/dev/null | "
+ "grep commit | awk '{{print $2}}'").format(
+ location), print_cmd=True).strip()
+ if source['type'] == "git":
+ if commit_id == "":
+ # is not a git
+ if handler is not None:
+ return handler(source, "Directory exists and is not git")
+ else:
+ print(("WARNING!: Directory {} already exists and is not a"
+ " git repo: '{}'").format(location, source['URL']))
+ elif commit_id != source["COMMIT"].strip():
+ # there are mismatching commit id's
+ if handler is not None:
+ return handler(source, "Mismatch in gits")
+ else:
+ print(("WARNING!: Mismatch in git repo {}\nExpected {}, "
+ "Cloned {}").format(source['URL'], source['COMMIT'],
+ commit_id))
+ elif source['type'] == "http":
+ if handler is not None:
+ return handler(source,
+ "WARNING!: Directory already exists")
+ else:
+ print("WARNING!: Directory {} already exists".format(
+ location))
+ return True
+ return False
+
+
+class CloneSources(object):
+ """Class used to clone the source code needed to produce code coverage
+ reports.
+ """
+ def __init__(self, json_file):
+ self.json_file = json_file
+ self.json_data = None
+ self.load_json()
+
+ def load_json(self):
+ with open(self.json_file, "r") as json_file:
+ self.json_data = json.load(json_file)
+
+ def clone_repo(self, output_dir, overwrite_handler=None):
+ """
+ Clones or reproduces a folder with source code based in the
+ configuration in the json file
+
+ :param output_dir: Where to put the source files
+ :param overwrite_handler: Optional function to handle overwrites
+ """
+ if self.json_data is None:
+ self.load_json()
+ sources = []
+ try:
+ if 'parameters' in self.json_data:
+ sources = self.json_data['parameters']['sources']
+ elif 'configuration' in self.json_data:
+ sources = self.json_data['configuration']['sources']
+ else:
+ raise Exception("No correct format for json sources!")
+ except Exception as ex:
+ raise Exception(ex)
+
+ for source in sources:
+ if skip_source(output_dir, source, overwrite_handler):
+ continue
+ if source['type'] == "git":
+ git = source
+ url = git["URL"]
+ commit_id = git["COMMIT"]
+ output_loc = os.path.join(output_dir, git["LOCATION"])
+ cmd = "git clone {} {}".format(url, output_loc)
+ output = call_cmd(cmd)
+ if git['REFSPEC']:
+ call_cmd("cd {};git fetch -q origin {}".format(
+ output_loc, git['REFSPEC']))
+ if commit_id:
+ call_cmd("cd {};git checkout -q {}".format(
+ output_loc, commit_id))
+ else:
+ call_cmd("cd {};git checkout -q FETCH_HEAD".format(
+ output_loc))
+ elif source['type'] == 'http':
+ site = source
+ output_loc = os.path.join(output_dir, site["LOCATION"])
+ tmp_folder = os.path.join(output_dir,
+ "_tmp_{}_{}".format(time.time(),
+ random()))
+ call_cmd("mkdir -p {}".format(tmp_folder))
+ call_cmd("wget -q {} -P {}".format(
+ site['URL'], tmp_folder))
+ call_cmd("mkdir -p {}".format(output_loc))
+ if site['COMPRESSION'] == "xz":
+ call_cmd("cd {};tar -xzf $(basename {}) -C {}".format(
+ tmp_folder, site['URL'], output_loc))
+ call_cmd("rm -rf {}".format(tmp_folder))
diff --git a/coverage-tool/coverage-reporting/generate_info_file.py b/coverage-tool/coverage-reporting/generate_info_file.py
new file mode 100755
index 0000000..0c0f39a
--- /dev/null
+++ b/coverage-tool/coverage-reporting/generate_info_file.py
@@ -0,0 +1,410 @@
+# !/usr/bin/env python
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+
+import os
+import sys
+import json
+import re
+import argparse
+
+
+def function_coverage(function_tuples, info_file):
+ """
+ Parses and get information from intermediate json file to info
+ file for function coverage
+
+ :param function_tuples: List of tuples with function name
+ and its data as pairs.
+ :param info_file: Handler to for file writing coverage
+ """
+ total_func = 0
+ covered_func = 0
+ function_names = []
+ function_cov = []
+ for func_name, func_data in function_tuples:
+ function_names.append(
+ 'FN:{},{}\n'.format(
+ func_data["line_number"],
+ func_name))
+ total_func += 1
+ if func_data["covered"]:
+ covered_func += 1
+ function_cov.append('FNDA:1,{}\n'.format(func_name))
+ else:
+ function_cov.append('FNDA:0,{}\n'.format(func_name))
+ info_file.write("\n".join(function_names))
+ info_file.write("\n".join(function_cov))
+ info_file.write('FNF:{}\n'.format(total_func))
+ info_file.write('FNH:{}\n'.format(covered_func))
+
+
+def line_coverage(lines_dict, info_file):
+ """
+ Parses and get information from intermediate json file to info
+ file for line coverage
+
+ :param lines_dict: Dictionary of lines with line number as key
+ and its data as value
+ :param info_file: Handler to for file writing coverage
+ """
+ total_lines = 0
+ covered_lines = 0
+ for line in lines_dict:
+ total_lines += 1
+ if lines_dict[line]['covered']:
+ covered_lines += 1
+ info_file.write('DA:' + line + ',1\n')
+ else:
+ info_file.write('DA:' + line + ',0\n')
+ info_file.write('LF:' + str(total_lines) + '\n')
+ info_file.write('LH:' + str(covered_lines) + '\n')
+
+
+def sanity_check(branch_line, lines_dict, abs_path_file):
+ """
+ Check if the 'branch_line' line of the C source corresponds to actual
+ branching instructions in the assembly code. Also, check if that
+ line is covered. If it's not covered, this branching statement can
+ be omitted from the report.
+ Returns False and prints an error message if check is not successful,
+ True otherwise
+
+ :param branch_line: Source code line with the branch instruction
+ :param lines_dict: Dictionary of lines with line number as key
+ and its data as value
+ :param abs_path_file: File name of the source file
+ """
+ if str(branch_line) not in lines_dict:
+ return False
+ found_branching = False
+ for i in lines_dict[str(branch_line)]['elf_index']:
+ for j in lines_dict[str(branch_line)]['elf_index'][i]:
+ string = lines_dict[str(branch_line)]['elf_index'][i][j][0]
+ # these cover all the possible branching instructions
+ if ('\tb' in string or
+ '\tcbnz' in string or
+ '\tcbz' in string or
+ '\ttbnz' in string or
+ '\ttbz' in string):
+ # '\tbl' in string or # already covered by '\tb'
+ # '\tblr' in string or # already covered by '\tb'
+ # '\tbr' in string or # already covered by '\tb'
+ found_branching = True
+ if not found_branching:
+ error_log.write(
+ '\nSomething possibly wrong:\n\tFile ' +
+ abs_path_file +
+ ', line ' +
+ str(branch_line) +
+ '\n\tshould be a branching statement but couldn\'t ' +
+ 'find correspondence in assembly code')
+ return True
+
+
+def manage_if_branching(branch_line, lines_dict, info_file, abs_path_file):
+ """
+ Takes care of branch coverage, branch_line is the source code
+ line in which the 'if' statement is located the function produces
+ branch coverage info based on C source code and json file content
+
+ :param branch_line: Source code line with the 'if' instruction
+ :param lines_dict: Dictionary of lines with line number as key
+ and its data as value
+ :param info_file: Handler to for file writing coverage
+ :param abs_path_file: File name of the source file
+ """
+ total_branch_local = 0
+ covered_branch_local = 0
+
+ if not sanity_check(branch_line, lines_dict, abs_path_file):
+ return(total_branch_local, covered_branch_local)
+ total_branch_local += 2
+ current_line = branch_line # used to read lines one by one
+ # check for multiline if-condition and update current_line accordingly
+ parenthesis_count = 0
+ while True:
+ end_of_condition = False
+ for char in lines[current_line]:
+ if char == ')':
+ parenthesis_count -= 1
+ if parenthesis_count == 0:
+ end_of_condition = True
+ elif char == '(':
+ parenthesis_count += 1
+ if end_of_condition:
+ break
+ current_line += 1
+ # first branch
+ # simple case: 'if' statements with no braces
+ if '{' not in lines[current_line] and '{' not in lines[current_line + 1]:
+
+ if (str(current_line + 1) in lines_dict and
+ lines_dict[str(current_line + 1)]['covered']):
+ info_file.write('BRDA:' + str(branch_line) + ',0,' + '0,' + '1\n')
+ covered_branch_local += 1
+ else:
+ info_file.write('BRDA:' + str(branch_line) + ',0,' + '0,' + '0\n')
+ current_line += 1
+
+ # more complex case: '{' after the 'if' statement
+ else:
+ if '{' in lines[current_line]:
+ current_line += 1
+ else:
+ current_line += 2
+
+ # we need to check whether at least one line in the block is covered
+ found_covered_line = False
+
+ # this is a simpler version of a stack used to check when a code block
+ # ends at the moment, it just checks for '{' and '}', doesn't take into
+ # account the presence of commented braces
+ brace_counter = 1
+ while True:
+ end_of_block = False
+ for char in lines[current_line]:
+ if char == '}':
+ brace_counter -= 1
+ if brace_counter == 0:
+ end_of_block = True
+ elif char == '{':
+ brace_counter += 1
+ if end_of_block:
+ break
+ if (str(current_line) in lines_dict and
+ lines_dict[str(current_line)]['covered']):
+ found_covered_line = True
+
+ current_line += 1
+
+ if found_covered_line:
+ info_file.write('BRDA:' + str(branch_line) + ',0,' + '0,' + '1\n')
+ covered_branch_local += 1
+ else:
+ info_file.write('BRDA:' + str(branch_line) + ',0,' + '0,' + '0\n')
+
+ # second branch (if present). If not present, second branch is covered by
+ # default
+ current_line -= 1
+ candidate_else_line = current_line
+ while 'else' not in lines[current_line] and candidate_else_line + \
+ 2 >= current_line:
+ current_line += 1
+ if current_line == len(lines):
+ break
+
+ # no 'else': branch covered by default
+ if current_line == candidate_else_line + 3:
+ info_file.write('BRDA:' + str(branch_line) + ',0,' + '1,' + '1\n')
+ covered_branch_local += 1
+ return(total_branch_local, covered_branch_local)
+
+ # 'else' found: check if opening braces are present
+ if '{' not in lines[current_line - 1] and '{' not in lines[current_line]:
+ if str(current_line + 1) in lines_dict:
+ if lines_dict[str(current_line + 1)]['covered']:
+ info_file.write(
+ 'BRDA:' +
+ str(branch_line) +
+ ',0,' +
+ '1,' +
+ '1\n')
+ covered_branch_local += 1
+ else:
+ info_file.write(
+ 'BRDA:' +
+ str(branch_line) +
+ ',0,' +
+ '1,' +
+ '0\n')
+ else:
+ info_file.write('BRDA:' + str(branch_line) + ',0,' + '1,' + '0\n')
+
+ else:
+ if '{' in lines[current_line]:
+ current_line += 1
+ else:
+ current_line += 2
+ found_covered_line = False
+ while '}' not in lines[current_line]:
+ if (str(current_line) in lines_dict and
+ lines_dict[str(current_line)]['covered']):
+ found_covered_line = True
+ break
+ current_line += 1
+ if found_covered_line:
+ info_file.write('BRDA:' + str(branch_line) + ',0,' + '1,' + '1\n')
+ covered_branch_local += 1
+ else:
+ info_file.write('BRDA:' + str(branch_line) + ',0,' + '1,' + '0\n')
+
+ return(total_branch_local, covered_branch_local)
+
+
+def manage_switch_branching(switch_line, lines_dict, info_file, abs_path_file):
+ """
+ Takes care of branch coverage, branch_line is the source code
+ line in which the 'switch' statement is located the function produces
+ branch coverage info based on C source code and json file content
+
+ :param switch_line: Source code line with the 'switch' instruction
+ :param lines_dict: Dictionary of lines with line number as key
+ and its data as value
+ :param info_file: Handler to for file writing coverage
+ :param abs_path_file: File name of the source file
+ """
+
+ total_branch_local = 0
+ covered_branch_local = 0
+
+ if not sanity_check(switch_line, lines_dict, abs_path_file):
+ return(total_branch_local, covered_branch_local)
+
+ current_line = switch_line # used to read lines one by one
+ branch_counter = 0 # used to count the number of switch branches
+ brace_counter = 0
+
+ # parse the switch-case line by line, checking if every 'case' is covered
+ # the switch-case ends with a '}'
+ while True:
+ if '{' in lines[current_line]:
+ brace_counter += 1
+ if '}' in lines[current_line]:
+ brace_counter -= 1
+ if brace_counter == 0:
+ return(total_branch_local, covered_branch_local)
+ if 'case' in lines[current_line] or 'default' in lines[current_line]:
+ covered = False
+ total_branch_local += 1
+ inner_brace = 0
+ current_line += 1
+ while (('case' not in lines[current_line]
+ and 'default' not in lines[current_line]) or
+ inner_brace > 0):
+ if (str(current_line) in lines_dict and
+ lines_dict[str(current_line)]['covered']):
+ covered = True
+ if '{' in lines[current_line]:
+ inner_brace += 1
+ brace_counter += 1
+ if '}' in lines[current_line]:
+ inner_brace -= 1
+ brace_counter -= 1
+ if brace_counter == 0:
+ break
+ current_line += 1
+ if covered:
+ info_file.write(
+ 'BRDA:' +
+ str(switch_line) +
+ ',0,' +
+ str(branch_counter) +
+ ',1\n')
+ covered_branch_local += 1
+ else:
+ info_file.write(
+ 'BRDA:' +
+ str(switch_line) +
+ ',0,' +
+ str(branch_counter) +
+ ',0\n')
+ if brace_counter == 0:
+ return(total_branch_local, covered_branch_local)
+ branch_counter += 1
+ else:
+ current_line += 1
+
+ return(total_branch_local, covered_branch_local)
+
+
+def branch_coverage(abs_path_file, info_file, lines_dict):
+ """
+ Produces branch coverage information, using the functions
+ 'manage_if_branching' and 'manage_switch_branching'
+
+ :param abs_path_file: File name of the source file
+ :param info_file: Handler to for file writing coverage
+ :param lines_dict: Dictionary of lines with line number as key
+ and its data as value
+ """
+ total_branch = 0
+ covered_branch = 0
+
+ # branch coverage: if statements
+ branching_lines = []
+
+ # regex: find all the lines starting with 'if' or 'else if'
+ # (possibly preceded by whitespaces/tabs)
+ pattern = re.compile(r"^\s+if|^\s+} else if|^\s+else if")
+ for i, line in enumerate(open(abs_path_file)):
+ for match in re.finditer(pattern, line):
+ branching_lines.append(i + 1)
+ while branching_lines:
+ t = manage_if_branching(branching_lines.pop(0), lines_dict,
+ info_file, abs_path_file)
+ total_branch += t[0]
+ covered_branch += t[1]
+
+ # branch coverage: switch statements
+ switch_lines = []
+
+ # regex: find all the lines starting with 'switch'
+ # (possibly preceded by whitespaces/tabs)
+ pattern = re.compile(r"^\s+switch")
+ for i, line in enumerate(open(abs_path_file)):
+ for match in re.finditer(pattern, line):
+ switch_lines.append(i + 1)
+ while switch_lines:
+ t = manage_switch_branching(switch_lines.pop(0), lines_dict,
+ info_file, abs_path_file)
+ total_branch += t[0]
+ covered_branch += t[1]
+
+ info_file.write('BRF:' + str(total_branch) + '\n')
+ info_file.write('BRH:' + str(covered_branch) + '\n')
+
+
+parser = argparse.ArgumentParser(
+ description="Script to convert intermediate json file to LCOV info file")
+parser.add_argument('--workspace', metavar='PATH',
+ help='Folder with source files structure',
+ required=True)
+parser.add_argument('--json', metavar='PATH',
+ help='Intermediate json file name',
+ required=True)
+parser.add_argument('--info', metavar='PATH',
+ help='Output info file name',
+ default="coverage.info")
+args = parser.parse_args()
+with open(args.json) as json_file:
+ json_data = json.load(json_file)
+info_file = open(args.info, "w+")
+error_log = open("error_log.txt", "w+")
+file_list = json_data['source_files'].keys()
+
+for relative_path in file_list:
+ abs_path_file = os.path.join(args.workspace, relative_path)
+ if not os.path.exists(abs_path_file):
+ continue
+ source = open(abs_path_file)
+ lines = source.readlines()
+ info_file.write('TN:\n')
+ info_file.write('SF:' + os.path.abspath(abs_path_file) + '\n')
+ lines = [-1] + lines # shifting the lines indexes to the right
+ function_coverage(
+ json_data['source_files'][relative_path]['functions'].items(),
+ info_file)
+ branch_coverage(abs_path_file, info_file,
+ json_data['source_files'][relative_path]['lines'])
+ line_coverage(json_data['source_files'][relative_path]['lines'],
+ info_file)
+ info_file.write('end_of_record\n\n')
+ source.close()
+
+json_file.close()
+info_file.close()
+error_log.close()
diff --git a/coverage-tool/coverage-reporting/intermediate_layer.py b/coverage-tool/coverage-reporting/intermediate_layer.py
new file mode 100644
index 0000000..794c7a4
--- /dev/null
+++ b/coverage-tool/coverage-reporting/intermediate_layer.py
@@ -0,0 +1,647 @@
+# !/usr/bin/env python
+###############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+###############################################################################
+
+###############################################################################
+# FILE: intermediate_layer.py
+#
+# DESCRIPTION: Creates an intermediate json file with information provided
+# by the configuration json file, dwarf signatures and trace
+# files.
+#
+###############################################################################
+
+import os
+import re
+import glob
+import argparse
+import subprocess
+import json
+from argparse import RawTextHelpFormatter
+import logging
+import time
+
+__version__ = "6.0"
+
+# Static map that defines the elf file source type in the intermediate json
+ELF_MAP = {
+ "bl1": 0,
+ "bl2": 1,
+ "bl31": 2,
+ "bl32": 3,
+ "scp_ram": 10,
+ "scp_rom": 11,
+ "mcp_rom": 12,
+ "mcp_ram": 13,
+ "custom_offset": 100
+}
+
+
+def os_command(command, show_command=False):
+ """
+ Function that execute an os command, on fail exit the program
+
+ :param command: OS command as string
+ :param show_command: Optional argument to print the command in stdout
+ :return: The string output of the os command
+ """
+ out = ""
+ try:
+ if show_command:
+ print("OS command: {}".format(command))
+ out = subprocess.check_output(
+ command, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as ex:
+ raise Exception(
+ "Exception running command '{}': {}({})".format(
+ command, ex.output, ex.returncode))
+ return out.decode("utf8")
+
+
+def load_stats_from_traces(trace_globs):
+ """
+ Function to process and consolidate statistics from trace files
+
+ :param trace_globs: List of trace file patterns
+ :return: Dictionary with stats from trace files i.e.
+ {mem address in decimal}=(times executed, inst size)
+ """
+ stats = {}
+ stat_size = {}
+
+ # Make a list of unique trace files
+ trace_files = []
+ for tg in trace_globs:
+ trace_files.extend(glob.glob(tg))
+ trace_files = set(trace_files)
+
+ if not trace_files:
+ raise Exception("No trace files found for '{}'".format(trace_globs))
+ # Load stats from the trace files
+ for trace_file in trace_files:
+ try:
+ with open(trace_file, 'r') as f:
+ for line in f:
+ data = line.split()
+ address = int(data[0], 16)
+ stat = int(data[1])
+ size = int(data[2])
+ stat_size[address] = size
+ if address in stats:
+ stats[address] += stat
+ else:
+ stats[address] = stat
+ except Exception as ex:
+ logger.error("@Loading stats from trace files:{}".format(ex))
+ # Merge the two dicts
+ for address in stats:
+ stats[address] = (stats[address], stat_size[address])
+ return stats
+
+
+def get_code_sections_for_binary(elf_name):
+ """
+ Function to return the ranges of memory address for sections of code
+ in the elf file
+
+ :param elf_name: Elf binary file name
+ :return: List of code sections tuples, i.e. (section type, initial
+ address, end address)
+ """
+ command = """%s -h %s | grep -B 1 CODE | grep -v CODE \
+ | awk '{print $2" "$4" "$3}'""" % (OBJDUMP, elf_name)
+ text_out = os_command(command)
+ sections = text_out.split('\n')
+ sections.pop()
+ secs = []
+ for sec in sections:
+ try:
+ d = sec.split()
+ secs.append((d[0], int(d[1], 16), int(d[2], 16)))
+ except Exception as ex:
+ logger.error(
+ "@Returning memory address code sections:".format(ex))
+ return secs
+
+
+def get_executable_ranges_for_binary(elf_name):
+ """
+ Get function ranges from an elf file
+
+ :param elf_name: Elf binary file name
+ :return: List of tuples for ranges i.e. (range start, range end)
+ """
+ # Parse all $x / $d symbols
+ symbol_table = []
+ command = r"""%s -s %s | awk '/\$[xatd]/ {print $2" "$8}'""" % (
+ READELF, elf_name)
+ text_out = os_command(command)
+ lines = text_out.split('\n')
+ lines.pop()
+ for line in lines:
+ try:
+ data = line.split()
+ address = int(data[0], 16)
+ _type = 'X' if data[1] in ['$x', '$t', '$a'] else 'D'
+ except Exception as ex:
+ logger.error("@Getting executable ranges:".format(ex))
+ symbol_table.append((address, _type))
+
+ # Add markers for end of code sections
+ sections = get_code_sections_for_binary(elf_name)
+ for sec in sections:
+ symbol_table.append((sec[1] + sec[2], 'S'))
+
+ # Sort by address
+ symbol_table = sorted(symbol_table, key=lambda tup: tup[0])
+
+ # Create ranges (list of START/END tuples)
+ ranges = []
+ range_start = symbol_table[0][0]
+ rtype = symbol_table[0][1]
+ for sym in symbol_table:
+ if sym[1] != rtype:
+ if rtype == 'X':
+ # Substract one because the first address of the
+ # next range belongs to the next range.
+ ranges.append((range_start, sym[0] - 1))
+ range_start = sym[0]
+ rtype = sym[1]
+ return ranges
+
+
+def list_of_functions_for_binary(elf_name):
+ """
+ Get an array of the functions in the elf file
+
+ :param elf_name: Elf binary file name
+ :return: An array of function address start, function address end,
+ function dwarf signature (sources) addressed by function name
+ """
+ _functions = {}
+ command = "%s -t %s | awk 'NR>4' | sed /^$/d" % (OBJDUMP, elf_name)
+ symbols_output = os_command(command)
+ rex = r'([0-9a-fA-F]+) (.{7}) ([^ ]+)[ \t]([0-9a-fA-F]+) (.*)'
+ symbols = symbols_output.split('\n')[:-1]
+ for sym in symbols:
+ try:
+ symbol_details = re.findall(rex, sym)
+ symbol_details = symbol_details[0]
+ if 'F' not in symbol_details[1]:
+ continue
+ function_name = symbol_details[4]
+ # We don't want the .hidden for hidden functions
+ if function_name.startswith('.hidden '):
+ function_name = function_name[len('.hidden '):]
+ if function_name not in _functions:
+ _functions[function_name] = {'start': symbol_details[0],
+ 'end': symbol_details[3],
+ 'sources': False}
+ else:
+ logger.warning("'{}' duplicated in '{}'".format(
+ function_name,
+ elf_name))
+ except Exception as ex:
+ logger.error("@Listing functions at file {}: {}".format(
+ elf_name,
+ ex))
+ return _functions
+
+
+def apply_functions_exclude(elf_config, functions):
+ """
+ Remove excluded functions from the list of functions
+
+ :param elf_config: Config for elf binary file
+ :param functions: Array of functions in the binary elf file
+ :return: Tuple with included and excluded functions
+ """
+ if 'exclude_functions' not in elf_config:
+ return functions, []
+ incl = {}
+ excl = {}
+ for fname in functions:
+ exclude = False
+ for rex in elf_config['exclude_functions']:
+ if re.match(rex, fname):
+ exclude = True
+ excl[fname] = functions[fname]
+ break
+ if not exclude:
+ incl[fname] = functions[fname]
+ return incl, excl
+
+
+def remove_workspace(path, workspace):
+ """
+ Get the relative path to a given workspace
+
+ :param path: Path relative to the workspace to be returned
+ :param workspace: Path.
+ """
+ ret = path if workspace is None else os.path.relpath(path, workspace)
+ # print("{} => {}".format(path, ret))
+ return ret
+
+
+def get_function_line_numbers(source_file):
+ """
+ Using ctags get all the function names with their line numbers
+ within the source_file
+
+ :return: Dictionary with function name as key and line number as value
+ """
+ function_lines = os_command(
+ "ctags -x --c-kinds=f {}".format(source_file)).split("\n")
+ fln = {}
+ try:
+ for line in function_lines:
+ cols = line.split()
+ if len(cols) < 3:
+ continue
+ if cols[1] == "function":
+ fln[cols[0]] = int(cols[2])
+ elif cols[1] == "label" and cols[0] == "func":
+ fln[cols[-1]] = int(cols[2])
+ except BaseException:
+ logger.warning("Warning: Can't get all function line numbers from %s" %
+ source_file)
+ return fln
+
+
+class FunctionLineNumbers(object):
+
+ def __init__(self, workspace):
+ self.filenames = {}
+ self.workspace = workspace
+
+ def get_line_number(self, filename, function_name):
+ if not FUNCTION_LINES_ENABLED:
+ return 0
+ if filename not in self.filenames:
+ newp = os.path.join(self.workspace, filename)
+ self.filenames[filename] = get_function_line_numbers(newp)
+ return 0 if function_name not in self.filenames[filename] else \
+ self.filenames[filename][function_name]
+
+
+class PostProcessCC(object):
+ """Class used to process the trace data along with the dwarf
+ signature files to produce an intermediate layer in json with
+ code coverage in assembly and c source code.
+ """
+
+ def __init__(self, _config, local_workspace):
+ self._data = {}
+ self.config = _config
+ self.local_workspace = local_workspace
+ self.elfs = self.config['elfs']
+ # Dictionary with stats from trace files {address}=(times executed,
+ # inst size)
+ self.traces_stats = {}
+ # Dictionary of unique assembly line memory address against source
+ # file location
+ # {assembly address} = (opcode, source file location, line number in
+ # the source file, times executed)
+ self.asm_lines = {}
+ # Dictionary of {source file location}=>{'lines': {'covered':Boolean,
+ # 'elf_index'; {elf index}=>{assembly address}=>(opcode,
+ # times executed),
+ # 'functions': {function name}=>is covered(boolean)}
+ self.source_files_coverage = {}
+ self.functions = []
+ # Unique set of elf list of files
+ self.elf_map = {}
+ # For elf custom mappings
+ self.elf_custom = None
+
+ def process(self):
+ """
+ Public method to process the trace files and dwarf signatures
+ using the information contained in the json configuration file.
+ This method writes the intermediate json file output linking
+ the trace data and c source and assembly code.
+ """
+ self.source_files_coverage = {}
+ self.asm_lines = {}
+ # Initialize for unknown elf files
+ self.elf_custom = ELF_MAP["custom_offset"]
+ sources_config = {}
+ print("Generating intermediate json layer '{}'...".format(
+ self.config['parameters']['output_file']))
+ for elf in self.elfs:
+ # Gather information
+ elf_name = elf['name']
+ os_command("ls {}".format(elf_name))
+ # Trace data
+ self.traces_stats = load_stats_from_traces(elf['traces'])
+ prefix = self.config['parameters']['workspace'] \
+ if self.config['configuration']['remove_workspace'] else \
+ None
+ functions_list = list_of_functions_for_binary(elf_name)
+ (functions_list, excluded_functions) = apply_functions_exclude(
+ elf, functions_list)
+ # Produce code coverage
+ self.dump_sources(elf_name, functions_list, prefix)
+ sources_config = self.config['parameters']['sources']
+ # Now check code coverage in the functions with no dwarf signature
+ # (sources)
+ nf = {f: functions_list[f] for f in
+ functions_list if not
+ functions_list[f]["sources"]}
+ self.process_fn_no_sources(nf)
+ # Write to the intermediate json file
+ data = {"source_files": self.source_files_coverage,
+ "configuration": {
+ "sources": sources_config,
+ "metadata": "" if 'metadata' not in
+ self.config['parameters'] else
+ self.config['parameters']['metadata'],
+ "elf_map": self.elf_map
+ }
+ }
+ json_data = json.dumps(data, indent=4, sort_keys=True)
+ with open(self.config['parameters']['output_file'], "w") as f:
+ f.write(json_data)
+
+ def dump_sources(self, elf_filename, function_list, prefix=None):
+ """
+ Process an elf file i.e. match the source and asm lines against trace
+ files (coverage).
+
+ :param elf_filename: Elf binary file name
+ :param function_list: List of functions in the elf file i.e.
+ [(address start, address end, function name)]
+ :param prefix: Optional path name to be removed at the start of source
+ file locations
+ """
+ command = "%s -Sl %s" % (OBJDUMP, elf_filename)
+ dump = os_command(command)
+ dump += "\n" # For pattern matching the last \n
+ elf_name = os.path.splitext(os.path.basename(elf_filename))[0]
+ # Object that handles the function line numbers in
+ # their filename
+ function_line_numbers = FunctionLineNumbers(self.local_workspace)
+ # To map the elf filename against an index
+ if elf_name not in self.elf_map:
+ if elf_name in ELF_MAP:
+ self.elf_map[elf_name] = ELF_MAP[elf_name]
+ else:
+ self.elf_map[elf_name] = self.elf_custom
+ self.elf_custom += 1
+ elf_index = self.elf_map[elf_name]
+ # The function groups have 2 elements:
+ # Function's block name, Function's block code
+ function_groups = re.findall(
+ r"(?s)[0-9a-fA-F]+ <([a-zA-Z0-9_]+)>:\n(.+?)(?:\r*\n\n|\n$)",
+ dump, re.DOTALL | re.MULTILINE)
+ # Pointer to files dictionary
+ source_files = self.source_files_coverage
+ for function_group in function_groups:
+ if len(function_group) != 2:
+ continue
+ block_function_name, block_code = function_group
+ block_code += "\n"
+ # Find if the function has C source code filename
+ function_signature_group = re.findall(
+ r"(?s){}\(\):\n(/.+?):[0-9]+.*(?:\r*\n\n|\n$)".format(
+ block_function_name), block_code, re.DOTALL | re.MULTILINE)
+ if not function_signature_group:
+ continue # Function does not have dwarf signature (sources)
+ function_list[block_function_name]["sources"] = True
+ block_function_source_file = remove_workspace(
+ function_signature_group[0], prefix)
+ fn_line_number = function_line_numbers.get_line_number(
+ block_function_source_file, block_function_name)
+ if block_function_source_file not in source_files:
+ source_files[block_function_source_file] = {"functions": {},
+ "lines": {}}
+ source_files[block_function_source_file]["functions"][
+ block_function_name] = {"covered": False,
+ "line_number": fn_line_number}
+ # Now lets check the block code
+ # The source code groups have 5 elements:
+ # Function for the statements (optional), Source file for the asm
+ # statements,
+ # line number for the asm statements, asm statements, lookahead
+ # (ignored)
+ source_code_groups = re.findall(SOURCE_PATTERN, block_code,
+ re.DOTALL | re.MULTILINE)
+ is_function_block_covered = False
+ # When not present the last function name applies
+ statements_function_name = block_function_name
+ for source_code_group in source_code_groups:
+ if len(source_code_group) != 5:
+ continue
+ fn_name, source_file, ln, asm_code, _ = source_code_group
+ if not fn_name:
+ # The statement belongs to the most recent function
+ fn_name = statements_function_name
+ else:
+ # Usually in the first iteration fn_name is not empty and
+ # is the function's name block
+ statements_function_name = fn_name
+ if statements_function_name in function_list:
+ # Some of the functions within a block are not defined in
+ # the function list dump
+ function_list[statements_function_name]["sources"] = True
+ statements_source_file = remove_workspace(source_file, prefix)
+ if statements_source_file not in source_files:
+ source_files[statements_source_file] = {"functions": {},
+ "lines": {}}
+ if statements_function_name not in \
+ source_files[statements_source_file]["functions"]:
+ fn_line_number = function_line_numbers.get_line_number(
+ statements_source_file,
+ statements_function_name)
+ source_files[statements_source_file]["functions"][
+ statements_function_name] = \
+ {"covered": False, "line_number": fn_line_number}
+ if ln not in source_files[statements_source_file]["lines"]:
+ source_files[statements_source_file]["lines"][ln] = \
+ {"covered": False, "elf_index": {}}
+ source_file_ln = source_files[statements_source_file]["lines"][
+ ln]
+ asm_line_groups = re.findall(
+ r"(?s)([a-fA-F0-9]+):\t(.+?)(?:\n|$)",
+ asm_code, re.DOTALL | re.MULTILINE)
+ for asm_line in asm_line_groups:
+ if len(asm_line) != 2:
+ continue
+ hex_line_number, opcode = asm_line
+ dec_address = int(hex_line_number, 16)
+ times_executed = 0 if dec_address not in self.traces_stats \
+ else self.traces_stats[dec_address][0]
+ if times_executed > 0:
+ is_function_block_covered = True
+ source_file_ln["covered"] = True
+ source_files[statements_source_file]["functions"][
+ statements_function_name]["covered"] = True
+ if elf_index not in source_file_ln["elf_index"]:
+ source_file_ln["elf_index"][elf_index] = {}
+ if dec_address not in \
+ source_file_ln["elf_index"][elf_index]:
+ source_file_ln["elf_index"][elf_index][dec_address] = (
+ opcode, times_executed)
+ source_files[block_function_source_file]["functions"][
+ block_function_name]["covered"] |= is_function_block_covered
+
+ def process_fn_no_sources(self, function_list):
+ """
+ Checks function coverage for functions with no dwarf signature i.e
+ sources.
+
+ :param function_list: Dictionary of functions to be checked
+ """
+ if not FUNCTION_LINES_ENABLED:
+ return # No source code at the workspace
+ address_seq = sorted(self.traces_stats.keys())
+ for function_name in function_list:
+ # Just check if the start address is in the trace logs
+ covered = function_list[function_name]["start"] in address_seq
+ # Find the source file
+ files = os_command(("grep --include *.c --include *.s -nrw '{}' {}"
+ "| cut -d: -f1").format(function_name,
+ self.local_workspace))
+ unique_files = set(files.split())
+ sources = []
+ line_number = 0
+ for source_file in unique_files:
+ d = get_function_line_numbers(source_file)
+ if function_name in d:
+ line_number = d[function_name]
+ sources.append(source_file)
+ if len(sources) > 1:
+ logger.warning("'{}' declared in {} files:{}".format(
+ function_name, len(sources),
+ ", ".join(sources)))
+ elif len(sources) == 1:
+ source_file = remove_workspace(sources[0],
+ self.local_workspace)
+ if source_file not in self.source_files_coverage:
+ self.source_files_coverage[source_file] = {"functions": {},
+ "lines": {}}
+ if function_name not in \
+ self.source_files_coverage[source_file]["functions"] or \
+ covered:
+ self.source_files_coverage[source_file]["functions"][
+ function_name] = {"covered": covered,
+ "line_number": line_number}
+ else:
+ logger.warning("Function '{}' not found in sources.".format(
+ function_name))
+
+
+json_conf_help = """
+Produces an intermediate json layer for code coverage reporting
+using an input json configuration file.
+
+Input json configuration file format:
+{
+ "configuration":
+ {
+ "remove_workspace": <true if 'workspace' must be from removed from the
+ path of the source files>,
+ "include_assembly": <true to include assembly source code in the
+ intermediate layer>
+ },
+ "parameters":
+ {
+ "objdump": "<Path to the objdump binary to handle dwarf signatures>",
+ "readelf: "<Path to the readelf binary to handle dwarf signatures>",
+ "sources": [ <List of source code origins, one or more of the next
+ options>
+ {
+ "type": "git",
+ "URL": "<URL git repo>",
+ "COMMIT": "<Commit id>",
+ "REFSPEC": "<Refspec>",
+ "LOCATION": "<Folder within 'workspace' where this source
+ is located>"
+ },
+ {
+ "type": "http",
+ "URL": <URL link to file>",
+ "COMPRESSION": "xz",
+ "LOCATION": "<Folder within 'workspace' where this source
+ is located>"
+ }
+ ],
+ "workspace": "<Workspace folder where the source code was located to
+ produce the elf/axf files>",
+ "output_file": "<Intermediate layer output file name and location>",
+ "metadata": {<Metadata objects to be passed to the intermediate json
+ files>}
+ },
+ "elfs": [ <List of elf files to be traced/parsed>
+ {
+ "name": "<Full path name to elf/axf file>",
+ "traces": [ <List of trace files to be parsed for this
+ elf/axf file>
+ "Full path name to the trace file,"
+ ]
+ }
+ ]
+}
+"""
+OBJDUMP = None
+READELF = None
+FUNCTION_LINES_ENABLED = None
+SOURCE_PATTERN = (r'(?s)([a-zA-Z0-0_]+)?(?:\(\):\n)?(^/.+?):([0-9]+)'
+ r'(?: \(.+?\))?\n(.+?)(?=\n/|\n$|([a-zA-Z0-0_]+\(\):))')
+
+
+def main():
+ global OBJDUMP
+ global READELF
+ global FUNCTION_LINES_ENABLED
+
+ parser = argparse.ArgumentParser(epilog=json_conf_help,
+ formatter_class=RawTextHelpFormatter)
+ parser.add_argument('--config-json', metavar='PATH',
+ dest="config_json", default='config_file.json',
+ help='JSON configuration file', required=True)
+ parser.add_argument('--local-workspace', default="",
+ help=('Local workspace folder where source code files'
+ ' and folders resides'))
+ args = parser.parse_args()
+ try:
+ with open(args.config_json, 'r') as f:
+ config = json.load(f)
+ except Exception as ex:
+ print("Error at opening and processing JSON: {}".format(ex))
+ return
+ # Setting toolchain binary tools variables
+ OBJDUMP = config['parameters']['objdump']
+ READELF = config['parameters']['readelf']
+ # Checking if are installed
+ os_command("{} --version".format(OBJDUMP))
+ os_command("{} --version".format(READELF))
+
+ if args.local_workspace != "":
+ # Checking ctags installed
+ try:
+ os_command("ctags --version")
+ except BaseException:
+ print("Warning!: ctags not installed/working function line numbers\
+ will be set to 0. [{}]".format(
+ "sudo apt install exuberant-ctags"))
+ else:
+ FUNCTION_LINES_ENABLED = True
+
+ pp = PostProcessCC(config, args.local_workspace)
+ pp.process()
+
+
+if __name__ == '__main__':
+ logging.basicConfig(filename='intermediate_layer.log', level=logging.DEBUG,
+ format=('%(asctime)s %(levelname)s %(name)s '
+ '%(message)s'))
+ logger = logging.getLogger(__name__)
+ start_time = time.time()
+ main()
+ elapsed_time = time.time() - start_time
+ print("Elapsed time: {}s".format(elapsed_time))
diff --git a/coverage-tool/coverage-reporting/merge.py b/coverage-tool/coverage-reporting/merge.py
new file mode 100755
index 0000000..e3d9d65
--- /dev/null
+++ b/coverage-tool/coverage-reporting/merge.py
@@ -0,0 +1,179 @@
+# !/usr/bin/env python
+###############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+###############################################################################
+
+###############################################################################
+# FILE: merge.py
+#
+# DESCRIPTION: Merge two or more .info and json files, sanitizing source file
+# paths.
+# If different .info files contain the same source code duplicated
+# in different directories, we use the absolute paths of the
+# first .info file.
+#
+###############################################################################
+
+
+import os
+import sys
+import argparse
+from argparse import RawTextHelpFormatter
+import subprocess
+import json
+
+
+# Define an argument parser using the argparse library
+parser = argparse.ArgumentParser(epilog="""Example of usage:
+python3 merge.py -a coverage_1.info -a coverage_2.info -o coverage_merge.info \
+-j input_file1.json -j input_file2.json -m merge_file.json
+
+It is possible to merge any number of files at once.
+If metadata json files are defined then they must pair with their
+corresponding info file, i.e. have the same name.
+If a local workspace is defined then the paths in the info files will
+be translated from the original test workspace to the local workspace
+to enable the usage of LCOV, but the original files will be kept intact.
+By default, the output file must be a new file.
+To overwrite an existing file, use the "--force" option.
+
+Note: the user is expected to merge .info files referring to the same project.
+If merging .info files from different projects, LCOV can be exploited directly
+using a command such as "lcov -rc lcov_branch_coverage=1 -a coverage_1.info \
+-a coverage_2.info -o coverage_merge.info."
+""", formatter_class=RawTextHelpFormatter)
+requiredNamed = parser.add_argument_group('required named arguments')
+requiredNamed.add_argument("-a", "--add-file",
+ help="Input info file to be merged.",
+ action='append', required=True)
+requiredNamed.add_argument("-o", "--output",
+ help="Name of the output info (merged) file.",
+ required=False)
+parser.add_argument("-j", "--json-file", action='append',
+ help="Input json file to be merged.")
+parser.add_argument("-m", "--output-json",
+ help="Name of the output json (merged) file.")
+parser.add_argument("--force", dest='force', action='store_true',
+ help="force overwriting of output file.")
+parser.add_argument("--local-workspace", dest='local_workspace',
+ help='Local workspace where source files reside.')
+
+options = parser.parse_args(sys.argv[1:])
+# At least two .info files are expected
+if len(options.add_file) < 2:
+ print('Error: too few input files.\n')
+ sys.exit(1)
+# The same number of info and json files expected
+if options.json_file:
+ if len(options.json_file) != len(options.add_file):
+ print('Umatched number of info and json files.\n')
+ sys.exit(1)
+
+file_groups = []
+info_files_to_merge = []
+# Check if files exist
+for file_name in options.add_file:
+ print("Merging '{}'".format(file_name))
+ if not os.path.isfile(file_name):
+ print('Error: file "' + file_name + '" not found.\n')
+ sys.exit(1)
+ if not file_name[-5:] == '.info':
+ print('Error: file "' + file_name +
+ '" has wrong extension. Expected .info file.\n')
+ sys.exit(1)
+ if file_name in info_files_to_merge:
+ print("Error: Duplicated info file '{}'".format(file_name))
+ sys.exit(1)
+ info_files_to_merge.append(file_name)
+ file_group = {"info": file_name, "locations": [], "json": ""}
+ info_name = os.path.basename(file_name).split(".")[0]
+ if options.json_file:
+ json_name = [i for i in options.json_file
+ if os.path.basename(i).split(".")[0] == info_name]
+ if not json_name:
+ print("Umatched json file name for '{}'".format(file_name))
+ sys.exit(1)
+ json_name = json_name.pop()
+ if not json_name[-5:] == '.json':
+ print('Error: file "' + json_name +
+ '" has wrong extension. Expected .json file.\n')
+ sys.exit(1)
+ if not os.path.isfile(json_name):
+ print('Error: file "' + json_name + '" not found.\n')
+ sys.exit(1)
+ # Now we have to extract the location folders for each info
+ # this is needed if we want translation to local workspace
+ file_group["json"] = json_name
+ with open(json_name) as json_file:
+ json_data = json.load(json_file)
+ locations = []
+ for source in json_data["configuration"]["sources"]:
+ locations.append(source["LOCATION"])
+ file_group["locations"] = locations
+ file_groups.append(file_group)
+
+# Check the extension of the output file
+if not options.output[-5:] == '.info':
+ print('Error: file "' + options.output +
+ '" has wrong extension. Expected .info file.\n')
+ sys.exit(1)
+
+if options.local_workspace is not None:
+ # Translation from test to local workspace
+ i = 0
+ while i < len(info_files_to_merge):
+ info_file = open(info_files_to_merge[i], "r")
+ print("Translating workspace for '{}'...".format(
+ info_files_to_merge[i]))
+ info_lines = info_file.readlines()
+ info_file.close()
+ common_prefix = os.path.normpath(
+ os.path.commonprefix([line[3:] for line in info_lines
+ if 'SF:' in line]))
+ temp_file = 'temporary_' + str(i) + '.info'
+ with open(temp_file, "w+") as f:
+ for line in info_lines:
+ cf = common_prefix
+ if os.path.basename(common_prefix) in file_groups[i]["locations"]:
+ cf = os.path.dirname(common_prefix)
+ f.write(line.replace(cf, options.local_workspace))
+ info_files_to_merge[i] = temp_file # Replace info file to be merged
+ i += 1
+
+# Merge json files
+if len(options.json_file):
+ json_merged_list = []
+ json_merged = {}
+ j = 0
+ while j < len(options.json_file):
+ json_file = options.json_file[j]
+ with open(json_file) as f:
+ data = json.load(f)
+ for source in data['configuration']['sources']:
+ if source not in json_merged_list:
+ json_merged_list.append(source)
+ j += 1
+ json_merged = {'configuration': {'sources': json_merged_list}}
+ with open(options.output_json, 'w') as f:
+ json.dump(json_merged, f)
+
+
+# Exploit LCOV merging capabilities
+# Example of LCOV usage: lcov -rc lcov_branch_coverage=1 -a coverage_1.info \
+# -a coverage_2.info -o coverage_merge.info
+command = ['lcov', '-rc', 'lcov_branch_coverage=1']
+
+for file_name in info_files_to_merge:
+ command.append('-a')
+ command.append(file_name)
+command.append('-o')
+command.append(options.output)
+
+subprocess.call(command)
+
+# Delete the temporary files
+if options.local_workspace is not None:
+ for f in info_files_to_merge:
+ os.remove(f)
diff --git a/coverage-tool/coverage-reporting/merge.sh b/coverage-tool/coverage-reporting/merge.sh
new file mode 100755
index 0000000..354dbc8
--- /dev/null
+++ b/coverage-tool/coverage-reporting/merge.sh
@@ -0,0 +1,417 @@
+#!/usr/bin/env bash
+
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+##############################################################################
+
+#==============================================================================
+# FILE: merge.sh
+#
+# DESCRIPTION: Wrapper to merge intermediate json files and LCOV trace .info
+# files.
+#==============================================================================
+
+set -e
+#################################################################
+# Function to manipulate json objects.
+# The json object properties can be accessed through "." separated
+# property names. There are special characters that define a function
+# over a given property value:
+# If the qualifier list starts with '-' then is asking for the len of the
+# json array defined by the qualifiers.
+# If the qualifier list starts with '*' then the resulting json value
+# is returned without double quotes at the end and the beginning.
+# If some property name starts with "?" then is requesting if that
+# property exists within the json object.
+# Globals:
+# None
+# Arguments:
+# 1-Json string that describes the json object
+# 2-String of '.' separated qualifiers to access properties
+# within the json object
+# 3- Optional default value for a sought property value
+# Outputs:
+# None
+################################################################
+get_json_object() {
+ export _json_string="$1"
+ export _qualifiers="$2"
+ export _default="$3"
+ python3 - << EOT
+import os
+import json
+import sys
+
+_json_string = os.getenv("_json_string", "")
+_qualifiers = os.getenv("_qualifiers", "")
+_default = os.getenv("_default", "")
+try:
+ data = json.loads(_json_string)
+except Exception as ex:
+ print("Error decoding json string:{}".format(ex))
+ sys.exit(-1)
+ptr = data
+if _qualifiers[0] in ['-', '*']:
+ cmd = _qualifiers[0]
+ _qualifiers = _qualifiers[1:]
+else:
+ cmd = ""
+for _name in _qualifiers.split("."):
+ if _name in ptr:
+ ptr = ptr[_name]
+ elif _name.isdigit() and int(_name) < len(ptr):
+ ptr = ptr[int(_name)]
+ elif _name.startswith("?"):
+ print(_name[1:] in ptr)
+ sys.exit(0)
+ elif _default:
+ print(_default)
+ sys.exit(0)
+ else:
+ print("'{}' is not in the json object".format(_name))
+ sys.exit(-1)
+if cmd == "-":
+ # return len of the json array
+ print(len(ptr))
+elif cmd == "*":
+ #remove quotes
+ string = json.dumps(ptr)
+ if string.startswith('"') and string.endswith('"'):
+ string = string[1:-1]
+ print(string)
+else:
+ print(json.dumps(ptr))
+EOT
+}
+
+#################################################################
+# Convert a relative path to absolute path
+# Globals:
+# None
+# Arguments:
+# 1-Path to be converted
+# Outputs:
+# Absolute path
+################################################################
+get_abs_path() {
+ path="$1"
+ echo "$(cd $(dirname $path) && echo "$(pwd -P)"/$(basename $path))"
+}
+
+#################################################################
+# Clone the source files
+# Globals:
+# None
+# Arguments:
+# 1-Json file with the sources to be cloned
+# 2-Folder where to clone the sources
+# Outputs:
+# None
+################################################################
+clone_repos() {
+ export OUTPUT_JSON="$1"
+ export CSOURCE_FOLDER="${2:-$LOCAL_WORKSPACE}"
+
+ cd $DIR # To be run at the same level of this script
+python3 - << EOT
+import os
+import clone_sources
+
+output_file = os.getenv('OUTPUT_JSON', 'output_file.json')
+source_folder = os.getenv('CSOURCE_FOLDER', 'source')
+try:
+ r = clone_sources.CloneSources(output_file)
+ r.clone_repo(source_folder)
+except Exception as ex:
+ print(ex)
+EOT
+ cd -
+}
+
+#################################################################
+# Get the a file defined in the json object
+# Globals:
+# None
+# Arguments:
+# 1-Json object that defines the locations of the info and json
+# files
+# 2-Folder to save the info and json files
+# 3-Variable that holds the name of the variable that will hold
+# the name of the file to be downloaded (reference argument)
+# Outputs:
+# None
+################################################################
+get_file() {
+ json_object="$1"
+ where="$2"
+ var_name="${3:-param_cloned}" # Defaults to globar var
+
+ local _type=$(get_json_object "$json_object" "type")
+ local _origin=$(get_json_object "$json_object" "*origin")
+ local _compression=$(get_json_object "$json_object" "*compression" None)
+ local fname=""
+ local cloned_file=""
+ local full_filename=$(basename -- "$_origin")
+ local extension="${full_filename##*.}"
+ local filename="${full_filename%.*}"
+
+ if [ "$_type" = '"http"' ];then
+ fname="$where.$extension" # Same filename as folder
+ rm $where/$fname &>/dev/null || true
+ wget -o error.log $_origin -O $where/$fname || (
+ cat error.log && exit -1)
+ cloned_file="$(get_abs_path $where/$fname)"
+ elif [ "$_type" = '"bundle"' ];then
+ # Check file exists at origin, i.e. was unbundled before
+ fname="$_origin"
+ if [ -f "$where/$fname" ];then
+ cloned_file="$(get_abs_path $where/$fname)"
+ fi
+ elif [ "$_type" = '"file"' ];then
+ if [[ "$_origin" = http* ]]; then
+ echo "$_origin looks like 'http' rather than 'file' please check..."
+ exit -1
+ fi
+ fname="$where.$extension" # Same filename as folder
+ cp -f $_origin $where/$fname
+ cloned_file="$(get_abs_path $where/$fname)"
+ else
+ echo "Error unsupported file type:$_type.... Aborting."
+ exit -1
+ fi
+ if [ "$_compression" = "tar.xz" ];then
+ cd $where
+ pwd
+ tar -xzf $fname
+ rm -f $fname
+ cd -
+ fi
+ eval "${var_name}=${cloned_file}"
+}
+
+#####################################################################
+# Get (download/copy) info and json files from the input json file
+# Globals:
+# merge_input_json_file: Input json file with locations of info
+# and intermediate json files to be merged.
+# input_folder: Folder to put info and json files to be merged
+# Arguments:
+# None
+# Outputs:
+# None
+###################################################################
+get_info_json_files() {
+ json_string="$(cat $merge_input_json_file)"
+ nf=$(get_json_object "$json_string" "-files")
+ rm -rf $input_folder > /dev/null || true
+ mkdir -p $input_folder
+ for f in $(seq 0 $(($nf - 1)));
+ do
+ pushd $input_folder > /dev/null
+ _file=$(get_json_object "$json_string" "files.$f")
+ folder=$(get_json_object "$_file" "*id")
+ echo "Geting files from project '$folder' into '$input_folder'..."
+ mkdir -p $folder
+ bundles=$(get_json_object "$_file" "bundles" None)
+ if [ "$bundles" != "None" ];then
+ nb=$(get_json_object "$_file" "-bundles")
+ for n in $(seq 0 $(($nb - 1)));
+ do
+ get_file "$(get_json_object "$bundles" "$n")" $folder
+ done
+ fi
+ get_file "$(get_json_object "$_file" "config")" $folder config_json_file
+ get_file "$(get_json_object "$_file" "info")" $folder info_file
+ popd > /dev/null
+ done
+}
+
+#################################################################
+# Merge json and info files and generate branch coverage report
+# Globals:
+# output_coverage_file: Location and name for merge coverage info
+# output_json_file: Location and name for merge json output
+# input_folder: Location where reside json and info files
+# LOCAL_WORKSPACE: Local workspace folder with the source files
+# Arguments:
+# None
+# Outputs:
+# Output merge coverage file
+# Output merge json file
+################################################################
+merge_files() {
+# Merge info and json files
+ local lc=" "
+ if [ -n "$LOCAL_WORKSPACE" ];then
+ # Translation to be done in the info files to local workspace
+ lc=" --local-workspace $LOCAL_WORKSPACE"
+ fi
+ # Getting the path of the merge.py must reside at the same
+ # path as the merge.sh
+ python3 ${DIR}/merge.py \
+ $(find $input_folder -name "*.info" -exec echo "-a {}" \;) \
+ $(find $input_folder -name "*.json" -exec echo "-j {}" \;) \
+ -o $output_coverage_file \
+ -m $output_json_file \
+ $lc
+
+}
+
+
+#################################################################
+# Print scripts usage
+# Arguments:
+# None
+# Outputs:
+# Prints to stdout script usage
+################################################################
+usage() {
+ clear
+ echo "Usage:"
+ echo "merge -h Display this help message."
+ echo "-j <input json file> Input json file(info and intermediate json files to be merged)."
+ echo "-l <report folder> Folder for branch coverage report. Defaults to ./lcov_folder."
+ echo "-i <Path> Folder to copy/download info and json files. Defaults to ./input."
+ echo "-w <Folder> Local workspace folder for source files."
+ echo "-o <name> Name of the merged info file. Defaults to ./coverage_merge.info"
+ echo "-m <name> Name of the merged metadata json file. Defaults to ./merge_output.json"
+ echo "-c If it is set, sources from merged json files will be cloned/copied to local workspace folder."
+ echo "$help_message"
+}
+
+help_message=$(cat <<EOF
+
+# The script that merges the info data (code coverage) and json metadata
+# (intermediate layer) needs as an input a json file with the following
+# properties:
+# files: array of objects that describe the type of file/project to be
+# merged.
+# id: Unique identifier (project) associated to the info and
+# intermediate json files
+# config: Intermediate json file
+# type: Type of storage for the file. (http or file)
+# origin: Location (url or folder) of the file
+# info: Info file
+# type: Type of storage for the file. (http or file)
+# origin: Location (url or folder) of the file
+# Example:
+{ "files" : [
+ {
+ "id": "<project 1>",
+ "config":
+ {
+ "type": "http",
+ "origin": "<URL of json file for project 1>"
+ },
+ "info":
+ {
+ "type": "http",
+ "origin": "<URL of info file for project 1>"
+ }
+ },
+ {
+ "id": "<project 2>",
+ "config":
+ {
+ "type": "http",
+ "origin": "<URL of json file for project 2>"
+ },
+ "info":
+ {
+ "type": "http",
+ "origin": "<URL of info file for project 2>"
+ }
+ },
+ .
+ .
+ .
+ ]
+}
+EOF
+)
+
+clear
+# Local workspace folder to contain source files
+LOCAL_WORKSPACE=""
+# If this is true then will clone/copy sources from merged json
+# file into local workspace
+CLONE_SOURCES=false
+# Location of the input json file that contains information about
+# the info and json files to be merged and produced a report
+merge_input_json_file=""
+# Folder to download json and info files
+input_folder="./input_folder"
+# Folder to to put the reports
+LCOV_FOLDER="./lcov_folder"
+# File name for merge coverage info
+output_coverage_file="./coverage_merge.info"
+# File name for merge json output
+output_json_file="./merge_output.json"
+while getopts ":hj:o:l:w:i:cm:" opt; do
+ case ${opt} in
+ h )
+ usage
+ exit 0
+ ;;
+ w )
+ LOCAL_WORKSPACE=$(cd $OPTARG; pwd)
+ ;;
+ i )
+ input_folder=$OPTARG
+ ;;
+ c )
+ CLONE_SOURCES=true
+ ;;
+ j )
+ merge_input_json_file=$OPTARG
+ ;;
+ l )
+ LCOV_FOLDER=$OPTARG
+ ;;
+ o )
+ output_coverage_file=$OPTARG
+ ;;
+ m )
+ output_json_file=$OPTARG
+ ;;
+ \? )
+ echo "Invalid option: $OPTARG" 1>&2
+ usage
+ exit -1
+ ;;
+ : )
+ echo "Invalid option: $OPTARG requires an argument" 1>&2
+ usage
+ exit -1
+ ;;
+ esac
+done
+shift $((OPTIND -1))
+if [ -z "$merge_input_json_file" ]; then
+ echo "Input json file required"
+ usage
+ exit -1
+fi
+if [ -z "$LOCAL_WORKSPACE" ] && [ $CLONE_SOURCES = true ]; then
+ echo "Need to define a local workspace folder to clone/copy sources!"
+ exit -1
+fi
+# Getting the script folder where other script files must reside, i.e
+# merge.py, clone_sources.py
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+input_folder="$(get_abs_path $input_folder)"
+LCOV_FOLDER="$(get_abs_path $LCOV_FOLDER)"
+output_coverage_file="$(get_abs_path $output_coverage_file)"
+output_json_file="$(get_abs_path $output_json_file)"
+param_cloned=""
+get_info_json_files
+merge_files
+if [ $CLONE_SOURCES = true ];then
+ clone_repos $output_json_file
+fi
+# Generate branch coverage report
+genhtml --branch-coverage $output_coverage_file \
+ --output-directory $LCOV_FOLDER
+cd -
diff --git a/coverage-tool/docs/code_cov_diag.jpg b/coverage-tool/docs/code_cov_diag.jpg
new file mode 100644
index 0000000..d2f3f63
--- /dev/null
+++ b/coverage-tool/docs/code_cov_diag.jpg
Binary files differ
diff --git a/coverage-tool/docs/design_overview.md b/coverage-tool/docs/design_overview.md
new file mode 100644
index 0000000..89c4b66
--- /dev/null
+++ b/coverage-tool/docs/design_overview.md
@@ -0,0 +1,78 @@
+# Design overview
+
+This document explains the overall design approach to the trace-based code coverage tool.
+
+## Motivation
+
+The primary motivation for this code coverage tool is driven by the fact that there are no commercial off-the-shelf (COTS) tools that can be readily used for doing code coverage measurement for firmware components - especially those meant for memory constraint platforms. Most of the tools rely on the traditional approach where the code is instrumented to enable the coverage measurement. In the case of firmware components designed for memory constraint platforms, code size is a key consideration and the need to change memory maps to accomodate the instrumented code for enabling coverage measurement is seen as a pain point. A possible alternative is to perform the coverage measurement on emulation platforms which could free up the constraints of memory limitations. However this adds the need to have more platform specific code to be supported in the firmware for the emulation platform.
+
+The above factors led to a design approach to measure the code coverage based on execution trace, without the need for any code instrumentation. This approach provides the following benefits:
+- allows the user to test the real software stack without worrying about memory constraints - no code is instrumented; meaning real software is used during coverage run.
+- allows the user to test on real platforms rather than partial system emulations - coverage information can be obtained without expensive modelling or porting effort.
+
+
+## Known Limitations
+
+The following limitations are understood to exist with the trace-based coverage tool
+
+- This works only with non-relocatable code: here we can easily map the execution address of an instruction to those determined from the generated binaries. Even if there is some position independent code involved, if the location binding happens at build time then also the user can use this tool as the post-processing stage could still be made to do the mapping.
+- Accuracy of code coverage info mapped to the source code is limited by the completeness of DWARF signatures embedded: we know that with higher levels of code optimisation the DWARF signatures embedded will be `sparse` in nature, especially when the generated code is optimised for size. Ideally this solution works best when there is no compiler optimisation turned ON.
+- This is currently proven to work on FVPs (Fixed Virtual Platforms): Early prototyping shows this approach can work with Silicon platforms, however needs further development.
+
+
+## Design Details
+The following diagram outlines the individual components involved in the trace-based coverage tool.
+
+
+
+The following changes are needed at each of the stages to enable this code coverage measurement tool to work.
+
+### Compilation stage
+
+The coverage tool relies on the DWARF signatures embedded within the binaries generated for the firmware that runs as part of the coverage run. In case of GCC toolchain we enable it by adding -g flag during the compilation.
+
+The -g flag generates DWARF signatures embedded within the binaries as see in the example below:
+```
+100005b0 <tfm_plat_get_rotpk_hash>:
+tfm_plat_get_rotpk_hash():
+/workspace/workspace/tf-m-build-config/trusted-firmware-m/platform/ext/common/template/crypto_keys.c:173
+100005b0: b510 push {r4, lr}
+/workspace/workspace/tf-m-build-config/trusted-firmware-m/platform/ext/common/template/crypto_keys.c:174
+100005b2: 6814 ldr r4, [r2, #0]
+```
+
+### Trace generation stage
+
+The coverage tool relies on the generation of the execution trace from the target platform (in our case FVP). It relies on the coverage trace plugin which is an MTI based custom plugin that registers for trace source type `INST` and dumps a filtered set of instruction data that got executed during the coverage run. In case of silicon platforms it expects to use trace capture with tools like DSTREAM-ST.
+
+See [Coverage Plugin](./plugin_design.md) documentation to know more about this custom plugin.
+
+The following diagram shows an example trace capture output from the coverage trace plugin:
+```
+[PC address, times executed, opcode size]
+0010065c 1 4
+00100660 1 4
+00100664 1 2
+00100666 1 2
+...
+```
+
+### Post-processing stage
+
+In this stage coverage information is generated by:
+1. Determining the instructions executed from the trace output captured.
+2. Mapping those instructions to source code by utilising the DWARF signatures embedded within the binaries.
+3. Generating the LCOV .info files allowing us to report the coverage information with the LCOV tool and merge reports from multiple runs.
+
+### Typical steps to integrate trace-based coverage tool to CI setup
+
+- Generate the DWARF binary (elf or axf) files at build stage using the -g flag or equivalent compiler switches.
+- Build the coverage plugin using the corresponding PVLIB_HOME library for the 64-bit compiler and deploy in your CI to be used during execution.
+- Use the coverage plugin during FVP execution by providing the additional parameters. See [here](./plugin_user_guide.md#capturing-a-trace)
+- Clone the sources in your local workspace if not already there.
+- The generated trace logs along with the DWARF binary files, the bin utilities (objdump, readelf from the same toolchain for the DWARF binary files) and source code will be used as input to the *intermediate_layer.py* to generate the intermediate json layer.
+- The *generate_info_file.py* will parse the json intermediate layer file to an info file that can be read by the genhtml binary from LCOV.
+- Optionally use the merge.py to merge multiple coverage info files to generate a combined report.
+## License
+[BSD-3-Clause](../../license.md)
+
diff --git a/coverage-tool/docs/plugin_user_guide.md b/coverage-tool/docs/plugin_user_guide.md
new file mode 100644
index 0000000..5c600dd
--- /dev/null
+++ b/coverage-tool/docs/plugin_user_guide.md
@@ -0,0 +1,30 @@
+# coverage-plugin User Guide
+
+The *coverage-plugin* is a C++ project using the Model Trace Interface Plugin Development Kit (MTIPDK) in order to create a trace plugin, which is a special shared library. The trace plugins can be loaded into Arm Fast Models to produce execution trace data for doing code coverage measurement.
+
+## Dependencies
+- GCC 7.5.0 at least
+
+## Building the coverage-plugin
+```bash
+$ cd coverage-plugin
+$ make PVLIB_HOME=</path/to/model_library>
+```
+
+## Capturing a trace
+
+You need to add two options to your model command-line:
+
+```bash
+ --plugin /path/to/coverage_trace.so
+ -C TRACE.coverage_trace.trace-file-prefix="/path/to/TRACE-PREFIX"
+```
+
+You can then run your FVP model. The traces will be created at the end of the simulation*.
+
+BEWARE: Traces aren't numbered and will be overwritten if you do two successive runs. Aggregating results will require moving traces to a separate place or changing the prefix between runs. This is the responsibility of the plugin user.
+
+*NOTE: The plugin captures the traces in memory and on the termination of the simulation it writes the data to a file. If user terminates the simulation forcefully with a Ctrl+C the trace files are not generated.
+
+## License
+[BSD-3-Clause](../../license.md)
diff --git a/coverage-tool/docs/reporting_user_guide.md b/coverage-tool/docs/reporting_user_guide.md
new file mode 100644
index 0000000..7c0deea
--- /dev/null
+++ b/coverage-tool/docs/reporting_user_guide.md
@@ -0,0 +1,357 @@
+# coverage-reporting User Guide
+
+The *coverage-reporting* is collection of python and bash scripts to generate LCOV HTML-based reports for code coverage against C source code. There are two stages for this process:
+
+1. Converting the information from the execution traces (using coverage-plugin) of the FVP and the DWARF signatures from the elf/axf files to an intermediate JSON file.
+
+2. Converting the intermediate JSON file into an info file that can be read by LCOV utilities to produce a code coverage HTML report. There are merrge utility scipts provided to merge multiple info files to generate a combined report from multiple runs.
+
+## Intermediate JSON file
+This is a JSON file that contains the information including the source code line numbers embedded in the elf files (by virtue of DWARF signatures) paired against the execution trace log files from the coverage-plugin. Hence only the lines that are compiled and linked to form the final binaries will be referenced by the DWARF signatures. Thus the coverage information will always be against the compiled code that made into the binary. The tools needs a configuration json file as an input with the needed metadata to perform the coverage computation. This configuration file is given as below:
+```json
+{
+ "configuration":
+ {
+ "remove_workspace": "<true> if workspace must be from removed from the path of the source files",
+ "include_assembly": "<true> to include assembly source code in the intermediate layer"
+ },
+ "parameters":
+ {
+ "objdump": "<Path> to the objdump binary to handle DWARF signatures",
+ "readelf": "<Path> to the readelf binary to handle DWARF signatures",
+ "sources": [
+ {
+ "type": "git",
+ "URL": "<URL> git repo",
+ "COMMIT": "<Commit id>",
+ "REFSPEC": "<Refspec>",
+ "LOCATION": "<Folder> within 'workspace' where this source is located"
+ },
+ {
+ "type": "http",
+ "URL": "<URL> link to file",
+ "COMPRESSION": "xz",
+ "LOCATION": "<Folder within 'workspace' where this source is located>"
+ }
+ ],
+ "workspace": "<Workspace folder> where the source code was located to produce(build) the elf/axf files",
+ "output_file": "<Intermediate json layer output file name and location>",
+ "metadata": {"metadata_1": "metadata value"}
+ },
+ "elfs": [
+ {
+ "name": "<Full path name to elf/axf file>",
+ "traces": [
+ "Full path name to the trace file,"
+ ]
+ }
+ ]
+}
+```
+
+Here is an example of an actual configuration JSON file:
+
+```json
+{
+ "configuration":
+ {
+ "remove_workspace": true,
+ "include_assembly": true
+ },
+ "parameters":
+ {
+ "objdump": "gcc-arm-none-eabi-7-2018-q2-update/bin/arm-none-eabi-objdump",
+ "readelf": "gcc-arm-none-eabi-7-2018-q2-update/bin/arm-none-eabi-readelf",
+ "sources": [
+ {
+ "type": "git",
+ "URL": "https://git.trustedfirmware.org/TF-M/trusted-firmware-m.git/",
+ "COMMIT": "2ffadc12fb34baf0717908336698f8f612904",
+ "REFSPEC": "",
+ "LOCATION": "trusted-firmware-m"
+ },
+ {
+ "type": "git",
+ "URL": "https://mucboot.com/mcuboot.git",
+ "COMMIT": "507689a57516f558dac72bef634723b60c5cfb46b",
+ "REFSPEC": "",
+ "LOCATION": "mcuboot"
+ },
+ {
+ "type": "git",
+ "URL": "https://tf.org/mbed/mbed-crypto.git",
+ "COMMIT": "1146b4589011b69a6437e6b728f2af043a06ec19",
+ "REFSPEC": "",
+ "LOCATION": "mbed-crypto"
+ }
+ ],
+ "workspace": "/workspace/workspace/tf-m",
+ "output_file": "output_file.json"
+ },
+ "elfs": [
+ {
+ "name": "mcuboot.axf",
+ "traces": [
+ "reg-covtrace*.log"
+ ]
+ },
+ {
+ "name": "tfms.axf",
+ "traces": [
+ "reg-covtrace*.log"
+ ]
+ },
+ {
+ "name": "tfmns.axf",
+ "traces": [
+ "reg-covtrace*.log"
+ ]
+ }
+ ]
+}
+```
+
+
+As dependencies the script needs the path to the objdump and readelf binares from the *same* toolchain used to build the elf binaries tested.
+Now it can be invoked as:
+
+```bash
+$ python3 intermediate_layer.py --config-json <config json file> [--local-workspace <path to local folder/workspace where the source files are located]
+```
+The *local-workspace* option must be indicated if the current path to the source files is different from the workspace where the build (compiling and linking) happened. The latter will be in the DWARF signature while the former will be used to produce the coverage report. It is not a requirement to have the local workspace recreated but if not present then the program will not be able to find the line numbers belonging to functions within the source files (also **ctags** must be installed i.e. **sudo apt install exuberant-ctags**)
+
+The output is an intermediate json file with the following format:
+
+```json
+{
+ "configuration": {
+ "elf_map": {
+ "binary name 1": 0,
+ "binary name 2": 1
+ },
+ "metadata": {
+ "property 1": "metadata value 1",
+ "property 2": "metadata value 2"
+ },
+ "sources": [{
+ "type": "<git or http>",
+ "URL": "<url for the source>",
+ "COMMIT": "<commit id for git source>",
+ "REFSPEC": "<refspec for the git source",
+ "LOCATION": "<folder to put the source>"
+ }]
+ },
+ "source_files": {
+ "<Source file name>": {
+ "functions": {
+ "line": "<Function line number>",
+ "covered": "<true or false>"
+ },
+ "lines": {
+ "<line number>": {
+ "covered": "<true or false>",
+ "elf_index": {
+ "<Index from elf map>": {
+ "<Address in decimal>": [
+ "<Assembly opcode>",
+ "<Number of times executed>"
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+}
+```
+
+An example snippet of an intermediate JSON file is here:
+
+```json
+{
+ "configuration": {
+ "elf_map": {
+ "bl1": 0,
+ "bl2": 1,
+ "bl31": 2
+ },
+ "metadata": {
+ "BUILD_CONFIG": "tf1",
+ "RUN_CONFIG": "tf2"
+ },
+ "sources": [
+ {
+ "type": "git",
+ "URL": "https://git.trustedfirmware.org/TF-M/trusted-firmware-m.git/",
+ "COMMIT": "2ffadc12fb34baf0717908336698f8f612904",
+ "REFSPEC": "",
+ "LOCATION": "trusted-firmware-m"
+ },
+ {
+ "type": "git",
+ "URL": "https://mucboot.com/mcuboot.git",
+ "COMMIT": "507689a57516f558dac72bef634723b60c5cfb46b",
+ "REFSPEC": "",
+ "LOCATION": "mcuboot"
+ },
+ {
+ "type": "git",
+ "URL": "https://tf.org/mbed/mbed-crypto.git",
+ "COMMIT": "1146b4589011b69a6437e6b728f2af043a06ec19",
+ "REFSPEC": "",
+ "LOCATION": "mbed-crypto"
+ }
+ ]
+ },
+ "source_files": {
+ "mcuboot/boot1.c": {
+ "functions": {
+ "arch_setup": true
+ },
+ "lines": {
+ "12": {
+ "covered": true,
+ "elf_index": {
+ "0": {
+ "6948": [
+ "b2760000 \torr\tx0, x0, #0x400",
+ 1
+ ]
+ }
+ }
+ },
+ "19": {
+ "covered": true,
+ "elf_index": {
+ "0": {
+ "6956": [
+ "d65f03c0 \tret",
+ 1
+ ]
+ }
+ }
+ }
+ }
+ },
+... more lines
+```
+
+
+
+## Report
+LCOV uses **info** files to produce a HTML report; hence to convert the intermediate json file to **info** file:
+```bash
+$ python3 generate_info_file.py --workspace <Workspace where the C source folder structure resides> --json <Intermediate json file> [--info <patht and filename for the info file>]
+```
+As was mentioned, the *workspace* option tells the program where to look for the source files thus is a requirement that the local workspace is populated.
+
+This will generate an info file *coverage.info* that can be input into LCOV to generate the final coverage report as below:
+
+```bash
+$ genhtml --branch-coverage coverage.info --output-directory <HTML report folder>
+```
+
+Here is a example snippet of a info file:
+
+```bash
+TN:
+SF:/home/projects/initial_attestation/attestation_key.c
+FN:213,attest_get_instance_id
+
+FN:171,attest_calc_instance_id
+
+FN:61,attest_register_initial_attestation_key
+
+FN:137,attest_get_signing_key_handle
+
+FN:149,attest_get_initial_attestation_public_key
+
+FN:118,attest_unregister_initial_attestation_key
+FNDA:1,attest_get_instance_id
+
+FNDA:1,attest_calc_instance_id
+
+FNDA:1,attest_register_initial_attestation_key
+
+FNDA:1,attest_get_signing_key_handle
+
+FNDA:1,attest_get_initial_attestation_public_key
+
+FNDA:1,attest_unregister_initial_attestation_key
+FNF:6
+FNH:6
+BRDA:71,0,0,0
+BRDA:71,0,1,1
+...<more lines>
+```
+
+Refer to [](http://ltp.sourceforge.net/coverage/lcov/geninfo.1.php) for meaning of the flags.
+
+## Wrapper
+There is a wrapper bash script that can generate the intermediate json file, create the info file and the LCOV report:
+```bash
+$ ./branch_coverage.sh --config config_file.json --workspace Local workspace --outdir html_report
+```
+
+## Merge files
+There is an utility wrapper that can merge jso and info files to produce a merge of the code coverage:
+```bash
+$ ./merge.sh -j <input json file> [-l <filename for report>] [-w <local workspace>] [-c to indicate to recreate workspace from sources]
+```
+This utility needs a input json file with the list of json/info files to be merged:
+```json
+{ "files" : [
+ {
+ "id": "<unique project id (string) that belongs the json and info files>",
+ "config":
+ {
+ "type": "<'http' or 'file'>",
+ "origin": "<URL or folder where the json files reside>"
+ },
+ "info":
+ {
+ "type": "<'http' or 'file'>",
+ "origin": "<URL or folder where the info files reside>"
+ }
+ },
+....More of these json objects
+ ]
+}
+```
+This utility will merge the files, create the C source folder structure and produce the LCOV reports for the merged files. The utility can do a translation from the workspaces for each info file to the local workspace in case the info files come from different workspaces. The only requirement is that all the info files come from the **same** sources, i.e. repositories.
+
+Example snippet of input json file:
+
+```bash
+{ "files" : [
+ {
+ "id": "Tests_Release_BL2",
+ "config":
+ {
+ "type": "file",
+ "origin": "/home/workspace/150133/output_file.json"
+ },
+ "info":
+ {
+ "type": "file",
+ "origin": "/home/workspace/150133/coverage.info"
+ }
+ },
+ {
+ "id": "Tests_Regression_BL2",
+ "config":
+ {
+ "type": "file",
+ "origin": "/home//workspace/150143/output_file.json"
+ },
+ "info":
+ "type": "file",
+ "origin": "/home/workspace/150143/coverage.info"
+ }
+ }
+ ]
+}
+```
+
+## License
+[BSD-3-Clause](../../license.md)
diff --git a/coverage-tool/docs/user_guide.md b/coverage-tool/docs/user_guide.md
new file mode 100644
index 0000000..70505a1
--- /dev/null
+++ b/coverage-tool/docs/user_guide.md
@@ -0,0 +1,15 @@
+# Trace-based Coverage Tool User Guide
+
+The *coverage-tool* is developed to provide code coverage measurement based on execution trace and without the need for code instrumentation. This tool is specifically meant for firmware components which are run on memory constraint platforms. The non-reliance on code instrumentation in this approach circumvents the frequent issue of instrumented code affecting the target memory model, where the firmware is expected to run. Thus here we test the firmware in the actual memory model it is intended to be eventually released. The coverage tool comprises of 2 main components. A *trace plugin component* and a set of *post processing scripts* to generate the coverage report.
+
+## Design Overview
+Refer to [design overview](./design_overview.md) for an outline of the design of this trace-based coverage tool.
+
+## Plugin user guide
+Refer to [plugin user guide](./plugin_user_guide.md) to learn more on how the plugin component is to be used as part of trace-based coverage tool.
+
+## Reporting user guide
+Refer to [reporting user guide](./reporting_user_guide.md) to learn more on how to use the post-processing scripts, that are part of the trace-based coverage tool, in order to generate the coverage report for analysis.
+
+## License
+[BSD-3-Clause](../../license.md)
diff --git a/coverage-tool/readme.md b/coverage-tool/readme.md
new file mode 100644
index 0000000..e956f2c
--- /dev/null
+++ b/coverage-tool/readme.md
@@ -0,0 +1,35 @@
+# coverage-tool
+
+The *coverage-tool* is a coverage measurement tool based on a custom plugin (implementing Model Trace Interface (MTI)) to generate execution traces and a report generator based on LCOV. Current implementation of this tool reports statement/line coverage, function coverage and branch coverage information.
+
+## Installation
+
+Please clone the repository.
+
+```bash
+git clone https://gitlab.arm.com/qa-tools.git
+```
+
+## Dependencies
+For the plugin
+- Python 3
+
+For the report generator:
+- LCOV (https://github.com/linux-test-project/lcov)
+```bash
+sudo apt-get update -y
+sudo apt-get install -y lcov
+sudo apt-get install exuberant-ctags
+```
+## Usage
+
+Please see the individual [user guide](./docs/user_guide.md) for more details.
+
+## Contributing
+Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
+
+Please follow the recommended [coding style](../readme.md#Style Guide) specified for each component. Also do make sure to update the in-source documentation as appropriate.
+
+
+## License
+[BSD-3-Clause](../../license.md)
diff --git a/license.md b/license.md
new file mode 100644
index 0000000..2e694a6
--- /dev/null
+++ b/license.md
@@ -0,0 +1,45 @@
+# License
+
+The software is provided under a BSD-3-Clause license (below).
+
+```bash
+
+ Copyright (c) [XXXX-]YYYY, <OWNER>. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without modification,
+ are permitted provided that the following conditions are met:
+
+ - Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+ - Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+ - Neither the name of Arm nor the names of its contributors may be used to
+ endorse or promote products derived from this software without specific
+ prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+```
+
+SPDX Identifiers
+----------------
+
+Individual files contain the following tag instead of the full license text.
+
+```bash
+ SPDX-License-Identifier: BSD-3-Clause
+```
+
+This enables machine processing of license information based on the SPDX
+License Identifiers that are here available: http://spdx.org/licenses/
diff --git a/quality-metrics/broker-component/Dockerfile b/quality-metrics/broker-component/Dockerfile
new file mode 100644
index 0000000..2f960ec
--- /dev/null
+++ b/quality-metrics/broker-component/Dockerfile
@@ -0,0 +1,28 @@
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+#======================================================================
+# FILE: Dockerfile
+#
+# DESCRIPTION: This is used for containerised deployment of the quality
+# metrics broker component.
+#======================================================================
+
+FROM alpine:3.11
+
+RUN apk update && \
+ apk upgrade && \
+ apk add --no-cache python3-dev && \
+ pip3 install --upgrade pip
+
+WORKDIR /metrics_server
+COPY . /metrics_server
+
+RUN pip3 --no-cache-dir install -r requirements.txt
+
+EXPOSE 5000
+
+CMD python3 quality_metrics_server.py
diff --git a/quality-metrics/broker-component/constants.py b/quality-metrics/broker-component/constants.py
new file mode 100644
index 0000000..32ce30f
--- /dev/null
+++ b/quality-metrics/broker-component/constants.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" constants.py:
+
+ This file contains the constants required by metrics server.
+
+"""
+
+JWT_EXPIRATION_DAYS = 365
+
+HOST = "<Host Public IP Address>"
+PORT = "8086"
+BUFF_SIZE = 10
+POLL_DELAY = 0.1
+
+LISTEN_ALL_IPS = "0.0.0.0"
+
+VALID_METRICS = [
+ 'tfm_image_size',
+ 'tfa_code_churn',
+ 'tfa_defects_stats',
+ 'tfa_defects_tracking',
+ 'tfa_complexity_stats',
+ 'tfa_complexity_tracking',
+ 'tfa_rtinstr',
+ 'tfa_image_size',
+ 'tfa_misra_defects']
+
+DATABASE_DICT = {
+ "TFM_IMAGE_SIZE": "TFM_ImageSize",
+ "TFA_CODE_CHURN": "TFA_CodeChurn",
+ "TFA_DEFECTS": "TFA_Defects",
+ "TFA_COMPLEXITY": "TFA_Complexity",
+ "TFA_RTINSTR": "TFA_RTINSTR",
+ "TFA_IMAGE_SIZE": "TFA_ImageSize",
+ "TFA_MISRA_DEFECTS": "TFA_MisraDefects"
+}
+
+SUPPORTED_API_VERSIONS = ["1.0"]
diff --git a/quality-metrics/broker-component/credentials.py b/quality-metrics/broker-component/credentials.py
new file mode 100644
index 0000000..7e9793a
--- /dev/null
+++ b/quality-metrics/broker-component/credentials.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" credentials.py:
+
+ Credentials class. This is for reference only.
+
+"""
+
+# SECRET_KEY is set for reference purpose only
+# It is recommended to change its value before deployment
+SECRET_KEY = 'SECRET_KEY'
+
+
+class User(object):
+ def __init__(self, id, username, password):
+ self.id = id
+ self.username = username
+ self.password = password
+
+ def __str__(self):
+ return "User(id='%s')" % self.id
+
+
+# User credentials are set for reference purpose only
+# It is recommended to change these value accordingly before deployment
+users = [
+ User(1, 'metrics_1', 'metrics_pass_1'),
+ User(2, 'metrics_2', 'metrics_pass_2'),
+ User(3, 'tfa_metrics', 'tfa_metrics_pass'),
+]
diff --git a/quality-metrics/broker-component/data_converter.py b/quality-metrics/broker-component/data_converter.py
new file mode 100644
index 0000000..5b3c74b
--- /dev/null
+++ b/quality-metrics/broker-component/data_converter.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" data_converter.py:
+
+ Data converter class. This class is aimed at converting the received
+ data in the format which InfluxDB understands.
+
+"""
+
+import json
+import constants
+
+
+class DataConverter:
+
+ @staticmethod
+ def convert_tfm_imagesize_data(data):
+ # Transform keys names
+ data['metadata']['DataProducer'] = data['metadata'].pop(
+ 'data_producer')
+
+ data['metadata']['git_info']['CommitTitle'] = data['metadata']['git_info'].pop(
+ 'commit_title')
+ data['metadata']['git_info']['CommitID'] = data['metadata']['git_info'].pop(
+ 'commit_id')
+ data['metadata']['git_info']['GerritID'] = data['metadata']['git_info'].pop(
+ 'gerrit_id')
+ data['metadata']['git_info']['CommitURL'] = data['metadata']['git_info'].pop(
+ 'commit_url')
+ data['metadata']['git_info']['Branch'] = data['metadata']['git_info'].pop(
+ 'branch')
+
+ data['metadata']['build_info']['BuildType'] = data['metadata']['build_info'].pop(
+ 'build_type')
+ data['metadata']['build_info']['CmakeConfig'] = data['metadata']['build_info'].pop(
+ 'cmake_config')
+ data['metadata']['build_info']['Compiler'] = data['metadata']['build_info'].pop(
+ 'compiler')
+ data['metadata']['build_info']['Target'] = data['metadata']['build_info'].pop(
+ 'target')
+
+ ret = {}
+ ret['tags'] = {}
+ ret['fields'] = {}
+
+ ret['measurement'] = 'TFM_ImageSize_Statistics'
+
+ for file_info in data['data']:
+ ret['fields'][file_info['file'].rsplit(
+ '.', 1)[0] + '_b'] = file_info['bss']
+ ret['fields'][file_info['file'].rsplit(
+ '.', 1)[0] + '_d'] = file_info['data']
+ ret['fields'][file_info['file'].rsplit(
+ '.', 1)[0] + '_t'] = file_info['text']
+
+ ret['tags']['DataProducer'] = str(data['metadata']['DataProducer'])
+
+ ret['time'] = str(data['metadata']['git_info']['commit_time'])
+
+ for key in data['metadata']['git_info']:
+ if key == 'commit_time':
+ continue
+ ret['tags'][key] = str(data['metadata']['git_info'][key])
+
+ for key in data['metadata']['build_info']:
+ ret['tags'][key] = str(data['metadata']['build_info'][key])
+
+ print(ret)
+
+ return [ret]
+
+ @staticmethod
+ def convert_data(data):
+ """
+ Convert data to a dictionary containing measurement
+ name, fields and tags. It is required by InfluxDB.
+
+ :param data: data to be converted to InfluxDB format
+ """
+
+ if data['metadata']['metrics'] == 'tfm_imagesize':
+ ret = DataConverter.convert_tfm_imagesize_data(data)
+ else:
+ ret = data['data']
+
+ return ret
diff --git a/quality-metrics/broker-component/data_validator.py b/quality-metrics/broker-component/data_validator.py
new file mode 100644
index 0000000..a67b86b
--- /dev/null
+++ b/quality-metrics/broker-component/data_validator.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" data_validator.py:
+
+ JSON data validator class. This class is aimed at validating the JSON
+ data sent in curl command with the JSON schema, so that before pushing
+ the data to the database, it is ensured that required data is received
+ in agreed-upon format.
+
+"""
+
+import sys
+import json
+import os.path
+import constants
+import jsonschema
+from jsonschema import validate
+
+
+class DataValidator:
+ @staticmethod
+ def validate_request_sanity(data_dict):
+ """
+ Input sanitisation/authentication in the application flow
+
+ :param: data_dict: Data to be validated
+ :return: Validation info and error code
+ """
+ if 'metrics' in data_dict['metadata'] and 'api_version' in data_dict and \
+ data_dict['metadata']['metrics'] in constants.VALID_METRICS:
+ if data_dict['api_version'] not in constants.SUPPORTED_API_VERSIONS:
+ return 'Incorrect API version', 401
+
+ filename = 'metrics-schemas/' + data_dict['metadata']['metrics'] + '_schema_' + \
+ data_dict['api_version'].replace(".", "_") + '.json'
+ if not os.path.exists(filename):
+ return filename + ' does not exist', 501
+
+ try:
+ with open(filename, 'r') as handle:
+ parsed = json.load(handle)
+ validate(data_dict, parsed)
+ sys.stdout.write('Record OK\n')
+ return 'OK', 204
+ except jsonschema.exceptions.ValidationError as ve:
+ sys.stdout.write('Record ERROR\n')
+ sys.stderr.write(str(ve) + "\n")
+ return 'Incorrect JSON Schema: ' + \
+ str(ve).split('\n', 1)[0], 400
+ else:
+ return 'Invalid schema - metrics or api version missing\n', 401
diff --git a/quality-metrics/broker-component/db_manager.py b/quality-metrics/broker-component/db_manager.py
new file mode 100644
index 0000000..8f2d77c
--- /dev/null
+++ b/quality-metrics/broker-component/db_manager.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python3
+
+from __future__ import print_function
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" db_manager.py:
+
+ Database interface abstraction class. This class is aimed at providing an
+ asynchronous interface between a blocking IO resource(database) and a
+ public interface designed for high concurrency.
+
+"""
+
+import time
+import threading
+from queue import Queue
+from pprint import pformat
+from influxdb import InfluxDBClient
+
+import constants
+from data_converter import DataConverter
+
+
+class dbManager(object):
+ def __init__(self,
+ host=constants.HOST,
+ port=constants.PORT,
+ user=None,
+ password=None,
+ buff_size=constants.BUFF_SIZE,
+ poll_delay=constants.POLL_DELAY,
+ app=None):
+ self.queue_buff_sz = buff_size
+ self.poll_delay = poll_delay
+
+ self.db_host = host
+ self.db_port = port
+ self.db_user = user
+ self.db_pass = password
+ self.write_queue = Queue(maxsize=self.queue_buff_sz)
+ self.stop_threads = False
+ self.app = app
+
+ for key in constants.DATABASE_DICT:
+ client = InfluxDBClient(host=self.db_host,
+ port=self.db_port,
+ username=self.db_user,
+ password=self.db_pass,
+ database=constants.DATABASE_DICT[key])
+ setattr(self, key.lower() + '_client', client)
+
+ def store(self, data):
+ """
+ Places data in the FIFO to be broadcast when
+ the database is not busy
+
+ :param: data: Data to be placed in FIFO
+ """
+ validation = 'OK'
+ err_code = 204
+ try:
+ self.write_queue.put(data)
+ except Exception as e:
+ validation = "** Write to Queue Failed. ** "
+ err_code = 402
+ print(validation, e)
+ self.app.logger.error(pformat({"error_code": err_code,
+ "info": validation, "exception": e}))
+ return validation, err_code
+
+ def get_db_client(self, metrics):
+ if "stats" in metrics:
+ client = metrics.replace("_stats", "") + "_client"
+ elif "tracking" in metrics:
+ client = metrics.replace("_tracking", "") + "_client"
+ else:
+ client = metrics + "_client"
+ if hasattr(self, client):
+ return getattr(self, client)
+ else:
+ self.app.logger.error("Invalid metrics %" % (metrics))
+
+ def write_db_direct(self, data):
+ """
+ Write data to database ( will block if database is busy )
+
+ :param: data: data to be written to database
+ """
+ db_client = self.get_db_client(data['metadata']['metrics'])
+
+ converted_data = DataConverter.convert_data(data)
+
+ if db_client:
+ if db_client.write_points(converted_data):
+ self.app.logger.info(
+ "Writing to InfluxDB hosted at %s "
+ "has been successful for %s!" %
+ (self.db_host, data['metadata']['metrics']))
+ else:
+ self.app.logger.error(
+ "Writing to InfluxDB hosted at %s "
+ "has FAILED for %s!" %
+ (self.db_host, data['metadata']['metrics']))
+ else:
+ self.app.logger.error(
+ "%s database not connected.." %
+ data['metadata']['metrics'])
+
+ def start_daemon(self):
+ """
+ Spawn a new thread that will consume data in the write FIFO
+ and place it into the databse
+ """
+
+ def write_db_loop():
+
+ while True:
+ try:
+ time.sleep(self.poll_delay)
+ if self.stop_threads:
+ self.app.logger.info(
+ "\n ** Shutting Down Database Writer **")
+ return
+ elif self.write_queue.qsize() > 0:
+ dt = self.write_queue.get()
+ # Write the data to the databse
+ self.write_db_direct(dt)
+ self.write_queue.task_done()
+ except Exception as e:
+ self.app.logger.error(
+ "** DB Writer Thread Failed. ** \n%s" % e)
+
+ self.db_write_thread = threading.Thread(target=write_db_loop)
+ self.db_write_thread.daemon = True
+ self.db_write_thread.start()
+ return self
+
+ def stop(self):
+ """
+ Flag which terminates db_write_threads loop
+ """
+ self.app.logger.info("** Setting stop_threads to True **")
+ self.stop_threads = True
diff --git a/quality-metrics/broker-component/docker-compose.yml b/quality-metrics/broker-component/docker-compose.yml
new file mode 100644
index 0000000..bb91628
--- /dev/null
+++ b/quality-metrics/broker-component/docker-compose.yml
@@ -0,0 +1,58 @@
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+#======================================================================
+# FILE: docker-compose.yml
+#
+# DESCRIPTION: This is a docker compose file for the deployment of all
+# the components involved in the quality metrics collection setup. It
+# brings up Grafana, InfluxDB and the broker component as a multi-
+# container deployment.
+#======================================================================
+
+version: "2"
+services:
+ grafana:
+ image: grafana/grafana
+ container_name: grafana_container
+ restart: always
+ ports:
+ - 3000:3000
+ networks:
+ - metrics_network
+ volumes:
+ - grafana-volume:/var/lib/grafana
+ - ./grafana-provisioning/:/etc/grafana/provisioning
+ environment:
+ - GF_INSTALL_PLUGINS=grafana-piechart-panel
+ influxdb:
+ image: influxdb
+ container_name: influxdb_container
+ restart: always
+ ports:
+ - 8086:8086
+ networks:
+ - metrics_network
+ volumes:
+ - influxdb-volume:/var/lib/influxdb
+ web:
+ build: .
+ ports:
+ - "5000:5000"
+ networks:
+ - metrics_network
+ depends_on:
+ - influxdb
+ links:
+ - influxdb:influx
+networks:
+ metrics_network:
+volumes:
+ grafana-volume:
+ external: true
+ influxdb-volume:
+ external: true
+
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_code_churn_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_code_churn_schema_1_0.json
new file mode 100644
index 0000000..8935e76
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_code_churn_schema_1_0.json
@@ -0,0 +1,58 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Lines_of_Change" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["Lines_of_Change"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Git_Tag_Date" : {
+ "type" : "number"
+ },
+ "Base_Tag" : {
+ "type" : "string"
+ },
+ "Target_Tag" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Git_Tag_Date", "Base_Tag", "Target_Tag"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_complexity_stats_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_complexity_stats_schema_1_0.json
new file mode 100644
index 0000000..0c22160
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_complexity_stats_schema_1_0.json
@@ -0,0 +1,70 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Function_ID" : {
+ "type" : "string"
+ },
+ "Score" : {
+ "type" : "number"
+ },
+ "Whitelisted" : {
+ "type" : "string"
+ },
+ "Threshold" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["Function_ID", "Score", "Whitelisted", "Threshold"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Git_Tag_Date" : {
+ "type" : "number"
+ },
+ "Base_Tag" : {
+ "type" : "string"
+ },
+ "Target_Tag" : {
+ "type" : "string"
+ },
+ "Location" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Git_Tag_Date", "Base_Tag", "Target_Tag", "Location"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
\ No newline at end of file
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_complexity_tracking_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_complexity_tracking_schema_1_0.json
new file mode 100644
index 0000000..509a966
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_complexity_tracking_schema_1_0.json
@@ -0,0 +1,61 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Functions_Exceeding_Threshold_Not_Whitelisted" : {
+ "type" : "number"
+ },
+ "Whitelisted" : {
+ "type" : "string"
+ },
+ "Threshold" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["Functions_Exceeding_Threshold_Not_Whitelisted", "Whitelisted", "Threshold"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Git_Tag_Date" : {
+ "type" : "number"
+ },
+ "Target_Tag" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Git_Tag_Date", "Target_Tag"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_defects_stats_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_defects_stats_schema_1_0.json
new file mode 100644
index 0000000..04dcdf8
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_defects_stats_schema_1_0.json
@@ -0,0 +1,58 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Title" : {
+ "type" : "string"
+ },
+ "Issue_Status" : {
+ "type" : "string"
+ },
+ "URL" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Title", "Issue_Status", "URL"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Defect_ID" : {
+ "type" : "string"
+ },
+ "Measured_Date" : {
+ }
+ },
+ "required" : ["Defect_ID", "Measured_Date"],
+ "additionalProperties": false
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_defects_tracking_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_defects_tracking_schema_1_0.json
new file mode 100644
index 0000000..7a0d855
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_defects_tracking_schema_1_0.json
@@ -0,0 +1,52 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Issue_Status" : {
+ "type" : "string"
+ },
+ "Number_of_Defects" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["Issue_Status", "Number_of_Defects"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Measured_Date" : {
+ }
+ },
+ "required" : ["Measured_Date"],
+ "additionalProperties": false
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_image_size_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_image_size_schema_1_0.json
new file mode 100644
index 0000000..e64e1f8
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_image_size_schema_1_0.json
@@ -0,0 +1,58 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "minProperties": 1,
+ "patternProperties" : {
+ "^BL*$" : {
+ "type" : "number"
+ }
+ }
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "BinMode" : {
+ "type" : "string"
+ },
+ "CommitID" : {
+ "type" : "string"
+ },
+ "CommitTitle" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["BinMode", "CommitID", "CommitTitle"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_misra_defects_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_misra_defects_schema_1_0.json
new file mode 100644
index 0000000..9109bd9
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_misra_defects_schema_1_0.json
@@ -0,0 +1,67 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "TotalDefects" : {
+ "type" : "number"
+ },
+ "MandatoryDefects" : {
+ "type" : "number"
+ },
+ "RequiredDefects" : {
+ "type" : "number"
+ },
+ "AdvisoryDefects" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["TotalDefects", "MandatoryDefects", "RequiredDefects", "AdvisoryDefects"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "BinMode" : {
+ "type" : "string"
+ },
+ "CommitID" : {
+ "type" : "string"
+ },
+ "CommitTitle" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["BinMode", "CommitID", "CommitTitle"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_rtinstr_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_rtinstr_schema_1_0.json
new file mode 100644
index 0000000..1801814
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_rtinstr_schema_1_0.json
@@ -0,0 +1,82 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Latency_EL3Entry_EL3Exit" : {
+ "type" : "number"
+ },
+ "Latency_EL3Entry_CPUPowerDown" : {
+ "type" : "number"
+ },
+ "Latency_CPUWakeup_EL3Exit" : {
+ "type" : "number"
+ },
+ "CacheFlush" : {
+ "type" : "number"
+ }
+ },
+ "oneOf": [
+ { "required": [
+ "Latency_EL3Entry_EL3Exit"
+ ]},
+ { "required": [
+ "Latency_EL3Entry_CPUPowerDown",
+ "Latency_CPUWakeup_EL3Exit",
+ "CacheFlush"
+ ]}
+ ]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "CommitID" : {
+ "type" : "string"
+ },
+ "CommitTitle" : {
+ "type" : "string"
+ },
+ "TC_Name" : {
+ "type" : "string"
+ },
+ "Cluster_ID" : {
+ "type" : "number"
+ },
+ "CPU_Core" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["CommitID", "CommitTitle", "TC_Name", "Cluster_ID", "CPU_Core"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfm_code_churn_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfm_code_churn_schema_1_0.json
new file mode 100644
index 0000000..a181c31
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfm_code_churn_schema_1_0.json
@@ -0,0 +1,58 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Lines_of_Change" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["Lines_of_Change"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Git_Tag_Date" : {
+ "type" : "string"
+ },
+ "Base_Tag" : {
+ "type" : "string"
+ },
+ "Target_Tag" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Git_Tag_Date", "Base_Tag", "Target_Tag"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfm_complexity_stats_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfm_complexity_stats_schema_1_0.json
new file mode 100644
index 0000000..8390692
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfm_complexity_stats_schema_1_0.json
@@ -0,0 +1,73 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Function" : {
+ "type" : "string"
+ },
+ "Score" : {
+ "type" : "number"
+ },
+ "Whitelisted" : {
+ "type" : "string"
+ },
+ "Threshold" : {
+ "type" : "number"
+ },
+ "Location" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Function", "Whitelisted", "Threshold", "Score", "Location"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Git_Tag_Date" : {
+ "type" : "string"
+ },
+ "Target_Tag" : {
+ "type" : "string"
+ },
+ "Base_Tag" : {
+ "type" : "string"
+ },
+ "Location_Tag" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Git_Tag_Date", "Target_Tag", "Base_Tag", "Location_Tag"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfm_defects_stats_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfm_defects_stats_schema_1_0.json
new file mode 100644
index 0000000..b5f819d
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfm_defects_stats_schema_1_0.json
@@ -0,0 +1,65 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Status" : {
+ "type" : "string"
+ },
+ "Priority" : {
+ "type" : "string"
+ },
+ "Summary" : {
+ "type" : "string"
+ },
+ "URL" : {
+ "type" : "string"
+ },
+ "Existing_Defect" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["Status", "Priority", "Summary", "URL", "Existing_Defect"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Defect_ID" : {
+ "type" : "string"
+ },
+ "Measured_Date" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Defect_ID", "Measured_Date"],
+ "additionalProperties": false
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfm_image_size_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfm_image_size_schema_1_0.json
new file mode 100644
index 0000000..30a2ffc
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfm_image_size_schema_1_0.json
@@ -0,0 +1,88 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ },
+ "data_producer" : {
+ "type" : "string"
+ },
+ "git_info" : {
+ "type" : "object",
+ "properties" : {
+ "commit_time" : {
+ "type" : "string"
+ },
+ "commit_title" : {
+ "type" : "string"
+ },
+ "commit_id" : {
+ "type" : "string"
+ },
+ "commit_url" : {
+ "type" : "string"
+ },
+ "branch" : {
+ "type" : "string"
+ },
+ "gerrit_id" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["commit_time", "commit_title", "commit_id", "commit_url", "branch"],
+ "additionalProperties": false
+ },
+ "build_info" : {
+ "type" : "object",
+ "properties" : {
+ "build_type" : {
+ "type" : "string"
+ },
+ "cmake_config" : {
+ "type" : "string"
+ },
+ "compiler" : {
+ "type" : "string"
+ },
+ "target" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["build_type", "cmake_config", "compiler", "target"]
+ }
+ },
+ "required" : ["metrics", "build_info", "git_info", "data_producer"],
+ "additionalProperties": false
+ },
+ "data" : {
+ "type" : "array",
+ "minItems" : 1,
+ "items" : {
+ "type" : "object",
+ "properties" : {
+ "file" : {
+ "type" : "string"
+ },
+ "bss" : {
+ "type" : "number"
+ },
+ "data" : {
+ "type" : "number"
+ },
+ "text" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["file", "bss", "data", "text"],
+ "additionalProperties": false
+ }
+ }
+ },
+ "required" : ["metadata", "data", "api_version"]
+}
diff --git a/quality-metrics/broker-component/quality_metrics_server.py b/quality-metrics/broker-component/quality_metrics_server.py
new file mode 100644
index 0000000..f68f4b2
--- /dev/null
+++ b/quality-metrics/broker-component/quality_metrics_server.py
@@ -0,0 +1,154 @@
+#!/usr/bin/env python3
+
+from __future__ import print_function
+from data_validator import DataValidator
+import credentials
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" quality_metrics_server.py:
+
+ This is the broker component which accepts the data from data
+ generator scripts, and performs basic sanity check and pushes
+ the data to Influx-DB for visualisation with Grafana component.
+ It is not mandatory to push data via data generator scripts.
+ The request to push data to database, in this case - InfluxDB,
+ is expected to be be a POST request with right credentials and
+ should be in agreed upon format.
+
+"""
+
+from pprint import pprint
+from pprint import pformat
+from db_manager import dbManager
+from flask_jwt import JWT, jwt_required
+from flask import Flask, jsonify, request
+from werkzeug.security import safe_str_cmp
+from logging.handlers import RotatingFileHandler
+
+import sys
+import json
+import random
+import logging
+import argparse
+import datetime
+
+import constants
+""" It is suggested to keep credentials.py is kept locally in the
+ system where server is running. This file has been provided
+ for reference.
+"""
+
+username_table = {u.username: u for u in credentials.users}
+userid_table = {u.id: u for u in credentials.users}
+
+
+def authenticate(username, password):
+ user = username_table.get(username, None)
+ if user and safe_str_cmp(
+ user.password.encode('utf-8'),
+ password.encode('utf-8')):
+ return user
+
+
+def identity(payload):
+ user_id = payload['identity']
+ return userid_table.get(user_id, None)
+
+
+def setup_logging(app):
+ # maxBytes and backupCount values to allow the file to rollover at a predetermined size.
+ # When the size is about to be exceeded, the file is closed and a new file is silently
+ # opened for output. Rollover occurs whenever the current log file is nearly maxBytes in length.
+ # When backupCount is non-zero, the system will save old log files by appending the extensions
+ # ‘.1’, ‘.2’ etc., to the filename.
+ file_handler = RotatingFileHandler(
+ "./flask.log",
+ maxBytes=1024 * 1024 * 1024 * 5,
+ backupCount=5)
+ file_handler.setFormatter(
+ logging.Formatter(
+ '[%(asctime)s][PID:%(process)d][%(levelname)s]'
+ '[%(lineno)s][%(name)s.%(funcName)s()] %(message)s'))
+ file_handler.setLevel(logging.INFO)
+ loggers = [app.logger]
+ for logger in loggers:
+ logger.addHandler(file_handler)
+ app.logger.setLevel(logging.INFO)
+
+
+app = Flask(__name__)
+
+setup_logging(app)
+
+logger = logging.getLogger(__name__)
+
+app.debug = True
+app.config['SECRET_KEY'] = credentials.SECRET_KEY
+app.config['JWT_EXPIRATION_DELTA'] = datetime.timedelta(
+ days=constants.JWT_EXPIRATION_DAYS)
+
+dbm = dbManager(app=app).start_daemon()
+
+jwt = JWT(app, authenticate, identity)
+
+# ----------------------- Database Methods ----------------------------------#
+
+
+def store_to_db(data_dict):
+ """
+ Use the database manager to asynchronously update the database
+
+ :param: data_dict: Dictionary containing data to be stored
+ """
+ validation, err_code = dbm.store(data_dict)
+ return validation, err_code
+
+# ----------------------- FLASK API Methods ---------------------------------- #
+
+
+@app.route('/', methods=['POST'])
+@jwt_required()
+def add_db_entry():
+ """
+ Store received data to database if validation is okay
+
+ :return: validation information and error code
+ """
+
+ data = request.get_json()
+ app.logger.debug("Received Data (POST)")
+ app.logger.debug(pformat(data))
+ # Make sure the data is valid
+ validation, err_code = DataValidator.validate_request_sanity(data)
+ if validation == "OK":
+ app.logger.info("<<<<VALIDATION OK>>>>")
+ validation, err_code = store_to_db(data)
+ else:
+ app.logger.error("<<<<VALIDATION NOT OK>>>>")
+ app.logger.error(pformat({"data": validation, "error_code": err_code}))
+ info_json = jsonify({"data": validation, "error_code": err_code})
+ return info_json, err_code
+
+
+@app.route("/")
+def home():
+ info_json = jsonify({"type": "INFO", "data": "Quality Metrics"})
+ return info_json, 200
+
+
+if __name__ == '__main__':
+ try:
+ app.run(host=constants.LISTEN_ALL_IPS, port=5000)
+ except Exception as ex:
+ template = "An exception of type {0} occurred. Arguments:\n{1!r}"
+ message = template.format(type(ex).__name__, ex.args)
+ app.logger.error("message")
+ dbm.stop()
diff --git a/quality-metrics/broker-component/requirements.txt b/quality-metrics/broker-component/requirements.txt
new file mode 100644
index 0000000..115caa2
--- /dev/null
+++ b/quality-metrics/broker-component/requirements.txt
@@ -0,0 +1,35 @@
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+#===============================================================================
+# FILE: requirements.txt
+#
+# DESCRIPTION: Requirements for metrics server
+#===============================================================================
+
+attrs==19.3.0
+certifi==2020.4.5.2
+chardet==3.0.4
+click==7.1.2
+Flask==1.1.2
+Flask-JWT==0.3.2
+idna==2.9
+importlib-metadata==1.6.1
+influxdb==5.3.0
+itsdangerous==1.1.0
+Jinja2==2.11.2
+jsonschema==3.2.0
+MarkupSafe==1.1.1
+msgpack==0.6.1
+PyJWT==1.4.2
+pyrsistent==0.16.0
+python-dateutil==2.8.1
+pytz==2020.1
+requests==2.23.0
+six==1.15.0
+urllib3==1.25.9
+Werkzeug==1.0.1
+zipp==3.1.0
diff --git a/quality-metrics/data-generator/common_metrics/__init__.py b/quality-metrics/data-generator/common_metrics/__init__.py
new file mode 100644
index 0000000..7337f68
--- /dev/null
+++ b/quality-metrics/data-generator/common_metrics/__init__.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" __init__.py:
+
+ __init__.py for complexity parser
+
+"""
+from complexity_parser import *
diff --git a/quality-metrics/data-generator/common_metrics/common_utilities/common_utilities.sh b/quality-metrics/data-generator/common_metrics/common_utilities/common_utilities.sh
new file mode 100755
index 0000000..decb753
--- /dev/null
+++ b/quality-metrics/data-generator/common_metrics/common_utilities/common_utilities.sh
@@ -0,0 +1,164 @@
+#!/usr/bin/env bash
+
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+#===============================================================================
+# FILE: common_utilities.sh
+#
+# DESCRIPTION: Contains common utilities required by all the metrics
+#===============================================================================
+
+# === Function ========================================================
+# NAME: include_variables_file
+# DESCRIPTION: Includes the variables file, specific to repository for
+# which metics are being computed. For example, include and
+# exclude folders are different for different repositories
+# PARAMETERS:
+# $1: File containing variables specific to the repository for which
+# metrics are computed.
+# =====================================================================
+include_variables_file()
+{
+ . ./"${1}"
+}
+
+
+# === Function ========================================================
+# NAME: cleanup_and_exit
+# DESCRIPTION: Deletes a repository, if it exists, and exits
+# =====================================================================
+cleanup_and_exit()
+{
+ # Delete the cloned repository
+ if [ -d "$REPOSITORY" ]; then
+ printf "Deleting $REPOSITORY...\n"
+ rm -rf $REPOSITORY
+ fi
+
+ printf "Exiting...\n"
+ exit
+}
+
+# === Function ========================================================
+# NAME: generate_code_churn_summary
+# DESCRIPTION: Generates the code churn summary from stats
+# PARAMETER:
+# $1: STATS
+# =====================================================================
+generate_code_churn_summary()
+{
+ INS_DEL_LOC_EXTRACT="[0-9]+ file(s)? changed, ([0-9]+) insertion(s)?\(\+\), ([0-9]+) deletion(s)?\(\-\)"
+ INS_LOC_EXTRACT="[0-9]+ file(s)? changed, ([0-9]+) insertion(s)?\(\+\)"
+ DEL_LOC_EXTRACT="[0-9]+ file(s)? changed, ([0-9]+) deletion(s)?\(\-\)"
+ if [[ $1 =~ ${INS_DEL_LOC_EXTRACT} ]]; then
+ INS=${BASH_REMATCH[2]}
+ DEL=${BASH_REMATCH[4]}
+ elif [[ $1 =~ ${INS_LOC_EXTRACT} ]]; then
+ INS=${BASH_REMATCH[2]}
+ DEL=0
+ elif [[ $1 =~ ${DEL_LOC_EXTRACT} ]]; then
+ INS=0
+ DEL=${BASH_REMATCH[2]}
+ else
+ INS=DEL=0
+ fi
+
+ CODE_CHURN=$((INS+DEL))
+ echo "$CODE_CHURN"
+}
+
+# === Function ========================================================
+# NAME: get_git_tag_date
+# DESCRIPTION: Returns the git tag date, as follows:
+# 1. tagger date is returned for annotated tag
+# 2. creator date is returned for non-annotated tag
+# =====================================================================
+get_git_tag_date()
+{
+ GIT_TAG_DATE_TIME=''
+ GIT_TAG_DATE=''
+
+ if [ -n "$1" ]; then
+ tag=$1
+ else
+ tag=$TARGET_TAG
+ fi
+ # Get tagger date for git tag in YYYY-MM-DD format
+ GIT_TAG_DATE_TIME=$(git rev-parse $tag | xargs git cat-file -p | \
+ awk '/^tagger/ { print strftime("%F",$(NF-1))}')
+ # If tagger date is not returned (in case of non-annotated tag), then get created date
+ if [ -z "${GIT_TAG_DATE}" ]; then
+ printf "\nGit tag date is \"created date\" because $tag is non-annotated...\n"
+ GIT_TAG_DATE_TIME=$(git log -1 --format=%ai $tag)
+ else
+ printf "\nGit tag date is \"tagger date\" because $tag is annotated...\n"
+ fi
+ export GIT_TAG_DATE_TIME
+ arr=($GIT_TAG_DATE_TIME)
+ export GIT_TAG_DATE=${arr[0]}
+}
+
+# === Function =================================================================
+# NAME: get_base_tag
+# DESCRIPTION: Checks if target tag exists. If it is exists, get the base tag
+# ==============================================================================
+get_base_tag()
+{
+ # list git tag by commit date and extract the tag string
+ tagList=$(git tag | xargs -I@ git log --format=format:"%ai @%n" -1 @ | sort | awk '{print $4}')
+
+ tagArray=($tagList)
+ matched=0
+
+ prevTag=""
+ currentTag=""
+ counter=0
+ TAG_PATTERN=$1
+
+ # Check if target tag exists
+ for i in "${tagArray[@]}"; do
+ if [ "$i" == "$tag" ]; then
+ matched=1
+ currentTag=$i
+ break
+ else
+ # If not in form of vXXX.YYY, continue
+ counter=$((counter+1))
+ continue
+ fi
+ done
+
+ if [ $matched -eq 0 ]; then
+ printf "@@ Tag $tag does not exist. Please specify an existing one.\n"
+ echo "Existing Tags:"
+ git tag | xargs -I@ git log --format=format:"%ai @%n" -1 @ | sort | awk '{print $4}'
+ exit
+ fi
+
+ get_git_tag_date "$tag"
+ tag_date_1=$GIT_TAG_DATE
+
+ # Search for previous tag in the form of vXXX.YYY before the current tag
+ # Skip the current tag itself and find the first match
+ START=$((counter-1))
+ for ((i=${START};i>=0;i--)); do
+ temp_tag="${tagArray[$i]}"
+ get_git_tag_date "$temp_tag"
+ tag_date_2=$GIT_TAG_DATE
+ echo "$temp_tag $GIT_TAG_DATE $tag_date_2"
+ if [[ $temp_tag =~ $TAG_PATTERN ]] && [[ "$tag_date_1" != "$tag_date_2" ]]; then
+ prevTag=$temp_tag
+ break
+ fi
+ done
+
+ printf "@@ Tag $tag is valid\n"
+ export TARGET_TAG=$currentTag
+ export BASE_TAG=$prevTag
+ echo "@@ Target tag is $TARGET_TAG ($tag_date_1)"
+ echo "@@ Base tag is $BASE_TAG ($tag_date_2)"
+}
diff --git a/quality-metrics/data-generator/common_metrics/complexity_parser/__init__.py b/quality-metrics/data-generator/common_metrics/complexity_parser/__init__.py
new file mode 100644
index 0000000..01b3878
--- /dev/null
+++ b/quality-metrics/data-generator/common_metrics/complexity_parser/__init__.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" __init__.py:
+
+ __init__.py for complexity parser
+
+"""
+
+__all__ = ["complexity_parser"]
+
+from complexity_parser import ComplexityParser
diff --git a/quality-metrics/data-generator/common_metrics/complexity_parser/complexity_parser.py b/quality-metrics/data-generator/common_metrics/complexity_parser/complexity_parser.py
new file mode 100644
index 0000000..afb3d76
--- /dev/null
+++ b/quality-metrics/data-generator/common_metrics/complexity_parser/complexity_parser.py
@@ -0,0 +1,164 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" complexity_parser.py:
+
+ Data converter class. This class is aimed at converting the received
+ data in the format which InfluxDB understands.
+
+"""
+
+import collections
+import re
+import sys
+
+
+class ComplexityParser(object):
+ """
+ Extract the following data from the complexity logs:
+ - complexity table: {filename: <complexity score>}
+ """
+
+ def __init__(self, complexityLog, threshold):
+ """ class constructor function """
+ self.complexityLog = complexityLog
+ self.complexityDict = collections.OrderedDict()
+ self.threshold = threshold
+
+ self.process_complexity_log()
+ self.process_complexity_data()
+
+ def process_complexity_log(self):
+ """ function to process complexity log and populate the complexity dictionary """
+ with open(self.complexityLog) as fp:
+ for line in fp:
+ scoreRegex = r"([0-9]+)\s+[0-9]+\s+[0-9]+\s+[0-9]+\s+[0-9]+\s+(.*)"
+ m = re.match(scoreRegex, line)
+
+ if m:
+ score = m.group(1)
+
+ self.complexityDict[m.group(2).strip()] = score
+
+ def process_complexity_data(self):
+ """ function to extract the function IDs above the complexity threshold """
+ self.complexityDict = collections.OrderedDict(
+ (k, v) for k, v in self.complexityDict.items() if int(v) >= self.threshold)
+
+ def apply_whitelist(self):
+ """ Add an additional field to indicate whitelist YES/NO """
+ tmpDict = collections.OrderedDict()
+ exclusionDict = collections.OrderedDict()
+
+ # read in the whitelist
+ with open('./whitelist.dat') as f:
+ lines = f.read().splitlines()
+
+ # construct a dictionary for the white list to deal with:
+ # FULL_DIR_FOR_EXCLUSION, FULL_FILE_FOR_EXCLUSION and function
+ for i in lines:
+ tmpK = i.split(':')[0]
+ tmpV = i.split(':')[1]
+ exclusionDict[tmpK] = tmpV
+
+ whitelist_match = 0
+
+ for k, v in self.complexityDict.items():
+ # dealing with whitelist
+ for wlK, wlV in exclusionDict.items():
+
+ if (wlV == "FULL_DIR_FOR_EXCLUSION") or (
+ wlV == "FULL_FILE_FOR_EXCLUSION"):
+ # dealing with FULL_DIR_EXCLUSION and FULL_FILE_FOR_EXCLUSION, here we compare the directory path name or
+ # file name before the ':'
+ if wlK in k.split(':')[0]:
+ whitelist_match = 1
+ else:
+ # dealing with function exclusion
+ if wlV in k.split(':')[1]:
+ whitelist_match = 1
+
+ if whitelist_match != 1:
+ newValue = v + ",NO"
+ else:
+ newValue = v + ",YES"
+
+ # add into the dictionary
+ tmpDict[k] = newValue
+
+ whitelist_match = 0
+
+ return tmpDict
+
+
+class ComplexityHTMLCreator(object):
+ """
+ Create HTML using the defect statistics
+ """
+
+ def __init__(self, complexityDict, fileName):
+ """ Class constructor function """
+ self.complexityDict = complexityDict
+ # output file name
+ self.fileName = fileName
+
+ self.create_template_head()
+ self.add_table_content()
+ self.create_template_tail()
+
+ def create_template_head(self):
+ """ Function to make the HTML template """
+ with open(self.fileName, 'w') as f:
+ f.write("<!DOCTYPE html>\n")
+ f.write("<html>\n")
+ f.write("<head>\n")
+ f.write("<style>\n")
+ f.write("table, th, td{\n")
+ f.write(" border: 1px solid black;\n")
+ f.write(" border-collapse: collapse;\n")
+ f.write("}\n")
+ f.write("</style>\n")
+ f.write("</head>\n")
+ f.write("<body>\n")
+ f.write("<table>\n")
+ f.write(" <tr>\n")
+ f.write(" <th>Function ID</th>\n")
+ f.write(" <th>In-file location</th>\n")
+ f.write(" <th>Complexity Score</th>\n")
+ f.write(" </tr>\n")
+
+ def add_table_content(self):
+ """ function to add rows for test case result summary """
+ with open(self.fileName, "a") as f:
+
+ for functionInfo, score in self.complexityDict.items():
+ if int(score) >= 10:
+ f.write(" <tr bgcolor=\"#E67E62\">\n")
+ else:
+ f.write(" <tr>\n")
+
+ # add function information
+ location = functionInfo.split(':')[0].strip()
+ functionName = functionInfo.split(':', 1)[1].strip()
+
+ # add complexity score
+ f.write(" <td>{0}</td>\n".format(functionName))
+ f.write(" <td>{0}</td>\n".format(location))
+ f.write(" <td>{0}</td>\n".format(score))
+ f.write(" </tr>\n")
+
+ def create_template_tail(self):
+ """ function to add the closing part of html """
+
+ with open(self.fileName, "a") as f:
+ f.write("</table>\n")
+ f.write("</body>\n")
+ f.write("</html>\n")
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_defects.py b/quality-metrics/data-generator/tfa_metrics/tfa_defects.py
new file mode 100755
index 0000000..8725909
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_defects.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_defects.py:
+
+ Retrieves TF-A defects from GitHub
+
+"""
+
+from github import GitHub, ApiError, ApiNotFoundError
+
+try:
+ token = "<GitHub Access Token>"
+ gh = GitHub(access_token=token)
+
+ # Please note that currently 'open' defects are reported
+ # In future, labels='bug' would be used for defect density
+ open_bug_issues = gh.repos(
+ 'ARM-software')('tf-issues').issues.get(state='open', labels='bug')
+
+ bugCounter = 0
+
+ TFA_URL = "https://github.com/ARM-software/tf-issues/issues/"
+
+ for issue in open_bug_issues:
+ print("Found open bug with id: %s: %s, %s" %
+ (issue.number, issue.title, issue.state))
+ bugCounter += 1
+
+ print("\t url for this issue is: %s" % (TFA_URL + str(issue.number)))
+
+ print("@@ Total number of open bugs: %d" % (bugCounter))
+
+except ApiNotFoundError as e:
+ print(e, e.request, e.response)
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_generate_influxdb_files.py b/quality-metrics/data-generator/tfa_metrics/tfa_generate_influxdb_files.py
new file mode 100755
index 0000000..825c1c9
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_generate_influxdb_files.py
@@ -0,0 +1,344 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_generate_influxdb_files.py:
+
+ Parses the TF-A metrics summary files and generates JSON files
+ containing data to be written to InfluxDB.
+ Usage: python3 tfa_generate_influxdb_files.py --defectLog <defect log> \
+ --complexityLog <complexity log> --loc <code churn loc> \
+ --gitTagDate <tag date> --influxTime <git tag date & time>
+
+"""
+
+import argparse
+import os
+import re
+import collections
+import string
+import time
+import json
+
+
+def load_module(name, fpath):
+ """
+ Function to return access to the module
+
+ :param: name: Module name to be loaded
+ :param: fpath: Relative path to complexity_parser.py
+ :return: Module object
+ """
+ import os
+ import imp
+ return imp.load_source(
+ name, os.path.join(
+ os.path.dirname(__file__), fpath))
+
+
+load_module(
+ "complexity_parser",
+ "../common_metrics/complexity_parser/complexity_parser.py")
+
+from complexity_parser import ComplexityParser
+
+def args_parse():
+
+ global DEFECT_LOG
+ global COMPLEXITY_LOG
+ global CODE_CHURN
+ global BASE_RELEASE_TAG
+ global TARGET_RELEASE_TAG
+ global GIT_TAG_DATE
+ global GIT_TAG_DATE_TIME
+
+ # Create parser instance and add arguments
+ parser = argparse.ArgumentParser(
+ description="TF-A quality metrics InfluxDB JSON files generator")
+ parser.add_argument("--defectLog", help="name of the defect log")
+ parser.add_argument("--complexityLog", help="name of the complexity log")
+ parser.add_argument("--loc", help="code churn statistics", required=True)
+ parser.add_argument(
+ "--baseTag",
+ help="name of the base release tag",
+ required=True)
+ parser.add_argument(
+ "--targetTag",
+ help="name of the target release tag",
+ required=True)
+ parser.add_argument("--gitTagDate", help="Git Tag Date", required=True)
+ parser.add_argument(
+ "--influxTime",
+ help="InfluxDB time, which is Git Tag Date and Time",
+ required=True)
+
+ # Parse the arguments
+ args = parser.parse_args()
+
+ if args.defectLog:
+ DEFECT_LOG = args.defectLog
+
+ if args.complexityLog:
+ COMPLEXITY_LOG = args.complexityLog
+
+ if args.loc:
+ CODE_CHURN = args.loc
+
+ if args.baseTag:
+ BASE_RELEASE_TAG = args.baseTag
+
+ if args.targetTag:
+ TARGET_RELEASE_TAG = args.targetTag
+
+ if args.gitTagDate:
+ GIT_TAG_DATE = re.sub('[-]', '', args.gitTagDate)
+
+ if args.influxTime:
+ GIT_TAG_DATE_TIME = args.influxTime
+
+
+def tfa_generate_defect_data(data):
+ """
+ Function to write the data of defects into influxdb """
+
+ dict_list = []
+ runDate = time.strftime('%H:%M-%x')
+
+ # "Issue_Status" acts as an indicative field to help the viewer figure out
+ # the current status of the bug
+ defects_tracking = {
+ "metadata": {
+ "metrics": "tfa_defects_tracking"
+ },
+ "api_version": "1.0",
+ "data": [{
+ "measurement": "TFA_Defects_Tracking",
+ "fields": {
+ "Issue_Status": "{}".format("Open"),
+ "Number_of_Defects": int(len(data))
+ },
+ "tags": {
+ "Measured_Date": "{}".format(runDate)
+ },
+ }]
+ }
+
+ with open('defects_tracking.json', 'w') as fp:
+ json.dump(defects_tracking, fp)
+
+ # Write details of each defects into the other measurement called
+ # "TFA_Defects_Statistics"
+ defect_stats = {}
+ defect_stats["data"] = []
+ defect_stats["metadata"] = {}
+ defect_stats["metadata"]["metrics"] = "tfa_defects_stats"
+ defect_stats["api_version"] = "1.0"
+ for ID, description in data.items():
+ json_body = {
+ "measurement": "TFA_Defects_Statistics",
+ "fields": {
+ "Title": "{}".format(description['title']),
+ "Issue_Status": "{}".format(description['state']),
+ "URL": "{}".format(description['url'])
+ },
+ "tags": {
+ "Defect_ID": "{}".format(ID),
+ "Measured_Date": "{}".format(runDate)
+ }
+ }
+
+ defect_stats["data"].append(json_body)
+
+ with open('defects_statistics.json', 'w') as fp:
+ json.dump(defect_stats, fp)
+
+
+def tfa_generate_codechurn_data(data, base_tag, target_tag):
+ """
+ Generates InfluxDB data for TF-A code churn and
+ writes that to code_churn.json file.
+
+ :param: data: Lines of change
+ :param: base_tag: Release tag prior to target_tag
+ :param: target_tag: Tag being tested
+ """
+
+ json_body = {
+ "metadata": {
+ "metrics": "tfa_code_churn"
+ },
+ "api_version": "1.0",
+ "data": [{
+ "measurement": "TFA_CodeChurn_Tracking",
+ "fields": {
+ "Lines_of_Change": int(data)
+ },
+ "tags": {
+ "Git_Tag_Date": int(GIT_TAG_DATE),
+ "Base_Tag": "{}".format(base_tag),
+ "Target_Tag": "{}".format(target_tag)
+ },
+ "time": GIT_TAG_DATE_TIME
+ }]
+ }
+
+ with open('code_churn.json', 'w') as fp:
+ json.dump(json_body, fp)
+
+
+def tfa_generate_complexity_data(data, base_tag, target_tag, threshold):
+ """
+ Generates InfluxDB data for TF-A complexity scores and
+ writes that to complexity stats and tracking json files.
+
+ :param: data: Complexity data
+ :param: base_tag: Release tag prior to target_tag
+ :param: target_tag: Tag being tested
+ :param: threshold: Complexity threshold
+ """
+
+ complexity_stats = {}
+ complexity_stats["data"] = []
+ complexity_stats["metadata"] = {}
+ complexity_stats["metadata"]["metrics"] = "tfa_complexity_stats"
+ complexity_stats["api_version"] = "1.0"
+
+ totalComplexity = 0
+
+ print(
+ "@@ Number of functions with complexity score > %d: %d" %
+ (threshold, len(data)))
+
+ for k, v in data.items():
+ # Extract the location and function name
+ location = k.split(':', 1)[0].strip()
+ functionID = k.split(':', 1)[1].strip()
+ json_body = {
+ "measurement": "TFA_Complexity_Statistics",
+ "fields": {
+ "Function_ID": "{}".format(functionID),
+ "Score": int(v),
+ "Whitelisted": "{}".format("no"),
+ "Threshold": int(threshold)
+ },
+ "tags": {
+ "Location": "{}".format(location),
+ "Git_Tag_Date": int(GIT_TAG_DATE),
+ "Base_Tag": "{}".format(base_tag),
+ "Target_Tag": "{}".format(target_tag)
+ },
+ "time": GIT_TAG_DATE_TIME
+ }
+
+ complexity_stats["data"].append(json_body)
+ totalComplexity += int(v)
+
+ with open('complexity_stats.json', 'w') as fp:
+ json.dump(complexity_stats, fp)
+
+ totalExceedThreshold = len(data)
+ complexity_tracking = {
+ "metadata": {
+ "metrics": "tfa_complexity_tracking"
+ },
+ "api_version": "1.0",
+ "data": [{
+ "measurement": "TFA_Complexity_Tracking",
+ "fields": {
+ "Threshold": int(threshold),
+ "Whitelisted": "{}".format("no"),
+ "Functions_Exceeding_Threshold_Not_Whitelisted": int(totalExceedThreshold)
+ },
+ "tags": {
+ "Git_Tag_Date": int(GIT_TAG_DATE),
+ "Target_Tag": "{}".format(target_tag)
+ },
+ "time": GIT_TAG_DATE_TIME
+ }]
+ }
+
+ with open('complexity_tracking.json', 'w') as fp:
+ json.dump(complexity_tracking, fp)
+
+
+class DefectParser(object):
+ """
+ Extract the following data from the defect/complexity logs:
+ - defect list: {test class ID:{title: <title>, link: <URL>}}
+ - int variable: total number of defects
+ """
+
+ def __init__(self, defectLog):
+ self.defectLog = defectLog
+ self.defectDict = collections.OrderedDict()
+
+ self.process_defect_log()
+
+ def process_defect_log(self):
+ """
+ Function to process defect log and populate the defect dictionary
+ """
+ with open(self.defectLog) as fp:
+ content = fp.readlines()
+
+ baseURL = "https://github.com/ARM-software/tf-issues/issues/"
+
+ # Get defect id, title and URL link to populate the defect dictionary
+ for i in content:
+ i_strip = i.strip()
+
+ titleIDRegex = "^Found open bug with id: ([0-9]+): (.*)"
+ mIDTitle = re.match(titleIDRegex, i)
+
+ if mIDTitle:
+ defectID = mIDTitle.group(1)
+ defectTitle = mIDTitle.group(2)
+ defectURL = baseURL + mIDTitle.group(1)
+
+ self.defectDict[defectID] = {}
+ self.defectDict[defectID]['title'] = defectTitle.split(',')[0]
+ self.defectDict[defectID]['url'] = defectURL
+ self.defectDict[defectID]['state'] = defectTitle.split(',')[1]
+
+
+if __name__ == "__main__":
+
+ # Initialise global variables
+ DEFECT_LOG = ""
+ COMPLEXITY_LOG = ""
+ CODE_CHURN = 0
+ BASE_RELEASE_TAG = 0
+ TARGET_RELEASE_TAG = 0
+ # Functions having pmcabbe cylomatic complexity >= TFA_THRESHOLD
+ # are reported
+ TFA_THRESHOLD = 11
+ GIT_TAG_DATE = ""
+
+ # parse arguments
+ args_parse()
+
+ # Generate defect data
+ defectData = DefectParser(DEFECT_LOG)
+
+ # Generate complexity data
+ complexityData = ComplexityParser(COMPLEXITY_LOG, TFA_THRESHOLD)
+
+ tfa_generate_defect_data(defectData.defectDict)
+
+ tfa_generate_codechurn_data(
+ CODE_CHURN,
+ BASE_RELEASE_TAG,
+ TARGET_RELEASE_TAG)
+
+ tfa_generate_complexity_data(
+ complexityData.complexityDict,
+ BASE_RELEASE_TAG,
+ TARGET_RELEASE_TAG,
+ TFA_THRESHOLD)
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_quality_metrics.sh b/quality-metrics/data-generator/tfa_metrics/tfa_quality_metrics.sh
new file mode 100755
index 0000000..cc920d9
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_quality_metrics.sh
@@ -0,0 +1,353 @@
+#!/usr/bin/env bash
+
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+#======================================================================
+# FILE: tfa_quality_metrics.sh
+#
+# DESCRIPTION: script to defects and calculate complexity score for arm-trusted-firmware
+#
+# USAGE: ./tfa_quality_metrics.sh --tag <release tag>
+#
+#======================================================================
+. ../common_metrics/common_utilities/common_utilities.sh
+. ./tfa_variables.sh
+
+# === Function ========================================================
+# NAME: clone_git_repo
+# DESCRIPTION: Clones the repository via "git clone" command
+# =====================================================================
+clone_git_repo()
+{
+ REPO_URL=$1
+ REPOSITORY=$(basename $REPO_URL .git)
+ # If repository already exists, then return from this function
+ if [ -d $REPOSITORY ]; then
+ printf "\nRepository \"$REPOSITORY\" already exists."
+ return
+ fi
+
+ # Clone repo. If it doesn't exist, then exit.
+ printf "\nCloning $REPOSITORY...\n"
+ printf "git clone $REPO_URL\n"
+ clone_err=$(git clone "$REPO_URL" 2>&1 | grep "fatal")
+
+ if [[ ! -z $clone_err ]]; then
+ printf "Repository \"$REPOSITORY\" not found. Exiting...\n"
+ exit
+ fi
+}
+
+# === Function ========================================================
+# NAME: tag_validation
+# DESCRIPTION: Invokes get_base_tag which retrieves base tag is target
+# tag is valid
+# PARAMETER:
+# $1: tag id
+# =====================================================================
+tag_validation()
+{
+ tag=$1
+
+ # check that tag actually exists
+ pushd arm-trusted-firmware
+ get_base_tag "^v[0-9]+\.[0-9]+$"
+ popd
+}
+
+# === Function ========================================================
+# NAME: generate_defect_summary
+# DESCRIPTION: Calculates the number of the total defects
+# PARAMETER:
+# $1: output defect log
+# =====================================================================
+generate_defect_summary()
+{
+ # copy the github module to this level
+ cp $DIR/./githubpy/github.py .
+ cp $DIR/./githubpy/setup.py .
+
+ python3 $DIR/tfa_defects.py > $DEFECT_LOG
+}
+
+# === Function ========================================================
+# NAME: get_complexity_score
+# DESCRIPTION: Finds cyclomatic complexity of all the C/C++ files.
+# =====================================================================
+get_complexity_score()
+{
+ complexity_dir="$(basename $TFA_REPO .git)"
+
+ # check the availability of pmccabe
+ validation=$(which pmccabe)
+ if [ -z "$validation" ]; then
+ echo "pmccabe not found. Aborting test...\n"
+ exit
+ fi
+
+ # find out complexity on computed folder
+ pmccabe -vt `find $complexity_dir -name "*.c"` `find $complexity_dir -name "*.cpp"` > $COMPLEXITY_LOG
+}
+
+# === Function ========================================================
+# NAME: complexity_score
+# DESCRIPTION: Calculates the McCabe complexity score
+# =====================================================================
+complexity_score()
+{
+ # checkout the tag before running pmmcabe
+ pushd $DIR/arm-trusted-firmware
+
+ echo "git checkout ${TARGET_TAG}"
+ git checkout ${TARGET_TAG}
+ git status
+
+ # exclude subfolders under plat except for 'arm' and 'common'
+ mv plat tmp_plat
+ mkdir plat
+ cp -rp tmp_plat/arm tmp_plat/common tmp_plat/compat plat 2>/dev/null
+ rm -rf tmp_plat
+
+ # exclude subfolders under lib
+ rm -rf lib/stdlib
+ rm -rf lib/libfdt
+ rm -rf lib/compiler-rt
+
+ # exclude tools
+ rm -rf tools
+
+ # exclude services/spd except for 'tspd'
+ mv services/spd services/safe_spd
+ mkdir services/spd
+ cp -rp services/safe_spd/tspd services/spd 2>/dev/null
+ rm -rf services/safe_spd
+
+ popd
+
+ get_complexity_score
+}
+
+# === Function ========================================================
+# NAME: code_churn_summary
+# DESCRIPTION: Function to get code churn summary
+# PARAMETER:
+# $1: code churn log
+# =====================================================================
+code_churn_summary()
+{
+ pushd $DIR/arm-trusted-firmware
+
+ echo "@@ Calculating code churn excluding plat folder..."
+
+ # Calculate code churn
+ stats1=$(git diff --stat $BASE_TAG $TARGET_TAG -- . ':!plat' | grep -E "[0-9]+ file(s)? changed,")
+ CODE_CHURN1=$(generate_code_churn_summary "$stats1")
+
+ echo "@@ Calculating code churn plat/arm and plat/common folder..."
+ stats2=$(git diff --stat $BASE_TAG $TARGET_TAG -- 'plat/arm' 'plat/common' | grep -E "[0-9]+ file(s)? changed,")
+ CODE_CHURN2=$(generate_code_churn_summary "$stats2")
+
+ CODE_CHURN=$((CODE_CHURN1+CODE_CHURN2))
+ echo "Code churn: $CODE_CHURN LOC" | tee $DIR/$CODE_CHURN_LOG
+
+ # get tagger date for git tag in YYYY-MM-DD format
+ get_git_tag_date
+
+ popd
+
+ echo $CODE_CHURN
+}
+
+# === Function ========================================================
+# NAME: write_influxdb_data
+# DESCRIPTION: Function to generate JSON files containing DB data
+# =====================================================================
+write_influxdb_data()
+{
+ # Create a result folder using the current time stamp and
+ # copy InfluxDB json files to it
+ local resultDir=$(date +%Y-%m-%d_%H_%M_%S)
+ local_result=$DIR/$resultDir
+
+ mkdir -p $local_result
+ mv *.json *.txt $local_result
+
+ pushd $local_result
+
+ for json_file in *.json; do
+ curl -X POST -H "Content-Type: application/json" -d "$(cat ${json_file})" \
+ "http://${INFLUX_HOST}:5000" -H "${TFA_METRICS_AUTH_TOKEN}"
+ done
+
+ popd
+}
+
+# === Function ========================================================
+# NAME: generate_defect_codechurn_complexity_data
+# DESCRIPTION: Function to generate defects, code churn and complexity
+# quality metrics data for given tag.
+# =====================================================================
+generate_defect_codechurn_complexity_data()
+{
+ # Remove files from previous run, if any
+ rm -rf arm-trusted-firmware/ github* setup.py
+
+ clone_git_repo $TFA_REPO
+ clone_git_repo $GITHUBPY_REPO
+
+ # validate TARGET_TAG and get base tag
+ tag_validation $TARGET_TAG
+
+ # do defect statistics
+ generate_defect_summary
+
+ # cyclomatic complexity check
+ complexity_score
+
+ # code churn
+ code_churn_summary
+
+ # Create InfluxDB json files to be written to InfluxDB
+ python3 $DIR/tfa_generate_influxdb_files.py --defectLog $DEFECT_LOG \
+ --complexityLog $COMPLEXITY_LOG --loc $CODE_CHURN --baseTag $BASE_TAG \
+ --targetTag $TARGET_TAG --gitTagDate $GIT_TAG_DATE --influxTime "$GIT_TAG_DATE_TIME"
+}
+
+# === Function ========================================================
+# NAME: usage
+# DESCRIPTION: Function to print script usage
+# =====================================================================
+usage()
+{
+ # print usage common to all files
+ printf "USAGE: $(basename $0) [options]\n"
+ printf "\t params: \n"
+ printf "\t -h|--help print help information\n"
+ printf "\t --tag user specified release tag\n"
+ printf "\t --metric_type [ runtime_instrumentation | image_size | coverity_misra ]*\n"
+ printf "\t --rt_instr Path to file containing instrumentation data\n"
+ printf "\t Required when metric_type is runtime_instrumentation\n"
+ printf "\t --image_size_file Path to file containing image size data\n"
+ printf "\t Required when metric_type is image_size\n"
+ printf "\t --misra_defects_file Path to file containing MISRA defects information\n"
+ printf "\t Required when metric_type is coverity_misra\n"
+ printf "* By default, code coverage, defects and complexity metrics are generated for given tag\n"
+ printf "When metric_type is specified, corresponding data file to be parsed is also required\n"
+ exit
+}
+
+# === Function ========================================================
+# NAME: generate_tfa_metrics_data
+# DESCRIPTION: Function to generate InfluxDB JSON file for specified
+# TF-A metrics - run time instrumentation/image size/MISRA defects
+# =====================================================================
+generate_tfa_metrics_data()
+{
+ case $METRIC_TYPE in
+ runtime_instrumentation)
+ if [[ ! -f $RTINSTR_FILE ]]; then
+ echo "$RTINSTR_FILE doesn't exist.. Exiting.."
+ exit 1
+ else
+ python3 tfa_rt_instr.py --rt_instr $RTINSTR_FILE
+ fi
+ ;;
+ image_size)
+ if [[ ! -f $IMAGE_SIZE_FILE ]]; then
+ echo "$IMAGE_SIZE_FILE doesn't exist.. Exiting.."
+ exit 1
+ else
+ python3 tfa_track_image_size.py --image_size_file $IMAGE_SIZE_FILE
+ fi
+ ;;
+ coverity_misra)
+ if [[ ! -f $MISRA_DEFECTS_FILE ]]; then
+ echo "$MISRA_DEFECTS_FILE doesn't exist.. Exiting.."
+ exit 1
+ else
+ python3 tfa_track_misra_defects.py --misra_defects_file $MISRA_DEFECTS_FILE
+ fi
+ ;;
+ esac
+ write_influxdb_data
+ exit
+}
+
+# === Function ========================================================
+# NAME: parse_args
+# DESCRIPTION: Arguments parser function
+# =====================================================================
+parse_args()
+{
+ # parse the arguments
+ while [[ $# -gt 0 ]]
+ do
+ key="$1"
+ case $key in
+ -h|--help)
+ usage
+ ;;
+ --tag)
+ export TARGET_TAG="$2"
+ shift
+ shift
+ ;;
+ --metric_type)
+ export METRIC_TYPE="$2"
+ shift
+ shift
+ ;;
+ --rt_instr_file)
+ export RTINSTR_FILE="$2"
+ shift
+ shift
+ ;;
+ --image_size_file)
+ export IMAGE_SIZE_FILE="$2"
+ shift
+ shift
+ ;;
+ --misra_defects_file)
+ export MISRA_DEFECTS_FILE="$2"
+ shift
+ shift
+ ;;
+ *)
+ echo "Unknown argument $key in arguments $@"
+ usage
+ ;;
+ esac
+ done
+
+}
+
+# === Function ========================================================
+# NAME: main
+# DESCRIPTION: main function
+# PARAMETER: Command-line arguments
+# =====================================================================
+main()
+{
+ parse_args $@
+
+ # If metrics type is specified, then generate influxdb JSON files
+ # from given text files
+ if [[ ! -z $METRIC_TYPE ]]; then
+ generate_tfa_metrics_data
+ # Otherwise generate code churn, complexity and defects data for given tag
+ elif [[ ! -z $TARGET_TAG ]]; then
+ generate_defect_codechurn_complexity_data
+ else
+ echo "Please specify either metric_type or tag.."
+ usage
+ fi
+
+ # write generated data (JSON files) to InfluxDB
+ write_influxdb_data
+}
+
+main $@
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_rt_instr.py b/quality-metrics/data-generator/tfa_metrics/tfa_rt_instr.py
new file mode 100644
index 0000000..bc40a7f
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_rt_instr.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_rt_instr.py:
+
+ Parses the job output log file, stores the data in a list of dictionaries
+ and creates JSON file to be written to influxDB.
+
+ USAGE: python3 tfa_rt_instr.py --rt_instr <job_output.log>
+
+ """
+
+import argparse
+import os
+import os.path
+import re
+import json
+
+
+class TFAInstrFileParser:
+ dict_list = []
+ file_name = None
+ rtinstr_data = {}
+ rtinstr_data["data"] = []
+ rtinstr_data["metadata"] = {}
+ rtinstr_data["metadata"]["metrics"] = "tfa_rtinstr"
+ rtinstr_data["api_version"] = "1.0"
+
+ def __init__(self, input_file):
+ self.file_name = input_file
+ self.parse_instr_file()
+ print(json.dumps(self.dict_list, indent=4, sort_keys=True))
+
+ def write_database_instr_tfa(self, file_dict):
+ self.rtinstr_data["data"].append(file_dict)
+
+ def parse_instr_file(self):
+ with open(self.file_name) as fp:
+ # Store instrumentation target as measurement name
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'InstrumentationTarget':
+ print("Invalid file format.. Intrumentation not found..")
+ print("Exiting..")
+ exit()
+ measurement = val[1].strip()
+
+ # Store commit ID
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'CommitID':
+ print("Invalid file format.. Commit ID not found..")
+ print("Exiting..")
+ exit()
+ commit_id = val[1].strip()[0:10]
+
+ # Store commit title
+ line = fp.readline()
+ val = line.split(':', 1)
+ if val[0].strip() != 'CommitTitle':
+ print("Invalid file format.. CommitTitle not found..")
+ print("Exiting..")
+ exit()
+ commit_title = val[1].strip()
+
+ # Store time as commit date
+ line = fp.readline()
+ if line.split()[0] != 'CommitDate:':
+ print("Invalid file format.. Commit Date not found..")
+ print("Exiting..")
+ exit()
+ commit_time = line.split()[1]
+
+ # Store latency data per test case
+ for line in iter(fp.readline, ''):
+ file_dict = {}
+ file_dict['tags'] = {}
+ file_dict['fields'] = {}
+ file_dict['measurement'] = measurement
+ file_dict['tags']['CommitID'] = commit_id
+ file_dict['tags']['CommitTitle'] = commit_title
+ file_dict['time'] = commit_time
+ tc_arr = line.split()
+ file_dict['tags']['TC_Name'] = tc_arr[0]
+ file_dict['tags']['Cluster_ID'] = int(tc_arr[1])
+ file_dict['tags']['CPU_Core'] = int(tc_arr[2])
+ if file_dict['tags']['TC_Name'] == 'testrtinstrpsciversionparallel':
+ file_dict['fields']['Latency_EL3Entry_EL3Exit'] = int(
+ tc_arr[3])
+ else:
+ file_dict['fields']['Latency_EL3Entry_CPUPowerDown'] = int(
+ tc_arr[3])
+ file_dict['fields']['Latency_CPUWakeup_EL3Exit'] = int(
+ tc_arr[4])
+ file_dict['fields']['CacheFlush'] = int(tc_arr[5])
+ self.write_database_instr_tfa(file_dict)
+
+ with open('tfa_rtinstr.json', 'w') as fp:
+ json.dump(self.rtinstr_data, fp)
+
+
+def get_tfa_instr_file():
+ # Create parser instance and add argument
+ parser = argparse.ArgumentParser(
+ description="TFA quality metrics: Runtime Instrumentation tracking")
+ parser.add_argument(
+ "--rt_instr",
+ help="file containing TF-A runtime instrumentation info")
+
+ # Parse the args
+ args = parser.parse_args()
+
+ # Check if file exists
+ if os.path.isfile(str(args.rt_instr)):
+ return args.rt_instr
+ else:
+ print("Runtime Instrumentation file not found.. Exiting..")
+ exit()
+
+
+if __name__ == '__main__':
+ tfa_instr_file_data = TFAInstrFileParser(str(get_tfa_instr_file()))
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_track_image_size.py b/quality-metrics/data-generator/tfa_metrics/tfa_track_image_size.py
new file mode 100755
index 0000000..44dba10
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_track_image_size.py
@@ -0,0 +1,199 @@
+#!/usr/bin/env python
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_track_image_size.py:
+
+ Parses TFA firmware image size file, stores the data in a list of
+ dictionaries and creates JSON file to be written to influxDB.
+
+ USAGE: python3 tfa_track_image_size.py --image_size_file <ImageSizeFil.txte>
+
+ """
+
+import argparse
+import os.path
+import re
+import json
+
+# Validation Variables
+MEM_SECTION_VALIDATION_TABLE = ['B', 'D', 'R', 'T', 'V', 'W']
+ELF_FILES_LOOKUP_TABLE = [
+ 'bl1.elf',
+ 'bl1u.elf',
+ 'bl2.elf',
+ 'bl2u.elf',
+ 'bl31.elf',
+ 'bl32.elf']
+
+
+class TFASizeFileParser:
+ """
+ Download the file containing sizes of various TFA build configs
+ Store the size data in a list of dictionaries in the following format:
+ [
+ {
+ "measurement": <build_config>,
+ "fields" : {
+ "BlX_B": Size of uninitialized data section
+ "BlX_D": Size of initialized data section
+ "BlX_R": Size of read only data section
+ "BlX_T": Size of text (code) section
+ "BlX_V": Size of weak object
+ "BlX_W": Size of weak symbol
+ },
+ "tags" : {
+ "BinMode" : Type of build (Release|Debug)
+ "CommitID" : Commit ID
+ "CommitTitle" : Commit title
+ }
+ "time" : Commit Time
+ }
+ ]
+ """
+
+ file_dict = {}
+ file_name = None
+
+ def __init__(self, input_file):
+ self.file_name = input_file
+ self.parse_image_size_file()
+ print(json.dumps(self.file_dict, indent=4, sort_keys=True))
+
+ def parse_image_size_file(self):
+ self.file_dict['tags'] = {}
+ self.file_dict['fields'] = {}
+
+ with open(self.file_name) as fp:
+ # Store measurement name as build config
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'BuildConfig':
+ print("Invalid file format.. BuildConfig not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['measurement'] = val[1].strip()
+
+ # Store bin_mode
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'BinMode':
+ print("Invalid file format.. BinMode not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['tags'][val[0].strip()] = val[1].strip().title()
+
+ # Store Commit ID
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'CommitID':
+ print("Invalid file format.. Commit ID not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['tags'][val[0].strip()] = val[1].strip()[0:10]
+
+ # Store Commit Title
+ line = fp.readline()
+ val = line.split(':', 1)
+ if val[0].strip() != 'CommitTitle':
+ print("Invalid file format.. CommitTitle not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['tags']['CommitTitle'] = val[1].strip()
+
+ # Store time as commit date
+ line = fp.readline()
+ if line.split()[0] != 'CommitDate:':
+ print("Invalid file format.. Commit Date not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['time'] = line.split()[1]
+
+ # Store Image Size memory related data component-wise
+ for line in iter(fp.readline, ''):
+ if ".elf" in line:
+ searched_build = line.split('/')[-1].split(':')[0]
+ build = searched_build.upper().rsplit('.', 1)[0]
+ if searched_build not in ELF_FILES_LOOKUP_TABLE:
+ print(
+ "WARNING: " +
+ searched_build +
+ " not present in ELF_FILES_LOOKUP_TABLE..")
+ print(
+ "Skipping publishing data for " +
+ searched_build +
+ " to InfluxDB")
+ build = None
+ continue
+ elif build is not None:
+ val = line.split(' ')
+ if len(val) > 1:
+ if not val[0].strip() in MEM_SECTION_VALIDATION_TABLE:
+ print(
+ "Invalid memory section \"%s\".. Exiting.." %
+ val[0].strip())
+ exit()
+ mem_comp = build + "_" + val[0].strip()
+ self.file_dict['fields'][mem_comp] = int(
+ val[1].strip())
+
+ json_body = json.dumps(str(self.file_dict))
+ if not self.file_dict['fields']:
+ failed_configs = 'failed_configs.txt'
+
+ if os.path.exists(failed_configs):
+ append_write = 'a' # append if already exists
+ else:
+ append_write = 'w' # make a new file if not
+
+ failed_configs_file = open(failed_configs, append_write)
+ failed_configs_file.write(
+ self.file_dict['measurement'] +
+ ', ' +
+ self.file_dict['tags']['BinMode'] +
+ ': bl1/bl1u/bl2/bl2u/bl31/bl32 not found\n')
+ failed_configs_file.close()
+ print("No memory section found.. Exiting")
+ exit()
+
+
+def generate_influxdb_json_file(file_dict):
+ image_size_data = {}
+ image_size_data["data"] = []
+ image_size_data["metadata"] = {}
+ image_size_data["metadata"]["metrics"] = "tfa_image_size"
+ image_size_data["api_version"] = "1.0"
+ image_size_data["data"].append(file_dict)
+ with open('tfa_image_size.json', 'w') as fp:
+ json.dump(image_size_data, fp)
+
+
+def get_tfa_size_file():
+ # Create parser instance and add argument
+ parser = argparse.ArgumentParser(
+ description="TFA quality metrics: firmware image size tracking")
+ parser.add_argument(
+ "--image_size_file",
+ help="file containing TFA image size info")
+
+ # Parse the args
+ args = parser.parse_args()
+
+ # Check if file exists
+ if os.path.isfile(str(args.image_size_file)):
+ return args.image_size_file
+ else:
+ print("Image size file not found.. Exiting..")
+ exit()
+
+
+if __name__ == '__main__':
+ tfa_size_file_data = TFASizeFileParser(str(get_tfa_size_file()))
+ generate_influxdb_json_file(tfa_size_file_data.file_dict)
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_track_misra_defects.py b/quality-metrics/data-generator/tfa_metrics/tfa_track_misra_defects.py
new file mode 100755
index 0000000..6962ca5
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_track_misra_defects.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_track_misra_defects.py:
+
+ Parses TFA MISRA defects file, stores the data in a list of
+ dictionaries and creates JSON file to be written to influxDB.
+
+ USAGE: python3 tfa_track_misra_defects.py --misra_defects_file <DefectsFile.txt>
+
+ """
+
+import argparse
+import os.path
+import re
+import json
+
+
+class TFACoverityFileParser:
+ """
+ Store the Misra C defects data in a list of dictionaries in the following
+ format:
+ [
+ {
+ "measurement": <build_config>,
+ "fields" : {
+ "TotalDefects" : Total coverity defects
+ "MandatoryDefects": Mandatory defects
+ "RequiredDefects" : Required defects
+ "AdvisoryDefects" : Advisory defects
+ },
+ "tags" : {
+ "BinMode" : Type of build (Release|Debug)
+ "CommitID" : Commit ID
+ "CommitTitle" : Commit Title
+ }
+ "time" : PR Merge Commit Time
+ }
+ ]
+ """
+
+ file_dict = {}
+ file_name = None
+
+ def __init__(self, input_file):
+ self.file_name = input_file
+ self.parse_misra_defects_file()
+ print(json.dumps(self.file_dict, indent=4, sort_keys=True))
+
+ def parse_misra_defects_file(self):
+ self.file_dict = {}
+ self.file_dict['tags'] = {}
+ self.file_dict['fields'] = {}
+
+ with open(self.file_name) as fp:
+ # Store measurement name as build config
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'BuildConfig':
+ print("Invalid file format.. BuildConfig not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['measurement'] = val[1].strip()
+
+ # Store bin_mode
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'BinMode':
+ print("Invalid file format.. BinMode not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['tags'][val[0].strip()] = val[1].strip().title()
+
+ # Store Commit ID
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'CommitID':
+ print("Invalid file format.. Commit ID not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['tags'][val[0].strip()] = val[1].strip()[0:10]
+
+ # Store Commit Title
+ line = fp.readline()
+ val = line.split(':', 1)
+ if val[0].strip() != 'CommitTitle':
+ print("Invalid file format.. CommitTitle not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['tags']['CommitTitle'] = val[1].strip()
+
+ # Store time as commit date
+ line = fp.readline()
+ if line.split()[0] != 'CommitDate:':
+ print("Invalid file format.. Commit Date not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['time'] = line.split()[1]
+
+ # Store Total Defects
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'TotalDefects':
+ print("Invalid file format.. TotalDefects not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['fields']['TotalDefects'] = int(val[1].strip())
+
+ # Store Mandatory Defects
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'MandatoryDefects':
+ print("Invalid file format.. MandatoryDefects not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['fields']['MandatoryDefects'] = int(val[1].strip())
+
+ # Store Required Defects
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'RequiredDefects':
+ print("Invalid file format.. RequiredDefects not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['fields']['RequiredDefects'] = int(val[1].strip())
+
+ # Store Advisory Defects
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'AdvisoryDefects':
+ print("Invalid file format.. AdvisoryDefects not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['fields']['AdvisoryDefects'] = int(val[1].strip())
+
+
+def write_database(file_dict):
+ misra_defects_data = {}
+ misra_defects_data["data"] = []
+ misra_defects_data["metadata"] = {}
+ misra_defects_data["metadata"]["metrics"] = "tfa_misra_defects"
+ misra_defects_data["api_version"] = "1.0"
+ misra_defects_data["data"].append(file_dict)
+ with open('tfa_misra_defects.json', 'w') as fp:
+ json.dump(misra_defects_data, fp)
+
+
+def get_tfa_coverity_file():
+ # Create parser instance and add argument
+ parser = argparse.ArgumentParser(
+ description="TF-A quality metrics: Misra C defects tracking")
+ parser.add_argument("--misra_defects_file",
+ help="file containing Misra defects information")
+
+ # Parse the args
+ args = parser.parse_args()
+
+ # Check if file exists
+ if os.path.isfile(str(args.misra_defects_file)):
+ return args.misra_defects_file
+ else:
+ print("Coverity file not found.. Exiting..")
+ exit()
+
+
+if __name__ == '__main__':
+ tfa_misra_defects_data = TFACoverityFileParser(
+ str(get_tfa_coverity_file()))
+ write_database(tfa_misra_defects_data.file_dict)
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_variables.sh b/quality-metrics/data-generator/tfa_metrics/tfa_variables.sh
new file mode 100644
index 0000000..56a2715
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_variables.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+export TFA_REPO='https://github.com/ARM-software/arm-trusted-firmware.git'
+export GITHUBPY_REPO='https://github.com/michaelliao/githubpy.git'
+
+export DEFECT_LOG=tfa_defects_summary.txt
+export COMPLEXITY_LOG=tfa_complexity_summary.txt
+export CODE_CHURN_LOG=tfa_code_churn.txt
+
+# Authentication token needs to be generated using following command:
+# curl -H "Content-Type: application/json" -X POST -d \
+# "$(cat <CREDENTIALS_JSON_FILE>)" http://<IP_ADDR>:5000/auth
+# where "IP_ADDR" is the IP address of host where metrics server is running, and
+# CREDENTIALS_JSON file should contain credentials which should match with
+# the credentials in ../../broker-component/credentials.py
+# Response would contain a JWT token, which needs to be added here
+# during deployment
+export TFA_METRICS_AUTH_TOKEN="<TFA Authorization Token>"
+
+# INFLUX_HOST is the IP address of host where InfluxDB service is running
+# It needs to be updated during deployment
+export INFLUX_HOST="<Influx Public Host IP>"
+
+# Use relative path to the current script
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
diff --git a/quality-metrics/docs/broker_component_user_guide.md b/quality-metrics/docs/broker_component_user_guide.md
new file mode 100644
index 0000000..58ddbdf
--- /dev/null
+++ b/quality-metrics/docs/broker_component_user_guide.md
@@ -0,0 +1,84 @@
+# Broker Component User Guide
+Broker component is Python Flask app, which handles the data being pushed by the data generator components of the quality metrics project. It implements APIs that allow the data generator scripts to POST metrics data in an agreed JSON format which gets pushed to InfluxDB backend database. For each received request with valid authorization token, it performs basic sanity check and pushes the data to InfluxDB. For details on how to visualize InfluxDB data using Grafana, please refer [visualisation user guide](./visualisation_user_guide.md).
+
+## Deploying broker component
+The broker component can be deployed in the infrastructure as a docker container with the docker files in the project.
+
+### Dependencies
+- docker
+- docker-compose
+- python3-dev
+- python3-pip
+- Ensure that ports 3000, 5000 and 8086 are enabled for incoming traffic.
+
+### Bringing-up Broker Component - Setup Instructions
+In order to bring-up broker component (for a first time setup), docker-compose.yml is used, which brings up 3 containers - Grafana, InfluxDB and Flask App. It is upto the user to adapt the docker_compose.yml file if only some of the components needs to be deployed.
+JWT Authorization Token is generated using following steps, which needs to be added in [tfa_variables.sh](../data_generator/tfa_variables.sh)
+1. Set value of HOST to host public IP address in file [constants.py](../broker-component/constants.py)
+1. [credentials.py](../broker-component/credentials.py) is provided for reference purpose only. Please change the value of SECRET_KEY and password for _tfa_metrics_ user.
+1. Ensure the current working directory is broker_component. Create [teamname]_credentials.json (as an example let us use 'tfa' as the teamname) and run following commands to generate Authorization Token:
+```bash
+$ cat tfa_credentials.json
+{
+ "username": "tfa_metrics",
+ "password": "[tfa_metrics password matching with password in credentials.py]"
+}
+
+$ docker network create metrics_network
+$ docker volume create influxdb-volume
+$ docker volume create grafana-volume
+$ docker-compose build
+$ docker-compose up -d
+$ curl -H "Content-Type: application/json" -X POST -d "$(cat tfa_credentials.json)" http://[Host Public IP]:5000/auth
+```
+
+## Testing the setup after deployment
+The following steps can help confirm if the deployment steps detailed in previous setup was indeed successful or not.
+
+### Verify that Grafana is up
+If URL *http://`[HOST Public IP]`:3000* is accessible, it confirms that Grafana is up.
+
+### Verify that InfluxDB is up
+If URL *http://`[HOST Public IP]`:8086/query* is accessible, it confirms that InfluxDB is up.
+
+### Create InfluxDB Database
+Database can be created by accessing InfluxDB container or by using InfluxDB API.
+
+#### Create database by accessing InfluxDB container
+```bash
+$ docker exec -it influxdb_container sh
+# influx
+> create database TFA_CodeChurn
+> create database TFA_Complexity
+> create database TFA_Defects
+> create database TFA_MisraDefects
+> create database TFA_ImageSize
+> create database TFA_RTINSTR
+> exit
+# exit
+```
+#### Create database using the InfluxDB API
+```bash
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=CREATE DATABASE TFA_CodeChurn"
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=CREATE DATABASE TFA_Complexity"
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=CREATE DATABASE TFA_Defects"
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=CREATE DATABASE TFA_MisraDefects"
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=CREATE DATABASE TFA_ImageSize"
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=CREATE DATABASE TFA_RTINSTR"
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=SHOW DATABASES"
+```
+
+### Pushing Data to InfluxDB
+Data can be pushed to InfluxDB by sending cURL POST request in the agreed-upon format and with correct authorization token.
+* The steps above mention that how authorization token can be generated.
+* Request is validated using [JSON schemas](../broker-component/metrics-schemas)
+In order to send push data, run following commands:
+```bash
+$ cd qa-tools/quality-metrics/data-generator/tfa_metrics
+$ ./tfa_quality_metrics.sh --tag [Release Tag]
+```
+
+For details, please refer [data generator user guide](./data_generator_user_guide.md).
+
+## License
+[BSD-3-Clause](../../license.md)
diff --git a/quality-metrics/docs/data_generator_user_guide.md b/quality-metrics/docs/data_generator_user_guide.md
new file mode 100644
index 0000000..fc81b54
--- /dev/null
+++ b/quality-metrics/docs/data_generator_user_guide.md
@@ -0,0 +1,194 @@
+# Data Generator User Guide
+Data generator scripts contain scripts to push JSON files containing metrics data to be stored in the InfluxDB database. These mainly push data for following TFA (trusted-firmware-a) metrics:
+1. Code churn, Code complexity and GitHub defects
+1. Image Size
+1. MISRA defects
+1. Runtime instrumentation
+
+For code churn, code complexity and defects metrics, data is generated by data generator scripts. For other metrics, the data generator scripts take as input a text file, that converts the information in the given file to JSON file that is posted to broker component. The text file is expected to be generated by TFA CI setup.
+
+## Dependencies
+- python3-dev
+- python3-pip
+
+## Generating Data
+Please refer [broker component user guide](./broker_component_user_guide.md) for changes essential for the setup. Once broker component is up and running, please make following changes to run data generator scipt.
+1. Please refer [broker component user guide](./broker_component_user_guide.md) for details on how to generate token, and add it in [tfa_variables.sh](../data-generator/tfa_metrics/tfa_variables.sh). "Bringing-up Broker Component - Setup Instructions" contains details on how to generate authentication token.
+1. Set the value of INFLUX_HOST to Influx Public Host IP in [tfa_variables.sh](../data-generator/tfa_metrics/tfa_variables.sh).
+1. Set the value of GitHub access token in [tfa_defects.py](../data-generator/tfa_metrics/tfa_defects.py)
+
+### Code churn, Code complexity and GitHub defects
+As mentioned in [broker component user guide](./broker_component_user_guide.md), code churn, complexity and defects data can be generated as follows for a given release tag:
+```bash
+$ cd qa-tools/quality-metrics/data-generator/tfa_metrics
+$ ./tfa_quality_metrics.sh --tag [Release Tag]
+```
+
+An example of "Release Tag" can be any of the [trusted-firmware-a repo's tags](https://git.trustedfirmware.org/TF-A/trusted-firmware-a.git/refs/tags)
+
+JSON files which are sent to broker component can be found in a timestamped folder in tfa_metrics folder. Data written to InfluxDB can be queried using following commands:
+```bash
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_CodeChurn' --data-urlencode 'q=show measurements'
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_CodeChurn' --data-urlencode 'q=select * from TFA_CodeChurn_Tracking'
+
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_Complexity' --data-urlencode 'q=show measurements'
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_Complexity' --data-urlencode 'q=select * from TFA_Complexity_Tracking'
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_Complexity' --data-urlencode 'q=select * from TFA_Complexity_Statistics'
+
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_Defects' --data-urlencode 'q=show measurements'
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_Defects' --data-urlencode 'q=select * from TFA_Defects_Tracking'
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_Defects' --data-urlencode 'q=select * from TFA_Defects_Statistics'
+```
+
+Using query "show measurements", all the measurements for corresponding DB are listed. After this, one can issue command "select * from measurement", where measurement can be any measurement from "show measurements" output.
+
+### Image Size
+```bash
+$ cd qa-tools/quality-metrics/data-generator/tfa_metrics
+$ ./tfa_quality_metrics.sh --metric_type image_size --image_size_file [Image Size Text File]
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_ImageSize' --data-urlencode 'q=show measurements'
+```
+
+"show measurements" would list the measurements available for TFA_ImageSize DB. To view the data image size measurements, issue command "select * from measurement", where measurement is the name of measurement for which data needs to be view data.
+
+#### Image Size Sample File
+```
+BuildConfig: zynqmp-tspd-tdram
+BinMode: debug
+CommitID: ed39d5e3c0709bab22821a1da3a62737c5d531de
+CommitTitle: Merge "Enabling DPU in dts file for TC0" into integration
+CommitDate: 2020-09-08T14:22:45+00:00
+/work/workspace/workspace/swqt-atf-image-size-metrics/scripts/metrics/atf_metrics/tf-topics/tools/renesas/rcar_layout_create/cert_header_sa6.elf:
+R 288
+
+/work/workspace/workspace/swqt-atf-image-size-metrics/scripts/metrics/atf_metrics/tf-topics/tools/renesas/rcar_layout_create/bootparam_sa0.elf:
+R 20
+
+/work/workspace/workspace/swqt-atf-image-size-metrics/scripts/metrics/atf_metrics/tf-topics/build/zynqmp/debug/bl31/bl31.elf:
+B 26676
+D 14193
+R 3121
+T 40916
+W 516
+
+/work/workspace/workspace/swqt-atf-image-size-metrics/scripts/metrics/atf_metrics/tf-topics/build/zynqmp/debug/bl32/bl32.elf:
+B 21512
+D 5
+R 1218
+T 11496
+W 196
+```
+
+### MISRA Defects
+```bash
+$ cd qa-tools/quality-metrics/data-generator/tfa_metrics
+$ ./tfa_quality_metrics.sh --metric_type coverity_misra --misra_defects_file [MISRA Defects Text File]
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_MisraDefects' --data-urlencode 'q=show measurements'
+```
+
+"show measurements" would list the measurements available for TFA_MisraDefects DB. To view the data MISRA defects measurements, issue command "select * from measurement", where measurement is the name of measurement for which data needs to be view data.
+
+#### MISRA Defects Sample File
+```
+BuildConfig: fvp-rst-bl31
+BinMode: debug
+CommitID: ed39d5e3c0709bab22821a1da3a62737c5d531de
+CommitTitle: Merge "Enabling DPU in dts file for TC0" into integration
+CommitDate: 2020-09-08T14:22:45+00:00
+TotalDefects: 667
+MandatoryDefects: 0
+RequiredDefects: 534
+AdvisoryDefects: 119
+```
+
+### Runtime Instrumentation
+```bash
+$ cd qa-tools/quality-metrics/data-generator/tfa_metrics
+$ ./tfa_quality_metrics.sh --metric_type runtime_instrumentation --rt_instr_file [RTINSTR Text File]
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_RTINSTR' --data-urlencode 'q=show measurements'
+```
+
+"show measurements" would list the measurements available for TFA_RTINSTR DB. To view the data runtime instrumentation measurements, issue command "select * from measurement", where measurement is the name of measurement for which data needs to be view data.
+
+#### Runtime Instrumentation Sample File
+```
+InstrumentationTarget: juno-tftf+aarch32-rt32.instr-r2
+CommitID: ed39d5e3c0709bab22821a1da3a62737c5d531de
+CommitTitle: Merge "Enabling DPU in dts file for TC0" into integration
+CommitDate: 2020-09-08T14:22:45+00:00
+testrtinstrsuspdeepparallel 1 0 128700 31280 9200
+testrtinstrsuspdeepparallel 1 0 128700 31280 9200
+testrtinstrsuspdeepparallel 1 1 283520 27260 9120
+testrtinstrsuspdeepparallel 1 1 283520 27260 9120
+testrtinstrsuspdeepparallel 1 2 541120 26860 214200
+testrtinstrsuspdeepparallel 1 2 541120 26860 214200
+testrtinstrsuspdeepparallel 1 3 206300 25180 9100
+testrtinstrsuspdeepparallel 1 3 206300 25180 9100
+testrtinstrsuspdeepparallel 0 0 51060 76960 6760
+testrtinstrsuspdeepparallel 0 0 51060 76960 6760
+testrtinstrsuspdeepparallel 0 1 361040 25560 203720
+testrtinstrsuspdeepparallel 0 1 361040 25560 203720
+testrtinstrsuspdeepserial 1 0 259740 28140 214060
+testrtinstrsuspdeepserial 1 0 259740 28140 214060
+testrtinstrsuspdeepserial 1 1 54480 23060 8920
+testrtinstrsuspdeepserial 1 1 54480 23060 8920
+testrtinstrsuspdeepserial 1 2 53860 23200 8920
+testrtinstrsuspdeepserial 1 2 53860 23200 8920
+testrtinstrsuspdeepserial 1 3 54280 23340 9120
+testrtinstrsuspdeepserial 1 3 54280 23340 9120
+testrtinstrsuspdeepserial 0 0 249020 25600 204180
+testrtinstrsuspdeepserial 0 0 249020 25600 204180
+testrtinstrsuspdeepserial 0 1 248800 25660 203940
+testrtinstrsuspdeepserial 0 1 248800 25660 203940
+testrtinstrcpususpparallel 1 0 209820 23500 10700
+testrtinstrcpususpparallel 1 0 209820 23500 10700
+testrtinstrcpususpparallel 1 1 287420 21920 11600
+testrtinstrcpususpparallel 1 1 287420 21920 11600
+testrtinstrcpususpparallel 1 2 364080 20980 11160
+testrtinstrcpususpparallel 1 2 364080 20980 11160
+testrtinstrcpususpparallel 1 3 441280 20720 10980
+testrtinstrcpususpparallel 1 3 441280 20720 10980
+testrtinstrcpususpparallel 0 0 52960 18920 9780
+testrtinstrcpususpparallel 0 0 52960 18920 9780
+testrtinstrcpususpparallel 0 1 130940 19020 9800
+testrtinstrcpususpparallel 0 1 130940 19020 9800
+testrtinstrcpususpserial 1 0 82460 20500 8960
+testrtinstrcpususpserial 1 0 82460 20500 8960
+testrtinstrcpususpserial 1 1 51320 20400 9040
+testrtinstrcpususpserial 1 1 51320 20400 9040
+testrtinstrcpususpserial 1 2 51180 19860 8980
+testrtinstrcpususpserial 1 2 51180 19860 8980
+testrtinstrcpususpserial 1 3 51220 20160 8940
+testrtinstrcpususpserial 1 3 51220 20160 8940
+testrtinstrcpususpserial 0 0 48560 19260 6840
+testrtinstrcpususpserial 0 0 48560 19260 6840
+testrtinstrcpususpserial 0 1 48340 19160 6760
+testrtinstrcpususpserial 0 1 48340 19160 6760
+testrtinstrcpuoffserial 1 0 260060 28060 214080
+testrtinstrcpuoffserial 1 0 260060 28060 214080
+testrtinstrcpuoffserial 1 1 55780 28100 9140
+testrtinstrcpuoffserial 1 1 55780 28100 9140
+testrtinstrcpuoffserial 1 2 55620 26860 9180
+testrtinstrcpuoffserial 1 2 55620 26860 9180
+testrtinstrcpuoffserial 1 3 55760 26440 9260
+testrtinstrcpuoffserial 1 3 55760 26440 9260
+testrtinstrcpuoffserial 0 0 251900 28880 204240
+testrtinstrcpuoffserial 0 0 251900 28880 204240
+testrtinstrcpuoffserial 0 1 252440 29560 204460
+testrtinstrcpuoffserial 0 1 252440 29560 204460
+testrtinstrpsciversionparallel 1 0 880
+testrtinstrpsciversionparallel 1 0 880
+testrtinstrpsciversionparallel 1 1 960
+testrtinstrpsciversionparallel 1 1 960
+testrtinstrpsciversionparallel 1 2 980
+testrtinstrpsciversionparallel 1 2 980
+testrtinstrpsciversionparallel 1 3 980
+testrtinstrpsciversionparallel 1 3 980
+testrtinstrpsciversionparallel 0 0 1040
+testrtinstrpsciversionparallel 0 0 1040
+testrtinstrpsciversionparallel 0 1 1180
+testrtinstrpsciversionparallel 0 1 1180
+```
+
+## License
+[BSD-3-Clause](../../license.md)
diff --git a/quality-metrics/docs/design_overview.md b/quality-metrics/docs/design_overview.md
new file mode 100644
index 0000000..77523a6
--- /dev/null
+++ b/quality-metrics/docs/design_overview.md
@@ -0,0 +1,73 @@
+# Multi-tier architecture
+The quality-metrics setup is designed based on a multi-tier architecture where we have different components at each tier. Current design has components at 3 tiers:
+1. front-end components which include a number of data generator scripts that compute the metric to be tracked and visualised. There is Grafana visualisation component to visualise the quality metrics data.
+1. middleware components which include a broker component that acts as an abstraction layer on top of the backend-components. This broker component performs basic data sanity checks and any data translation required.
+1. back-end components which include the InfluxDB timeseries database as the data sink.
+
+## Front-end: Data Generator scripts and Grafana visualisation tool
+Data generator scripts are metric specific scripts that are expected to be integrated as part of the regular CI setup. Metrics are generated based on a pre-determined frequency with which it is executed in the team's CI setup. Each data generator script is expected to compute the agreed metric and push the metric data along with the associated metadata to the broker component as a JSON data fragment which then gets pushed to the backend for visualisation.
+
+The data generator scripts are organised on a per-team basis.
+
+Grafana tool is used to visualise the quality metrics data on a per-team basis.
+
+### Defect Metric
+This metric tracks the open defects against a given project.
+
+### Code Churn Metric
+This metric tracks the code churn, that is number of lines added, modified and deleted, against given tag for a project.
+
+### Code Complexity Metric
+This metric tracks the code complexity against a given tag for a project. It reports modified McCabe score for the code complexity where a switch-statement is treated as a single decision point, thus reporting a value of 2. It uses pmccabe utility for calculating complexity score, and list all the functions having "Modified McCabe Cyclomatic Complexity" above the agreed upon threshold are pushed to database.
+
+### Other Metrics
+While the above mentioned metrics are computed by data generator scripts, there are some other metrics like image size, MISRA defects and run time instrumentation, which are not computed by data generator scripts. The role of scripts present in data generator folder for these metrics is to convert the input text file containing data (to be written to DB) into JSON file (which is sent to broker component). These input text files are expected to be generated as part of CI setup.
+
+## Middleware: Broker component
+The broker component provides a level of abstraction and decouples the data generator components from the backend components. This decoupling allows ease of future changes if a new database or visualisation component is to be added, without major changes needed at the front-end scripts.
+
+The broker component provides a simple token-based authentication scheme to validate the data sources that pushes the metrics data to the quality metrics setup. A token is issued on a per-team basis in this setup. The broker component implements a service queue for the data requests received from clients. The broker component always does a sanity check of the data pushed by a client. Only well-formed data against agreed data template will be processed by the broker component. The broker component can perform agreed data transformation also on some data pushed to it before committing them to the backend data base.
+
+## Back-end: InfluxDB database
+The backend consists of the data sink component which holds the quality metrics data. An individual data model is defined for each metric. The below sections capture the details of the individual measurements and the InfluxDB line protocol outlining the data model. Separate database is created for each metric for a team and all measurements associated with a metric is held on this database.
+
+### TF-A Defect Data Model
+This database holds the measurements which contains data for the open defects against TFA components raised in GitHub.
+
+#### Measurement: TFA_Defects_Tracking
+This measurement captures the open defect count against TFA in GitHub and allows to visualize the trend over time.
+
+Line Protocol Description: TFA_Defects_Tracking,Measured_Date=[timestamp in date format] Issue_Status=[Open from GitHub Issue states],Number_of_Defects=[integer value for the defect count]
+
+#### Measurement: TFA_Defects_Statistics
+This measurement is a holding database for the raw data to feed into the TFA_Defects_Tracking.
+
+Line Protocol Description: TFA_Defects_Statistics,Defect_ID=[defect identifier],Measured_Date=[timestamp in date format] Title=[Defect title],Issue_Status=[Open|Closed|... from GitHub Issue states],URL=[URL to the issue]
+
+### TF-A Code Churn Data Model
+This database holds the measurements that is used to provide the visualization for the trend of LoC changes over time against the TFA (trusted-firmware-a) code base.
+
+#### Measurement: TFA_CodeChurn_Tracking
+This measurement captures the LoC add/delete/modified count against the TFA versions and allows to visualize the trend over time.
+
+Line Protocol Description: TFA_CodeChurn_Tracking,Git_Tag_Date=[Git Tag Date],Target_Tag=[TFA version tag],Base_Tag=[base tag] Lines_of_Change=[LoC changed]
+
+
+### TF-A Complexity Data Model
+This database holds the measurements that is used to provide the visualization for the trend of complex functions over time against the TFA code.
+
+#### Measurement: TFA_Complexity_Tracking
+This measurement captures the function count which are above a given threshold against the TFA code base and allows to visualize the trend over time.
+
+Line Protocol Description: TFA_Complexity_Tracking,Git_Tag_Date=[Git Tag Date],Target_Tag=[TFA version tag] Threshold=[threshold value],Whitelisted=[no],Functions_Exceeding_Threshold_Not_Whitelisted=[number of functions exceeding complexity threshold which are not whitelisted]
+
+#### Measurement: TFA_Complexity_Statistics
+This measurement is a holding database for the raw data to feed into the TFA_Complexity_Tracking.
+
+Line Protocol Description: TFA_Complexity_Statistics,Git_Tag_Date=[Git Tag Date],Base_Tag=[base tag],Target_Tag=[TFA version tag],Location=[path in the code base for the function] Function_ID=[function identifier],Score=[mccabe score],Threshold=[threshold value],Whitelisted=[yes|no]
+
+Most data models can also be interpreted from [JSON schemas](../broker-component/metrics-schemas). "data" section contains the details about fields and tags.
+
+## License
+[BSD-3-Clause](../../license.md)
+
diff --git a/quality-metrics/docs/sample-dashboards/tfa_codechurn.json b/quality-metrics/docs/sample-dashboards/tfa_codechurn.json
new file mode 100644
index 0000000..ac9e0b7
--- /dev/null
+++ b/quality-metrics/docs/sample-dashboards/tfa_codechurn.json
@@ -0,0 +1,288 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 3,
+ "links": [],
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": true,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_CodeChurn",
+ "decimals": 2,
+ "description": "Tracking the lines of changes in TF-A source code per release tag",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 7,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "hiddenSeries": false,
+ "id": 5,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": false,
+ "total": false,
+ "values": false
+ },
+ "lines": false,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "$tag_Target_Tag",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT max(Lines_of_Change) FROM \"TFA_CodeChurn_Tracking\" where $timeFilter group by Target_Tag, Git_Tag_Date order by time",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "TFA CodeChurn Tracking",
+ "tooltip": {
+ "shared": false,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "series",
+ "name": null,
+ "show": true,
+ "values": [
+ "max"
+ ]
+ },
+ "yaxes": [
+ {
+ "decimals": 0,
+ "format": "short",
+ "label": "Lines of Change",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_CodeChurn",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 10,
+ "w": 24,
+ "x": 0,
+ "y": 7
+ },
+ "id": 7,
+ "links": [],
+ "pageSize": 9,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "hidden"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "TFA_CodeChurn_Tracking",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Base_Tag as \"Base Tag\", max(Lines_of_Change) as \"Lines of Change\" FROM \"TFA_CodeChurn_Tracking\" GROUP BY Target_Tag ",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "TF-A Code Churn Statistics",
+ "transform": "table",
+ "type": "table-old"
+ }
+ ],
+ "refresh": "1d",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [
+ "TFA_QUALITY_METRICS"
+ ],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-5y",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "TFA_Churn",
+ "uid": "tfa-churn",
+ "version": 4
+}
\ No newline at end of file
diff --git a/quality-metrics/docs/sample-dashboards/tfa_complexity.json b/quality-metrics/docs/sample-dashboards/tfa_complexity.json
new file mode 100644
index 0000000..87ee822
--- /dev/null
+++ b/quality-metrics/docs/sample-dashboards/tfa_complexity.json
@@ -0,0 +1,487 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 5,
+ "iteration": 1600199355075,
+ "links": [],
+ "panels": [
+ {
+ "content": "<b><center>First table shows the details for the latest tag and the second table shows the details for the selected \"Target Tag\". </b>",
+ "datasource": null,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 2,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "id": 6,
+ "links": [],
+ "mode": "html",
+ "options": {
+ "content": "<b><center>First table shows the details for the latest tag and the second table shows the details for the selected \"Target Tag\". </b>",
+ "mode": "html"
+ },
+ "pluginVersion": "7.1.0",
+ "title": "Please note that when \"yes\" is selected from the above drop-down, data for both whitelisted and non-whitelisted functions is shown. Currently there are 0 whitelisted functions for TF-A.",
+ "type": "text"
+ },
+ {
+ "aliasColors": {},
+ "bars": true,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_Complexity",
+ "description": "Tracking the number of functions exceeding threshold",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 2
+ },
+ "hiddenSeries": false,
+ "id": 3,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": false,
+ "total": false,
+ "values": false
+ },
+ "lines": false,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "$tag_Target_Tag",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT max(Functions_Exceeding_Threshold_Not_Whitelisted) FROM \"TFA_Complexity_Tracking\" where ((Whitelisted =~ /^$Whitelisted$/ OR Whitelisted =~ /^no$/) AND $timeFilter) group by Target_Tag, Git_Tag_Date order by time",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "TFA Complexity Tracking",
+ "tooltip": {
+ "shared": false,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "series",
+ "name": null,
+ "show": true,
+ "values": [
+ "max"
+ ]
+ },
+ "yaxes": [
+ {
+ "decimals": 0,
+ "format": "short",
+ "label": "Functions Exceeding Threshold",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_Complexity",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 11
+ },
+ "id": 4,
+ "links": [],
+ "pageSize": 100,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "date"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Base_Tag, Function_ID, Location, Score, Threshold, Whitelisted FROM \"TFA_Complexity_Statistics\" where ((Whitelisted =~ /^$Whitelisted$/ OR Whitelisted =~ /^no$/) AND $timeFilter AND Target_Tag =~ /^$TargetTag1$/) GROUP BY Target_Tag",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "TFA Complexity Statistics for Target Tag $TargetTag1",
+ "transform": "table",
+ "type": "table-old"
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_Complexity",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 20
+ },
+ "id": 7,
+ "links": [],
+ "pageSize": 100,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "date"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Base_Tag, Function_ID, Location, Score, Threshold, Whitelisted FROM \"TFA_Complexity_Statistics\" where ((Whitelisted =~ /^$Whitelisted$/ OR Whitelisted =~ /^no$/) AND Target_Tag =~ /^$TargetTag2$/) GROUP BY Target_Tag",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "TFA Complexity Statistics for Target Tag $TargetTag2",
+ "transform": "table",
+ "type": "table-old"
+ }
+ ],
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [
+ "TFA_QUALITY_METRICS"
+ ],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "current": {
+ "tags": [],
+ "text": "no",
+ "value": "no"
+ },
+ "hide": 0,
+ "includeAll": false,
+ "label": "Show data including \"whitelisted\" functions?",
+ "multi": false,
+ "name": "Whitelisted",
+ "options": [
+ {
+ "selected": false,
+ "text": "yes",
+ "value": "yes"
+ },
+ {
+ "selected": true,
+ "text": "no",
+ "value": "no"
+ }
+ ],
+ "query": "yes,no",
+ "skipUrlSync": false,
+ "type": "custom"
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "v2.3",
+ "value": "v2.3"
+ },
+ "datasource": "TFA_Complexity",
+ "definition": "SELECT LAST(Target_Tag) FROM (SELECT Target_Tag, Function_ID FROM TFA_Complexity_Statistics)",
+ "hide": 2,
+ "includeAll": false,
+ "label": "Target Tag 1",
+ "multi": false,
+ "name": "TargetTag1",
+ "options": [],
+ "query": "SELECT LAST(Target_Tag) FROM (SELECT Target_Tag, Function_ID FROM TFA_Complexity_Statistics)",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "v2.0",
+ "value": "v2.0"
+ },
+ "datasource": "TFA_Complexity",
+ "definition": "SHOW TAG VALUES WITH KEY=\"Target_Tag\"",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Target Tag",
+ "multi": false,
+ "name": "TargetTag2",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY=\"Target_Tag\"",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ }
+ ]
+ },
+ "time": {
+ "from": "now-2y",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "TFA_Complexity",
+ "uid": "tfa-complexity",
+ "version": 6
+}
\ No newline at end of file
diff --git a/quality-metrics/docs/sample-dashboards/tfa_defects.json b/quality-metrics/docs/sample-dashboards/tfa_defects.json
new file mode 100644
index 0000000..17b7f1e
--- /dev/null
+++ b/quality-metrics/docs/sample-dashboards/tfa_defects.json
@@ -0,0 +1,275 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 6,
+ "links": [],
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": true,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_Defects",
+ "decimals": 0,
+ "description": "Tracks the number of defects of TF-A per release tag",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 7,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "hiddenSeries": false,
+ "id": 1,
+ "legend": {
+ "alignAsTable": false,
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "rightSide": false,
+ "show": false,
+ "total": false,
+ "values": false
+ },
+ "lines": false,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "Total Defects",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT last(Number_of_Defects) as Defects FROM \"TFA_Defects_Tracking\" where $timeFilter GROUP BY time(1w) fill(none)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "TFA Defects Tracking",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "decimals": 0,
+ "format": "short",
+ "label": "Number of Defects",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "datasource": "TFA_Defects",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {
+ "align": null
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 24,
+ "x": 0,
+ "y": 7
+ },
+ "id": 2,
+ "links": [],
+ "options": {
+ "showHeader": true
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "alias": "",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT * FROM \"TFA_Defects_Statistics\" where $timeFilter",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "TF-A Defects Statistics",
+ "type": "table"
+ }
+ ],
+ "refresh": "1d",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [
+ "TFA_QUALITY_METRICS"
+ ],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-90d",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "TFA_Defects",
+ "uid": "tfa-defects",
+ "version": 6
+}
\ No newline at end of file
diff --git a/quality-metrics/docs/sample-dashboards/tfa_image_size.json b/quality-metrics/docs/sample-dashboards/tfa_image_size.json
new file mode 100644
index 0000000..8325e46
--- /dev/null
+++ b/quality-metrics/docs/sample-dashboards/tfa_image_size.json
@@ -0,0 +1,642 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "TFA_ImageSize",
+ "enable": false,
+ "hide": false,
+ "iconColor": "#e0f9d7",
+ "limit": 100,
+ "name": "View PR Details",
+ "query": "select PullRequestURL, Pull_Request_Title, CommitID, BL1_B, BL2_B, BL1U_B, BL2U_B, BL31_B, BL32_B from \"[[BuildConfig]]\"",
+ "showIn": 0,
+ "tagsColumn": "CommitID",
+ "textColumn": "Pull_Request_Title",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 9,
+ "iteration": 1600199357868,
+ "links": [],
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_ImageSize",
+ "decimals": 3,
+ "description": "This shows the trend in image size of .ELF files for selected build config in stacked manner. The values are individually stacked, not cumulative.\n\nBuild Config and Bin Mode can be changed from the drop down at the top of dashboard.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 2,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "hiddenSeries": false,
+ "id": 10,
+ "legend": {
+ "avg": false,
+ "current": true,
+ "hideEmpty": true,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT (\"BL1_B\" + \"BL1_D\" + \"BL1_R\" + \"BL1_T\" + \"BL1_W\") as bl1, (\"BL1U_B\" + \"BL1U_D\" + \"BL1U_R\" + \"BL1U_T\" + \"BL1U_W\") as bl1u, (\"BL2_B\" + \"BL2_D\" + \"BL2_R\" + \"BL2_T\" + \"BL2_W\") as bl2, (\"BL2U_B\" + \"BL2U_D\" + \"BL2U_R\" + \"BL2U_T\" + \"BL2U_W\") as bl2u, (\"BL31_B\" + \"BL31_D\" + \"BL31_R\" + \"BL31_T\" + \"BL31_W\") as bl31, (\"BL32_B\" + \"BL32_D\" + \"BL32_R\" + \"BL32_T\" + \"BL32_W\") as bl32 from \"$BuildConfig\" WHERE (\"BinMode\" =~ /^$BinMode$/ ) fill(0)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "TF-A Image Size Time Series Graph for Selected Build Config ($BuildConfig) and Bin Mode ($BinMode)",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "decimals": 3,
+ "format": "decbytes",
+ "label": "Image Size",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_ImageSize",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 9
+ },
+ "id": 4,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "date"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 3,
+ "pattern": "/^bl*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "decbytes"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "link": true,
+ "linkTargetBlank": true,
+ "linkTooltip": "",
+ "linkUrl": "${__cell:raw}",
+ "pattern": "Pull Request URL",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT CommitTitle as \"Commit Title\", \"BinMode\", \"CommitID\" as \"Commit ID\", (\"BL1_B\" + \"BL1_D\" + \"BL1_R\" + \"BL1_T\" + \"BL1_W\") as bl1, (\"BL1U_B\" + \"BL1U_D\" + \"BL1U_R\" + \"BL1U_T\" + \"BL1U_W\") as bl1u, (\"BL2_B\" + \"BL2_D\" + \"BL2_R\" + \"BL2_T\" + \"BL2_W\") as bl2, (\"BL2U_B\" + \"BL2U_D\" + \"BL2U_R\" + \"BL2U_T\" + \"BL2U_W\") as bl2u, (\"BL31_B\" + \"BL31_D\" + \"BL31_R\" + \"BL31_T\" + \"BL31_W\") as bl31, (\"BL32_B\" + \"BL32_D\" + \"BL32_R\" + \"BL32_T\" + \"BL32_W\") as bl32 from \"$BuildConfig\" WHERE (\"BinMode\" =~ /^$BinMode$/ ) fill(0)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "TF-A Image Size Details Table for $BuildConfig",
+ "transform": "table",
+ "type": "table-old"
+ },
+ {
+ "aliasColors": {},
+ "bars": true,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_ImageSize",
+ "decimals": 3,
+ "description": "",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 18
+ },
+ "hiddenSeries": false,
+ "id": 8,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": true,
+ "hideZero": true,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "sideWidth": null,
+ "total": false,
+ "values": true
+ },
+ "lines": false,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT *\nFROM \"$BuildConfig\"\nWHERE (\"CommitID\" =~ /^$CommitID$/ AND \"BinMode\" =~ /^$BinMode$/ )\nGROUP BY \"BinMode\", \"PullRequestURL\", \"CommitID\"",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Image size memory details for selected Commit ID ($CommitID) and Bin Mode ($BinMode) for $BuildConfig",
+ "tooltip": {
+ "shared": false,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "series",
+ "name": null,
+ "show": true,
+ "values": [
+ "total"
+ ]
+ },
+ "yaxes": [
+ {
+ "decimals": 3,
+ "format": "decbytes",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "breakPoint": "50%",
+ "cacheTimeout": null,
+ "combine": {
+ "label": "Others",
+ "threshold": 0
+ },
+ "datasource": "TFA_ImageSize",
+ "decimals": 3,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "80%",
+ "format": "decbytes",
+ "gridPos": {
+ "h": 9,
+ "w": 19,
+ "x": 0,
+ "y": 27
+ },
+ "id": 6,
+ "interval": null,
+ "legend": {
+ "header": "Image Size",
+ "show": true,
+ "sideWidth": 300,
+ "values": true
+ },
+ "legendType": "Right side",
+ "links": [],
+ "maxDataPoints": 3,
+ "nullPointMode": "connected",
+ "pieType": "pie",
+ "strokeWidth": "2",
+ "targets": [
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT (\"BL1_B\" + \"BL1_D\" + \"BL1_R\" + \"BL1_T\" + \"BL1_W\") as bl1, (\"BL1U_B\" + \"BL1U_D\" + \"BL1U_R\" + \"BL1U_T\" + \"BL1U_W\") as bl1u, (\"BL2_B\" + \"BL2_D\" + \"BL2_R\" + \"BL2_T\" + \"BL2_W\") as bl2, (\"BL2U_B\" + \"BL2U_D\" + \"BL2U_R\" + \"BL2U_T\" + \"BL2U_W\") as bl2u, (\"BL31_B\" + \"BL31_D\" + \"BL31_R\" + \"BL31_T\" + \"BL31_W\") as bl31, (\"BL32_B\" + \"BL32_D\" + \"BL32_R\" + \"BL32_T\" + \"BL32_W\") as bl32 \nFROM \"$BuildConfig\"\nWHERE (\"CommitID\" =~ /^$CommitID$/ AND (\"BinMode\" =~ /^$BinMode$/ ))\nGROUP BY \"CommitID\", \"PullRequestURL\", \"BinMode\" fill(0)\n",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "Image size pie chart for selected Commit ID ($CommitID) and Bin Mode ($BinMode) for $BuildConfig",
+ "type": "grafana-piechart-panel",
+ "valueName": "current"
+ },
+ {
+ "content": "<li>\"BlX_B\": Size of uninitialized data section</li>\n<li>\"BlX_D\": Size of initialized data section</li>\n<li>\"BlX_R\": Size of read only data section</li>\n<li>\"BlX_T\": Size of text (code) section</li>\n<li>\"BlX_V\": Size of weak object</li>\n<li>\"BlX_W\": Size of weak symbol</li>\n\n<br>Build Config, Commit ID and Bin Mode can<br>\nbe changed from the drop down menu at <br>the top of\npage.",
+ "datasource": null,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 5,
+ "x": 19,
+ "y": 27
+ },
+ "id": 12,
+ "links": [],
+ "mode": "html",
+ "options": {
+ "content": "<li>\"BlX_B\": Size of uninitialized data section</li>\n<li>\"BlX_D\": Size of initialized data section</li>\n<li>\"BlX_R\": Size of read only data section</li>\n<li>\"BlX_T\": Size of text (code) section</li>\n<li>\"BlX_V\": Size of weak object</li>\n<li>\"BlX_W\": Size of weak symbol</li>\n\n<br>Build Config, Commit ID and Bin Mode can<br>\nbe changed from the drop down menu at <br>the top of\npage.",
+ "mode": "html"
+ },
+ "pluginVersion": "7.1.0",
+ "title": "Memory Section Details:",
+ "transparent": true,
+ "type": "text"
+ }
+ ],
+ "refresh": "1d",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [
+ "TFA_QUALITY_METRICS"
+ ],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "poplar-default",
+ "value": "poplar-default"
+ },
+ "datasource": "TFA_ImageSize",
+ "definition": "",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Build Config",
+ "multi": false,
+ "name": "BuildConfig",
+ "options": [],
+ "query": "show measurements",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "9b2bf15016",
+ "value": "9b2bf15016"
+ },
+ "datasource": "TFA_ImageSize",
+ "definition": "",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Commit ID",
+ "multi": false,
+ "name": "CommitID",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY = \"CommitID\"",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "Debug",
+ "value": "Debug"
+ },
+ "datasource": "TFA_ImageSize",
+ "definition": "",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Bin Mode",
+ "multi": false,
+ "name": "BinMode",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY = \"BinMode\"",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ }
+ ]
+ },
+ "time": {
+ "from": "now-30d",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "TFA_Image_Size",
+ "uid": "GkzYOKFiz",
+ "version": 6
+}
\ No newline at end of file
diff --git a/quality-metrics/docs/sample-dashboards/tfa_misra_defects.json b/quality-metrics/docs/sample-dashboards/tfa_misra_defects.json
new file mode 100644
index 0000000..303e802
--- /dev/null
+++ b/quality-metrics/docs/sample-dashboards/tfa_misra_defects.json
@@ -0,0 +1,897 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "TFA_MisraDefects",
+ "enable": false,
+ "hide": false,
+ "iconColor": "#e0f9d7",
+ "limit": 100,
+ "name": "View PR Details",
+ "query": "select PullRequestURL, Pull_Request_Title, CommitID, TotalDefects from \"[[BuildConfig]]\"",
+ "showIn": 0,
+ "tagsColumn": "CommitID",
+ "textColumn": "Pull_Request_Title",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 8,
+ "iteration": 1600199358960,
+ "links": [],
+ "panels": [
+ {
+ "aliasColors": {
+ "MandatoryDefects": "#bf1b00",
+ "RequiredDefects": "#eab839"
+ },
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_MisraDefects",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "hiddenSeries": false,
+ "id": 2,
+ "legend": {
+ "alignAsTable": false,
+ "avg": false,
+ "current": true,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT TotalDefects FROM \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ )",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT MandatoryDefects FROM \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ )",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT RequiredDefects FROM \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ )",
+ "rawQuery": true,
+ "refId": "C",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT AdvisoryDefects FROM \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ )",
+ "rawQuery": true,
+ "refId": "D",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "TF-A Misra Defects Time Series Graph for Selected Build Config ($BuildConfig) and Bin Mode Debug",
+ "tooltip": {
+ "shared": true,
+ "sort": 1,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_MisraDefects",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 9
+ },
+ "id": 4,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "date"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "link": true,
+ "linkTargetBlank": true,
+ "linkTooltip": "",
+ "linkUrl": "${__cell:raw} ",
+ "pattern": "Pull Request URL",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 3,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT CommitTitle as \"Commit Title\", \"BinMode\", \"CommitID\" as \"Commit ID\", \"MandatoryDefects\" as \"Mandatory Defects\", \"RequiredDefects\" as \"Required Defects\", \"AdvisoryDefects\" as \"Advisory Defects\", \"TotalDefects\" as \"Total Defects\" from \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ )",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "TF-A Misra C Defects Details Table for $BuildConfig",
+ "transform": "table",
+ "type": "table-old"
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_MisraDefects",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 5,
+ "w": 24,
+ "x": 0,
+ "y": 18
+ },
+ "id": 6,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "date"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "link": true,
+ "linkTargetBlank": true,
+ "linkTooltip": "",
+ "linkUrl": "$__cell",
+ "pattern": "Pull Request URL",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 3,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"CommitID\" as \"Commit ID\", CommitTitle as \"Commit Title\", \"BinMode\", \"MandatoryDefects\" as \"Mandatory Defects\", \"RequiredDefects\" as \"Required Defects\", \"AdvisoryDefects\" as \"Advisory Defects\", \"TotalDefects\" as \"Total Defects\" from \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ AND \"CommitID\" =~ /^$CommitID1$/ )",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"CommitID\" as \"Commit ID\", CommitTitle as \"Commit Title\", \"BinMode\", \"MandatoryDefects\" as \"Mandatory Defects\", \"RequiredDefects\" as \"Required Defects\", \"AdvisoryDefects\" as \"Advisory Defects\", \"TotalDefects\" as \"Total Defects\" from \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ AND \"CommitID\" =~ /^$CommitID2$/ )",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "Comparison table between $CommitID1 and $CommitID2 for BinMode \"Debug\"",
+ "transform": "table",
+ "transparent": true,
+ "type": "table-old"
+ },
+ {
+ "aliasColors": {
+ "Advisory Defects": "#ef843c",
+ "Mandatory Defects": "#bf1b00",
+ "Total Defects": "#629e51"
+ },
+ "bars": true,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_MisraDefects",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 23
+ },
+ "hiddenSeries": false,
+ "id": 8,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": false,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "Mandatory Defects",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "hide": false,
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT DIFFERENCE(\"MandatoryDefects\") FROM \"$BuildConfig\" WHERE ((\"CommitID\" =~ /^$CommitID1$/ AND (\"BinMode\" =~ /^Debug$/ )) OR (\"CommitID\" =~ /^$CommitID2$/ AND (\"BinMode\" =~ /^Debug$/ )))",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Required Defects",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "hide": false,
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT DIFFERENCE(\"RequiredDefects\") FROM \"$BuildConfig\" WHERE ((\"CommitID\" =~ /^$CommitID1$/ AND (\"BinMode\" =~ /^Debug$/ )) OR (\"CommitID\" =~ /^$CommitID2$/ AND (\"BinMode\" =~ /^Debug$/ )))",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Advisory Defects",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "hide": false,
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT DIFFERENCE(\"AdvisoryDefects\") FROM \"$BuildConfig\" WHERE ((\"CommitID\" =~ /^$CommitID1$/ AND (\"BinMode\" =~ /^Debug$/ )) OR (\"CommitID\" =~ /^$CommitID2$/ AND (\"BinMode\" =~ /^Debug$/ )))",
+ "rawQuery": true,
+ "refId": "C",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Total Defects",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "hide": false,
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT DIFFERENCE(\"TotalDefects\") FROM \"$BuildConfig\" WHERE ((\"CommitID\" =~ /^$CommitID1$/ AND (\"BinMode\" =~ /^Debug$/ )) OR (\"CommitID\" =~ /^$CommitID2$/ AND (\"BinMode\" =~ /^Debug$/ )))",
+ "rawQuery": true,
+ "refId": "D",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Comparison Chart between [$CommitID1,Debug] and [$CommitID2,Debug] Misra C Defects for \"$BuildConfig\"",
+ "tooltip": {
+ "shared": false,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "transparent": true,
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "series",
+ "name": null,
+ "show": true,
+ "values": [
+ "total"
+ ]
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Difference in Misra C Defects",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ }
+ ],
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [
+ "TFA_QUALITY_METRICS"
+ ],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "fvp-rst-bl31",
+ "value": "fvp-rst-bl31"
+ },
+ "datasource": "TFA_MisraDefects",
+ "definition": "",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Build Config",
+ "multi": false,
+ "name": "BuildConfig",
+ "options": [],
+ "query": "show measurements",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "9b2bf15016",
+ "value": "9b2bf15016"
+ },
+ "datasource": "TFA_MisraDefects",
+ "definition": "",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Commit ID 1",
+ "multi": false,
+ "name": "CommitID1",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY = \"CommitID\" ",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "a41ca4c344",
+ "value": "a41ca4c344"
+ },
+ "datasource": "TFA_MisraDefects",
+ "definition": "",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Commit ID 2",
+ "multi": false,
+ "name": "CommitID2",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY = \"CommitID\" ",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ }
+ ]
+ },
+ "time": {
+ "from": "now-30d",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "TFA_Misra_Defects",
+ "uid": "41hRgW-mz",
+ "version": 10
+}
\ No newline at end of file
diff --git a/quality-metrics/docs/sample-dashboards/tfa_runtime_perf_details.json b/quality-metrics/docs/sample-dashboards/tfa_runtime_perf_details.json
new file mode 100644
index 0000000..bbe140b
--- /dev/null
+++ b/quality-metrics/docs/sample-dashboards/tfa_runtime_perf_details.json
@@ -0,0 +1,1089 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "TFA_RunTime_Perf",
+ "enable": false,
+ "hide": false,
+ "iconColor": "#fce2de",
+ "limit": 100,
+ "name": "View Commit Details",
+ "query": "select CommitTitle, CommitID, TC_Name, CPU_Core, CacheFlush, Latency_EL3Entry_EL3Exit from \"[[Instrumentation]]\" WHERE ($timeFilter AND \"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^0/ AND \"Cluster_ID\" =~ /^0/) LIMIT 1000",
+ "showIn": 0,
+ "tagsColumn": "CommitID",
+ "textColumn": "CommitTitle",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 10,
+ "iteration": 1600199360656,
+ "links": [],
+ "panels": [
+ {
+ "aliasColors": {
+ "Cluster ID: 0, CPU Core: 0": "#eab839",
+ "Cluster ID: 0, CPU Core: 1": "#eab839",
+ "Cluster ID: 1, CPU Core: 0": "#70dbed",
+ "Cluster ID: 1, CPU Core: 1": "#70dbed",
+ "Cluster ID: 1, CPU Core: 2": "#70dbed",
+ "Cluster ID: 1, CPU Core: 3": "#70dbed",
+ "Latency_CPUWakeup_EL3Exit for CPU Core 0": "#e24d42"
+ },
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_RunTime_Perf",
+ "decimals": 3,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "hiddenSeries": false,
+ "id": 2,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": true,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": false,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 3,
+ "points": true,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "Cluster ID: 0, CPU Core: 0",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^0/ AND \"Cluster_ID\" =~ /^0/)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 0, CPU Core: 1",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^1/ AND \"Cluster_ID\" =~ /^0/)",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 0, CPU Core: 2",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^2/ AND \"Cluster_ID\" =~ /^0/)",
+ "rawQuery": true,
+ "refId": "C",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 0, CPU Core: 3",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^3/ AND \"Cluster_ID\" =~ /^0/)",
+ "rawQuery": true,
+ "refId": "D",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 1, CPU Core: 0",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^0/ AND \"Cluster_ID\" =~ /^1/)",
+ "rawQuery": true,
+ "refId": "E",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 1, CPU Core: 1",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^1/ AND \"Cluster_ID\" =~ /^1/)",
+ "rawQuery": true,
+ "refId": "F",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 1, CPU Core: 2",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^2/ AND \"Cluster_ID\" =~ /^1/)",
+ "rawQuery": true,
+ "refId": "G",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 1, CPU Core: 3",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^3/ AND \"Cluster_ID\" =~ /^1/)",
+ "rawQuery": true,
+ "refId": "H",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "[TC: $TestCase] $Latency Graph for $Instrumentation",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "transparent": true,
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "decimals": 3,
+ "format": "µs",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_RunTime_Perf",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 9
+ },
+ "id": 7,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "hidden"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 3,
+ "pattern": "/^Max*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "ns"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "link": true,
+ "linkTargetBlank": true,
+ "linkTooltip": "",
+ "linkUrl": "${__cell:raw}",
+ "pattern": "Pull_Request_URL",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT CommitID, CommitTitle as \"Commit Title\", Cluster_ID as \"Cluster ID\", MAX($Latency) AS \"Max $Latency\"\nFROM \"[[Instrumentation]]\" WHERE (TC_Name =~ /^$TestCase$/ AND Cluster_ID =~ /^0/) GROUP BY CommitID",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT CommitID, CommitTitle as \"Commit Title\", Cluster_ID as \"Cluster ID\", MAX($Latency) AS \"Max $Latency\"\nFROM \"[[Instrumentation]]\" WHERE (TC_Name =~ /^$TestCase$/ AND Cluster_ID =~ /^1/) GROUP BY CommitID",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "Details for selected instrumentation and test case ([[Instrumentation]], [[TestCase]])",
+ "transform": "table",
+ "type": "table-old"
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_RunTime_Perf",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "110%",
+ "gridPos": {
+ "h": 3.7,
+ "w": 12,
+ "x": 0,
+ "y": 18
+ },
+ "id": 4,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": null,
+ "desc": false
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "hidden"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 3,
+ "pattern": "/^MAX*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "ns"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Cluster_ID, MAX(\"$Latency\") AS \"MAX($Latency)\" FROM \"[[Instrumentation]]\" WHERE (CommitID =~ /^$CommitID1$/ AND Cluster_ID =~ /^0/ AND \"TC_Name\" =~ /^$TestCase$/)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Cluster_ID, MAX(\"$Latency\") AS \"MAX($Latency)\" FROM \"[[Instrumentation]]\" WHERE (CommitID =~ /^$CommitID1$/ AND Cluster_ID =~ /^1/ AND \"TC_Name\" =~ /^$TestCase$/)",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "Max $Latency for Cluster IDs 0 and 1 for ($TestCase, $CommitID1)",
+ "transform": "table",
+ "transparent": true,
+ "type": "table-old"
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_RunTime_Perf",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "110%",
+ "gridPos": {
+ "h": 3.7,
+ "w": 12,
+ "x": 12,
+ "y": 18
+ },
+ "id": 5,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": null,
+ "desc": false
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "hidden"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 3,
+ "pattern": "/^MAX*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "ns"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Cluster_ID, MAX(\"$Latency\") AS \"MAX($Latency)\" FROM \"[[Instrumentation]]\" WHERE (CommitID =~ /^$CommitID2$/ AND Cluster_ID =~ /^0/ AND \"TC_Name\" =~ /^$TestCase$/)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Cluster_ID, MAX(\"$Latency\") AS \"MAX($Latency)\" FROM \"[[Instrumentation]]\" WHERE (CommitID =~ /^$CommitID2$/ AND Cluster_ID =~ /^1/ AND \"TC_Name\" =~ /^$TestCase$/)",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "Max $Latency for Cluster IDs 0 and 1 for ($TestCase, $CommitID2)",
+ "transform": "table",
+ "transparent": true,
+ "type": "table-old"
+ }
+ ],
+ "refresh": false,
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [
+ "TFA_QUALITY_METRICS"
+ ],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "juno-tftf+aarch32-rt32.instr-r2",
+ "value": "juno-tftf+aarch32-rt32.instr-r2"
+ },
+ "datasource": "TFA_RunTime_Perf",
+ "definition": "show measurements",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Instrumentation",
+ "multi": false,
+ "name": "Instrumentation",
+ "options": [],
+ "query": "show measurements",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "tags": [],
+ "text": "testrtinstrpsciversionparallel",
+ "value": "testrtinstrpsciversionparallel"
+ },
+ "hide": 0,
+ "includeAll": false,
+ "label": "Test Case",
+ "multi": false,
+ "name": "TestCase",
+ "options": [
+ {
+ "selected": false,
+ "text": "testrtinstrcpuoffserial",
+ "value": "testrtinstrcpuoffserial"
+ },
+ {
+ "selected": false,
+ "text": "testrtinstrcpususpparallel",
+ "value": "testrtinstrcpususpparallel"
+ },
+ {
+ "selected": false,
+ "text": "testrtinstrcpususpserial",
+ "value": "testrtinstrcpususpserial"
+ },
+ {
+ "selected": true,
+ "text": "testrtinstrpsciversionparallel",
+ "value": "testrtinstrpsciversionparallel"
+ },
+ {
+ "selected": false,
+ "text": "testrtinstrsuspdeepparallel",
+ "value": "testrtinstrsuspdeepparallel"
+ },
+ {
+ "selected": false,
+ "text": "testrtinstrsuspdeepserial",
+ "value": "testrtinstrsuspdeepserial"
+ }
+ ],
+ "query": "testrtinstrcpuoffserial,testrtinstrcpususpparallel,testrtinstrcpususpserial,testrtinstrpsciversionparallel,testrtinstrsuspdeepparallel,testrtinstrsuspdeepserial",
+ "skipUrlSync": false,
+ "type": "custom"
+ },
+ {
+ "allValue": null,
+ "current": {
+ "tags": [],
+ "text": "Latency_EL3Entry_EL3Exit",
+ "value": "Latency_EL3Entry_EL3Exit"
+ },
+ "hide": 0,
+ "includeAll": false,
+ "label": "Latency",
+ "multi": false,
+ "name": "Latency",
+ "options": [
+ {
+ "selected": false,
+ "text": "CacheFlush",
+ "value": "CacheFlush"
+ },
+ {
+ "selected": false,
+ "text": "Latency_CPUWakeup_EL3Exit",
+ "value": "Latency_CPUWakeup_EL3Exit"
+ },
+ {
+ "selected": false,
+ "text": "Latency_EL3Entry_CPUPowerDown",
+ "value": "Latency_EL3Entry_CPUPowerDown"
+ },
+ {
+ "selected": true,
+ "text": "Latency_EL3Entry_EL3Exit",
+ "value": "Latency_EL3Entry_EL3Exit"
+ }
+ ],
+ "query": "CacheFlush,Latency_CPUWakeup_EL3Exit,Latency_EL3Entry_CPUPowerDown,Latency_EL3Entry_EL3Exit",
+ "skipUrlSync": false,
+ "type": "custom"
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "e98d934aee",
+ "value": "e98d934aee"
+ },
+ "datasource": "TFA_RunTime_Perf",
+ "definition": "SHOW TAG VALUES WITH KEY=CommitID",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Commit ID 1",
+ "multi": false,
+ "name": "CommitID1",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY=CommitID",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "e98d934aee",
+ "value": "e98d934aee"
+ },
+ "datasource": "TFA_RunTime_Perf",
+ "definition": "SHOW TAG VALUES WITH KEY=CommitID",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Commit ID 2",
+ "multi": false,
+ "name": "CommitID2",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY=CommitID",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ }
+ ]
+ },
+ "time": {
+ "from": "now-30d",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "TFA_RunTime_Perf_Details",
+ "uid": "qqVv390mz",
+ "version": 10
+}
\ No newline at end of file
diff --git a/quality-metrics/docs/visualisation_user_guide.md b/quality-metrics/docs/visualisation_user_guide.md
new file mode 100644
index 0000000..6233e53
--- /dev/null
+++ b/quality-metrics/docs/visualisation_user_guide.md
@@ -0,0 +1,19 @@
+# Visualisation User Guide
+Once broker component is up and running, and it is verified that data is getting pushed to InfluxDB, it can be visualised using Grafana. Following are the steps to create Grafana dashboard:
+
+1. Go to `http://[Host public IP address]:3000` and sign in using the appropriate credentials (Grafana has the following default credentials: admin, admin).
+1. Create data sources for each of the database. Set the value of data source name, database and URL to "`http://[Host public IP address]:8086`". Click on "Save & Test".
+1. In order to create dashboard, click on '+' sign on the left side bar. Select "Import" and paste the content of JSON file from [sample dashboards](./sample-dashboards) folder for the dashboard that needs to be created. In [sample dashboards](./sample-dashboards), TFA dashboards JSON files have been provided for reference. User can also create custom dashboard or modify sample ones. For details on creating dashboard, please refer [Grafana Labs](https://grafana.com/docs/grafana/latest/getting-started/getting-started/).
+
+Following table captures the details of datasource for the dashboards provided in [sample dashboards](./sample-dashboards) folder:
+
+| S. No. | Data Source Name | Database |
+| ------------- | ------------- | ------------- |
+| 1 | TFA_CodeChurn | TFA_CodeChurn |
+| 2 | TFA_Complexity | TFA_Complexity |
+| 3 | TFA_Defects | TFA_Defects |
+| 4 | TFA_ImageSize | TFA_ImageSize |
+| 5 | TFA_MisraDefects | TFA_MisraDefects |
+| 6 | TFA_RunTime_Perf | TFA_RTINSTR |
+
+Please note that URL remains same for all the data sources, i.e., `http://[Host public IP address]:8086`
diff --git a/quality-metrics/readme.md b/quality-metrics/readme.md
new file mode 100644
index 0000000..c427834
--- /dev/null
+++ b/quality-metrics/readme.md
@@ -0,0 +1,25 @@
+# quality-metrics
+
+The *quality-metrics* implements a set of components that enable project teams to generate and track code quality metrics for data driven quality improvement. It comprises of:
+- a set of data generator scripts that produce quality metrics data (like code churn, open defects, code complexity etc.) for a given project.
+- a data broker middleware component to manage the capturing of the data generated by multiple projects.
+- a database backend to store and track the generated quality metrics; current implementation uses [InfluxDB](https://github.com/influxdata/influxdb) time-series database.
+- a visualisation front-end to view the trend of these metrics over time with [Grafana](https://github.com/grafana/grafana) visualisation tool.
+- a set of docker files for easy deployment of containerised components.
+
+Additional documentation is also provided that outlines how a user can visualise InfluxDB data using Grafana.
+
+## Design Overview
+Please refer to [design overview](./docs/design_overview.md) for design details for the broker component and the data generator scripts.
+
+## Broker Component User Guide
+[Broker component user guide](./docs/broker_component_user_guide.md) contains details of how to bring up the broker component that implements APIs (Application Programming Interface) for data generator clients. The data generator clients POST metrics data, which in-turn gets committed to the backend database. It performs a set of basic sanity check before commiting any metrics data and provides simple token-based authentication for clients.
+
+## Data Generator User Guide
+Please refer to [data generator user guide](./docs/data_generator_user_guide.md) for details on how metrics data is generated. These data generator scripts are typically integrated with individual projects CI (Continuous Integration) setup.
+
+## Visualisation User Guide
+[Visualisation user guide](./docs/visualisation_user_guide.md) contains details on visualising the InfluxDB data using Grafana.
+
+## License
+[BSD-3-Clause](../license.md)
diff --git a/readme.md b/readme.md
new file mode 100644
index 0000000..7483dae
--- /dev/null
+++ b/readme.md
@@ -0,0 +1,39 @@
+# qa-tools
+
+qa-tools repo consists of a set of tools that helps in improving the quality assurance within project teams.
+
+Currently qa-tools repo hosts the following tools:
+- Trace-based code coverage tool.
+- Quality metrics measurement and tracking setup.
+
+## Trace-based code coverage tool
+
+This is a code coverage tool specifically for firmware components that run on memory constraint platforms, where traditional approach to measuring coverage with code instrumentation does not work well. This tool allows coverage measurement to be done based on execution trace from fastmodel platforms used along with the information gathered from DWARF signatures embedded in the firmware binaries.
+
+See [Coverage Tool - README](./coverage-tool/readme.md) for more details.
+
+
+## Quality metrics measurement and tracking setup
+
+This is a collection of data generator scripts that can be integrated as part of any CI setup to generate some quality metrics and publish that to a database backend to be visualised and tracked over time. The aim is to equip the projects with the capability to measure and track useful metrics to improve quality of the software delivery over time.
+
+
+See [Quality Metrics - README](./quality-metrics/readme.md) for more details.
+
+
+## Contributing
+Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
+
+Please make sure to update the in-source documentation as appropriate.
+
+## Style Guide
+
+At the moment the qa-tools comprises of various component developed in multiple languages. We follow the below coding guidelines and would request the same for any incoming contributions.
+
+Shell scripting guidelines - Follow Google's guidelines [Google Shell Scripting Guideline](https://google.github.io/styleguide/shellguide.html)
+C++ coding guidelines - Follow Google's guidelines [Google C++ Guideline](https://google.github.io/styleguide/cppguide.html)
+Python coding guidelines - Follow PEP 8 style guide [PEP 8 Style Guide](https://www.python.org/dev/peps/pep-0008/). We highly recommend the user of [Autopep8 tool](https://pypi.org/project/autopep8/) for the automatic formating of python code to be compliant with PEP 8 guidelines.
+
+## License
+[BSD-3-Clause](./license.md)
+