rename name/identifier fields, add verbose option, dictionary metadata, init test files

This commit is contained in:
Justine West 2024-02-19 14:00:44 -08:00
parent bf32cd4341
commit c9dad54ea6
9 changed files with 630 additions and 47 deletions

View File

@ -6,12 +6,21 @@ import fpp.compiler.analysis._
import io.circe._
import io.circe.syntax._
case class DictionaryMetadata(
deploymentName: String,
frameworkVersion: String,
libraryVersions: List[String],
dictionarySpecVersion: String
)
case class DictionaryJsonEncoder(
/** Analysis data structure */
analysis: Analysis,
/** Constructed Dictionary data structure */
dictionary: Dictionary
dictionary: Dictionary,
metadata: DictionaryMetadata,
defaultStringSize: Int,
verbose: Boolean
) {
private def dictionaryEntryMapAsJson[A, B] (f1: (A, B) => Json) (map: Map[A, B]): Json =
(map.map { case (key, value) => f1(key, value) }).toList.asJson
@ -29,6 +38,16 @@ case class DictionaryJsonEncoder(
Encoder.instance (dictionarySymbolSetAsJson (f1) _)
}
private implicit def dictionaryMetadataEncoder: Encoder[DictionaryMetadata] = new Encoder[DictionaryMetadata] {
override def apply(metadata: DictionaryMetadata): Json = {
Json.obj(
"deploymentName" -> metadata.deploymentName.asJson,
"frameworkVersion" -> metadata.frameworkVersion.asJson,
"libraryVersions" -> metadata.libraryVersions.asJson,
"dictionarySpecVersion" -> metadata.dictionarySpecVersion.asJson
)
}
}
private implicit val commandMapEncoder: Encoder[Map[BigInt, CommandEntry]] = {
def f1(opcode: BigInt, command: CommandEntry) = (opcode -> command).asJson
Encoder.instance (dictionaryEntryMapAsJson (f1) _)
@ -109,10 +128,8 @@ case class DictionaryJsonEncoder(
"kind" -> "string".asJson
)
size match {
case Some(s) => {
Json.obj("size" -> valueAsJson(analysis.valueMap(s.id))).deepMerge(jsonObj)
}
case None => jsonObj
case Some(s) => Json.obj("size" -> valueAsJson(analysis.valueMap(s.id))).deepMerge(jsonObj)
case None => Json.obj("size" -> defaultStringSize.asJson).deepMerge(jsonObj)
}
}
case Type.Array(node, _, _, _) => {
@ -237,13 +254,13 @@ case class DictionaryJsonEncoder(
val paramListJson = for (paramEntry <- params) yield {
val (_, elem, annotation) = paramEntry
val description = annotation.mkString("\n")
val AstNode(Ast.FormalParam(kind, identifier, typeNameNode), _) = elem
val AstNode(Ast.FormalParam(kind, name, typeNameNode), _) = elem
val ref = kind match {
case Ast.FormalParam.Ref => true
case Ast.FormalParam.Value => false
}
Json.obj(
"identifier" -> identifier.asJson,
"name" -> name.asJson,
"description" -> description.asJson,
"type" -> typeAsJson(analysis.typeMap(typeNameNode.id)),
"ref" -> ref.asJson
@ -258,7 +275,7 @@ case class DictionaryJsonEncoder(
val opcode = entry._1
val command = entry._2.command
val componentInstUnqualName = entry._2.componentInstance.getUnqualifiedName
val identifier = componentInstUnqualName + "." + command.getName
val name = componentInstUnqualName + "." + command.getName
command match {
case Command.NonParam(aNode, kind) => {
val (annotation, node, _) = aNode
@ -274,15 +291,20 @@ case class DictionaryJsonEncoder(
val formalParams = data.params
val json = Json.obj(
"identifier" -> identifier.asJson,
"name" -> name.asJson,
"commandKind" -> commandKind.asJson,
"opcode" -> opcode.asJson,
"description" -> description.asJson,
"formalParams" -> formalParams.asJson
)
val optionalMap = Map("priority" -> priority, "queueFullBehavior" -> queueFull)
optionalMap.foldLeft(json) ((acc, inst) => jsonWithOptional(inst._1, inst._2, acc))
if(verbose) {
val optionalMap = Map("priority" -> priority, "queueFullBehavior" -> queueFull)
optionalMap.foldLeft(json) ((acc, inst) => jsonWithOptional(inst._1, inst._2, acc))
}
else {
json
}
}
// case where command is param set/save command
case fpp.compiler.analysis.Command.Param(aNode, kind) => {
@ -293,7 +315,7 @@ case class DictionaryJsonEncoder(
case Command.Param.Save => "save"
}
Json.obj(
"identifier" -> identifier.asJson,
"name" -> name.asJson,
"commandKind" -> commandKind.asJson,
"opcode" -> opcode.asJson,
"description" -> annotation.mkString("\n").asJson,
@ -309,13 +331,13 @@ case class DictionaryJsonEncoder(
val numIdentifier = entry._1
val param = entry._2.param
val componentInstUnqualName = entry._2.componentInstance.getUnqualifiedName
val identifier = componentInstUnqualName + "." + param.getName
val name = componentInstUnqualName + "." + param.getName
val (annotation, node, _) = param.aNode
val json = Json.obj(
"identifier" -> identifier.asJson,
"name" -> name.asJson,
"description" -> annotation.mkString("\n").asJson,
"type" -> typeAsJson(param.paramType),
"numericIdentifier" -> numIdentifier.asJson
"identifier" -> numIdentifier.asJson
)
jsonWithOptional("default", param.default, json)
}
@ -326,14 +348,14 @@ case class DictionaryJsonEncoder(
val event = entry._2.event
val numIdentifier = entry._1
val componentInstUnqualName = entry._2.componentInstance.getUnqualifiedName
val identifier = componentInstUnqualName + "." + event.getName
val name = componentInstUnqualName + "." + event.getName
val (annotation, node, _) = event.aNode
val json = Json.obj(
"identifier" -> identifier.asJson,
"name" -> name.asJson,
"description" -> annotation.mkString("\n").asJson,
"severity" -> node.data.severity.toString.asJson,
"severity" -> node.data.severity.toString.replace(" ", "_").toUpperCase().asJson,
"formalParams" -> node.data.params.asJson,
"numericIdentifier" -> numIdentifier.asJson,
"identifier" -> numIdentifier.asJson
)
val optionalMap = Map("formatString" -> Some(event.format), "throttle" -> event.throttle)
optionalMap.foldLeft(json) ((acc, inst) => jsonWithOptional(inst._1, inst._2, acc))
@ -345,21 +367,28 @@ case class DictionaryJsonEncoder(
val channel = entry._2.tlmChannel
val numIdentifier = entry._1
val componentInstUnqualName = entry._2.componentInstance.getUnqualifiedName
val identifier = componentInstUnqualName + "." + channel.getName
val name = componentInstUnqualName + "." + channel.getName
val (annotation, node, _) = channel.aNode
val json = Json.obj(
"identifier" -> identifier.asJson,
"name" -> name.asJson,
"description" -> annotation.mkString("\n").asJson,
"type" -> typeAsJson(channel.channelType),
"numericIdentifier" -> numIdentifier.asJson,
"identifier" -> numIdentifier.asJson,
"telemtryUpdate" -> channel.update.toString.asJson
// "limit" -> Json.obj(
// "low" -> channel.lowLimits.asJson,
// "high" -> channel.highLimits.asJson
// )
)
jsonWithOptional("formatString", channel.format, json)
val jsonWithOptionals = jsonWithOptional("formatString", channel.format, json)
// if channel high or low limits are specified, add them to the JSON and return the telem channel JSON
if(!channel.lowLimits.isEmpty || !channel.highLimits.isEmpty) {
val lowLimitJson = if(!channel.lowLimits.isEmpty) Json.obj("low" -> channel.lowLimits.asJson) else Json.obj()
val highLimitJson = if(!channel.highLimits.isEmpty) Json.obj("high" -> channel.highLimits.asJson) else Json.obj()
Json.obj("limits" -> lowLimitJson.deepMerge(highLimitJson)).deepMerge(jsonWithOptionals)
}
// no channel limits exist, return the telem channel JSON
else {
jsonWithOptionals
}
}
}
@ -368,14 +397,14 @@ case class DictionaryJsonEncoder(
val record = entry._2.record
val numIdentifier = entry._1
val componentInstUnqualName = entry._2.componentInstance.getUnqualifiedName
val identifier = componentInstUnqualName + "." + record.getName
val name = componentInstUnqualName + "." + record.getName
val (annotation, node, _) = record.aNode
Json.obj(
"identifier" -> identifier.asJson,
"name" -> name.asJson,
"description" -> annotation.mkString("\n").asJson,
"type" -> typeAsJson(record.recordType),
"array" -> record.isArray.asJson,
"numericIdentifier" -> numIdentifier.asJson,
"identifier" -> numIdentifier.asJson,
)
}
}
@ -385,14 +414,19 @@ case class DictionaryJsonEncoder(
val container = entry._2.container
val numIdentifier = entry._1
val componentInstUnqualName = entry._2.componentInstance.getUnqualifiedName
val identifier = componentInstUnqualName + "." + container.getName
val name = componentInstUnqualName + "." + container.getName
val (annotation, node, _) = container.aNode
val json = Json.obj(
"identifier" -> identifier.asJson,
"name" -> name.asJson,
"description" -> annotation.mkString("\n").asJson,
"numericIdentifier" -> numIdentifier.asJson,
)
jsonWithOptional("defaultPriority", container.defaultPriority, json)
"identifier" -> numIdentifier.asJson,
)
if(verbose) {
jsonWithOptional("defaultPriority", container.defaultPriority, json)
}
else {
json
}
}
}
@ -413,6 +447,7 @@ case class DictionaryJsonEncoder(
// split set into individual sets consisting of each symbol type (arrays, enums, structs)
val (arraySymbolSet, enumSymbolSet, structSymbolSet) = splitTypeSymbolSet(dictionary.typeSymbolSet, Set(), Set(), Set())
Json.obj(
"metadata" -> metadata.asJson,
"arrays" -> arraySymbolSet.asJson,
"enums" -> enumSymbolSet.asJson,
"structs" -> structSymbolSet.asJson,

View File

@ -8,20 +8,26 @@ import fpp.compiler.transform._
import fpp.compiler.util._
import scopt.OParser
object FPPToDict {
case class Options(
files: List[File] = Nil
files: List[File] = Nil,
imports: List[File] = Nil,
defaultStringSize: Int = 80,
deploymentName: String = "",
frameworkVersion: String = "",
libraryVersions: List[String] = Nil,
dictionarySpecVersion: String = "1.0.0",
verbose: Boolean = false
)
// TODO: need to add arg for including dependency FPP files
def constructDictionary(a: Analysis): Iterable[dictionary.Dictionary] = {
val dictionaryList = for (((_, t), index) <- a.topologyMap.zipWithIndex) yield {
def writeDictionary(a: Analysis, defaultStringSize: Int, verbose: Boolean, metadata: dictionary.DictionaryMetadata): Result.Result[Unit] = {
for (((_, t), index) <- a.topologyMap.zipWithIndex) yield {
val constructedDictionary = dictionary.Dictionary().buildDictionary(a, t)
val jsonEncoder = dictionary.DictionaryJsonEncoder(a, constructedDictionary)
writeJson("justine-test-" + index + ".json", jsonEncoder.dictionaryToJson)
constructedDictionary
val jsonEncoder = dictionary.DictionaryJsonEncoder(a, constructedDictionary, metadata, defaultStringSize, verbose)
writeJson("topology-" + index + "-dictionary.json", jsonEncoder.dictionaryToJson)
}
return dictionaryList
Right(())
}
def writeJson (fileName: String, json: io.circe.Json): Result.Result[Unit] = {
@ -36,17 +42,19 @@ object FPPToDict {
// create Analysis
// extract info we need from analysis and store in dictionary data structure (done in Dictionary.scala)
// write json to file (maybe the fpp-to-dict tool should have a dictionary file name input?)
def command(options: Options): Result.Result[Unit] = {
def command(options: Options) = {
fpp.compiler.util.Error.setTool(Tool(name))
val files = options.files.reverse match {
case Nil => List(File.StdIn)
case list => list
}
val a = Analysis(inputFileSet = options.files.toSet)
val metadata = dictionary.DictionaryMetadata(options.deploymentName, options.frameworkVersion, options.libraryVersions, options.dictionarySpecVersion)
for {
tul <- Result.map(files, Parser.parseFile (Parser.transUnit) (None) _)
a <- CheckSemantics.tuList(a, tul)
dictionaryList <- constructDictionary(a).asInstanceOf[Result.Result[dictionary.Dictionary]]
tulFiles <- Result.map(files, Parser.parseFile (Parser.transUnit) (None) _)
tulImports <- Result.map(options.imports, Parser.parseFile (Parser.transUnit) (None) _)
a <- CheckSemantics.tuList(a, tulFiles ++ tulImports)
_ <- writeDictionary(a, options.defaultStringSize, options.verbose, metadata)
} yield ()
}
@ -68,6 +76,33 @@ object FPPToDict {
.optional()
.action((f, c) => c.copy(files = File.fromString(f) :: c.files))
.text(".fpp file(s)"),
opt[Seq[String]]('i', "imports")
.valueName("<file1>,<file2>...")
.action((i, c) => c.copy(imports = i.toList.map(File.fromString(_))))
.text("files to import"),
opt[Int]('s', "size")
.valueName("<size>")
.validate(s => if (s > 0) success else failure("size must be greater than zero"))
.action((s, c) => c.copy(defaultStringSize = s))
.text("default string size"),
opt[String]('d', "deployment")
.valueName("<deployment>")
.action((d, c) => c.copy(deploymentName = d))
.text("deployment name"),
opt[String]('f', "frameworkVersion")
.valueName("<frameworkVersion>")
.action((f, c) => c.copy(frameworkVersion = f))
.text("framework version"),
opt[Seq[String]]('l', "libraryVersions")
.valueName("<lib1ver>,<lib2ver>,...")
.action((l, c) => {
c.copy(libraryVersions = l.toList)
})
.text("library versions"),
opt[Unit]('v', "verbose")
.valueName("<verbose>")
.action((_, c) => c.copy(verbose = true))
.text("verbose"),
)
}
}

View File

@ -0,0 +1,21 @@
== fpp-to-dict/test
This is the test directory for the `fpp-to-dict` tool.
=== Running Tests
To run the tests, execute `./test` in this directory.
All the tests should pass.
=== Developing Tests
When developing tests, run the tool `./check-json-dict`.
This makes sure that all the `*.ref.json` files are valid {json} files.
Before running `check-json-dict`, do the following:
* Check out a copy of the
https://github.com/nasa/fprime[F Prime repository].
* Set the shell environment variable `FPRIME` to point to
the root of the F Prime repository.

View File

@ -0,0 +1,10 @@
#!/bin/sh -e
. ../../../scripts/utils.sh
for target in `find . -mindepth 2 -name check-json-dict`
do
dir=`dirname $target`
echo "[ $dir ]"
(cd $dir; ./check-json-dict)
done

View File

@ -0,0 +1,12 @@
#!/bin/sh -e
cd `dirname $0`
. ../../../scripts/utils.sh
for target in `find . -mindepth 2 -maxdepth 2 -name clean`
do
dir=`dirname $target`
(cd $dir; ./clean)
done
clean

View File

@ -0,0 +1,164 @@
#!/bin/sh -e
. ../../../scripts/test-utils.sh
fpp_to_dict=../../../bin/fpp-to-dict
run_test()
{
args=$1
infile=$2
if test -n "$3"
then
outfile=$3
else
outfile=$infile
fi
{
# Run fpp_to_json and concatenate the output files
$fpp_to_json $infile.fpp $args 2>&1 | remove_path_prefix > $outfile.out.txt && \
remove_path_prefix < fpp-ast.json >> $outfile.out.txt && \
remove_path_prefix < fpp-loc-map.json >> $outfile.out.txt
} && \
{
# Validate the location map
if which python3 > /dev/null 2>&1
then
python3 python/locationMapValidator.py fpp-ast.json fpp-loc-map.json
else
# Work around an issue in CI
echo "python3 is not available; skipping map validation" 1>&2
fi
} && \
{
# Clean up
if [ "$args" = "-s" ]; then
rm fpp-ast.json fpp-loc-map.json
else
remove_path_prefix < fpp-analysis.json >> $outfile.out.txt
# Delete the JSON files
rm fpp-ast.json fpp-loc-map.json fpp-analysis.json
fi
} && \
{
# Compare the output
diff -u $outfile.ref.txt $outfile.out.txt > $outfile.diff.txt 2>&1
}
}
constants()
{
run_test "" constants
}
modules()
{
run_test "" modules
}
types()
{
run_test "" types
}
enums()
{
run_test "" enums
}
ports()
{
run_test "" ports
}
simpleComponents()
{
run_test "" simpleComponents
}
specialPorts()
{
run_test "" specialPorts
}
internalPorts()
{
run_test "" internalPorts
}
commands(){
run_test "" commands
}
events(){
run_test "" events
}
telemetry(){
run_test "" telemetry
}
parameters(){
run_test "" parameters
}
constTypesComponents(){
run_test "" constTypesComponents
}
matchedPorts(){
run_test "" matchedPorts
}
passiveComponent(){
run_test "" passiveComponent
}
queuedComponents(){
run_test "" queuedComponents
}
activeComponents(){
run_test "" activeComponents
}
simpleTopology(){
run_test "" simpleTopology
}
importedTopologies(){
run_test "" importedTopologies
}
syntaxOnly(){
run_test "-s" syntaxOnly
}
tests="
activeComponents
commands
constTypesComponents
constants
enums
events
importedTopologies
internalPorts
matchedPorts
modules
parameters
passiveComponent
ports
queuedComponents
simpleComponents
simpleTopology
specialPorts
syntaxOnly
telemetry
types
"
run_suite $tests

View File

@ -0,0 +1,6 @@
#!/bin/sh -e
export COMPILER_ROOT=../..
cd ..
$COMPILER_ROOT/scripts/test
mv num_failed.txt test-output.txt test

View File

@ -0,0 +1,291 @@
module Ports {
port P
}
module FppTest {
@ A component for testing data product code gen
active component DpTest {
output port pOut: Ports.P
@ Product get port
product get port productGetOut
@ A port for sending data products
product send port productSendOut
@ A port for getting the current time
time get port timeGetOut
@ Port for sending command registrations
command reg port cmdRegOut
@ Port for receiving commands
command recv port cmdIn
@ Port for sending command responses
command resp port cmdResponseOut
# ----------------------------------------------------------------------
# Types
# ----------------------------------------------------------------------
@ Command 1
async command COMMAND_1(
a: bool @< description for arguement a
) priority 10
@ Data for a DataRecord
struct Data {
@ A U16 field
u16Field: U16
}
@ A single U32 value
product record Record0: Data
@ Description of Container 0
product container Container0
@ Container 1
product container Container1 id 0x02
@ Container 2
@ Implied id is 0x03
product container Container2 default priority 10
}
}
module Fw {
port Cmd
port CmdReg
port CmdResponse
port PrmGet
port PrmSet
port Log
port LogText
port Time
port Tlm
port DpGet
port DpSet
port DpRequest
port DpResponse
port DpSend
}
module M {
enum E1: U32 {
X = 0
Y = 1
Z = 2
} default X
enum E2 {
PASS
FAIL
} default PASS
enum U8Gunfighters: U8 {
IL_BUONO
IL_BRUTTO
IL_CATTIVO
}
enum Status: U32 {
YES
NO
MAYBE
} default YES
# struct MyStruct1 {
# x: [3] U32,
# y: B,
# z: string size 16 format "The string is {}"
# } default { z = "hello world"}
struct A {
x: U64 format "The value of x is {}"
y: F32
} default { x = 1, y = 1.5 }
@ An array of 3 enum values
array EnumArray = [3] Status default [Status.YES, Status.YES, Status.YES]
# @ An array of 3 struct values
array MyArray = [3] U64
@ An array of 3 I32 values
array I32x3 = [3] I32
@ An array of 4 U32 values
array U32x4 = [4] U32 default [1, 2, 3, 4]
@ An array of 2 F64 values
array F64x2 = [2] F64
@ An array of 3 F64 values
array F64x3 = [3] F64
@ An array of 4 F64 values
array F64x4 = [4] F64
@ An array of 2 String values
array StringArray = [2] string default ["A", "B"]
# Defines an array type A of 3 U8 elements with default value [ 0, 0, 0 ]
# array A = [3] U8
# Defines an array type B of 2 A elements with default value
# [ [ 0, 0, 0 ], [ 0, 0, 0 ] ]
# array B = [3] A
active component C1 {
@ Command 1
async command COMMAND_1(
a: bool @< description for arguement a
) priority 10
@ Command 2
sync command COMMAND_2(a: string size 20)
# @ My parameter 1
# param PARAM_1: Status default Status.YES
@ My parameter 2
param PARAM_1: MyArray
@ My parameter 1
param PARAM_3: U32x4
@ Parameter 3
@ Its set opcode is 0x12
@ Its save opcode is 0x20
param PARAM_4: F32
@ Parameter 5
param PARAM_5: E2
@ Port for sending command registrations
command reg port cmdRegOut
@ Port for receiving commands
command recv port cmdIn
@ Port for sending command responses
command resp port cmdResponseOut
output port pOut: Ports.P
@ Port to return the value of a parameter
param get port prmGetOut
@Port to set the value of a parameter
param set port prmSetOut
@ Product get port
product get port productGetOut
@ A port for sending data products
product send port productSendOut
@ A port for getting the current time
time get port timeGetOut
@ A single U32 value
product record Record0: U32
@ Record 1: A single F64x3 value
@ Implied id is 0x03
product record Record1: F64x3
@ Description of Container 0
product container Container0
@ Container 1
product container Container1 id 0x02
@ Container 2
@ Implied id is 0x03
product container Container2 default priority 10
}
passive component C2 {
@ Event logged when the LED blink interval is updated
event Event1(
arg1: F64x4 @< description of arg1 formal param
) \
severity activity high \
format "Arg one is {} and there is no arg 2"
@ Telemetry channel counting LED transitions
telemetry LedTransitions: I64 \
low { yellow -1, orange -2, red -3 } \
high { yellow 1, orange 2, red 3 }
@ Telemetry channel counting LED transitions
telemetry MyTlmChannel1: I32x3 \
@ Command 1
sync command COMMAND_1(a: string)
@ Command 2
sync command COMMAND_2(a: string)
@ Port for sending command registrations
command reg port cmdRegOut
@ Port for receiving commands
command recv port cmdIn
@ Port for sending command responses
command resp port cmdResponseOut
sync input port pIn: Ports.P
@ Port for sending events to downlink
event port logOut
@ Port for sending textual representation of events
text event port logTextOut
@ Port for requesting the current time
time get port timeCaller
@ Port for sending telemetry channels to downlink
telemetry port tlmOut
}
# instance c1: FppTest.DpTest base id 0x100
# instance c2: C2 base id 0x200
instance c1: C1 base id 0x300
instance c2: C2 base id 0x400
topology T {
instance c1
instance c2
connections C {
c1.pOut -> c2.pIn
}
}
# topology T2 {
# instance MySecondC1
# instance MySecondC2
# connections C {
# MySecondC1.pOut -> MySecondC2.pIn
# }
# }
}

View File

@ -0,0 +1,9 @@
#!/bin/sh -e
for file in `find . -mindepth 2 -maxdepth 2 -name update-ref`
do
dir=`dirname $file`
echo "[ $dir ]"
base=`basename $file`
(cd $dir; ./$base)
done