Happy path transform document, duplicate detection should be added.
This commit is contained in:
parent
d4b6714229
commit
ea99135dab
3 changed files with 61 additions and 9 deletions
|
@ -2,8 +2,10 @@ package nu.zoom.dsl.ast;
|
|||
|
||||
import nu.zoom.dsl.parser.EndpointsBaseVisitor;
|
||||
import nu.zoom.dsl.parser.EndpointsParser;
|
||||
import org.antlr.v4.runtime.tree.TerminalNode;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class EndpointsVisitorTransformer extends EndpointsBaseVisitor<EndpointsParser.DocumentContext> {
|
||||
private ArrayList<EndpointNode> endpoints = new ArrayList<>();
|
||||
|
@ -12,27 +14,74 @@ public class EndpointsVisitorTransformer extends EndpointsBaseVisitor<EndpointsP
|
|||
public EndpointsVisitorTransformer() {
|
||||
}
|
||||
|
||||
public List<EndpointNode> getEndpoints() {
|
||||
return List.copyOf(endpoints);
|
||||
}
|
||||
|
||||
public List<ConfigItemNode> getConfig() {
|
||||
return List.copyOf(config);
|
||||
}
|
||||
|
||||
public List<CompoundTypeNode> getDataTypes() {
|
||||
return List.copyOf(dataTypes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public EndpointsParser.DocumentContext visitConfigitem(EndpointsParser.ConfigitemContext ctx) {
|
||||
String configKey = ctx.configkey().IDENTIFIER().getText() ;
|
||||
String configValue = getText(ctx.configvalue()) ;
|
||||
String configValue = getText(ctx.configvalue().IDENTIFIER(), ctx.configvalue().VALUE()) ;
|
||||
this.config.add(new ConfigItemNode(configKey, configValue));
|
||||
return super.visitConfigitem(ctx) ;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EndpointsParser.DocumentContext visitCompoundType(EndpointsParser.CompoundTypeContext ctx) {
|
||||
this.dataTypes.add(extractCompoundTypeNode(ctx));
|
||||
return super.visitCompoundType(ctx);
|
||||
}
|
||||
|
||||
@Override
|
||||
public EndpointsParser.DocumentContext visitEndpoint(EndpointsParser.EndpointContext ctx) {
|
||||
List<String> segments = ctx
|
||||
.path()
|
||||
.pathSegment()
|
||||
.stream()
|
||||
.map(
|
||||
segment -> getText(segment.IDENTIFIER(), segment.VALUE())
|
||||
).toList() ;
|
||||
TerminalNode typeReference = ctx.IDENTIFIER() ;
|
||||
if (typeReference != null) {
|
||||
var endpoint = new EndpointNode(new PathsNode(segments), typeReference.getText());
|
||||
this.endpoints.add(endpoint);
|
||||
} else {
|
||||
var compoundTypeNode = extractCompoundTypeNode(ctx.compoundType()) ;
|
||||
var endpoint = new EndpointNode(new PathsNode(segments), compoundTypeNode.name());
|
||||
this.dataTypes.add(compoundTypeNode);
|
||||
this.endpoints.add(endpoint);
|
||||
}
|
||||
return super.visitEndpoint(ctx);
|
||||
}
|
||||
|
||||
private String getText(EndpointsParser.ConfigvalueContext ctx) {
|
||||
String identifierText = (ctx.IDENTIFIER() != null) ? ctx.IDENTIFIER().getText() : "" ;
|
||||
String valueText = (ctx.VALUE() != null) ? ctx.VALUE().getText() : "" ;
|
||||
return identifierText + valueText;
|
||||
private CompoundTypeNode extractCompoundTypeNode(EndpointsParser.CompoundTypeContext ctx) {
|
||||
String typeName = ctx.compoundTypeName().getText() ;
|
||||
List<FieldNode> fields = extractTypeFields(ctx.compoundFields().compoundField()) ;
|
||||
return new CompoundTypeNode(typeName, fields) ;
|
||||
}
|
||||
|
||||
private List<FieldNode> extractTypeFields(List<EndpointsParser.CompoundFieldContext> compoundFieldContexts) {
|
||||
return compoundFieldContexts.stream().map(
|
||||
ctx -> new FieldNode(
|
||||
ctx.fieldName().getText(),
|
||||
ctx.fieldType().getText()
|
||||
)
|
||||
).toList() ;
|
||||
}
|
||||
|
||||
// Concatenate the text from to terminal nodes. Useful for contexts that are either an identifier or a value,
|
||||
// and you just want the text from whichever is not null.
|
||||
private String getText(TerminalNode identifier, TerminalNode value) {
|
||||
return
|
||||
identifier != null ? identifier.getText() : ""
|
||||
+ value != null ? value.getText() : "" ;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,12 +11,13 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
|
||||
public class ParserWrapper {
|
||||
public DocumentNode parse(Path sourcePath) throws IOException {
|
||||
public static DocumentNode parse(Path sourcePath) throws IOException {
|
||||
var ins = CharStreams.fromPath(sourcePath, StandardCharsets.UTF_8);
|
||||
EndpointsLexer lexer = new EndpointsLexer(ins);
|
||||
EndpointsParser parser = new EndpointsParser(new CommonTokenStream(lexer));
|
||||
var document= parser.document() ;
|
||||
new EndpointsVisitorTransformer().visit(document);
|
||||
return null ;
|
||||
var astTransformer = new EndpointsVisitorTransformer();
|
||||
astTransformer.visit(document);
|
||||
return new DocumentNode(astTransformer.getConfig(), astTransformer.getDataTypes(), astTransformer.getEndpoints());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package nu.zoom.dsl.cli;
|
||||
|
||||
import nu.zoom.dsl.ast.DocumentNode;
|
||||
import nu.zoom.dsl.ast.ParserWrapper;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Command;
|
||||
|
@ -41,7 +42,8 @@ public class EndpointsCLI implements Callable<Integer> {
|
|||
validateTemplateDirectory();
|
||||
validateInputFile();
|
||||
validateOutputDirectory();
|
||||
new ParserWrapper().parse(file);
|
||||
DocumentNode rootNode = ParserWrapper.parse(file);
|
||||
System.out.println(rootNode);
|
||||
return 0;
|
||||
} catch (Exception e) {
|
||||
System.err.println(e.getMessage());
|
||||
|
|
Loading…
Add table
Reference in a new issue