Compare commits

...
Sign in to create a new pull request.

6 commits

10 changed files with 231 additions and 105 deletions

23
.gitignore vendored
View file

@ -1,3 +1,4 @@
tapir-out/**
# ---> Scala
*.class
*.log
@ -39,25 +40,6 @@ replay_pid*
.idea/**
*.iml
# AWS User-specific
.idea/**/aws.xml
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
@ -74,9 +56,6 @@ replay_pid*
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws

View file

@ -1,9 +1,14 @@
projekt/create/ -> createProjekt(
ProjektProperties(
title: String,
description: String
)
/createProject -> (
id: ProjektId,
properties: ProjektProperties
)
projekt/update/ -> updateProjekt(
/updateProject -> (
id: ProjektId,
properties: ProjektProperties
)

View file

@ -32,6 +32,7 @@
-->
<execution>
<id>jjtree-javacc</id>
<phase>generate-sources</phase>
<goals>
<goal>jjtree-javacc</goal>
</goals>

View file

@ -0,0 +1,6 @@
package nu.zoom.tapir;
import java.util.List;
public record DataTypeNode(String name, List<FieldNode> fields) {
}

View file

@ -39,19 +39,25 @@ public class Generator implements Callable<Integer> {
validateTemplateDirectory();
validateInputFile();
validateOutputDirectory();
var rootNode = new TapirParser(Files.newBufferedReader(this.file)).endpoints();
var rootNode = new TapirParser(Files.newBufferedReader(this.file)).specification();
if (this.verbose) {
System.out.println("====== Parse Tree ======");
rootNode.dump("");
}
var endpoints = NodeTransformer.transform(rootNode);
if (endpoints.isEmpty()) {
NodeTransformer transformer = new NodeTransformer();
transformer.transform(rootNode);
if (transformer.getEndpoints().isEmpty()) {
System.err.println("No tapir endpoints found.");
return 2;
}
if (this.verbose) {
System.out.println("\n====== AST ======");
endpoints.forEach(endpoint -> {
System.out.println("\n====== Types ======");
transformer.getDataTypes().forEach(type -> {
System.out.println(type);
});
System.out.println("\n====== Endpoints ======");
transformer.getEndpoints().forEach(endpoint -> {
System.out.println(endpoint);
});
}
@ -59,7 +65,8 @@ public class Generator implements Callable<Integer> {
this.verbose,
this.outputDir,
this.templateDir,
endpoints
transformer.getEndpoints(),
transformer.getDataTypes()
);
targetGenerator.generate();
return 0;

View file

@ -9,14 +9,95 @@ import java.util.ArrayList;
import java.util.List;
public class NodeTransformer {
private final List<EndpointNode> endpoints = new ArrayList<>();
private final List<DataTypeNode> dataTypes = new ArrayList<>();
public static List<EndpointNode> transform(SimpleNode rootNode) throws ParseException {
ArrayList<EndpointNode> endpoints = new ArrayList<>();
for (int i = 0; i < rootNode.jjtGetNumChildren(); i++) {
SimpleNode endpoint = assertSimpleNode(rootNode.jjtGetChild(i));
endpoints.add(handleEndpoint(endpoint));
public List<EndpointNode> getEndpoints() {
return endpoints;
}
public List<DataTypeNode> getDataTypes() {
return dataTypes;
}
public void transform(SimpleNode rootNode) throws ParseException {
assertSimpleNodeType(rootNode, TapirParserTreeConstants.JJTSPECIFICATION);
int numChildren = rootNode.jjtGetNumChildren();
if (numChildren == 2) {
this.dataTypes.addAll(
handleDataTypes(
assertSimpleNodeType(
rootNode.jjtGetChild(0),
TapirParserTreeConstants.JJTDATATYPES
)
)
);
this.endpoints.addAll(
handleEndpoints(
assertSimpleNodeType(
rootNode.jjtGetChild(1),
TapirParserTreeConstants.JJTENDPOINTS
)
)
);
} else if (numChildren == 1) {
this.endpoints.addAll(
handleEndpoints(
assertSimpleNodeType(
rootNode.jjtGetChild(1),
TapirParserTreeConstants.JJTENDPOINTS
)
)
);
} else {
throw new ParseException("Expected specification to have 1 or 2 children but had " + numChildren);
}
return endpoints ;
}
private static List<EndpointNode> handleEndpoints(SimpleNode endpoints) throws ParseException {
ArrayList<EndpointNode> endpointNodes = new ArrayList<>();
for (int i = 0; i < endpoints.jjtGetNumChildren(); i++) {
endpointNodes.add(
handleEndpoint(
assertSimpleNodeType(
endpoints.jjtGetChild(i),
TapirParserTreeConstants.JJTENDPOINT
)
)
);
}
return endpointNodes;
}
private static List<DataTypeNode> handleDataTypes(SimpleNode dataTypesDeclaration) throws ParseException {
List<DataTypeNode> dataTypes = new ArrayList<>();
for (int i = 0; i < dataTypesDeclaration.jjtGetNumChildren(); i++) {
dataTypes.add(
handleCompoundDataType(
assertSimpleNodeType(
dataTypesDeclaration.jjtGetChild(i),
TapirParserTreeConstants.JJTCOMPOUNDDATATYPE
)
)
);
}
return dataTypes;
}
private static DataTypeNode handleCompoundDataType(SimpleNode dataTypeNode) throws ParseException {
String typename = getStringValue(
assertSimpleNodeType(
dataTypeNode.jjtGetChild(0),
TapirParserTreeConstants.JJTCOMPUNDDATATYPENAME
)
);
List<FieldNode> fields = handleFields(
assertSimpleNodeType(
dataTypeNode.jjtGetChild(1),
TapirParserTreeConstants.JJTDATATYPEFIELDS
)
);
return new DataTypeNode(typename, fields);
}
private static EndpointNode handleEndpoint(SimpleNode node) throws ParseException {
@ -27,15 +108,16 @@ public class NodeTransformer {
SimpleNode pathsParseNode =
assertSimpleNodeType(
node.jjtGetChild(0),
TapirParserTreeConstants.JJTPATHS
TapirParserTreeConstants.JJTPATH
);
PathsNode pathsNode = handlePaths(pathsParseNode);
SimpleNode handlerParseNode =
assertSimpleNodeType(
node.jjtGetChild(1),
TapirParserTreeConstants.JJTHANDLERSPEC
TapirParserTreeConstants.JJTDATATYPEFIELDS
);
HandlerNode handlerNode = handleHandler(handlerParseNode);
List<FieldNode> fields = handleFields(handlerParseNode);
HandlerNode handlerNode = new HandlerNode(pathsNode.paths().getLast(), fields);
return new EndpointNode(pathsNode, handlerNode);
}
@ -47,25 +129,25 @@ public class NodeTransformer {
String handlerName = getStringValue(
assertSimpleNodeType(
handlerSpec.jjtGetChild(0),
TapirParserTreeConstants.JJTHANDLERNAME
TapirParserTreeConstants.JJTCOMPUNDDATATYPENAME
)
);
SimpleNode payloadFieldsParseNode =
assertSimpleNodeType(
handlerSpec.jjtGetChild(1),
TapirParserTreeConstants.JJTPAYLOADFIELDS
TapirParserTreeConstants.JJTDATATYPEFIELDS
);
List<FieldNode> fields = handleFields(payloadFieldsParseNode);
return new HandlerNode(handlerName, fields);
}
private static List<FieldNode> handleFields(SimpleNode payloadFieldsParseNode) throws ParseException {
private static List<FieldNode> handleFields(SimpleNode compoundDatatTypeFields) throws ParseException {
ArrayList<FieldNode> fields = new ArrayList<>();
for (int i = 0; i < payloadFieldsParseNode.jjtGetNumChildren(); i++) {
for (int i = 0; i < compoundDatatTypeFields.jjtGetNumChildren(); i++) {
SimpleNode payloadFieldParseNode =
assertSimpleNodeType(
payloadFieldsParseNode.jjtGetChild(i),
TapirParserTreeConstants.JJTPAYLOADFIELD
compoundDatatTypeFields.jjtGetChild(i),
TapirParserTreeConstants.JJTDATATYPEFIELD
);
int numFieldNodes = payloadFieldParseNode.jjtGetNumChildren();
if (numFieldNodes != 2) {
@ -74,13 +156,13 @@ public class NodeTransformer {
String fieldName = getStringValue(
assertSimpleNodeType(
payloadFieldParseNode.jjtGetChild(0),
TapirParserTreeConstants.JJTPAYLOADFIELDNAME
TapirParserTreeConstants.JJTDATATYPEFIELDNAME
)
);
String fieldType = getStringValue(
assertSimpleNodeType(
payloadFieldParseNode.jjtGetChild(1),
TapirParserTreeConstants.JJTPAYLOADFIELDTYPE
TapirParserTreeConstants.JJTDATATYPEFIELDTYPE
)
);
fields.add(new FieldNode(fieldName, fieldType));
@ -92,7 +174,7 @@ public class NodeTransformer {
int numPathSegments = pathsParseNode.jjtGetNumChildren();
ArrayList<String> segments = new ArrayList<>();
for (int i = 0; i < numPathSegments; i++) {
SimpleNode segmentParseNode = assertSimpleNodeType(pathsParseNode.jjtGetChild(i), TapirParserTreeConstants.JJTPATH);
SimpleNode segmentParseNode = assertSimpleNodeType(pathsParseNode.jjtGetChild(i), TapirParserTreeConstants.JJTPATHSEGMENT);
segments.add(getStringValue(segmentParseNode));
}
return new PathsNode(segments);

View file

@ -16,24 +16,23 @@ import java.util.Objects;
public class TargetGenerator {
private final Path outputPath;
private final Path templatePath;
private final boolean verbose;
public static String ENDPOINTS_TEMPLATE_NAME = "endpoints.ftl";
private final List<EndpointNode> endpoints;
private final List<DataTypeNode> dataTypes;
private final boolean verbose ;
public static class TargetGeneratorException extends Exception {
public TargetGeneratorException(String message) {
super(message);
}
public TargetGeneratorException(Exception cause) {
super(cause);
}
}
public TargetGenerator(
final boolean verbose,
boolean verbose,
Path outputPath,
Path templatePath,
List<EndpointNode> endpoints
List<EndpointNode> endpoints,
List<DataTypeNode> dataTypes
) {
this.verbose = verbose;
this.outputPath = Objects.requireNonNull(
@ -44,26 +43,48 @@ public class TargetGenerator {
templatePath,
"Template path is required"
);
this.endpoints = Objects.requireNonNull(endpoints) ;
this.endpoints = Objects.requireNonNull(endpoints);
this.dataTypes = Objects.requireNonNull(dataTypes);
}
public void generate() throws TargetGeneratorException, IOException, TemplateException {
Configuration cfg = new Configuration(Configuration.VERSION_2_3_34);
cfg.setDirectoryForTemplateLoading(this.templatePath.toFile());
cfg.setDefaultEncoding("UTF-8");
cfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
cfg.setLogTemplateExceptions(false);
cfg.setWrapUncheckedExceptions(true);
cfg.setFallbackOnNullLoopVariable(false);
Template temp = cfg.getTemplate(ENDPOINTS_TEMPLATE_NAME);
try (var outputFile = Files.newBufferedWriter(
outputPath.resolve("endpoints.scala"),
StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING
)) {
HashMap<String, List<EndpointNode>> templateData = new HashMap<>();
templateData.put("endpoints", endpoints);
temp.process(templateData, outputFile);
public void generate() throws TargetGeneratorException {
try {
Configuration cfg = new Configuration(Configuration.VERSION_2_3_34);
cfg.setDirectoryForTemplateLoading(this.templatePath.toFile());
cfg.setDefaultEncoding("UTF-8");
cfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
cfg.setLogTemplateExceptions(false);
cfg.setWrapUncheckedExceptions(true);
cfg.setFallbackOnNullLoopVariable(false);
List<Path> templates = Files
.list(this.templatePath)
.filter(Files::isRegularFile)
.filter(f -> f.getFileName().toString().endsWith(".ftl"))
.toList() ;
for (Path templatePath : templates) {
try (var outputFile = Files.newBufferedWriter(
outputName(templatePath),
StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING
)) {
if (this.verbose) {
System.out.println("Processing " + templatePath);
}
HashMap<String, Object> templateData = new HashMap<>();
templateData.put("endpoints", endpoints);
templateData.put("datatypes", dataTypes);
cfg.getTemplate(
templatePath.getFileName().toString()
).process(templateData, outputFile);
}
}
} catch (TemplateException | IOException ex) {
throw new TargetGeneratorException(ex);
}
}
private Path outputName(Path templatePath) {
String name = templatePath.getFileName().toString().replace(".ftl", ".scala") ;
return this.outputPath.resolve(name) ;
}
}

View file

@ -23,6 +23,7 @@ SKIP: {
TOKEN : {
<OPENPARANTHESIS: "(">
| <CLOSEPARANTHESIS: ")">
| <SEMICOLON: ";">
| <TRANSITION: "->">
| <SLASH: "/">
| <COLON: ":">
@ -32,63 +33,75 @@ TOKEN : {
| <IDENTIFIER: <FIRST_LETTER> (<LETTER>)* >
}
void pathSegment() :
{Token t;}
{
<SLASH> t=<IDENTIFIER>{jjtThis.value = t.image;}
}
void path() :
{Token t;}
{
t=<IDENTIFIER>{jjtThis.value = t.image;} <SLASH>
}
void paths() :
{}
{
path() (path())*
pathSegment() (pathSegment())*
}
void payloadFieldName() :
void dataTypeFieldType() :
{Token t;}
{
t=<IDENTIFIER>{jjtThis.value = t.image;}
}
void payloadFieldType() :
void dataTypeFieldName() :
{Token t;}
{
t=<IDENTIFIER>{jjtThis.value = t.image;}
}
void payloadField() :
void dataTypeField() :
{}
{
payloadFieldName() <COLON> payloadFieldType()
dataTypeFieldName() <COLON> dataTypeFieldType()
}
void payloadFields() :
void dataTypeFields() :
{}
{
payloadField() (<COMMA> payloadField() )*
dataTypeField() (<COMMA> dataTypeField() )*
}
void handlerName() :
void compundDataTypeName() :
{Token t;}
{
t=<IDENTIFIER>{jjtThis.value = t.image;}
}
void handlerSpec() :
void compoundDataType() :
{}
{
handlerName() <OPENPARANTHESIS> payloadFields() <CLOSEPARANTHESIS>
compundDataTypeName() <OPENPARANTHESIS> dataTypeFields() <CLOSEPARANTHESIS>
}
void dataTypes() :
{}
{
(compoundDataType() )*
}
void endpoint() :
{}
{
paths() <TRANSITION> handlerSpec()
path() <TRANSITION> <OPENPARANTHESIS> dataTypeFields() <CLOSEPARANTHESIS>
}
SimpleNode endpoints() :
void endpoints() :
{}
{
(endpoint() )*
}
SimpleNode specification() :
{}
{
dataTypes() endpoints()
{ return jjtThis; }
}

View file

@ -0,0 +1,8 @@
object Codecs:
<#list datatypes as datatype>
given Codec[${datatype.name?cap_first}] = deriveCodec
</#list>
<#list endpoints as endpoint>
given Codec[${endpoint.handler.name?cap_first}Payload] = deriveCodec
</#list>

View file

@ -1,4 +1,4 @@
package se.senashdev.projekt.api
package se.senashdev.project.api
import se.rutdev.projekt.api.HttpProtocol.VersionedResponse
import se.rutdev.framework.json.circe.RutUtilsCodec
@ -10,8 +10,16 @@ import sttp.tapir.Schema
class Endpoints(override val config: OAuthUtils.OAuthConfig) extends framework.service.api.Endpoints with RutTapir with RutUtilsCodec:
type ApiEndpoint[I, O] = OAuthEndpoint[RequestMeta.OAuthRequestMeta, I, ProblemDetail, O]
<#list datatypes as datatype>
case class ${datatype.name}(
<#list datatype.fields as field>
${field.name} : ${field.type},
</#list>
)
</#list>
<#list endpoints as endpoint>
case class ${endpoint.handler.name?cap_first}(
case class ${endpoint.handler.name?cap_first}Payload(
<#list endpoint.handler.fields as field>
${field.name} : ${field.type},
</#list>
@ -19,19 +27,15 @@ class Endpoints(override val config: OAuthUtils.OAuthConfig) extends framework.s
</#list>
<#list endpoints as endpoint>
given Codec[${endpoint.handler.name?cap_first}] = deriveCodec
val ${endpoint.handler.name}Endpoint = ApiEndpoint[${endpoint.handler.name?cap_first}Payload, VersionedResponse] =
<#list endpoint.paths.paths>
apiV1Endpoint
.post
<#items as segment>
.in("${segment}")
</#items>
.post
.in(jsonBody[${endpoint.handler.name?cap_first}Payload])
.out(jsonBody[VersionedResponse])
</#list>
<#list endpoints as endpoint>
val ${endpoint.handler.name}Endpoint = ApiEndpoint[${endpoint.handler.name?cap_first}, VersionedResponse] =
<#list endpoint.paths.paths>
apiV1Endpoint
.post
<#items as segment>
.in("${segment}")
</#items>
.post
.in(jsonBody[${endpoint.handler.name?cap_first}])
.out(jsonBody[VersionedResponse])
</#list>
</#list>