generateForAnnotatedElement method
- Element element,
- ConstantReader annotation,
- BuildStep buildStep
Implement to return source code to generate for element.
This method is invoked based on finding elements annotated with an
instance of T. The annotation is provided as a ConstantReader.
Supported return values include a single String or multiple String instances within an Iterable or Stream. It is also valid to return a Future of String, Iterable, or Stream. When multiple values are returned through an iterable or stream they will be deduplicated. Typically each value will be an independent unit of code and the deduplication prevents re-defining the same member multiple times. For example if multiple annotated elements may need a specific utility method available it can be output for each one, and the single deduplicated definition can be shared.
Implementations should return null when no content is generated. Empty
or whitespace-only String instances are also ignored.
Implementation
@override
String generateForAnnotatedElement(
Element element,
ConstantReader annotation,
BuildStep buildStep,
) {
log.info('EndpointGenerator processing ${element.name}');
try {
helpers.validateClassElement(element, 'Endpoint');
final classElement = element as ClassElement;
// Check which base class is extended
final hasBody = helpers.checkInheritance(
classElement,
'SparkEndpointWithBody',
);
final hasNoBody = helpers.checkInheritance(classElement, 'SparkEndpoint');
if (!hasBody && !hasNoBody) {
throw InvalidGenerationSourceError(
'@Endpoint classes must extend SparkEndpoint or SparkEndpointWithBody<T>',
element: element,
);
}
final className = element.name;
final path = annotation.read('path').stringValue;
final method = annotation.read('method').stringValue;
// Parse path parameters from the route pattern
final pathParams = helpers.parsePathParams(path);
// Convert path pattern to shelf_router format
final shelfPath = helpers.convertToShelfPath(path);
// Get the body type if using SparkEndpointWithBody
final bodyType = hasBody ? _getBodyType(element) : null;
// Get the return type from the handler method
final handlerMethod = element.getMethod('handler');
final returnType = handlerMethod?.returnType;
final buffer = StringBuffer();
// Generate static route info constant
buffer.writeln('// Route: $path ($method)');
buffer.writeln('const _\$${className}Route = (');
buffer.writeln(" path: '$shelfPath',");
buffer.writeln(" methods: <String>['$method'],");
buffer.writeln(
' pathParams: <String>[${pathParams.map((p) => "'$p'").join(', ')}],',
);
buffer.writeln(" className: '$className',");
buffer.writeln(');');
buffer.writeln();
// Generate Handler Function
buffer.writeln('Future<Response> _\$handle$className(');
buffer.writeln(' Request request,');
for (final param in pathParams) {
buffer.writeln(' String $param,');
}
buffer.writeln(') async {');
// Instantiate the endpoint class
buffer.writeln(' final endpoint = $className();');
buffer.writeln();
// Build middleware pipeline from the endpoint's middleware getter
buffer.writeln(' var pipeline = const Pipeline();');
buffer.writeln(' for (final middleware in endpoint.middleware) {');
buffer.writeln(' pipeline = pipeline.addMiddleware(middleware);');
buffer.writeln(' }');
buffer.writeln();
buffer.writeln(' final handler = (Request req) async {');
buffer.writeln(' try {');
// Create SparkRequest
buffer.writeln(' final sparkRequest = SparkRequest(');
buffer.writeln(' shelfRequest: req,');
buffer.writeln(' pathParams: {');
for (final p in pathParams) {
buffer.writeln(" '$p': $p,");
}
buffer.writeln(' },');
buffer.writeln(' );');
buffer.writeln();
if (hasBody && bodyType != null) {
// Generate OpenAPI validation before body parsing
_generateOpenApiValidation(buffer, annotation);
// Parse body for SparkEndpointWithBody
final contentTypes = annotation.objectValue
.getField('contentTypes')
?.toListValue()
?.map((e) => e.toStringValue())
.where((e) => e != null)
.cast<String>()
.toList();
_generateBodyParsing(buffer, bodyType, contentTypes);
buffer.writeln(
' final result = await endpoint.handler(sparkRequest, body);',
);
} else {
// Generate OpenAPI validation before handler execution
_generateOpenApiValidation(buffer, annotation);
// No body for SparkEndpoint
buffer.writeln(
' final result = await endpoint.handler(sparkRequest);',
);
}
buffer.writeln();
// Handle response serialization
_generateResponseSerialization(buffer, returnType);
buffer.writeln(' } on SparkValidationException catch (e) {');
buffer.writeln(' return ApiError(');
buffer.writeln(" message: e.message,");
buffer.writeln(" code: 'VALIDATION_ERROR',");
buffer.writeln(" details: e.errors,");
buffer.writeln(' ).toResponse(400);');
buffer.writeln(' } on ApiError catch (e) {');
buffer.writeln(' return e.toResponse();');
buffer.writeln(' } on SparkHttpException catch (e) {');
buffer.writeln(' return ApiError(');
buffer.writeln(" message: e.message,");
buffer.writeln(" code: e.code,");
buffer.writeln(" details: e.details,");
buffer.writeln(' ).toResponse(e.statusCode);');
buffer.writeln(' } catch (e, s) {');
buffer.writeln(" print(e);");
buffer.writeln(' return ApiError(');
buffer.writeln(" message: 'Internal Server Error',");
buffer.writeln(" code: 'INTERNAL_ERROR',");
buffer.writeln(' ).toResponse(500);');
buffer.writeln(' }');
buffer.writeln(' };');
buffer.writeln();
// Execute pipeline
buffer.writeln(' return pipeline.addHandler(handler)(request);');
buffer.writeln('}');
return buffer.toString();
} catch (e, s) {
log.severe('Error generating endpoint for ${element.name}', e, s);
return '';
}
}