readTables method
Reads the schema information for tables in the connected database.
Fetches base table information (schema, name, comment) and then concurrently
fetches detailed column information via _readTableColumns and foreign key
constraint information via _readTableForeignKeys for each eligible table.
It applies filtering rules defined in the config (includeTables, excludeTables, generateForAllTables).
Constructs and returns a list of SupabaseTableInfo objects.
{@tool example}
final reader = SchemaReader(config);
await reader.connect();
try {
List<TableInfo> tables = await reader.readTables();
for (var table in tables) {
print('Table: ${table.schema}.${table.name}');
print(' Columns: ${table.columns.map((c) => c.name).join(', ')}');
print(' Foreign Keys: ${table.foreignKeys.map((fk) => fk.constraintName).join(', ')}');
}
} finally {
await reader.disconnect();
}
{@end-tool}
@return A Future containing a list of SupabaseTableInfo objects, each representing
a table that matches the configuration rules. Includes detailed column
and foreign key information.
Example return structure:
json [ { "name": "posts", "schema": "public", "comment": "Blog posts", "columns": [ { "name": "id", "type": "integer", "isPrimaryKey": true, ... }, { "name": "author_id", "type": "uuid", ... }, { "name": "content", "type": "text", ... } ], "foreignKeys": [ { "constraintName": "posts_author_id_fkey", "columnNames": ["author_id"], "foreignSchemaName": "public", "foreignTableName": "users", "foreignColumnNames": ["id"], "updateAction": "NO ACTION", "deleteAction": "CASCADE", ... } ] }, // ... other tables ]
@throws StateError if the database connection is not initialized (connect() was not called or failed).
@throws Exception if fetching details for a table fails.
Implementation
Future<List<SupabaseTableInfo>> readTables() async {
if (_connection == null) {
throw StateError('Database connection not initialized.');
}
_logger.info('Reading base table information...');
final List<SupabaseTableInfo> tablesWithoutReverseRelations =
[]; // Renamed from unsortedTables
// 1. Fetch ALL base table information (existing logic)
final initialResult = await _connection!.execute(
Sql(r'''
SELECT DISTINCT ON (pgc.oid, t.table_schema, t.table_name)
t.table_schema, t.table_name, obj_description(pgc.oid, 'pg_class') as table_comment
FROM information_schema.tables t
JOIN pg_catalog.pg_class pgc ON pgc.relname = t.table_name
AND pgc.relnamespace = (SELECT oid FROM pg_catalog.pg_namespace WHERE nspname = t.table_schema)
WHERE t.table_schema NOT IN ('information_schema', 'pg_catalog', 'pg_toast')
AND t.table_schema NOT LIKE 'pg_temp_%' AND t.table_schema NOT LIKE 'pg_toast_temp_%'
AND t.table_type = 'BASE TABLE'
ORDER BY pgc.oid ASC, t.table_schema, t.table_name
'''),
);
// 2. Apply Exclusions based on ORIGINAL DB names (existing logic)
final List<List<dynamic>> includedBaseTables;
if (config.excludeTables.isNotEmpty) {
includedBaseTables =
initialResult.where((row) {
final schema = row[0] as String;
final originalDbName =
row[1] as String; // Use original name for exclusion check
// --- ACTUAL EXCLUSION CHECK ---
bool isExcluded = config.excludeTables.any((pattern) {
final parts = pattern.split('.');
if (parts.length == 2) {
// Schema-qualified pattern
return _matchesPattern(schema, parts[0]) &&
_matchesPattern(originalDbName, parts[1]);
} else {
// Simple table name pattern (matches any schema)
return _matchesPattern(originalDbName, pattern);
}
});
// --- END EXCLUSION CHECK ---
if (isExcluded) {
_logger.fine(
'Excluding table based on config.excludeTables: $schema.$originalDbName',
);
}
// Keep the row in the filtered list only if it was NOT excluded
return !isExcluded;
}).toList(); // Convert the filtered iterable back to a list
_logger.info(
'Applied exclusions, processing ${includedBaseTables.length} of ${initialResult.length} initially found tables.',
);
} else {
// No exclusions defined, process all initially found tables
includedBaseTables = initialResult;
_logger.info(
'No exclusions defined, processing all ${initialResult.length} initially found tables.',
);
}
// 3. Fetch details (Columns, FKs, Indexes) and build initial TableInfo list (existing logic)
for (final row in includedBaseTables) {
final schema = row[0] as String;
final originalDbName = row[1] as String;
final comment = row[2] as String?;
// Apply include rules
if (!_shouldProcessTableBasedOnIncludes(schema, originalDbName)) {
_logger.fine(
'Skipping table based on include rules: $schema.$originalDbName',
);
continue;
}
_logger.info('Reading schema details for table: $schema.$originalDbName');
late final List<TetherColumnInfo> columns;
late final List<SupabaseForeignKeyConstraint> foreignKeys;
late final List<SupabaseIndexInfo> indexes;
try {
// Fetch Columns, FKs, and Indexes concurrently
final results = await Future.wait([
_readTableColumns(schema, originalDbName),
_readTableForeignKeys(schema, originalDbName),
_readTableIndexes(schema, originalDbName),
]);
columns = results[0] as List<TetherColumnInfo>;
foreignKeys = results[1] as List<SupabaseForeignKeyConstraint>;
indexes = results[2] as List<SupabaseIndexInfo>;
} catch (e, s) {
_logger.severe(
'Failed to read complete details for table $schema.$originalDbName: $e\n$s',
);
continue;
}
tablesWithoutReverseRelations.add(
SupabaseTableInfo(
name: StringUtils.toCamelCase(originalDbName),
originalName: originalDbName,
schema: schema,
columns: columns,
foreignKeys: foreignKeys,
indexes: indexes,
comment: comment,
reverseRelations:
const [], // Initialize with empty, will be populated next
),
);
}
// --- 4. Populate Reverse Relations ---
_logger.info('Populating reverse relations for all tables...');
final Map<String, List<ModelReverseRelationInfo>> allReverseRelationsMap =
{}; // Key: targetTable.uniqueKey
for (final referencingTableInfo in tablesWithoutReverseRelations) {
for (final fk in referencingTableInfo.foreignKeys) {
final targetTableUniqueKey =
'${fk.foreignTableSchema}.${fk.originalForeignTableName}';
// Determine fieldNameInThisModel (in the target table's model)
// This is the name of the list of 'referencingTableInfo' models.
// Example: if 'books' references 'authors', then in AuthorModel, this field might be 'booksList' or 'books'.
// A common convention is to use the pluralized camelCase name of the referencing table.
// This might need to be more sophisticated if multiple FKs from the same table point to the target
// or if specific naming conventions are desired.
String fieldName = StringUtils.toCamelCase(
referencingTableInfo.originalName,
);
// Attempt to make it plural. You might need a more robust pluralization utility.
// For simplicity, if 'StringUtils.pluralize' is not available or complex,
// you can use a simpler heuristic or make it configurable.
// Assuming 'StringUtils.toCamelCase' gives 'bookGenre', pluralizing might give 'bookGenres'.
// If StringUtils.pluralize is available from tether_libs/utils/string_utils.dart:
// fieldName = StringUtils.pluralize(fieldName);
// If not, a simple 's' for now, or leave as singular and let ModelGenerator handle pluralization.
// Let's assume a simple pluralization for now if not already plural.
if (!fieldName.endsWith('s')) {
fieldName = '${fieldName}s'; // Basic pluralization
}
final reverseRelation = ModelReverseRelationInfo(
fieldNameInThisModel: fieldName,
referencingTableOriginalName: referencingTableInfo.originalName,
foreignKeyColumnInReferencingTable: fk.originalColumns.first,
);
(allReverseRelationsMap[targetTableUniqueKey] ??= []).add(
reverseRelation,
);
}
}
// 5. Create new TableInfo instances with populated reverseRelations
final List<SupabaseTableInfo> tablesWithReverseRelations =
tablesWithoutReverseRelations.map((table) {
return SupabaseTableInfo(
name: table.name,
originalName: table.originalName,
localName: table.localName,
schema: table.schema,
columns: table.columns,
foreignKeys: table.foreignKeys,
indexes: table.indexes,
comment: table.comment,
reverseRelations:
allReverseRelationsMap[table.uniqueKey] ?? const [],
);
}).toList();
_logger.info('Finished populating reverse relations.');
return tablesWithReverseRelations;
}