mirror of https://github.com/AMT-Cheif/drift.git
Handle import statements and declared queries in .moor
This commit is contained in:
parent
f3db52717f
commit
6a0716daaf
|
@ -2,6 +2,7 @@ import 'package:moor_generator/src/analyzer/errors.dart';
|
||||||
import 'package:moor_generator/src/analyzer/runner/steps.dart';
|
import 'package:moor_generator/src/analyzer/runner/steps.dart';
|
||||||
import 'package:moor_generator/src/analyzer/moor/create_table_reader.dart';
|
import 'package:moor_generator/src/analyzer/moor/create_table_reader.dart';
|
||||||
import 'package:moor_generator/src/analyzer/runner/results.dart';
|
import 'package:moor_generator/src/analyzer/runner/results.dart';
|
||||||
|
import 'package:moor_generator/src/model/sql_query.dart';
|
||||||
import 'package:sqlparser/sqlparser.dart';
|
import 'package:sqlparser/sqlparser.dart';
|
||||||
|
|
||||||
class MoorParser {
|
class MoorParser {
|
||||||
|
@ -15,18 +16,18 @@ class MoorParser {
|
||||||
final parsedFile = result.rootNode as MoorFile;
|
final parsedFile = result.rootNode as MoorFile;
|
||||||
|
|
||||||
final createdReaders = <CreateTableReader>[];
|
final createdReaders = <CreateTableReader>[];
|
||||||
|
final queryDeclarations = <DeclaredMoorQuery>[];
|
||||||
|
final importStatements = <ImportStatement>[];
|
||||||
|
|
||||||
for (var parsedStmt in parsedFile.statements) {
|
for (var parsedStmt in parsedFile.statements) {
|
||||||
if (parsedStmt is ImportStatement) {
|
if (parsedStmt is ImportStatement) {
|
||||||
final importStmt = parsedStmt;
|
final importStmt = parsedStmt;
|
||||||
step.inlineDartResolver.importStatements.add(importStmt.importedFile);
|
step.inlineDartResolver.importStatements.add(importStmt.importedFile);
|
||||||
|
importStatements.add(importStmt);
|
||||||
} else if (parsedStmt is CreateTableStatement) {
|
} else if (parsedStmt is CreateTableStatement) {
|
||||||
createdReaders.add(CreateTableReader(parsedStmt));
|
createdReaders.add(CreateTableReader(parsedStmt));
|
||||||
} else {
|
} else if (parsedStmt is DeclaredStatement) {
|
||||||
step.reportError(ErrorInMoorFile(
|
queryDeclarations.add(DeclaredMoorQuery.fromStatement(parsedStmt));
|
||||||
span: parsedStmt.span,
|
|
||||||
message: 'At the moment, only CREATE TABLE statements are supported'
|
|
||||||
'in .moor files'));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,6 +41,13 @@ class MoorParser {
|
||||||
final createdTables =
|
final createdTables =
|
||||||
createdReaders.map((r) => r.extractTable(step.mapper)).toList();
|
createdReaders.map((r) => r.extractTable(step.mapper)).toList();
|
||||||
|
|
||||||
return Future.value(ParsedMoorFile(result, declaredTables: createdTables));
|
return Future.value(
|
||||||
|
ParsedMoorFile(
|
||||||
|
result,
|
||||||
|
declaredTables: createdTables,
|
||||||
|
queries: queryDeclarations,
|
||||||
|
imports: importStatements,
|
||||||
|
),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
import 'package:meta/meta.dart';
|
import 'package:meta/meta.dart';
|
||||||
import 'package:analyzer/dart/element/element.dart';
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
|
import 'package:moor_generator/src/analyzer/runner/file_graph.dart';
|
||||||
import 'package:moor_generator/src/model/specified_db_classes.dart';
|
import 'package:moor_generator/src/model/specified_db_classes.dart';
|
||||||
import 'package:moor_generator/src/model/specified_table.dart';
|
import 'package:moor_generator/src/model/specified_table.dart';
|
||||||
|
import 'package:moor_generator/src/model/sql_query.dart';
|
||||||
import 'package:sqlparser/sqlparser.dart';
|
import 'package:sqlparser/sqlparser.dart';
|
||||||
|
|
||||||
abstract class FileResult {}
|
abstract class FileResult {}
|
||||||
|
@ -26,7 +28,16 @@ class ParsedDartFile extends FileResult {
|
||||||
class ParsedMoorFile extends FileResult {
|
class ParsedMoorFile extends FileResult {
|
||||||
final ParseResult parseResult;
|
final ParseResult parseResult;
|
||||||
MoorFile get parsedFile => parseResult.rootNode as MoorFile;
|
MoorFile get parsedFile => parseResult.rootNode as MoorFile;
|
||||||
final List<SpecifiedTable> declaredTables;
|
|
||||||
|
|
||||||
ParsedMoorFile(this.parseResult, {this.declaredTables = const []});
|
final List<ImportStatement> imports;
|
||||||
|
final List<SpecifiedTable> declaredTables;
|
||||||
|
final List<DeclaredQuery> queries;
|
||||||
|
|
||||||
|
List<SqlQuery> resolvedQueries;
|
||||||
|
Map<ImportStatement, FoundFile> resolvedImports;
|
||||||
|
|
||||||
|
ParsedMoorFile(this.parseResult,
|
||||||
|
{this.declaredTables = const [],
|
||||||
|
this.queries = const [],
|
||||||
|
this.imports = const []});
|
||||||
}
|
}
|
||||||
|
|
|
@ -105,7 +105,7 @@ class ParseDartStep extends Step {
|
||||||
final key = entry.key.toStringValue();
|
final key = entry.key.toStringValue();
|
||||||
final value = entry.value.toStringValue();
|
final value = entry.value.toStringValue();
|
||||||
|
|
||||||
return DeclaredQuery(key, value);
|
return DeclaredDartQuery(key, value);
|
||||||
}).toList();
|
}).toList();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@ import 'package:moor_generator/src/analyzer/runner/steps.dart';
|
||||||
import 'package:moor_generator/src/analyzer/session.dart';
|
import 'package:moor_generator/src/analyzer/session.dart';
|
||||||
import 'package:moor_generator/src/backends/backend.dart';
|
import 'package:moor_generator/src/backends/backend.dart';
|
||||||
import 'package:moor_generator/src/model/specified_db_classes.dart';
|
import 'package:moor_generator/src/model/specified_db_classes.dart';
|
||||||
|
import 'package:sqlparser/sqlparser.dart';
|
||||||
|
|
||||||
/// A task is used to fully parse and analyze files based on an input file. To
|
/// A task is used to fully parse and analyze files based on an input file. To
|
||||||
/// analyze that file, all transitive imports will have to be analyzed as well.
|
/// analyze that file, all transitive imports will have to be analyzed as well.
|
||||||
|
@ -73,7 +74,8 @@ class Task {
|
||||||
final parsed = await step.parseFile();
|
final parsed = await step.parseFile();
|
||||||
file.currentResult = parsed;
|
file.currentResult = parsed;
|
||||||
|
|
||||||
for (var import in parsed.parsedFile.imports) {
|
parsed.resolvedImports = <ImportStatement, FoundFile>{};
|
||||||
|
for (var import in parsed.imports) {
|
||||||
final found = session.resolve(file, import.importedFile);
|
final found = session.resolve(file, import.importedFile);
|
||||||
if (!await backend.exists(found.uri)) {
|
if (!await backend.exists(found.uri)) {
|
||||||
step.reportError(ErrorInMoorFile(
|
step.reportError(ErrorInMoorFile(
|
||||||
|
@ -83,6 +85,7 @@ class Task {
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
resolvedImports.add(found);
|
resolvedImports.add(found);
|
||||||
|
parsed.resolvedImports[import] = found;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -29,16 +29,22 @@ class SqlParser {
|
||||||
|
|
||||||
for (var query in definedQueries) {
|
for (var query in definedQueries) {
|
||||||
final name = query.name;
|
final name = query.name;
|
||||||
final sql = query.sql;
|
|
||||||
|
|
||||||
AnalysisContext context;
|
AnalysisContext context;
|
||||||
try {
|
|
||||||
context = _engine.analyze(sql);
|
if (query is DeclaredDartQuery) {
|
||||||
} catch (e, s) {
|
final sql = query.sql;
|
||||||
step.reportError(MoorError(
|
|
||||||
severity: Severity.criticalError,
|
try {
|
||||||
message: 'Error while trying to parse $name: $e, $s'));
|
context = _engine.analyze(sql);
|
||||||
return;
|
} catch (e, s) {
|
||||||
|
step.reportError(MoorError(
|
||||||
|
severity: Severity.criticalError,
|
||||||
|
message: 'Error while trying to parse $name: $e, $s'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} else if (query is DeclaredMoorQuery) {
|
||||||
|
context = _engine.analyzeNode(query.query);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (var error in context.errors) {
|
for (var error in context.errors) {
|
||||||
|
|
|
@ -7,11 +7,40 @@ import 'package:sqlparser/sqlparser.dart';
|
||||||
final _illegalChars = RegExp(r'[^0-9a-zA-Z_]');
|
final _illegalChars = RegExp(r'[^0-9a-zA-Z_]');
|
||||||
final _leadingDigits = RegExp(r'^\d*');
|
final _leadingDigits = RegExp(r'^\d*');
|
||||||
|
|
||||||
class DeclaredQuery {
|
/// Represents the declaration of a compile-time query that will be analyzed
|
||||||
|
/// by moor_generator.
|
||||||
|
///
|
||||||
|
/// The subclasses [DeclaredDartQuery] and [DeclaredMoorQuery] contain
|
||||||
|
/// information about the declared statement, only the name is common for both
|
||||||
|
/// declaration methods.
|
||||||
|
/// In the `analyze` step, a [DeclaredQuery] is turned into a resolved
|
||||||
|
/// [SqlQuery], which contains information about the affected tables and what
|
||||||
|
/// columns are returned.
|
||||||
|
abstract class DeclaredQuery {
|
||||||
final String name;
|
final String name;
|
||||||
|
|
||||||
|
DeclaredQuery(this.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A [DeclaredQuery] parsed from a Dart file by reading a constant annotation.
|
||||||
|
class DeclaredDartQuery extends DeclaredQuery {
|
||||||
final String sql;
|
final String sql;
|
||||||
|
|
||||||
DeclaredQuery(this.name, this.sql);
|
DeclaredDartQuery(String name, this.sql) : super(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A [DeclaredQuery] read from a `.moor` file, where the AST is already
|
||||||
|
/// available.
|
||||||
|
class DeclaredMoorQuery extends DeclaredQuery {
|
||||||
|
final AstNode query;
|
||||||
|
|
||||||
|
DeclaredMoorQuery(String name, this.query) : super(name);
|
||||||
|
|
||||||
|
factory DeclaredMoorQuery.fromStatement(DeclaredStatement stmt) {
|
||||||
|
final name = stmt.name;
|
||||||
|
final query = stmt.statement;
|
||||||
|
return DeclaredMoorQuery(name, query);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
abstract class SqlQuery {
|
abstract class SqlQuery {
|
||||||
|
|
|
@ -3,20 +3,25 @@ import 'package:test_api/test_api.dart';
|
||||||
|
|
||||||
void main() {
|
void main() {
|
||||||
final content = '''
|
final content = '''
|
||||||
|
import 'package:my_package/some_file.dart';
|
||||||
|
import 'relative_file.moor';
|
||||||
|
|
||||||
CREATE TABLE users(
|
CREATE TABLE users(
|
||||||
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||||
name VARCHAR NOT NULL CHECK(LENGTH(name) BETWEEN 5 AND 30)
|
name VARCHAR NOT NULL CHECK(LENGTH(name) BETWEEN 5 AND 30)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
usersWithLongName: SELECT * FROM users WHERE LENGTH(name) > 25
|
||||||
''';
|
''';
|
||||||
|
|
||||||
test('extracts table structure from .moor files', () async {
|
test('parses standalone .moor files', () async {
|
||||||
final parseStep = ParseMoorStep(null, null, content);
|
final parseStep = ParseMoorStep(null, null, content);
|
||||||
final result = await parseStep.parseFile();
|
final result = await parseStep.parseFile();
|
||||||
|
|
||||||
expect(parseStep.errors.errors, isEmpty);
|
expect(parseStep.errors.errors, isEmpty);
|
||||||
|
|
||||||
final table = result.declaredTables.single;
|
final table = result.declaredTables.single;
|
||||||
|
|
||||||
expect(table.sqlName, 'users');
|
expect(table.sqlName, 'users');
|
||||||
|
expect(table.columns.map((c) => c.name.name), ['id', 'name']);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -91,6 +91,25 @@ class SqlEngine {
|
||||||
final node = result.rootNode;
|
final node = result.rootNode;
|
||||||
|
|
||||||
final context = AnalysisContext(node, result.sql);
|
final context = AnalysisContext(node, result.sql);
|
||||||
|
_analyzeContext(context);
|
||||||
|
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Analyzes the given [node], which should be a [CrudStatement].
|
||||||
|
/// The [AnalysisContext] enhances the AST by reporting type hints and errors.
|
||||||
|
///
|
||||||
|
/// The analyzer needs to know all the available tables to resolve references
|
||||||
|
/// and result columns, so all known tables should be registered using
|
||||||
|
/// [registerTable] before calling this method.
|
||||||
|
AnalysisContext analyzeNode(AstNode node) {
|
||||||
|
final context = AnalysisContext(node, node.span.context);
|
||||||
|
_analyzeContext(context);
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
void _analyzeContext(AnalysisContext context) {
|
||||||
|
final node = context.root;
|
||||||
final scope = _constructRootScope();
|
final scope = _constructRootScope();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -106,8 +125,6 @@ class SqlEngine {
|
||||||
// todo should we do now? AFAIK, everything that causes an exception
|
// todo should we do now? AFAIK, everything that causes an exception
|
||||||
// is added as an error contained in the context.
|
// is added as an error contained in the context.
|
||||||
}
|
}
|
||||||
|
|
||||||
return context;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue