mirror of https://github.com/AMT-Cheif/drift.git
Migrate more analyzer tests
This commit is contained in:
parent
23b0c8a362
commit
fe3566429f
|
@ -48,7 +48,7 @@ class DartAccessorResolver
|
|||
}
|
||||
|
||||
final table = await resolveDartReferenceOrReportError<DriftTable>(
|
||||
dartType.element2,
|
||||
dartType.element,
|
||||
(msg) => DriftAnalysisError.forDartElement(element, msg));
|
||||
if (table != null) {
|
||||
tables.add(table);
|
||||
|
@ -72,7 +72,7 @@ class DartAccessorResolver
|
|||
}
|
||||
|
||||
final view = await resolveDartReferenceOrReportError<DriftView>(
|
||||
dartType.element2,
|
||||
dartType.element,
|
||||
(msg) => DriftAnalysisError.forDartElement(element, msg));
|
||||
if (view != null) {
|
||||
views.add(view);
|
||||
|
@ -135,7 +135,7 @@ class DartAccessorResolver
|
|||
);
|
||||
} else {
|
||||
final dbType = element.allSupertypes
|
||||
.firstWhereOrNull((i) => i.element2.name == 'DatabaseAccessor');
|
||||
.firstWhereOrNull((i) => i.element.name == 'DatabaseAccessor');
|
||||
|
||||
// inherits from DatabaseAccessor<T>, we want to know which T
|
||||
|
||||
|
|
|
@ -70,6 +70,7 @@ class DiscoverStep {
|
|||
DiscoveredDartLibrary(library, _checkForDuplicates(finder.found));
|
||||
break;
|
||||
case '.drift':
|
||||
case '.moor':
|
||||
final engine = _driver.newSqlEngine();
|
||||
final pendingElements = <DiscoveredDriftElement>[];
|
||||
|
||||
|
|
|
@ -137,17 +137,6 @@ class _LintingVisitor extends RecursiveVisitor<void, void> {
|
|||
relevantNode: expression,
|
||||
));
|
||||
}
|
||||
|
||||
final dependsOnPlaceholder = e.as == null &&
|
||||
expression.allDescendants.whereType<DartPlaceholder>().isNotEmpty;
|
||||
if (dependsOnPlaceholder) {
|
||||
linter.sqlParserErrors.add(AnalysisError(
|
||||
type: AnalysisErrorType.other,
|
||||
message: 'The name of this column depends on a Dart template, which '
|
||||
'breaks generated code. Try adding an `AS` alias to fix this.',
|
||||
relevantNode: e,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if (e is NestedStarResultColumn) {
|
||||
|
|
|
@ -59,9 +59,10 @@ class TypeMapping {
|
|||
name: view.schemaName,
|
||||
resolvedColumns: [
|
||||
for (final column in view.columns)
|
||||
_MappedViewColumn(
|
||||
column.nameInSql,
|
||||
ViewColumn(
|
||||
_SimpleColumn(column.nameInSql, _columnType(column)),
|
||||
_columnType(column),
|
||||
column.nameInSql,
|
||||
),
|
||||
],
|
||||
);
|
||||
|
@ -141,11 +142,11 @@ class TypeConverterHint extends TypeHint {
|
|||
TypeConverterHint(this.converter);
|
||||
}
|
||||
|
||||
class _MappedViewColumn extends Column with ColumnWithType {
|
||||
class _SimpleColumn extends Column with ColumnWithType {
|
||||
@override
|
||||
final String name;
|
||||
@override
|
||||
final ResolvedType type;
|
||||
|
||||
_MappedViewColumn(this.name, this.type);
|
||||
_SimpleColumn(this.name, this.type);
|
||||
}
|
||||
|
|
|
@ -32,8 +32,12 @@ class FileAnalyzer {
|
|||
imports.add(imported);
|
||||
}
|
||||
|
||||
final availableElements = driver.cache
|
||||
.crawlMulti(imports)
|
||||
final imported = driver.cache.crawlMulti(imports).toSet();
|
||||
for (final import in imported) {
|
||||
await driver.resolveElements(import.ownUri);
|
||||
}
|
||||
|
||||
final availableElements = imported
|
||||
.expand((reachable) {
|
||||
final elementAnalysis = reachable.analysis.values;
|
||||
return elementAnalysis.map((e) => e.result);
|
||||
|
@ -48,8 +52,8 @@ class FileAnalyzer {
|
|||
driver.typeMapping.newEngineWithTables(availableElements);
|
||||
final context = engine.analyze(query.sql);
|
||||
|
||||
final analyzer = QueryAnalyzer(context, driver,
|
||||
references: element.references.toList());
|
||||
final analyzer =
|
||||
QueryAnalyzer(context, driver, references: availableElements);
|
||||
queries[query.name] = analyzer.analyze(query);
|
||||
|
||||
for (final error in analyzer.lints) {
|
||||
|
|
|
@ -473,9 +473,12 @@ class QueryAnalyzer {
|
|||
required.requiredNumberedVariables.contains(used.resolvedIndex);
|
||||
|
||||
if (explicitIndex != null && currentIndex >= maxIndex) {
|
||||
throw ArgumentError(
|
||||
'Cannot have a variable with an index lower than that of an '
|
||||
'array appearing after an array!');
|
||||
lints.add(AnalysisError(
|
||||
type: AnalysisErrorType.other,
|
||||
relevantNode: used,
|
||||
message: 'Cannot have have a variable with an index lower than '
|
||||
'that of an array appearing after an array!',
|
||||
));
|
||||
}
|
||||
|
||||
AppliedTypeConverter? converter;
|
||||
|
@ -501,8 +504,11 @@ class QueryAnalyzer {
|
|||
// arrays cannot be indexed explicitly because they're expanded into
|
||||
// multiple variables when executed
|
||||
if (isArray && explicitIndex != null) {
|
||||
throw ArgumentError(
|
||||
'Cannot use an array variable with an explicit index');
|
||||
lints.add(AnalysisError(
|
||||
type: AnalysisErrorType.other,
|
||||
message: 'Cannot use an array variable with an explicit index',
|
||||
relevantNode: used,
|
||||
));
|
||||
}
|
||||
if (isArray) {
|
||||
maxIndex = used.resolvedIndex!;
|
||||
|
@ -635,9 +641,15 @@ class QueryAnalyzer {
|
|||
for (var i = 0; i < variables.length; i++) {
|
||||
final current = variables[i];
|
||||
if (current.index > currentExpectedIndex) {
|
||||
throw StateError('This query skips some variable indexes: '
|
||||
'We found no variable is at position $currentExpectedIndex, '
|
||||
'even though a variable at index ${current.index} exists.');
|
||||
lints.add(
|
||||
AnalysisError(
|
||||
type: AnalysisErrorType.other,
|
||||
message:
|
||||
'Illegal variable index ${current.index} because no variable '
|
||||
'at index $currentExpectedIndex exists.',
|
||||
relevantNode: current.syntacticOrigin,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (i < variables.length - 1) {
|
||||
|
|
|
@ -597,6 +597,14 @@ abstract class FoundElement {
|
|||
|
||||
/// If the element should be hidden from the parameter list
|
||||
bool get hidden => false;
|
||||
|
||||
/// Returns a syntactic origin for this element in the query.
|
||||
///
|
||||
/// Some elements may have more than one origin. For instance, the query
|
||||
/// `SELECT ?, ?1` only contains one logical [FoundVariable], but two
|
||||
/// syntactic origins. This getter will return one of them, but the exact
|
||||
/// source is undefined in that case.
|
||||
AstNode get syntacticOrigin;
|
||||
}
|
||||
|
||||
/// A semantic interpretation of a [Variable] in a sql statement.
|
||||
|
@ -677,6 +685,9 @@ class FoundVariable extends FoundElement implements HasType {
|
|||
return 'var${variable.resolvedIndex}';
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
AstNode get syntacticOrigin => variable;
|
||||
}
|
||||
|
||||
abstract class DartPlaceholderType {}
|
||||
|
@ -809,6 +820,9 @@ class FoundDartPlaceholder extends FoundElement {
|
|||
// already defines which fields are available
|
||||
type is! InsertableDartPlaceholderType;
|
||||
}
|
||||
|
||||
@override
|
||||
AstNode get syntacticOrigin => astNode!;
|
||||
}
|
||||
|
||||
/// A table or view that is available in the position of a
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
@Tags(['analyzer'])
|
||||
import 'package:drift_dev/src/analysis/driver/state.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import 'test_utils.dart';
|
||||
|
||||
void main() {
|
||||
test('handles cyclic imports', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'a|lib/entry.dart': '''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
class Foos extends Table {
|
||||
IntColumn get id => integer().autoIncrement()();
|
||||
}
|
||||
|
||||
@DriftDatabase(include: {'db.drift'}, tables: [Foos])
|
||||
class Database {}
|
||||
''',
|
||||
'a|lib/db.drift': '''
|
||||
import 'entry.dart';
|
||||
|
||||
CREATE TABLE bars (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT
|
||||
);
|
||||
''',
|
||||
});
|
||||
|
||||
final file =
|
||||
await state.driver.fullyAnalyze(Uri.parse('package:a/entry.dart'));
|
||||
|
||||
expect(file.discovery, isA<DiscoveredDartLibrary>());
|
||||
expect(file.allErrors, isEmpty);
|
||||
|
||||
final database = file.fileAnalysis!.resolvedDatabases.values.single;
|
||||
expect(database.availableElements.map((t) => t.id.name),
|
||||
containsAll(['foos', 'bars']));
|
||||
});
|
||||
}
|
|
@ -0,0 +1,170 @@
|
|||
@Tags(['analyzer'])
|
||||
import 'package:drift/drift.dart' show DriftSqlType;
|
||||
import 'package:drift_dev/src/analysis/driver/state.dart';
|
||||
import 'package:drift_dev/src/analysis/results/results.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import 'test_utils.dart';
|
||||
|
||||
void main() {
|
||||
test('handles cyclic imports', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'a|lib/entry.dart': '''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
class Foos extends Table {
|
||||
IntColumn get id => integer().autoIncrement()();
|
||||
}
|
||||
|
||||
@DriftDatabase(include: {'db.drift'}, tables: [Foos])
|
||||
class Database {}
|
||||
''',
|
||||
'a|lib/db.drift': '''
|
||||
import 'entry.dart';
|
||||
|
||||
CREATE TABLE bars (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT
|
||||
);
|
||||
''',
|
||||
});
|
||||
|
||||
final file =
|
||||
await state.driver.fullyAnalyze(Uri.parse('package:a/entry.dart'));
|
||||
|
||||
expect(file.discovery, isA<DiscoveredDartLibrary>());
|
||||
expect(file.allErrors, isEmpty);
|
||||
|
||||
final database = file.fileAnalysis!.resolvedDatabases.values.single;
|
||||
expect(database.availableElements.map((t) => t.id.name),
|
||||
containsAll(['foos', 'bars']));
|
||||
});
|
||||
|
||||
test('resolves tables and queries', () async {
|
||||
final backend = TestBackend.inTest({
|
||||
'a|lib/database.dart': r'''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
import 'another.dart'; // so that the resolver picks it up
|
||||
|
||||
@DataClassName('UsesLanguage')
|
||||
class UsedLanguages extends Table {
|
||||
IntColumn get language => integer()();
|
||||
IntColumn get library => integer()();
|
||||
|
||||
@override
|
||||
Set<Column> get primaryKey => {language, library};
|
||||
}
|
||||
|
||||
@DriftDatabase(
|
||||
tables: [UsedLanguages],
|
||||
include: {'package:a/tables.drift'},
|
||||
queries: {
|
||||
'transitiveImportTest': r'SELECT * FROM programming_languages ORDER BY $o',
|
||||
},
|
||||
)
|
||||
class Database {}
|
||||
''',
|
||||
'a|lib/tables.drift': r'''
|
||||
import 'another.dart';
|
||||
|
||||
CREATE TABLE reference_test (
|
||||
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
library INT NOT NULL REFERENCES libraries(id)
|
||||
);
|
||||
|
||||
CREATE TABLE libraries (
|
||||
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
|
||||
findLibraries: SELECT * FROM libraries WHERE name LIKE ?;
|
||||
joinTest: SELECT * FROM reference_test r
|
||||
INNER JOIN libraries l ON l.id = r.library;
|
||||
''',
|
||||
'a|lib/another.dart': r'''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
class ProgrammingLanguages extends Table {
|
||||
IntColumn get id => integer().autoIncrement()();
|
||||
TextColumn get name => text()();
|
||||
IntColumn get popularity => integer().named('ieee_index').nullable()();
|
||||
}
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await backend.analyze('package:a/database.dart');
|
||||
expect(file.discovery, isA<DiscoveredDartLibrary>());
|
||||
expect(file.isFullyAnalyzed, isTrue);
|
||||
backend.expectNoErrors();
|
||||
|
||||
final database = file.fileAnalysis!.resolvedDatabases.values.single;
|
||||
final availableTables = database.availableElements.whereType<DriftTable>();
|
||||
|
||||
expect(
|
||||
availableTables.map((e) => e.schemaName),
|
||||
containsAll(['used_languages', 'libraries', 'programming_languages']),
|
||||
);
|
||||
|
||||
final tableWithReferences =
|
||||
availableTables.singleWhere((e) => e.schemaName == 'reference_test');
|
||||
expect(tableWithReferences.references, [
|
||||
isA<DriftTable>().having((e) => e.schemaName, 'schemaName', 'libraries')
|
||||
]);
|
||||
|
||||
final importQuery = database.definedQueries.values.single;
|
||||
expect(importQuery.name, 'transitiveImportTest');
|
||||
expect(importQuery.resultSet?.matchingTable?.table.nameOfRowClass,
|
||||
'ProgrammingLanguage');
|
||||
expect(importQuery.declaredInDriftFile, isFalse);
|
||||
expect(importQuery.hasMultipleTables, isFalse);
|
||||
expect(
|
||||
importQuery.placeholders,
|
||||
contains(
|
||||
equals(
|
||||
FoundDartPlaceholder(
|
||||
SimpleDartPlaceholderType(SimpleDartPlaceholderKind.orderBy),
|
||||
'o',
|
||||
[
|
||||
AvailableDriftResultSet(
|
||||
'programming_languages',
|
||||
availableTables
|
||||
.firstWhere((e) => e.schemaName == 'programming_languages'),
|
||||
)
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
final tablesFile = await backend.analyze('package:a/tables.drift');
|
||||
final librariesQuery = tablesFile.fileAnalysis!.resolvedQueries.values
|
||||
.singleWhere((e) => e.name == 'findLibraries') as SqlSelectQuery;
|
||||
expect(librariesQuery.variables.single.sqlType, DriftSqlType.string);
|
||||
expect(librariesQuery.declaredInDriftFile, isTrue);
|
||||
});
|
||||
|
||||
test('still supports .moor files', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'a|lib/main.dart': '''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
import 'table.dart';
|
||||
|
||||
@DriftDatabase(include: {'file.moor'})
|
||||
class MyDatabase {}
|
||||
''',
|
||||
'a|lib/file.moor': '''
|
||||
CREATE TABLE users (
|
||||
id INTEGER NOT NULL PRIMARY KEY,
|
||||
name TEXT
|
||||
);
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await state.analyze('package:a/main.dart');
|
||||
state.expectNoErrors();
|
||||
|
||||
final db = file.fileAnalysis!.resolvedDatabases.values.single;
|
||||
expect(db.availableElements, hasLength(1));
|
||||
});
|
||||
}
|
|
@ -66,4 +66,78 @@ class MyDatabase2 extends _$MyDatabase {
|
|||
await backend.driver.fullyAnalyze(mainUri);
|
||||
backend.expectNoErrors();
|
||||
});
|
||||
|
||||
test('supports inheritance for daos', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'a|lib/database.dart': r'''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
class Products extends Table {
|
||||
IntColumn get id => integer().autoIncrement()();
|
||||
TextColumn get name => text()();
|
||||
}
|
||||
|
||||
@DriftDatabase(tables: [Products], daos: [ProductsDao])
|
||||
class MyDatabase {}
|
||||
|
||||
abstract class BaseDao<T extends Table, D >
|
||||
extends DatabaseAccessor<MyDatabase> {
|
||||
|
||||
final TableInfo<T, D> _table;
|
||||
|
||||
BaseDao(MyDatabase db, this._table): super(db);
|
||||
|
||||
Future<void> insertOne(Insertable<T> value) => into(_table).insert(value);
|
||||
|
||||
Future<List<T>> selectAll() => select(_table).get();
|
||||
}
|
||||
|
||||
abstract class BaseProductsDao extends BaseDao<Products, Product> {
|
||||
BaseProductsDao(MyDatabase db): super(db, db.products);
|
||||
}
|
||||
|
||||
@DriftAccessor(tables: [Products])
|
||||
class ProductsDao extends BaseProductsDao with _$ProductDaoMixin {
|
||||
ProductsDao(MyDatabase db): super(db);
|
||||
}
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await state.analyze('package:a/database.dart');
|
||||
|
||||
expect(file.isFullyAnalyzed, isTrue);
|
||||
state.expectNoErrors();
|
||||
|
||||
final dao =
|
||||
file.analysis[file.id('ProductsDao')]!.result as DatabaseAccessor;
|
||||
expect(dao.databaseClass.toString(), 'MyDatabase');
|
||||
});
|
||||
|
||||
test('only includes duplicate elements once', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'a|lib/main.dart': '''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
import 'table.dart';
|
||||
|
||||
@DriftDatabase(tables: [Users], include: {'file.drift'})
|
||||
class MyDatabase {}
|
||||
''',
|
||||
'a|lib/file.drift': '''
|
||||
import 'table.dart';
|
||||
''',
|
||||
'a|lib/table.dart': '''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
class Users extends Table {
|
||||
IntColumn get id => integer().autoIncrement()();
|
||||
}
|
||||
'''
|
||||
});
|
||||
|
||||
final dbFile = await state.analyze('package:a/main.dart');
|
||||
final db = dbFile.fileAnalysis!.resolvedDatabases.values.single;
|
||||
|
||||
expect(db.availableElements, hasLength(1));
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
@Tags(['analyzer'])
|
||||
import 'package:drift/drift.dart' as drift;
|
||||
import 'package:drift_dev/src/analysis/results/results.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../../test_utils.dart';
|
||||
|
@ -98,4 +97,52 @@ void main() {
|
|||
'query.'))
|
||||
]);
|
||||
});
|
||||
|
||||
test('imported views are analyzed', () async {
|
||||
// Regression test for https://github.com/simolus3/drift/issues/1639
|
||||
|
||||
final testState = TestBackend.inTest({
|
||||
'a|lib/imported.drift': '''
|
||||
CREATE TABLE a (
|
||||
b TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE VIEW my_view AS SELECT * FROM a;
|
||||
''',
|
||||
'a|lib/main.drift': '''
|
||||
import 'imported.drift';
|
||||
|
||||
query: SELECT * FROM my_view;
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await testState.analyze('package:a/main.drift');
|
||||
testState.expectNoErrors();
|
||||
|
||||
expect(file.analysis, hasLength(1));
|
||||
});
|
||||
|
||||
test('picks valid Dart names for columns', () async {
|
||||
final testState = TestBackend.inTest({
|
||||
'a|lib/a.drift': '''
|
||||
CREATE VIEW IF NOT EXISTS repro AS
|
||||
SELECT 1,
|
||||
2 AS "1",
|
||||
3 AS "a + b",
|
||||
4 AS foo_bar_baz
|
||||
;
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await testState.analyze('package:a/a.drift');
|
||||
expect(file.allErrors, isEmpty);
|
||||
|
||||
final view = file.analyzedElements.single as DriftView;
|
||||
expect(view.columns.map((e) => e.nameInDart), [
|
||||
'empty', // 1
|
||||
'empty1', // 2 AS "1"
|
||||
'ab', // AS "a + b"
|
||||
'fooBarBaz', // fooBarBaz
|
||||
]);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
import 'package:drift_dev/moor_generator.dart';
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:drift_dev/src/analyzer/options.dart';
|
||||
import 'package:drift_dev/src/analyzer/runner/results.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../utils.dart';
|
||||
import '../../test_utils.dart';
|
||||
|
||||
void main() {
|
||||
test('experimental inference - integration test', () async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/a.moor': '''
|
||||
final state = TestBackend.inTest({
|
||||
'foo|lib/a.drift': '''
|
||||
CREATE TABLE artists (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
name VARCHAR NOT NULL
|
||||
|
@ -37,15 +36,12 @@ totalDurationByArtist:
|
|||
'''
|
||||
}, options: const DriftOptions.defaults());
|
||||
|
||||
final file = await state.analyze('package:foo/a.moor');
|
||||
final result = file.currentResult as ParsedDriftFile;
|
||||
final queries = result.resolvedQueries;
|
||||
|
||||
expect(state.session.errorsInFileAndImports(file), isEmpty);
|
||||
state.close();
|
||||
final file = await state.analyze('package:foo/a.drift');
|
||||
state.expectNoErrors();
|
||||
|
||||
final totalDurationByArtist =
|
||||
queries!.singleWhere((q) => q.name == 'totalDurationByArtist');
|
||||
file.fileAnalysis!.resolvedQueries[file.id('totalDurationByArtist')];
|
||||
|
||||
expect(
|
||||
totalDurationByArtist,
|
||||
returnsColumns({
|
|
@ -0,0 +1,315 @@
|
|||
import 'package:drift_dev/src/analysis/driver/state.dart';
|
||||
import 'package:drift_dev/src/analyzer/options.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../../test_utils.dart';
|
||||
|
||||
void main() {
|
||||
test('warns when a result column is unresolved', () async {
|
||||
final result = await TestBackend.analyzeSingle('a: SELECT ?;');
|
||||
|
||||
expect(result.allErrors,
|
||||
[isDriftError(contains('unknown type')).withSpan('?')]);
|
||||
});
|
||||
|
||||
test('warns for skipped variable index', () async {
|
||||
final result = await TestBackend.analyzeSingle('''
|
||||
q1(?2 AS TEXT): SELECT ?2;
|
||||
q2: SELECT ?1 = ?3;
|
||||
q3: SELECT ?1 = ?3 OR ?2;
|
||||
''');
|
||||
|
||||
expect(result.allErrors, [
|
||||
isDriftError(
|
||||
'Illegal variable index 2 because no variable at index 1 exists.')
|
||||
.withSpan('?2'),
|
||||
isDriftError(
|
||||
'Illegal variable index 3 because no variable at index 2 exists.')
|
||||
.withSpan('?3'),
|
||||
]);
|
||||
});
|
||||
|
||||
test('warns for illegal variable after array or placeholder', () async {
|
||||
final result = await TestBackend.analyzeSingle(r'''
|
||||
CREATE TABLE t (i INTEGER PRIMARY KEY);
|
||||
|
||||
q1: SELECT * FROM t WHERE i IN ? OR i == ?2;
|
||||
ok1: SELECT * FROM t WHERE i == ?1 OR i IN ?;
|
||||
ok2: SELECT * FROM t WHERE i IN ? OR i = ?;
|
||||
|
||||
q2: SELECT * FROM t WHERE $pred OR ?1;
|
||||
ok3: SELECT * FROM t WHERE i == ?1 OR $pred;
|
||||
ok4: SELECT * FROM t WHERE $pred OR i = ?;
|
||||
|
||||
ok5: SELECT * FROM t WHERE $pred OR i IN ?;
|
||||
''');
|
||||
|
||||
final message = contains('Cannot have have a variable with an index lower');
|
||||
|
||||
expect(result.allErrors, [
|
||||
isDriftError(message).withSpan('?2'),
|
||||
isDriftError(message).withSpan('?1'),
|
||||
]);
|
||||
});
|
||||
|
||||
test('warns about indexed array variable', () async {
|
||||
final result = await TestBackend.analyzeSingle(r'''
|
||||
CREATE TABLE t (i INTEGER PRIMARY KEY);
|
||||
|
||||
q: SELECT * FROM t WHERE i IN ?1;
|
||||
''');
|
||||
|
||||
expect(
|
||||
result.allErrors,
|
||||
[
|
||||
isDriftError('Cannot use an array variable with an explicit index')
|
||||
.withSpan('?1'),
|
||||
],
|
||||
);
|
||||
});
|
||||
|
||||
test('no warning for Dart placeholder in column', () async {
|
||||
final result =
|
||||
await TestBackend.analyzeSingle(r"a: SELECT 'string' = $expr;");
|
||||
|
||||
expect(result.allErrors, isEmpty);
|
||||
});
|
||||
|
||||
test('warns when nested results refer to table-valued functions', () async {
|
||||
final result = await TestBackend.analyzeSingle(
|
||||
"a: SELECT json_each.** FROM json_each('');",
|
||||
options: DriftOptions.defaults(modules: [SqlModule.json1]),
|
||||
);
|
||||
|
||||
expect(
|
||||
result.allErrors,
|
||||
[
|
||||
isDriftError(
|
||||
contains('Nested star columns must refer to a table directly.'))
|
||||
.withSpan('json_each.**')
|
||||
],
|
||||
);
|
||||
});
|
||||
|
||||
test('warns about default values outside of expressions', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'foo|lib/a.drift': r'''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY,
|
||||
content VARCHAR
|
||||
);
|
||||
|
||||
all ($limit = 3): SELECT * FROM foo LIMIT $limit;
|
||||
''',
|
||||
});
|
||||
|
||||
final result = await state.analyze('package:foo/a.drift');
|
||||
|
||||
expect(
|
||||
result.allErrors,
|
||||
contains(isDriftError(contains('only supported for expressions'))),
|
||||
);
|
||||
});
|
||||
|
||||
test('warns when placeholder are used in insert with columns', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'foo|lib/a.drift': r'''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY,
|
||||
content VARCHAR
|
||||
);
|
||||
|
||||
in: INSERT INTO foo (id) $placeholder;
|
||||
''',
|
||||
});
|
||||
|
||||
final result = await state.analyze('package:foo/a.drift');
|
||||
|
||||
expect(
|
||||
result.allErrors,
|
||||
contains(isDriftError(contains("Dart placeholders can't be used here"))),
|
||||
);
|
||||
});
|
||||
|
||||
test(
|
||||
'warns when nested results appear in compound statements',
|
||||
() async {
|
||||
final state = TestBackend.inTest({
|
||||
'foo|lib/a.drift': '''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY,
|
||||
content VARCHAR
|
||||
);
|
||||
|
||||
all: SELECT foo.** FROM foo UNION ALL SELECT foo.** FROM foo;
|
||||
''',
|
||||
});
|
||||
|
||||
final result = await state.analyze('package:foo/a.drift');
|
||||
|
||||
expect(
|
||||
result.allErrors,
|
||||
contains(isDriftError(
|
||||
contains('columns may only appear in a top-level select'))),
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
'warns when nested query appear in nested query',
|
||||
() async {
|
||||
final state = TestBackend.inTest({
|
||||
'foo|lib/a.drift': '''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY,
|
||||
content VARCHAR
|
||||
);
|
||||
|
||||
all: SELECT foo.**, LIST(SELECT *, LIST(SELECT * FROM foo) FROM foo) FROM foo;
|
||||
''',
|
||||
});
|
||||
|
||||
final result = await state.analyze('package:foo/a.drift');
|
||||
|
||||
expect(
|
||||
result.allErrors,
|
||||
contains(isDriftError(
|
||||
contains('query may only appear in a top-level select'))),
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
group('warns about insert column count mismatch', () {
|
||||
TestBackend? state;
|
||||
|
||||
Future<void> expectError() async {
|
||||
final file = await state!.analyze('package:foo/a.drift');
|
||||
expect(
|
||||
file.allErrors,
|
||||
contains(isDriftError('Expected tuple to have 2 values')),
|
||||
);
|
||||
}
|
||||
|
||||
test('in top-level queries', () async {
|
||||
state = TestBackend.inTest({
|
||||
'foo|lib/a.drift': '''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
context VARCHAR
|
||||
);
|
||||
|
||||
test: INSERT INTO foo VALUES (?)
|
||||
''',
|
||||
});
|
||||
await expectError();
|
||||
});
|
||||
|
||||
test('in CREATE TRIGGER statements', () async {
|
||||
state = TestBackend.inTest({
|
||||
'foo|lib/a.drift': '''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
context VARCHAR
|
||||
);
|
||||
|
||||
CREATE TRIGGER my_trigger AFTER DELETE ON foo BEGIN
|
||||
INSERT INTO foo VALUES (old.context);
|
||||
END;
|
||||
''',
|
||||
});
|
||||
await expectError();
|
||||
});
|
||||
|
||||
test('in @create statements', () async {
|
||||
state = TestBackend.inTest({
|
||||
'foo|lib/a.drift': '''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
context VARCHAR
|
||||
);
|
||||
|
||||
@create: INSERT INTO foo VALUES (old.context);
|
||||
''',
|
||||
});
|
||||
await expectError();
|
||||
});
|
||||
});
|
||||
|
||||
group('warning about comparing textual date times', () {
|
||||
Future<FileState> handle(String sql, {bool dateTimesAreText = true}) async {
|
||||
final state = await TestBackend.analyzeSingle(
|
||||
options:
|
||||
DriftOptions.defaults(storeDateTimeValuesAsText: dateTimesAreText),
|
||||
'''
|
||||
CREATE TABLE t (
|
||||
a DATETIME, b DATETIME, c DATETIME
|
||||
);
|
||||
|
||||
q: $sql;
|
||||
''',
|
||||
);
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
test('for BETWEEN', () async {
|
||||
final state = await handle('SELECT a BETWEEN b AND c FROM t');
|
||||
|
||||
expect(
|
||||
state.allErrors,
|
||||
contains(isDriftError(
|
||||
contains('This compares two date time values lexicographically'),
|
||||
)),
|
||||
);
|
||||
});
|
||||
|
||||
test('for equality', () async {
|
||||
for (final operator in ['=', '==', '<>', '!=']) {
|
||||
final state = await handle('SELECT a $operator b FROM t');
|
||||
expect(
|
||||
state.allErrors,
|
||||
contains(
|
||||
isDriftError(
|
||||
contains(
|
||||
'Semantically equivalent date time values may be formatted '
|
||||
'differently',
|
||||
),
|
||||
).withSpan(operator),
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test('for comparisons', () async {
|
||||
for (final operator in ['<', '<=', '>=', '>']) {
|
||||
final state = await handle('SELECT a $operator b FROM t');
|
||||
expect(
|
||||
state.allErrors,
|
||||
contains(
|
||||
isDriftError(
|
||||
contains(
|
||||
'This compares two date time values lexicographically',
|
||||
),
|
||||
).withSpan(operator),
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test('does not trigger for unix timestamps', () async {
|
||||
expect(
|
||||
(await handle('SELECT a = b FROM t', dateTimesAreText: false))
|
||||
.allErrors,
|
||||
isEmpty);
|
||||
expect(
|
||||
(await handle('SELECT a BETWEEN b AND c FROM t',
|
||||
dateTimesAreText: false))
|
||||
.allErrors,
|
||||
isEmpty);
|
||||
expect(
|
||||
(await handle('SELECT a <= c FROM t', dateTimesAreText: false))
|
||||
.allErrors,
|
||||
isEmpty);
|
||||
});
|
||||
});
|
||||
}
|
|
@ -0,0 +1,260 @@
|
|||
import 'package:drift/drift.dart' show DriftSqlType;
|
||||
import 'package:drift_dev/src/analysis/results/results.dart';
|
||||
import 'package:drift_dev/src/analyzer/options.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../../test_utils.dart';
|
||||
import 'utils.dart';
|
||||
|
||||
Future<SqlQuery> _handle(String sql) async {
|
||||
return analyzeSingleQueryInDriftFile('''
|
||||
CREATE TABLE foo (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
name VARCHAR
|
||||
);
|
||||
CREATE TABLE bar (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
foo INTEGER NOT NULL REFERENCES foo(id)
|
||||
);
|
||||
|
||||
a: $sql
|
||||
''');
|
||||
}
|
||||
|
||||
void main() {
|
||||
test('respects explicit type arguments', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'foo|lib/main.drift': '''
|
||||
bar(?1 AS TEXT, :foo AS BOOLEAN): SELECT ?, :foo;
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await state.analyze('package:foo/main.drift');
|
||||
state.expectNoErrors();
|
||||
|
||||
final query = file.fileAnalysis!.resolvedQueries.values.single;
|
||||
expect(query, const TypeMatcher<SqlSelectQuery>());
|
||||
|
||||
final resultSet = (query as SqlSelectQuery).resultSet;
|
||||
expect(resultSet.matchingTable, isNull);
|
||||
expect(resultSet.columns.map((c) => c.name), ['?', ':foo']);
|
||||
expect(resultSet.columns.map((c) => c.sqlType),
|
||||
[DriftSqlType.string, DriftSqlType.bool]);
|
||||
});
|
||||
|
||||
test('reads REQUIRED syntax', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'foo|lib/main.drift': '''
|
||||
bar(REQUIRED ?1 AS TEXT OR NULL, REQUIRED :foo AS BOOLEAN): SELECT ?, :foo;
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await state.analyze('package:foo/main.drift');
|
||||
state.expectNoErrors();
|
||||
|
||||
final query = file.fileAnalysis!.resolvedQueries.values.single;
|
||||
expect(
|
||||
query.variables,
|
||||
allOf(
|
||||
hasLength(2),
|
||||
everyElement(isA<FoundVariable>()
|
||||
.having((e) => e.isRequired, 'isRequired', isTrue)),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
group('detects whether multiple tables are referenced', () {
|
||||
test('when only selecting from one table', () async {
|
||||
final query = await _handle('SELECT * FROM foo;');
|
||||
expect(query.hasMultipleTables, isFalse);
|
||||
});
|
||||
|
||||
test('when selecting from multiple tables', () async {
|
||||
final query =
|
||||
await _handle('SELECT * FROM bar JOIN foo ON bar.foo = foo.id;');
|
||||
|
||||
expect(query.hasMultipleTables, isTrue);
|
||||
});
|
||||
|
||||
test('when updating a single table', () async {
|
||||
final query = await _handle('INSERT INTO bar (foo) SELECT id FROM foo;');
|
||||
|
||||
expect(query.hasMultipleTables, isTrue);
|
||||
expect((query as UpdatingQuery).updates, hasLength(1));
|
||||
});
|
||||
});
|
||||
|
||||
test('infers result set for views', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'foo|lib/main.drift': r'''
|
||||
CREATE VIEW my_view AS SELECT 'foo', 2;
|
||||
|
||||
query: SELECT * FROM my_view;
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await state.analyze('package:foo/main.drift');
|
||||
state.expectNoErrors();
|
||||
|
||||
final query = file.fileAnalysis!.resolvedQueries.values.single;
|
||||
expect(
|
||||
query.resultSet!.matchingTable,
|
||||
isA<MatchingDriftTable>()
|
||||
.having(
|
||||
(e) => e.table,
|
||||
'table',
|
||||
isA<DriftView>()
|
||||
.having((e) => e.schemaName, 'schemaName', 'my_view'),
|
||||
)
|
||||
.having((e) => e.effectivelyNoAlias, 'effectivelyNoAlias', isTrue),
|
||||
);
|
||||
});
|
||||
|
||||
test('infers nested result set for views', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'foo|lib/main.drift': r'''
|
||||
CREATE VIEW my_view AS SELECT 'foo', 2;
|
||||
|
||||
query: SELECT foo.**, bar.** FROM my_view foo, my_view bar;
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await state.analyze('package:foo/main.drift');
|
||||
state.expectNoErrors();
|
||||
|
||||
final query = file.fileAnalysis!.resolvedQueries.values.single;
|
||||
|
||||
expect(query.resultSet!.nestedResults, hasLength(2));
|
||||
expect(
|
||||
query.resultSet!.nestedResults,
|
||||
everyElement(isA<NestedResultTable>()
|
||||
.having((e) => e.table.schemaName, 'table.schemName', 'my_view')));
|
||||
});
|
||||
|
||||
for (final dateTimeAsText in [false, true]) {
|
||||
test('analyzing date times (stored as text: $dateTimeAsText)', () async {
|
||||
final state = TestBackend.inTest(
|
||||
{
|
||||
'foo|lib/foo.drift': r'''
|
||||
CREATE TABLE foo (
|
||||
bar DATETIME NOT NULL
|
||||
);
|
||||
|
||||
q1: SELECT bar FROM foo;
|
||||
q2: SELECT unixepoch('now');
|
||||
q3: SELECT datetime('now');
|
||||
''',
|
||||
},
|
||||
options: DriftOptions.defaults(
|
||||
storeDateTimeValuesAsText: dateTimeAsText,
|
||||
sqliteAnalysisOptions: const SqliteAnalysisOptions(
|
||||
version: SqliteVersion.v3_38,
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
final file = await state.analyze('package:foo/foo.drift');
|
||||
state.expectNoErrors();
|
||||
|
||||
final queries = file.fileAnalysis!.resolvedQueries.values.toList();
|
||||
expect(queries, hasLength(3));
|
||||
|
||||
final q1 = queries[0];
|
||||
expect(q1.resultSet!.columns.single.sqlType, DriftSqlType.dateTime);
|
||||
|
||||
final q2 = queries[1];
|
||||
final q3 = queries[2];
|
||||
|
||||
if (dateTimeAsText) {
|
||||
expect(q2.resultSet!.columns.single.sqlType, DriftSqlType.int);
|
||||
expect(q3.resultSet!.columns.single.sqlType, DriftSqlType.dateTime);
|
||||
} else {
|
||||
expect(q2.resultSet!.columns.single.sqlType, DriftSqlType.dateTime);
|
||||
expect(q3.resultSet!.columns.single.sqlType, DriftSqlType.string);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
test('resolves nested result sets', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'foo|lib/main.drift': r'''
|
||||
CREATE TABLE points (
|
||||
id INTEGER NOT NULL PRIMARY KEY,
|
||||
lat REAL NOT NULL,
|
||||
long REAL NOT NULL
|
||||
);
|
||||
CREATE TABLE routes (
|
||||
id INTEGER NOT NULL PRIMARY KEY,
|
||||
"from" INTEGER NOT NULL REFERENCES points (id),
|
||||
"to" INTEGER NOT NULL REFERENCES points (id)
|
||||
);
|
||||
|
||||
allRoutes: SELECT routes.*, "from".**, "to".**
|
||||
FROM routes
|
||||
INNER JOIN points "from" ON "from".id = routes.from
|
||||
INNER JOIN points "to" ON "to".id = routes."to";
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await state.analyze('package:foo/main.drift');
|
||||
state.expectNoErrors();
|
||||
|
||||
final query = file.fileAnalysis!.resolvedQueries.values.single;
|
||||
final resultSet = (query as SqlSelectQuery).resultSet;
|
||||
|
||||
expect(resultSet.columns.map((e) => e.name), ['id', 'from', 'to']);
|
||||
expect(resultSet.matchingTable, isNull);
|
||||
expect(
|
||||
resultSet.nestedResults.cast<NestedResultTable>().map((e) => e.name),
|
||||
['from', 'to'],
|
||||
);
|
||||
expect(
|
||||
resultSet.nestedResults
|
||||
.cast<NestedResultTable>()
|
||||
.map((e) => e.table.schemaName),
|
||||
['points', 'points'],
|
||||
);
|
||||
});
|
||||
|
||||
test('resolves nullability of aliases in nested result sets', () async {
|
||||
final state = TestBackend.inTest({
|
||||
'foo|lib/main.drift': r'''
|
||||
CREATE TABLE tableA1 (id INTEGER);
|
||||
CREATE TABLE tableB1 (id INTEGER);
|
||||
|
||||
query: SELECT
|
||||
tableA1.**,
|
||||
tableA2.**,
|
||||
tableB1.**,
|
||||
tableB2.**
|
||||
FROM tableA1 -- not nullable
|
||||
|
||||
LEFT JOIN tableA1 AS tableA2 -- nullable
|
||||
ON FALSE
|
||||
|
||||
INNER JOIN tableB1 -- not nullable
|
||||
ON TRUE
|
||||
|
||||
LEFT JOIN tableB1 AS tableB2 -- nullable
|
||||
ON FALSE;
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await state.analyze('package:foo/main.drift');
|
||||
state.expectNoErrors();
|
||||
|
||||
final query = file.fileAnalysis!.resolvedQueries.values.single;
|
||||
final resultSet = (query as SqlSelectQuery).resultSet;
|
||||
|
||||
final nested = resultSet.nestedResults;
|
||||
expect(
|
||||
nested.cast<NestedResultTable>().map((e) => e.name),
|
||||
['tableA1', 'tableA2', 'tableB1', 'tableB2'],
|
||||
);
|
||||
expect(
|
||||
nested.cast<NestedResultTable>().map((e) => e.isNullable),
|
||||
[false, true, false, true],
|
||||
);
|
||||
});
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
import 'package:drift_dev/src/analysis/results/results.dart';
|
||||
|
||||
import '../../test_utils.dart';
|
||||
|
||||
Future<SqlQuery> analyzeSingleQueryInDriftFile(String driftFile) async {
|
||||
final file = await TestBackend.analyzeSingle(driftFile);
|
||||
return file.fileAnalysis!.resolvedQueries.values.single;
|
||||
}
|
||||
|
||||
Future<SqlQuery> analyzeQuery(String sql) async {
|
||||
return analyzeSingleQueryInDriftFile('a: $sql');
|
||||
}
|
|
@ -50,6 +50,14 @@ class TestBackend extends DriftBackend {
|
|||
return backend;
|
||||
}
|
||||
|
||||
static Future<FileState> analyzeSingle(String content,
|
||||
{String asset = 'a|lib/a.drift',
|
||||
DriftOptions options = const DriftOptions.defaults()}) {
|
||||
final assetId = AssetId.parse(asset);
|
||||
final backend = TestBackend.inTest({asset: content}, options: options);
|
||||
return backend.driver.fullyAnalyze(assetId.uri);
|
||||
}
|
||||
|
||||
void expectNoErrors() {
|
||||
for (final file in driver.cache.knownFiles.values) {
|
||||
expect(file.allErrors, isEmpty, reason: 'Error in ${file.ownUri}');
|
||||
|
|
|
@ -1,56 +0,0 @@
|
|||
@Tags(['analyzer'])
|
||||
import 'package:drift_dev/src/analyzer/runner/results.dart';
|
||||
import 'package:drift_dev/src/utils/type_utils.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../utils.dart';
|
||||
|
||||
void main() {
|
||||
test('supports inheritance for daos', () async {
|
||||
final state = TestState.withContent({
|
||||
'a|lib/database.dart': r'''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
class Products extends Table {
|
||||
IntColumn get id => integer().autoIncrement()();
|
||||
TextColumn get name => text()();
|
||||
}
|
||||
|
||||
@DriftDatabase(tables: [Products], daos: [ProductsDao])
|
||||
class MyDatabase {}
|
||||
|
||||
abstract class BaseDao<T extends Table, D >
|
||||
extends DatabaseAccessor<MyDatabase> {
|
||||
|
||||
final TableInfo<T, D> _table;
|
||||
|
||||
BaseDao(MyDatabase db, this._table): super(db);
|
||||
|
||||
Future<void> insertOne(Insertable<T> value) => into(_table).insert(value);
|
||||
|
||||
Future<List<T>> selectAll() => select(_table).get();
|
||||
}
|
||||
|
||||
abstract class BaseProductsDao extends BaseDao<Products, Product> {
|
||||
BaseProductsDao(MyDatabase db): super(db, db.products);
|
||||
}
|
||||
|
||||
@DriftAccessor(tables: [ProductTable])
|
||||
class ProductsDao extends BaseProductsDao with _$ProductDaoMixin {
|
||||
ProductsDao(MyDatabase db): super(db);
|
||||
}
|
||||
''',
|
||||
});
|
||||
|
||||
await state.runTask('package:a/database.dart');
|
||||
final file = state.file('package:a/database.dart');
|
||||
|
||||
expect(file.isAnalyzed, isTrue);
|
||||
expect(file.errors.errors, isEmpty);
|
||||
|
||||
final dao = (file.currentResult as ParsedDartFile).declaredDaos.single;
|
||||
expect(dao.dbClass.nameIfInterfaceType, 'MyDatabase');
|
||||
|
||||
state.close();
|
||||
});
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
import 'package:drift_dev/src/analyzer/runner/results.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../utils.dart';
|
||||
|
||||
void main() {
|
||||
test("tables imported in two ways aren't duplicated", () async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/main.dart': '''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
import 'table.dart';
|
||||
|
||||
@DriftDatabase(tables: [Users], include: {'file.moor'})
|
||||
class MyDatabase {}
|
||||
''',
|
||||
'foo|lib/file.moor': '''
|
||||
import 'table.dart';
|
||||
''',
|
||||
'foo|lib/table.dart': '''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
class Users extends Table {
|
||||
IntColumn get id => integer().autoIncrement()();
|
||||
}
|
||||
'''
|
||||
});
|
||||
|
||||
final dbFile = await state.analyze('package:foo/main.dart');
|
||||
final db =
|
||||
(dbFile.currentResult as ParsedDartFile).declaredDatabases.single;
|
||||
|
||||
state.close();
|
||||
expect(db.entities, hasLength(1));
|
||||
});
|
||||
}
|
|
@ -1,120 +0,0 @@
|
|||
@Tags(['analyzer'])
|
||||
import 'package:drift_dev/moor_generator.dart';
|
||||
import 'package:drift_dev/src/analyzer/runner/file_graph.dart';
|
||||
import 'package:drift_dev/src/analyzer/runner/results.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../utils.dart';
|
||||
|
||||
void main() {
|
||||
late TestState state;
|
||||
|
||||
setUpAll(() {
|
||||
state = TestState.withContent({
|
||||
'test_lib|lib/database.dart': r'''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
import 'another.dart'; // so that the resolver picks it up
|
||||
|
||||
@DataClassName('UsesLanguage')
|
||||
class UsedLanguages extends Table {
|
||||
IntColumn get language => integer()();
|
||||
IntColumn get library => integer()();
|
||||
|
||||
@override
|
||||
Set<Column> get primaryKey => {language, library};
|
||||
}
|
||||
|
||||
@DriftDatabase(
|
||||
tables: [UsedLanguages],
|
||||
include: {'package:test_lib/tables.moor'},
|
||||
queries: {
|
||||
'transitiveImportTest': r'SELECT * FROM programming_languages ORDER BY $o',
|
||||
},
|
||||
)
|
||||
class Database {}
|
||||
''',
|
||||
'test_lib|lib/tables.moor': r'''
|
||||
import 'another.dart';
|
||||
|
||||
CREATE TABLE reference_test (
|
||||
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
library INT NOT NULL REFERENCES libraries(id)
|
||||
);
|
||||
|
||||
CREATE TABLE libraries (
|
||||
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
|
||||
findLibraries: SELECT * FROM libraries WHERE name LIKE ?;
|
||||
joinTest: SELECT * FROM reference_test r
|
||||
INNER JOIN libraries l ON l.id = r.library;
|
||||
''',
|
||||
'test_lib|lib/another.dart': r'''
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
class ProgrammingLanguages extends Table {
|
||||
IntColumn get id => integer().autoIncrement()();
|
||||
TextColumn get name => text()();
|
||||
IntColumn get popularity => integer().named('ieee_index').nullable()();
|
||||
}
|
||||
''',
|
||||
});
|
||||
});
|
||||
|
||||
tearDownAll(() {
|
||||
state.close();
|
||||
});
|
||||
|
||||
setUp(() async {
|
||||
await state.runTask('package:test_lib/database.dart');
|
||||
});
|
||||
|
||||
test('resolves tables and queries', () {
|
||||
final file = state.file('package:test_lib/database.dart');
|
||||
|
||||
expect(file.state, FileState.analyzed);
|
||||
expect(file.errors.errors, isEmpty);
|
||||
|
||||
final result = file.currentResult as ParsedDartFile;
|
||||
final database = result.declaredDatabases.single;
|
||||
|
||||
expect(database.tables.map((t) => t.sqlName),
|
||||
containsAll(['used_languages', 'libraries', 'programming_languages']));
|
||||
|
||||
final tableWithReferences =
|
||||
database.tables.singleWhere((r) => r.sqlName == 'reference_test');
|
||||
expect(tableWithReferences.references.single.sqlName, 'libraries');
|
||||
|
||||
final importQuery = database.queries!
|
||||
.singleWhere((q) => q.name == 'transitiveImportTest') as SqlSelectQuery;
|
||||
expect(importQuery.resultSet.matchingTable!.table.dartTypeCode(),
|
||||
'ProgrammingLanguage');
|
||||
expect(importQuery.declaredInMoorFile, isFalse);
|
||||
expect(importQuery.hasMultipleTables, isFalse);
|
||||
expect(
|
||||
importQuery.placeholders,
|
||||
contains(
|
||||
equals(
|
||||
FoundDartPlaceholder(
|
||||
SimpleDartPlaceholderType(SimpleDartPlaceholderKind.orderBy),
|
||||
'o',
|
||||
[
|
||||
AvailableMoorResultSet(
|
||||
'programming_languages',
|
||||
database.tables
|
||||
.firstWhere((e) => e.sqlName == 'programming_languages'),
|
||||
)
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
final librariesQuery = database.queries!
|
||||
.singleWhere((q) => q.name == 'findLibraries') as SqlSelectQuery;
|
||||
expect(librariesQuery.variables.single.type, DriftSqlType.string);
|
||||
expect(librariesQuery.declaredInMoorFile, isTrue);
|
||||
});
|
||||
}
|
|
@ -1,296 +0,0 @@
|
|||
import 'package:drift_dev/moor_generator.dart';
|
||||
import 'package:drift_dev/src/analyzer/errors.dart';
|
||||
import 'package:drift_dev/src/analyzer/options.dart';
|
||||
import 'package:drift_dev/src/analyzer/sql_queries/query_handler.dart';
|
||||
import 'package:drift_dev/src/analyzer/sql_queries/type_mapping.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../utils.dart';
|
||||
|
||||
void main() {
|
||||
final engine = SqlEngine(EngineOptions(
|
||||
useDriftExtensions: true, enabledExtensions: const [Json1Extension()]));
|
||||
final mapper = TypeMapper(options: const DriftOptions.defaults());
|
||||
|
||||
final fakeQuery = DeclaredDartQuery('query', 'sql');
|
||||
|
||||
test('warns when a result column is unresolved', () {
|
||||
final result = engine.analyze('SELECT ?;');
|
||||
final moorQuery = QueryHandler(result, mapper).handle(fakeQuery);
|
||||
|
||||
expect(moorQuery.lints,
|
||||
anyElement((AnalysisError q) => q.message!.contains('unknown type')));
|
||||
});
|
||||
|
||||
test('warns when the result depends on a Dart template', () {
|
||||
final result = engine.analyze(r"SELECT 'string' = $expr;");
|
||||
final moorQuery = QueryHandler(result, mapper).handle(fakeQuery);
|
||||
|
||||
expect(moorQuery.lints,
|
||||
anyElement((AnalysisError q) => q.message!.contains('Dart template')));
|
||||
});
|
||||
|
||||
test('warns when nested results refer to table-valued functions', () {
|
||||
final result = engine.analyze("SELECT json_each.** FROM json_each('')");
|
||||
final moorQuery = QueryHandler(result, mapper).handle(fakeQuery);
|
||||
|
||||
expect(
|
||||
moorQuery.lints,
|
||||
contains(isA<AnalysisError>().having((e) => e.message, 'message',
|
||||
contains('must refer to a table directly'))),
|
||||
);
|
||||
});
|
||||
|
||||
test('warns about default values outside of expressions', () async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/a.moor': r'''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY,
|
||||
content VARCHAR
|
||||
);
|
||||
|
||||
all ($limit = 3): SELECT * FROM foo LIMIT $limit;
|
||||
''',
|
||||
});
|
||||
|
||||
final result = await state.analyze('package:foo/a.moor');
|
||||
state.close();
|
||||
|
||||
expect(
|
||||
result.errors.errors,
|
||||
contains(isA<DriftError>().having(
|
||||
(e) => e.message,
|
||||
'message',
|
||||
contains('only supported for expressions'),
|
||||
)),
|
||||
);
|
||||
});
|
||||
|
||||
test('warns when placeholder are used in insert with columns', () async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/a.moor': r'''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY,
|
||||
content VARCHAR
|
||||
);
|
||||
|
||||
in: INSERT INTO foo (id) $placeholder;
|
||||
''',
|
||||
});
|
||||
|
||||
final result = await state.analyze('package:foo/a.moor');
|
||||
state.close();
|
||||
|
||||
expect(
|
||||
result.errors.errors,
|
||||
contains(isA<DriftError>().having(
|
||||
(e) => e.message,
|
||||
'message',
|
||||
contains("Dart placeholders can't be used here"),
|
||||
)),
|
||||
);
|
||||
});
|
||||
|
||||
test(
|
||||
'warns when nested results appear in compound statements',
|
||||
() async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/a.moor': '''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY,
|
||||
content VARCHAR
|
||||
);
|
||||
|
||||
all: SELECT foo.** FROM foo UNION ALL SELECT foo.** FROM foo;
|
||||
''',
|
||||
});
|
||||
|
||||
final result = await state.analyze('package:foo/a.moor');
|
||||
state.close();
|
||||
|
||||
expect(
|
||||
result.errors.errors,
|
||||
contains(isA<DriftError>().having(
|
||||
(e) => e.message,
|
||||
'message',
|
||||
contains('columns may only appear in a top-level select'),
|
||||
)),
|
||||
);
|
||||
},
|
||||
timeout: Timeout.none,
|
||||
);
|
||||
|
||||
test(
|
||||
'warns when nested query appear in nested query',
|
||||
() async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/a.moor': '''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY,
|
||||
content VARCHAR
|
||||
);
|
||||
|
||||
all: SELECT foo.**, LIST(SELECT *, LIST(SELECT * FROM foo) FROM foo) FROM foo;
|
||||
''',
|
||||
});
|
||||
|
||||
final result = await state.analyze('package:foo/a.moor');
|
||||
state.close();
|
||||
|
||||
expect(
|
||||
result.errors.errors,
|
||||
contains(isA<DriftError>().having(
|
||||
(e) => e.message,
|
||||
'message',
|
||||
contains('query may only appear in a top-level select'),
|
||||
)),
|
||||
);
|
||||
},
|
||||
timeout: Timeout.none,
|
||||
);
|
||||
|
||||
group('warns about insert column count mismatch', () {
|
||||
TestState? state;
|
||||
|
||||
tearDown(() => state?.close());
|
||||
|
||||
Future<void> expectError() async {
|
||||
final file = await state!.analyze('package:foo/a.moor');
|
||||
expect(
|
||||
file.errors.errors,
|
||||
contains(const TypeMatcher<DriftError>().having(
|
||||
(e) => e.message, 'message', 'Expected tuple to have 2 values')),
|
||||
);
|
||||
}
|
||||
|
||||
test('in top-level queries', () async {
|
||||
state = TestState.withContent({
|
||||
'foo|lib/a.moor': '''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
context VARCHAR
|
||||
);
|
||||
|
||||
test: INSERT INTO foo VALUES (?)
|
||||
''',
|
||||
});
|
||||
await expectError();
|
||||
});
|
||||
|
||||
test('in CREATE TRIGGER statements', () async {
|
||||
state = TestState.withContent({
|
||||
'foo|lib/a.moor': '''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
context VARCHAR
|
||||
);
|
||||
|
||||
CREATE TRIGGER my_trigger AFTER DELETE ON foo BEGIN
|
||||
INSERT INTO foo VALUES (old.context);
|
||||
END;
|
||||
''',
|
||||
});
|
||||
await expectError();
|
||||
});
|
||||
|
||||
test('in @create statements', () async {
|
||||
state = TestState.withContent({
|
||||
'foo|lib/a.moor': '''
|
||||
CREATE TABLE foo (
|
||||
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
context VARCHAR
|
||||
);
|
||||
|
||||
@create: INSERT INTO foo VALUES (old.context);
|
||||
''',
|
||||
});
|
||||
await expectError();
|
||||
});
|
||||
});
|
||||
|
||||
group('warning about comparing textual date times', () {
|
||||
const options = AnalyzeStatementOptions(namedVariableTypes: {
|
||||
':text_a': ResolvedType(type: BasicType.text, hint: IsDateTime()),
|
||||
':text_b': ResolvedType(type: BasicType.text, hint: IsDateTime()),
|
||||
':text_c': ResolvedType(type: BasicType.text, hint: IsDateTime()),
|
||||
':int_a': ResolvedType(type: BasicType.int, hint: IsDateTime()),
|
||||
':int_b': ResolvedType(type: BasicType.int, hint: IsDateTime()),
|
||||
':int_c': ResolvedType(type: BasicType.int, hint: IsDateTime()),
|
||||
});
|
||||
|
||||
SqlQuery handle(String sql, {bool dateTimesAreText = true}) {
|
||||
final result = engine.analyze(sql, stmtOptions: options);
|
||||
expect(result.errors, isEmpty,
|
||||
reason: 'Unexpected error by sqlparser package');
|
||||
|
||||
final mapper = TypeMapper(
|
||||
options: DriftOptions.defaults(
|
||||
storeDateTimeValuesAsText: dateTimesAreText));
|
||||
return QueryHandler(result, mapper).handle(fakeQuery);
|
||||
}
|
||||
|
||||
test('for BETWEEN', () {
|
||||
final query = handle('SELECT :text_a BETWEEN :text_b AND :text_c');
|
||||
expect(
|
||||
query.lints,
|
||||
contains(
|
||||
isA<AnalysisError>().having(
|
||||
(e) => e.message,
|
||||
'message',
|
||||
contains(
|
||||
'This compares two date time values lexicographically',
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test('for equality', () {
|
||||
for (final operator in ['=', '==', '<>', '!=']) {
|
||||
final query = handle('SELECT :text_a $operator :text_b');
|
||||
expect(
|
||||
query.lints,
|
||||
contains(
|
||||
isA<AnalysisError>()
|
||||
.having(
|
||||
(e) => e.message,
|
||||
'message',
|
||||
contains(
|
||||
'Semantically equivalent date time values may be formatted '
|
||||
'differently',
|
||||
),
|
||||
)
|
||||
.having((e) => e.span!.text, 'span.text', operator),
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test('for comparisons', () {
|
||||
for (final operator in ['<', '<=', '>=', '>']) {
|
||||
final query = handle('SELECT :text_a $operator :text_b');
|
||||
expect(
|
||||
query.lints,
|
||||
contains(
|
||||
isA<AnalysisError>()
|
||||
.having(
|
||||
(e) => e.message,
|
||||
'message',
|
||||
contains(
|
||||
'This compares two date time values lexicographically',
|
||||
),
|
||||
)
|
||||
.having((e) => e.span!.text, 'span.text', operator),
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test('does not trigger for unix timestamps', () {
|
||||
expect(handle('SELECT :int_a = :int_b').lints, isEmpty);
|
||||
expect(handle('SELECT :int_a BETWEEN :int_b AND :int_c').lints, isEmpty);
|
||||
expect(handle('SELECT :int_a <= :int_c').lints, isEmpty);
|
||||
});
|
||||
});
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
import 'package:drift_dev/moor_generator.dart';
|
||||
import 'package:drift_dev/src/analyzer/options.dart';
|
||||
import 'package:drift_dev/src/analyzer/runner/results.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../utils.dart';
|
||||
|
||||
void main() {
|
||||
test('select from view test', () async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/a.moor': '''
|
||||
CREATE TABLE artists (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
name VARCHAR NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE albums (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
artist INTEGER NOT NULL REFERENCES artists (id)
|
||||
);
|
||||
|
||||
CREATE TABLE tracks (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
album INTEGER NOT NULL REFERENCES albums (id),
|
||||
duration_seconds INTEGER NOT NULL,
|
||||
was_single BOOLEAN NOT NULL DEFAULT FALSE
|
||||
);
|
||||
|
||||
CREATE VIEW total_duration_by_artist_view AS
|
||||
SELECT a.*, SUM(tracks.duration_seconds) AS duration
|
||||
FROM artists a
|
||||
INNER JOIN albums ON albums.artist = a.id
|
||||
INNER JOIN tracks ON tracks.album = albums.id
|
||||
GROUP BY a.id;
|
||||
|
||||
totalDurationByArtist:
|
||||
SELECT * FROM total_duration_by_artist_view;
|
||||
'''
|
||||
}, options: const DriftOptions.defaults());
|
||||
|
||||
final file = await state.analyze('package:foo/a.moor');
|
||||
final result = file.currentResult as ParsedDriftFile;
|
||||
final queries = result.resolvedQueries;
|
||||
|
||||
expect(state.session.errorsInFileAndImports(file), isEmpty);
|
||||
state.close();
|
||||
|
||||
final totalDurationByArtist =
|
||||
queries!.singleWhere((q) => q.name == 'totalDurationByArtist');
|
||||
expect(
|
||||
totalDurationByArtist,
|
||||
returnsColumns({
|
||||
'id': DriftSqlType.int,
|
||||
'name': DriftSqlType.string,
|
||||
'duration': DriftSqlType.int,
|
||||
}),
|
||||
);
|
||||
});
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
import 'package:drift_dev/moor_generator.dart';
|
||||
import 'package:drift_dev/src/analyzer/runner/results.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../utils.dart';
|
||||
|
||||
void main() {
|
||||
test('respects explicit type arguments', () async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/main.moor': '''
|
||||
bar(?1 AS TEXT, :foo AS BOOLEAN): SELECT ?, :foo;
|
||||
''',
|
||||
});
|
||||
|
||||
await state.runTask('package:foo/main.moor');
|
||||
final file = state.file('package:foo/main.moor');
|
||||
state.close();
|
||||
|
||||
expect(file.errors.errors, isEmpty);
|
||||
final content = file.currentResult as ParsedDriftFile;
|
||||
|
||||
final query = content.resolvedQueries!.single;
|
||||
expect(query, const TypeMatcher<SqlSelectQuery>());
|
||||
|
||||
final resultSet = (query as SqlSelectQuery).resultSet;
|
||||
expect(resultSet.matchingTable, isNull);
|
||||
expect(resultSet.columns.map((c) => c.name), ['?', ':foo']);
|
||||
expect(resultSet.columns.map((c) => c.type),
|
||||
[DriftSqlType.string, DriftSqlType.bool]);
|
||||
});
|
||||
|
||||
test('reads REQUIRED syntax', () async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/main.moor': '''
|
||||
bar(REQUIRED ?1 AS TEXT OR NULL, REQUIRED :foo AS BOOLEAN): SELECT ?, :foo;
|
||||
''',
|
||||
});
|
||||
|
||||
await state.runTask('package:foo/main.moor');
|
||||
final file = state.file('package:foo/main.moor');
|
||||
state.close();
|
||||
|
||||
expect(file.errors.errors, isEmpty);
|
||||
final content = file.currentResult as ParsedDriftFile;
|
||||
|
||||
final query = content.resolvedQueries!.single;
|
||||
expect(
|
||||
query.variables,
|
||||
allOf(
|
||||
hasLength(2),
|
||||
everyElement(isA<FoundVariable>()
|
||||
.having((e) => e.isRequired, 'isRequired', isTrue)),
|
||||
),
|
||||
);
|
||||
});
|
||||
}
|
|
@ -1,258 +0,0 @@
|
|||
import 'package:drift_dev/moor_generator.dart';
|
||||
import 'package:drift_dev/src/analyzer/drift/create_table_reader.dart';
|
||||
import 'package:drift_dev/src/analyzer/options.dart';
|
||||
import 'package:drift_dev/src/analyzer/runner/file_graph.dart';
|
||||
import 'package:drift_dev/src/analyzer/runner/results.dart';
|
||||
import 'package:drift_dev/src/analyzer/runner/steps.dart';
|
||||
import 'package:drift_dev/src/analyzer/sql_queries/query_handler.dart';
|
||||
import 'package:drift_dev/src/analyzer/sql_queries/type_mapping.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../utils.dart';
|
||||
|
||||
const createFoo = '''
|
||||
CREATE TABLE foo (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
name VARCHAR
|
||||
);
|
||||
''';
|
||||
|
||||
const createBar = '''
|
||||
CREATE TABLE bar (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
foo INTEGER NOT NULL REFERENCES foo(id)
|
||||
);
|
||||
''';
|
||||
|
||||
Future<void> main() async {
|
||||
final mapper = TypeMapper(options: const DriftOptions.defaults());
|
||||
final engine = SqlEngine(EngineOptions(useDriftExtensions: true));
|
||||
final state = TestState.withContent({'a|lib/foo.drift': 'foo'});
|
||||
tearDownAll(state.close);
|
||||
final task = await state.runTask('package:a/foo.drift');
|
||||
|
||||
final step = ParseMoorStep(
|
||||
task, FoundFile(Uri.parse('file://foo'), FileType.drift), '');
|
||||
|
||||
final parsedFoo = engine.parse(createFoo).rootNode as CreateTableStatement;
|
||||
final foo = await CreateTableReader(parsedFoo, step, await task.helper)
|
||||
.extractTable(mapper);
|
||||
engine.registerTable(mapper.extractStructure(foo!));
|
||||
|
||||
final parsedBar = engine.parse(createBar).rootNode as CreateTableStatement;
|
||||
final bar = await CreateTableReader(parsedBar, step, await task.helper)
|
||||
.extractTable(mapper);
|
||||
engine.registerTable(mapper.extractStructure(bar!));
|
||||
|
||||
SqlQuery parse(String sql) {
|
||||
final parsed = engine.analyze(sql);
|
||||
final fakeQuery = DeclaredDartQuery('query', sql);
|
||||
return QueryHandler(parsed, mapper).handle(fakeQuery);
|
||||
}
|
||||
|
||||
group('detects whether multiple tables are referenced', () {
|
||||
test('when only selecting from one table', () {
|
||||
expect(parse('SELECT * FROM foo').hasMultipleTables, isFalse);
|
||||
});
|
||||
|
||||
test('when selecting from multiple tables', () {
|
||||
expect(
|
||||
parse('SELECT * FROM bar JOIN foo ON bar.foo = foo.id')
|
||||
.hasMultipleTables,
|
||||
isTrue,
|
||||
);
|
||||
});
|
||||
|
||||
test('when updating a single table', () {
|
||||
final query = parse('INSERT INTO bar (foo) SELECT id FROM foo');
|
||||
|
||||
expect(query.hasMultipleTables, isTrue);
|
||||
expect((query as UpdatingQuery).updates, hasLength(1));
|
||||
});
|
||||
});
|
||||
|
||||
test('throws when variable indexes are skipped', () {
|
||||
expect(() => parse('SELECT ?2'), throwsStateError);
|
||||
expect(() => parse('SELECT ?1 = ?3'), throwsStateError);
|
||||
expect(() => parse('SELECT ?1 = ?3 OR ?2'), returnsNormally);
|
||||
});
|
||||
|
||||
test('resolves nested result sets', () async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/main.moor': r'''
|
||||
CREATE TABLE points (
|
||||
id INTEGER NOT NULL PRIMARY KEY,
|
||||
lat REAL NOT NULL,
|
||||
long REAL NOT NULL
|
||||
);
|
||||
CREATE TABLE routes (
|
||||
id INTEGER NOT NULL PRIMARY KEY,
|
||||
"from" INTEGER NOT NULL REFERENCES points (id),
|
||||
"to" INTEGER NOT NULL REFERENCES points (id)
|
||||
);
|
||||
|
||||
allRoutes: SELECT routes.*, "from".**, "to".**
|
||||
FROM routes
|
||||
INNER JOIN points "from" ON "from".id = routes.from
|
||||
INNER JOIN points "to" ON "to".id = routes."to";
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await state.analyze('package:foo/main.moor');
|
||||
final result = file.currentResult as ParsedDriftFile;
|
||||
state.close();
|
||||
|
||||
expect(file.errors.errors, isEmpty);
|
||||
|
||||
final query = result.resolvedQueries!.single;
|
||||
final resultSet = (query as SqlSelectQuery).resultSet;
|
||||
|
||||
expect(resultSet.columns.map((e) => e.name), ['id', 'from', 'to']);
|
||||
expect(resultSet.matchingTable, isNull);
|
||||
expect(
|
||||
resultSet.nestedResults.cast<NestedResultTable>().map((e) => e.name),
|
||||
['from', 'to'],
|
||||
);
|
||||
expect(
|
||||
resultSet.nestedResults
|
||||
.cast<NestedResultTable>()
|
||||
.map((e) => e.table.displayName),
|
||||
['points', 'points'],
|
||||
);
|
||||
});
|
||||
|
||||
test('resolves nullability of aliases in nested result sets', () async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/main.moor': r'''
|
||||
CREATE TABLE tableA1 (id INTEGER);
|
||||
CREATE TABLE tableB1 (id INTEGER);
|
||||
|
||||
query: SELECT
|
||||
tableA1.**,
|
||||
tableA2.**,
|
||||
tableB1.**,
|
||||
tableB2.**
|
||||
FROM tableA1 -- not nullable
|
||||
|
||||
LEFT JOIN tableA1 AS tableA2 -- nullable
|
||||
ON FALSE
|
||||
|
||||
INNER JOIN tableB1 -- not nullable
|
||||
ON TRUE
|
||||
|
||||
LEFT JOIN tableB1 AS tableB2 -- nullable
|
||||
ON FALSE;
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await state.analyze('package:foo/main.moor');
|
||||
final result = file.currentResult as ParsedDriftFile;
|
||||
state.close();
|
||||
|
||||
expect(file.errors.errors, isEmpty);
|
||||
|
||||
final query = result.resolvedQueries!.single;
|
||||
final resultSet = (query as SqlSelectQuery).resultSet;
|
||||
|
||||
final nested = resultSet.nestedResults;
|
||||
expect(
|
||||
nested.cast<NestedResultTable>().map((e) => e.name),
|
||||
['tableA1', 'tableA2', 'tableB1', 'tableB2'],
|
||||
);
|
||||
expect(
|
||||
nested.cast<NestedResultTable>().map((e) => e.isNullable),
|
||||
[false, true, false, true],
|
||||
);
|
||||
});
|
||||
|
||||
test('infers result set for views', () async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/main.moor': r'''
|
||||
CREATE VIEW my_view AS SELECT 'foo', 2;
|
||||
|
||||
query: SELECT * FROM my_view;
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await state.analyze('package:foo/main.moor');
|
||||
expect(file.errors.errors, isEmpty);
|
||||
|
||||
final result = file.currentResult as ParsedDriftFile;
|
||||
|
||||
final query = result.resolvedQueries!.single;
|
||||
expect(
|
||||
query.resultSet!.matchingTable,
|
||||
isA<MatchingMoorTable>()
|
||||
.having((e) => e.table, 'table',
|
||||
isA<MoorView>().having((e) => e.name, 'name', 'my_view'))
|
||||
.having((e) => e.effectivelyNoAlias, 'effectivelyNoAlias', isTrue));
|
||||
});
|
||||
|
||||
test('infers nested result set for views', () async {
|
||||
final state = TestState.withContent({
|
||||
'foo|lib/main.moor': r'''
|
||||
CREATE VIEW my_view AS SELECT 'foo', 2;
|
||||
|
||||
query: SELECT foo.**, bar.** FROM my_view foo, my_view bar;
|
||||
''',
|
||||
});
|
||||
|
||||
final file = await state.analyze('package:foo/main.moor');
|
||||
expect(file.errors.errors, isEmpty);
|
||||
|
||||
final result = file.currentResult as ParsedDriftFile;
|
||||
final query = result.resolvedQueries!.single;
|
||||
|
||||
expect(query.resultSet!.nestedResults, hasLength(2));
|
||||
expect(
|
||||
query.resultSet!.nestedResults,
|
||||
everyElement(isA<NestedResultTable>().having(
|
||||
(e) => e.table.displayName, 'table.displayName', 'my_view')));
|
||||
});
|
||||
|
||||
for (final dateTimeAsText in [false, true]) {
|
||||
test('analyzing date times (stored as text: $dateTimeAsText)', () async {
|
||||
final state = TestState.withContent(
|
||||
{
|
||||
'foo|lib/foo.drift': r'''
|
||||
CREATE TABLE foo (
|
||||
bar DATETIME NOT NULL
|
||||
);
|
||||
|
||||
q1: SELECT bar FROM foo;
|
||||
q2: SELECT unixepoch('now');
|
||||
q3: SELECT datetime('now');
|
||||
''',
|
||||
},
|
||||
options: DriftOptions.defaults(
|
||||
storeDateTimeValuesAsText: dateTimeAsText,
|
||||
sqliteAnalysisOptions: const SqliteAnalysisOptions(
|
||||
version: SqliteVersion.v3_38,
|
||||
),
|
||||
),
|
||||
);
|
||||
addTearDown(state.close);
|
||||
|
||||
final file = await state.analyze('package:foo/foo.drift');
|
||||
expect(file.errors.errors, isEmpty);
|
||||
|
||||
final result = file.currentResult as ParsedDriftFile;
|
||||
expect(result.resolvedQueries, hasLength(3));
|
||||
|
||||
final q1 = result.resolvedQueries![0];
|
||||
expect(q1.resultSet!.columns.single.type, DriftSqlType.dateTime);
|
||||
|
||||
final q2 = result.resolvedQueries![1];
|
||||
final q3 = result.resolvedQueries![2];
|
||||
|
||||
if (dateTimeAsText) {
|
||||
expect(q2.resultSet!.columns.single.type, DriftSqlType.int);
|
||||
expect(q3.resultSet!.columns.single.type, DriftSqlType.dateTime);
|
||||
} else {
|
||||
expect(q2.resultSet!.columns.single.type, DriftSqlType.dateTime);
|
||||
expect(q3.resultSet!.columns.single.type, DriftSqlType.string);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
import 'package:drift/drift.dart' show DriftSqlType;
|
||||
import 'package:drift_dev/src/analyzer/options.dart';
|
||||
import 'package:drift_dev/src/analyzer/sql_queries/type_mapping.dart';
|
||||
import 'package:drift_dev/src/model/sql_query.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
final _idColumn = TableColumn('id', const ResolvedType(type: BasicType.int));
|
||||
final _titleColumn =
|
||||
TableColumn('title', const ResolvedType(type: BasicType.text));
|
||||
final Table table =
|
||||
Table(name: 'todos', resolvedColumns: [_idColumn, _titleColumn]);
|
||||
|
||||
void main() {
|
||||
final engine = SqlEngine(EngineOptions(useDriftExtensions: true));
|
||||
final mapper = TypeMapper(options: const DriftOptions.defaults());
|
||||
|
||||
test('extracts variables and sorts them by index', () {
|
||||
final result = engine.analyze(
|
||||
'SELECT * FROM todos WHERE title = ?2 OR id IN ? OR title = ?1');
|
||||
|
||||
final elements = mapper
|
||||
.extractElements(ctx: result, root: result.root)
|
||||
.cast<FoundVariable>();
|
||||
|
||||
expect(elements.map((v) => v.index), [1, 2, 3]);
|
||||
});
|
||||
|
||||
test('throws when an array with an explicit index is used', () {
|
||||
final result = engine.analyze('SELECT 1 WHERE 1 IN ?1');
|
||||
|
||||
expect(() => mapper.extractElements(ctx: result, root: result.root),
|
||||
throwsArgumentError);
|
||||
});
|
||||
|
||||
test(
|
||||
'throws when an explicitly index var with higher index appears after array',
|
||||
() {
|
||||
final result = engine.analyze('SELECT 1 WHERE 1 IN ? OR 2 = ?2');
|
||||
expect(() => mapper.extractElements(ctx: result, root: result.root),
|
||||
throwsArgumentError);
|
||||
},
|
||||
);
|
||||
|
||||
test('extracts variables but excludes nested queries', () {
|
||||
final result = engine.analyze(
|
||||
'SELECT *, LIST(SELECT * FROM todos WHERE title = ?3)'
|
||||
'FROM todos WHERE title = ?2 OR id IN ? OR title = ?1',
|
||||
);
|
||||
|
||||
final elements = mapper
|
||||
.extractElements(ctx: result, root: result.root)
|
||||
.cast<FoundVariable>();
|
||||
|
||||
expect(elements.map((v) => v.index), [1, 2, 3]);
|
||||
});
|
||||
|
||||
test('extracts variables from nested query', () {
|
||||
final result = engine.analyze(
|
||||
'SELECT *, LIST(SELECT * FROM todos WHERE title = ?1)'
|
||||
'FROM todos WHERE title = ?2 OR id IN ? OR title = ?1',
|
||||
);
|
||||
|
||||
final root =
|
||||
((result.root as SelectStatement).columns[1] as NestedQueryColumn)
|
||||
.select;
|
||||
|
||||
final elements =
|
||||
mapper.extractElements(ctx: result, root: root).cast<FoundVariable>();
|
||||
|
||||
expect(elements.map((v) => v.index), [1]);
|
||||
});
|
||||
|
||||
test('infers result columns as datetime', () {
|
||||
final datesAsTimestamp = TypeMapper(options: const DriftOptions.defaults());
|
||||
final datesAsText = TypeMapper(
|
||||
options: const DriftOptions.defaults(storeDateTimeValuesAsText: true));
|
||||
|
||||
const dateIntType = ResolvedType(type: BasicType.int, hint: IsDateTime());
|
||||
const dateTextType = ResolvedType(type: BasicType.text, hint: IsDateTime());
|
||||
|
||||
expect(datesAsTimestamp.resolvedToMoor(dateIntType), DriftSqlType.dateTime);
|
||||
expect(datesAsText.resolvedToMoor(dateIntType), DriftSqlType.int);
|
||||
|
||||
expect(datesAsTimestamp.resolvedToMoor(dateTextType), DriftSqlType.string);
|
||||
expect(datesAsText.resolvedToMoor(dateTextType), DriftSqlType.dateTime);
|
||||
});
|
||||
|
||||
test('maps datetime to sql type', () {
|
||||
final datesAsTimestamp = TypeMapper(options: const DriftOptions.defaults());
|
||||
final datesAsText = TypeMapper(
|
||||
options: const DriftOptions.defaults(storeDateTimeValuesAsText: true));
|
||||
|
||||
expect(datesAsTimestamp.resolveForColumnType(DriftSqlType.dateTime),
|
||||
const ResolvedType(type: BasicType.int, hint: IsDateTime()));
|
||||
expect(datesAsText.resolveForColumnType(DriftSqlType.dateTime),
|
||||
const ResolvedType(type: BasicType.text, hint: IsDateTime()));
|
||||
});
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
import 'package:test/test.dart';
|
||||
|
||||
import '../utils.dart';
|
||||
|
||||
void main() {
|
||||
// Regression test for https://github.com/simolus3/drift/issues/1639
|
||||
test('imported views are analyzed', () async {
|
||||
final testState = TestState.withContent({
|
||||
'a|lib/imported.drift': '''
|
||||
CREATE TABLE a (
|
||||
b TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE VIEW my_view AS SELECT * FROM a;
|
||||
''',
|
||||
'a|lib/main.drift': '''
|
||||
import 'imported.drift';
|
||||
|
||||
query: SELECT * FROM my_view;
|
||||
''',
|
||||
});
|
||||
addTearDown(testState.close);
|
||||
|
||||
final file = await testState.analyze('package:a/main.drift');
|
||||
expect(file.errors.errors, isEmpty);
|
||||
});
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
import 'package:drift_dev/src/model/model.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../utils.dart';
|
||||
|
||||
void main() {
|
||||
test('picks valid Dart names', () async {
|
||||
final testState = TestState.withContent({
|
||||
'a|lib/a.drift': '''
|
||||
CREATE VIEW IF NOT EXISTS repro AS
|
||||
SELECT 1,
|
||||
2 AS "1",
|
||||
3 AS "a + b",
|
||||
4 AS foo_bar_baz
|
||||
;
|
||||
''',
|
||||
});
|
||||
addTearDown(testState.close);
|
||||
|
||||
final file = await testState.analyze('package:a/a.drift');
|
||||
expect(file.errors.errors, isEmpty);
|
||||
|
||||
final view = file.currentResult!.declaredEntities.single as MoorView;
|
||||
expect(view.columns.map((e) => e.dartGetterName), [
|
||||
'empty', // 1
|
||||
'empty1', // 2 AS "1"
|
||||
'ab', // AS "a + b"
|
||||
'fooBarBaz', // fooBarBaz
|
||||
]);
|
||||
});
|
||||
}
|
|
@ -20,6 +20,8 @@ const Map<String, ResolvedType?> _types = {
|
|||
ResolvedType(type: BasicType.text),
|
||||
'SELECT * FROM demo WHERE content IN ?':
|
||||
ResolvedType(type: BasicType.text, isArray: true),
|
||||
'SELECT * FROM demo WHERE content IN ? OR content = ?2':
|
||||
ResolvedType(type: BasicType.text, isArray: true),
|
||||
'SELECT * FROM demo WHERE content IN (?)':
|
||||
ResolvedType(type: BasicType.text, isArray: false),
|
||||
'SELECT * FROM demo JOIN tbl ON demo.id = tbl.id WHERE date = ?':
|
||||
|
|
Loading…
Reference in New Issue