mirror of https://github.com/AMT-Cheif/drift.git
Merge branch 'develop' into single_selectable
This commit is contained in:
commit
eb0c5da412
|
@ -73,7 +73,7 @@ LazyDatabase(() async {
|
|||
```
|
||||
|
||||
Note: If you haven't shipped a version with `moor_flutter` to your users yet, you can drop the dependency
|
||||
on `sqflite`. Instead, you can use `path_provider` which [works on Desktop](https://pub.dev/packages/path_provider).
|
||||
on `sqflite`. Instead, you can use `path_provider` which [works on Desktop](https://github.com/flutter/plugins/tree/master/packages/path_provider).
|
||||
Please be aware that `FlutterQueryExecutor.inDatabaseFolder` might yield a different folder than
|
||||
`path_provider` on Android. This can cause data loss if you've already shipped a version using
|
||||
`moor_flutter`. In that case, using `getDatabasePath` from sqflite is the suggested solution.
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
## 4.2.1
|
||||
|
||||
- Deprecate `readBool`, `readString`, `readInt`, `readDouble`, `readDateTime`
|
||||
and `readBlob` in `QueryRow`.
|
||||
Use `read` directly (which supports nullable types).
|
||||
|
||||
## 4.2.0
|
||||
|
||||
- Fixed subqueries not affecting stream updates
|
||||
|
|
|
@ -14,7 +14,7 @@ targets:
|
|||
named_parameters: true
|
||||
new_sql_code_generation: true
|
||||
sqlite:
|
||||
version: "3.34"
|
||||
version: "3.35"
|
||||
modules:
|
||||
- json1
|
||||
- fts5
|
||||
|
|
|
@ -967,13 +967,13 @@ abstract class _$Database extends GeneratedDatabase {
|
|||
$IngredientInRecipesTable(this);
|
||||
Selectable<TotalWeightResult> totalWeight() {
|
||||
return customSelect(
|
||||
'SELECT r.title, SUM(ir.amount)AS total_weight FROM recipes AS r INNER JOIN recipe_ingredients AS ir ON ir.recipe = r.id GROUP BY r.id',
|
||||
'SELECT r.title, SUM(ir.amount) AS total_weight FROM recipes AS r INNER JOIN recipe_ingredients AS ir ON ir.recipe = r.id GROUP BY r.id',
|
||||
variables: [],
|
||||
readsFrom: {recipes, ingredientInRecipes}).map((QueryRow row) {
|
||||
return TotalWeightResult(
|
||||
row: row,
|
||||
title: row.readString('title'),
|
||||
totalWeight: row.readInt('total_weight'),
|
||||
title: row.read<String>('title'),
|
||||
totalWeight: row.read<int>('total_weight'),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ abstract class DatabaseConnectionUser {
|
|||
|
||||
/// Creates and auto-updating stream from the given select statement. This
|
||||
/// method should not be used directly.
|
||||
Stream<T> createStream<T>(QueryStreamFetcher<T> stmt) =>
|
||||
Stream<List<Map<String, Object?>>> createStream(QueryStreamFetcher stmt) =>
|
||||
streamQueries.registerStream(stmt);
|
||||
|
||||
/// Creates a copy of the table with an alias so that it can be used in the
|
||||
|
@ -260,6 +260,27 @@ abstract class DatabaseConnectionUser {
|
|||
);
|
||||
}
|
||||
|
||||
/// Runs a `INSERT`, `UPDATE` or `DELETE` statement returning rows.
|
||||
///
|
||||
/// You can use the [updates] parameter so that moor knows which tables are
|
||||
/// affected by your query. All select streams that depend on a table
|
||||
/// specified there will then update their data. For more accurate results,
|
||||
/// you can also set the [updateKind] parameter.
|
||||
/// This is optional, but can improve the accuracy of query updates,
|
||||
/// especially when using triggers.
|
||||
Future<List<QueryRow>> customWriteReturning(
|
||||
String query, {
|
||||
List<Variable> variables = const [],
|
||||
Set<TableInfo>? updates,
|
||||
UpdateKind? updateKind,
|
||||
}) {
|
||||
return _customWrite(query, variables, updates, updateKind,
|
||||
(executor, sql, vars) async {
|
||||
final rows = await executor.runSelect(sql, vars);
|
||||
return [for (final row in rows) QueryRow(row, attachedDatabase)];
|
||||
});
|
||||
}
|
||||
|
||||
/// Common logic for [customUpdate] and [customInsert] which takes care of
|
||||
/// mapping the variables, running the query and optionally informing the
|
||||
/// stream-queries.
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import 'package:meta/meta.dart';
|
||||
import 'package:moor/src/runtime/api/runtime_api.dart';
|
||||
|
||||
import 'stream_queries.dart';
|
||||
|
@ -6,6 +7,7 @@ import 'stream_queries.dart';
|
|||
/// available delegate.
|
||||
/// This class is internal and should not be exposed to moor users. It's used
|
||||
/// through a delayed database connection.
|
||||
@internal
|
||||
class DelayedStreamQueryStore implements StreamQueryStore {
|
||||
late Future<StreamQueryStore> _delegate;
|
||||
StreamQueryStore? _resolved;
|
||||
|
@ -35,7 +37,8 @@ class DelayedStreamQueryStore implements StreamQueryStore {
|
|||
}
|
||||
|
||||
@override
|
||||
Stream<T> registerStream<T>(QueryStreamFetcher<T> fetcher) {
|
||||
Stream<List<Map<String, Object?>>> registerStream(
|
||||
QueryStreamFetcher fetcher) {
|
||||
return Stream.fromFuture(_delegate)
|
||||
.asyncExpand((resolved) => resolved.registerStream(fetcher))
|
||||
.asBroadcastStream();
|
||||
|
|
|
@ -2,18 +2,20 @@ import 'dart:async';
|
|||
import 'dart:collection';
|
||||
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:meta/meta.dart';
|
||||
import 'package:moor/moor.dart';
|
||||
import 'package:moor/src/utils/start_with_value_transformer.dart';
|
||||
import 'package:pedantic/pedantic.dart';
|
||||
|
||||
const _listEquality = ListEquality<dynamic>();
|
||||
const _listEquality = ListEquality<Object?>();
|
||||
|
||||
// This is an internal moor library that's never exported to users.
|
||||
// ignore_for_file: public_member_api_docs
|
||||
|
||||
/// Representation of a select statement that knows from which tables the
|
||||
/// statement is reading its data and how to execute the query.
|
||||
class QueryStreamFetcher<T> {
|
||||
@internal
|
||||
class QueryStreamFetcher {
|
||||
/// Table updates that will affect this stream.
|
||||
///
|
||||
/// If any of these tables changes, the stream must fetch its data again.
|
||||
|
@ -24,7 +26,7 @@ class QueryStreamFetcher<T> {
|
|||
final StreamKey? key;
|
||||
|
||||
/// Function that asynchronously fetches the latest set of data.
|
||||
final Future<T> Function() fetchData;
|
||||
final Future<List<Map<String, Object?>>> Function() fetchData;
|
||||
|
||||
QueryStreamFetcher(
|
||||
{required this.readsFrom, this.key, required this.fetchData});
|
||||
|
@ -36,20 +38,16 @@ class QueryStreamFetcher<T> {
|
|||
/// As two equal statements always yield the same result when operating on the
|
||||
/// same data, this can make streams more efficient as we can return the same
|
||||
/// stream for two equivalent queries.
|
||||
@internal
|
||||
class StreamKey {
|
||||
final String sql;
|
||||
final List<dynamic> variables;
|
||||
|
||||
/// Used to differentiate between custom streams, which return a [QueryRow],
|
||||
/// and regular streams, which return an instance of a generated data class.
|
||||
final Type returnType;
|
||||
|
||||
StreamKey(this.sql, this.variables, this.returnType);
|
||||
StreamKey(this.sql, this.variables);
|
||||
|
||||
@override
|
||||
int get hashCode {
|
||||
return (sql.hashCode * 31 + _listEquality.hash(variables)) * 31 +
|
||||
returnType.hashCode;
|
||||
return $mrjf($mrjc(sql.hashCode, _listEquality.hash(variables)));
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -57,18 +55,19 @@ class StreamKey {
|
|||
return identical(this, other) ||
|
||||
(other is StreamKey &&
|
||||
other.sql == sql &&
|
||||
_listEquality.equals(other.variables, variables) &&
|
||||
other.returnType == returnType);
|
||||
_listEquality.equals(other.variables, variables));
|
||||
}
|
||||
}
|
||||
|
||||
/// Keeps track of active streams created from [SimpleSelectStatement]s and
|
||||
/// updates them when needed.
|
||||
@internal
|
||||
class StreamQueryStore {
|
||||
final Map<StreamKey, QueryStream> _activeKeyStreams = {};
|
||||
final HashSet<StreamKey?> _keysPendingRemoval = HashSet<StreamKey?>();
|
||||
|
||||
bool _isShuttingDown = false;
|
||||
|
||||
// we track pending timers since Flutter throws an exception when timers
|
||||
// remain after a test run.
|
||||
final Set<Completer> _pendingTimers = {};
|
||||
|
@ -84,19 +83,20 @@ class StreamQueryStore {
|
|||
StreamQueryStore();
|
||||
|
||||
/// Creates a new stream from the select statement.
|
||||
Stream<T> registerStream<T>(QueryStreamFetcher<T> fetcher) {
|
||||
Stream<List<Map<String, Object?>>> registerStream(
|
||||
QueryStreamFetcher fetcher) {
|
||||
final key = fetcher.key;
|
||||
|
||||
if (key != null) {
|
||||
final cached = _activeKeyStreams[key];
|
||||
if (cached != null) {
|
||||
return (cached as QueryStream<T>).stream;
|
||||
return cached.stream;
|
||||
}
|
||||
}
|
||||
|
||||
// no cached instance found, create a new stream and register it so later
|
||||
// requests with the same key can be cached.
|
||||
final stream = QueryStream<T>(fetcher, this);
|
||||
final stream = QueryStream(fetcher, this);
|
||||
// todo this adds the stream to a map, where it will only be removed when
|
||||
// somebody listens to it and later calls .cancel(). Failing to do so will
|
||||
// cause a memory leak. Is there any way we can work around it? Perhaps a
|
||||
|
@ -180,19 +180,20 @@ class StreamQueryStore {
|
|||
}
|
||||
}
|
||||
|
||||
class QueryStream<T> {
|
||||
final QueryStreamFetcher<T> _fetcher;
|
||||
class QueryStream {
|
||||
final QueryStreamFetcher _fetcher;
|
||||
final StreamQueryStore _store;
|
||||
|
||||
late final StreamController<T> _controller = StreamController.broadcast(
|
||||
late final StreamController<List<Map<String, Object?>>> _controller =
|
||||
StreamController.broadcast(
|
||||
onListen: _onListen,
|
||||
onCancel: _onCancel,
|
||||
);
|
||||
StreamSubscription? _tablesChangedSubscription;
|
||||
|
||||
T? _lastData;
|
||||
List<Map<String, Object?>>? _lastData;
|
||||
|
||||
Stream<T> get stream {
|
||||
Stream<List<Map<String, Object?>>> get stream {
|
||||
return _controller.stream.transform(StartWithValueTransformer(_cachedData));
|
||||
}
|
||||
|
||||
|
@ -202,7 +203,7 @@ class QueryStream<T> {
|
|||
|
||||
/// Called when we have a new listener, makes the stream query behave similar
|
||||
/// to an `BehaviorSubject` from rxdart.
|
||||
T? _cachedData() => _lastData;
|
||||
List<Map<String, Object?>>? _cachedData() => _lastData;
|
||||
|
||||
void _onListen() {
|
||||
_store.markAsOpened(this);
|
||||
|
@ -239,7 +240,7 @@ class QueryStream<T> {
|
|||
}
|
||||
|
||||
Future<void> fetchAndEmitData() async {
|
||||
T data;
|
||||
List<Map<String, Object?>> data;
|
||||
|
||||
try {
|
||||
data = await _fetcher.fetchData();
|
||||
|
|
|
@ -22,24 +22,24 @@ class CustomSelectStatement with Selectable<QueryRow> {
|
|||
|
||||
/// Constructs a fetcher for this query. The fetcher is responsible for
|
||||
/// updating a stream at the right moment.
|
||||
QueryStreamFetcher<List<QueryRow>> _constructFetcher() {
|
||||
QueryStreamFetcher _constructFetcher() {
|
||||
final args = _mapArgs();
|
||||
|
||||
return QueryStreamFetcher<List<QueryRow>>(
|
||||
return QueryStreamFetcher(
|
||||
readsFrom: TableUpdateQuery.onAllTables(tables),
|
||||
fetchData: () => _executeWithMappedArgs(args),
|
||||
key: StreamKey(query, args, QueryRow),
|
||||
fetchData: () => _executeRaw(args),
|
||||
key: StreamKey(query, args),
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<List<QueryRow>> get() async {
|
||||
return _executeWithMappedArgs(_mapArgs());
|
||||
Future<List<QueryRow>> get() {
|
||||
return _executeRaw(_mapArgs()).then(_mapDbResponse);
|
||||
}
|
||||
|
||||
@override
|
||||
Stream<List<QueryRow>> watch() {
|
||||
return _db.createStream(_constructFetcher());
|
||||
return _db.createStream(_constructFetcher()).map(_mapDbResponse);
|
||||
}
|
||||
|
||||
List<dynamic> _mapArgs() {
|
||||
|
@ -47,12 +47,12 @@ class CustomSelectStatement with Selectable<QueryRow> {
|
|||
return variables.map((v) => v.mapToSimpleValue(ctx)).toList();
|
||||
}
|
||||
|
||||
Future<List<QueryRow>> _executeWithMappedArgs(
|
||||
List<dynamic> mappedArgs) async {
|
||||
final result =
|
||||
await _db.doWhenOpened((e) => e.runSelect(query, mappedArgs));
|
||||
Future<List<Map<String, Object?>>> _executeRaw(List<Object?> mappedArgs) {
|
||||
return _db.doWhenOpened((e) => e.runSelect(query, mappedArgs));
|
||||
}
|
||||
|
||||
return result.map((row) => QueryRow(row, _db)).toList();
|
||||
List<QueryRow> _mapDbResponse(List<Map<String, Object?>> rows) {
|
||||
return rows.map((row) => QueryRow(row, _db)).toList();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -81,20 +81,26 @@ class QueryRow {
|
|||
}
|
||||
|
||||
/// Reads a bool from the column named [key].
|
||||
@Deprecated('Use read<bool>(key) directly')
|
||||
bool readBool(String key) => read<bool>(key);
|
||||
|
||||
/// Reads a string from the column named [key].
|
||||
@Deprecated('Use read<String>(key) directly')
|
||||
String readString(String key) => read<String>(key);
|
||||
|
||||
/// Reads a int from the column named [key].
|
||||
@Deprecated('Use read<int>(key) directly')
|
||||
int readInt(String key) => read<int>(key);
|
||||
|
||||
/// Reads a double from the column named [key].
|
||||
@Deprecated('Use read<double>(key) directly')
|
||||
double readDouble(String key) => read<double>(key);
|
||||
|
||||
/// Reads a [DateTime] from the column named [key].
|
||||
@Deprecated('Use read<DateTime>(key) directly')
|
||||
DateTime readDateTime(String key) => read<DateTime>(key);
|
||||
|
||||
/// Reads a [Uint8List] from the column named [key].
|
||||
@Deprecated('Use read<Uint8List>(key) directly')
|
||||
Uint8List readBlob(String key) => read<Uint8List>(key);
|
||||
}
|
||||
|
|
|
@ -44,16 +44,31 @@ class SimpleSelectStatement<T extends Table, D extends DataClass>
|
|||
}
|
||||
|
||||
@override
|
||||
Future<List<D>> get() async {
|
||||
Future<List<D>> get() {
|
||||
final ctx = constructQuery();
|
||||
return _getWithQuery(ctx);
|
||||
return _getRaw(ctx).then(_mapResponse);
|
||||
}
|
||||
|
||||
Future<List<D>> _getWithQuery(GenerationContext ctx) async {
|
||||
final results = await ctx.executor!.doWhenOpened((e) async {
|
||||
return await e.runSelect(ctx.sql, ctx.boundVariables);
|
||||
@override
|
||||
Stream<List<D>> watch() {
|
||||
final query = constructQuery();
|
||||
final fetcher = QueryStreamFetcher(
|
||||
readsFrom: TableUpdateQuery.onAllTables(query.watchedTables),
|
||||
fetchData: () => _getRaw(query),
|
||||
key: StreamKey(query.sql, query.boundVariables),
|
||||
);
|
||||
|
||||
return database.createStream(fetcher).map(_mapResponse);
|
||||
}
|
||||
|
||||
Future<List<Map<String, Object?>>> _getRaw(GenerationContext ctx) {
|
||||
return database.doWhenOpened((e) {
|
||||
return e.runSelect(ctx.sql, ctx.boundVariables);
|
||||
});
|
||||
return results.map(table.map).toList();
|
||||
}
|
||||
|
||||
List<D> _mapResponse(List<Map<String, Object?>> rows) {
|
||||
return rows.map(table.map).toList();
|
||||
}
|
||||
|
||||
/// Creates a select statement that operates on more than one table by
|
||||
|
@ -118,18 +133,6 @@ class SimpleSelectStatement<T extends Table, D extends DataClass>
|
|||
void orderBy(List<OrderClauseGenerator<T>> clauses) {
|
||||
orderByExpr = OrderBy(clauses.map((t) => t(table.asDslTable)).toList());
|
||||
}
|
||||
|
||||
@override
|
||||
Stream<List<D>> watch() {
|
||||
final query = constructQuery();
|
||||
final fetcher = QueryStreamFetcher<List<D>>(
|
||||
readsFrom: TableUpdateQuery.onAllTables(query.watchedTables),
|
||||
fetchData: () => _getWithQuery(query),
|
||||
key: StreamKey(query.sql, query.boundVariables, D),
|
||||
);
|
||||
|
||||
return database.createStream(fetcher);
|
||||
}
|
||||
}
|
||||
|
||||
String _beginOfSelect(bool distinct) {
|
||||
|
|
|
@ -181,23 +181,26 @@ class JoinedSelectStatement<FirstT extends Table, FirstD extends DataClass>
|
|||
@override
|
||||
Stream<List<TypedResult>> watch() {
|
||||
final ctx = constructQuery();
|
||||
final fetcher = QueryStreamFetcher<List<TypedResult>>(
|
||||
final fetcher = QueryStreamFetcher(
|
||||
readsFrom: TableUpdateQuery.onAllTables(ctx.watchedTables),
|
||||
fetchData: () => _getWithQuery(ctx),
|
||||
key: StreamKey(ctx.sql, ctx.boundVariables, TypedResult),
|
||||
fetchData: () => _getRaw(ctx),
|
||||
key: StreamKey(ctx.sql, ctx.boundVariables),
|
||||
);
|
||||
|
||||
return database.createStream(fetcher);
|
||||
return database
|
||||
.createStream(fetcher)
|
||||
.map((rows) => _mapResponse(ctx, rows));
|
||||
}
|
||||
|
||||
@override
|
||||
Future<List<TypedResult>> get() async {
|
||||
final ctx = constructQuery();
|
||||
return _getWithQuery(ctx);
|
||||
final raw = await _getRaw(ctx);
|
||||
return _mapResponse(ctx, raw);
|
||||
}
|
||||
|
||||
Future<List<TypedResult>> _getWithQuery(GenerationContext ctx) async {
|
||||
final results = await ctx.executor!.doWhenOpened((e) async {
|
||||
Future<List<Map<String, Object?>>> _getRaw(GenerationContext ctx) {
|
||||
return ctx.executor!.doWhenOpened((e) async {
|
||||
try {
|
||||
return await e.runSelect(ctx.sql, ctx.boundVariables);
|
||||
} catch (e, s) {
|
||||
|
@ -211,8 +214,11 @@ class JoinedSelectStatement<FirstT extends Table, FirstD extends DataClass>
|
|||
rethrow;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return results.map((row) {
|
||||
List<TypedResult> _mapResponse(
|
||||
GenerationContext ctx, List<Map<String, Object?>> rows) {
|
||||
return rows.map((row) {
|
||||
final readTables = <TableInfo, dynamic>{};
|
||||
final readColumns = <Expression, dynamic>{};
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
name: moor
|
||||
description: Moor is a safe and reactive persistence library for Dart applications
|
||||
version: 4.2.0
|
||||
version: 4.2.1
|
||||
repository: https://github.com/simolus3/moor
|
||||
homepage: https://moor.simonbinder.eu/
|
||||
issue_tracker: https://github.com/simolus3/moor/issues
|
||||
|
|
|
@ -1618,20 +1618,24 @@ abstract class _$CustomTablesDb extends GeneratedDatabase {
|
|||
readsFrom: {config}).map(config.mapFromRow);
|
||||
}
|
||||
|
||||
Selectable<String> typeConverterVar(SyncType? var1, List<SyncType?> var2) {
|
||||
Selectable<String> typeConverterVar(SyncType? var1, List<SyncType?> var2,
|
||||
{Expression<bool> pred = const CustomExpression('(TRUE)')}) {
|
||||
var $arrayStartIndex = 2;
|
||||
final generatedpred = $write(pred);
|
||||
$arrayStartIndex += generatedpred.amountOfVariables;
|
||||
final expandedvar2 = $expandVar($arrayStartIndex, var2.length);
|
||||
$arrayStartIndex += var2.length;
|
||||
return customSelect(
|
||||
'SELECT config_key FROM config WHERE sync_state = ? OR sync_state_implicit IN ($expandedvar2)',
|
||||
'SELECT config_key FROM config WHERE ${generatedpred.sql} AND(sync_state = ? OR sync_state_implicit IN ($expandedvar2))',
|
||||
variables: [
|
||||
...generatedpred.introducedVariables,
|
||||
Variable<int?>(ConfigTable.$converter0.mapToSql(var1)),
|
||||
for (var $ in var2)
|
||||
Variable<int?>(ConfigTable.$converter1.mapToSql($))
|
||||
],
|
||||
readsFrom: {
|
||||
config
|
||||
}).map((QueryRow row) => row.readString('config_key'));
|
||||
}).map((QueryRow row) => row.read<String>('config_key'));
|
||||
}
|
||||
|
||||
Selectable<JsonResult> tableValued() {
|
||||
|
@ -1641,21 +1645,21 @@ abstract class _$CustomTablesDb extends GeneratedDatabase {
|
|||
readsFrom: {config}).map((QueryRow row) {
|
||||
return JsonResult(
|
||||
row: row,
|
||||
key: row.readString('key'),
|
||||
value: row.readString('value'),
|
||||
key: row.read<String>('key'),
|
||||
value: row.read<String?>('value'),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
Selectable<JsonResult> another() {
|
||||
return customSelect(
|
||||
'SELECT \'one\' AS "key", NULLIF(\'two\', \'another\')AS value',
|
||||
'SELECT \'one\' AS "key", NULLIF(\'two\', \'another\') AS value',
|
||||
variables: [],
|
||||
readsFrom: {}).map((QueryRow row) {
|
||||
return JsonResult(
|
||||
row: row,
|
||||
key: row.readString('key'),
|
||||
value: row.readString('value'),
|
||||
key: row.read<String>('key'),
|
||||
value: row.read<String?>('value'),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
@ -1668,8 +1672,8 @@ abstract class _$CustomTablesDb extends GeneratedDatabase {
|
|||
readsFrom: {withConstraints, withDefaults}).map((QueryRow row) {
|
||||
return MultipleResult(
|
||||
row: row,
|
||||
a: row.readString('a'),
|
||||
b: row.readInt('b'),
|
||||
a: row.read<String?>('a'),
|
||||
b: row.read<int?>('b'),
|
||||
c: withConstraints.mapFromRowOrNull(row, tablePrefix: 'nested_0'),
|
||||
);
|
||||
});
|
||||
|
@ -1690,12 +1694,13 @@ abstract class _$CustomTablesDb extends GeneratedDatabase {
|
|||
readsFrom: {config}).map((QueryRow row) {
|
||||
return ReadRowIdResult(
|
||||
row: row,
|
||||
rowid: row.readInt('rowid'),
|
||||
configKey: row.readString('config_key'),
|
||||
configValue: row.readString('config_value'),
|
||||
syncState: ConfigTable.$converter0.mapToDart(row.readInt('sync_state')),
|
||||
rowid: row.read<int>('rowid'),
|
||||
configKey: row.read<String>('config_key'),
|
||||
configValue: row.read<String?>('config_value'),
|
||||
syncState:
|
||||
ConfigTable.$converter0.mapToDart(row.read<int?>('sync_state')),
|
||||
syncStateImplicit: ConfigTable.$converter1
|
||||
.mapToDart(row.readInt('sync_state_implicit')),
|
||||
.mapToDart(row.read<int?>('sync_state_implicit')),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
@ -1705,11 +1710,12 @@ abstract class _$CustomTablesDb extends GeneratedDatabase {
|
|||
variables: [], readsFrom: {config}).map((QueryRow row) {
|
||||
return ReadViewResult(
|
||||
row: row,
|
||||
configKey: row.readString('config_key'),
|
||||
configValue: row.readString('config_value'),
|
||||
syncState: ConfigTable.$converter0.mapToDart(row.readInt('sync_state')),
|
||||
configKey: row.read<String>('config_key'),
|
||||
configValue: row.read<String?>('config_value'),
|
||||
syncState:
|
||||
ConfigTable.$converter0.mapToDart(row.read<int?>('sync_state')),
|
||||
syncStateImplicit: ConfigTable.$converter1
|
||||
.mapToDart(row.readInt('sync_state_implicit')),
|
||||
.mapToDart(row.read<int?>('sync_state_implicit')),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
@ -1718,7 +1724,28 @@ abstract class _$CustomTablesDb extends GeneratedDatabase {
|
|||
return customSelect(
|
||||
'WITH RECURSIVE cnt(x)AS (SELECT 1 UNION ALL SELECT x + 1 FROM cnt LIMIT 1000000) SELECT x FROM cnt',
|
||||
variables: [],
|
||||
readsFrom: {}).map((QueryRow row) => row.readInt('x'));
|
||||
readsFrom: {}).map((QueryRow row) => row.read<int>('x'));
|
||||
}
|
||||
|
||||
Selectable<int?> nullableQuery() {
|
||||
return customSelect('SELECT MAX(oid) FROM config',
|
||||
variables: [],
|
||||
readsFrom: {config}).map((QueryRow row) => row.read<int?>('MAX(oid)'));
|
||||
}
|
||||
|
||||
Future<List<Config>> addConfig(
|
||||
String var1, String? var2, SyncType? var3, SyncType? var4) {
|
||||
return customWriteReturning(
|
||||
'INSERT INTO config VALUES (?, ?, ?, ?) RETURNING *',
|
||||
variables: [
|
||||
Variable<String>(var1),
|
||||
Variable<String?>(var2),
|
||||
Variable<int?>(ConfigTable.$converter0.mapToSql(var3)),
|
||||
Variable<int?>(ConfigTable.$converter1.mapToSql(var4))
|
||||
],
|
||||
updates: {
|
||||
config
|
||||
}).then((rows) => rows.map(config.mapFromRow).toList());
|
||||
}
|
||||
|
||||
Future<int> writeConfig({required String key, String? value}) {
|
||||
|
|
|
@ -57,7 +57,8 @@ FROM config WHERE config_key = ?;
|
|||
readMultiple: SELECT * FROM config WHERE config_key IN ? ORDER BY $clause;
|
||||
readDynamic ($predicate = TRUE): SELECT * FROM config WHERE $predicate;
|
||||
|
||||
typeConverterVar: SELECT config_key FROM config WHERE sync_state = ? OR sync_state_implicit IN ?;
|
||||
typeConverterVar($pred = TRUE): SELECT config_key FROM config WHERE $pred AND
|
||||
(sync_state = ? OR sync_state_implicit IN ?);
|
||||
|
||||
tableValued AS JsonResult:
|
||||
SELECT "key", "value"
|
||||
|
@ -88,3 +89,6 @@ cfeTest: WITH RECURSIVE
|
|||
LIMIT 1000000
|
||||
)
|
||||
SELECT x FROM cnt;
|
||||
|
||||
nullableQuery: SELECT MAX(oid) FROM config;
|
||||
addConfig: INSERT INTO config VALUES (?, ?, ?, ?) RETURNING *;
|
||||
|
|
|
@ -1490,13 +1490,13 @@ abstract class _$TodoDb extends GeneratedDatabase {
|
|||
readsFrom: {categories, todosTable}).map((QueryRow row) {
|
||||
return AllTodosWithCategoryResult(
|
||||
row: row,
|
||||
id: row.readInt('id'),
|
||||
title: row.readString('title'),
|
||||
content: row.readString('content'),
|
||||
targetDate: row.readDateTime('target_date'),
|
||||
category: row.readInt('category'),
|
||||
catId: row.readInt('catId'),
|
||||
catDesc: row.readString('catDesc'),
|
||||
id: row.read<int>('id'),
|
||||
title: row.read<String?>('title'),
|
||||
content: row.read<String>('content'),
|
||||
targetDate: row.read<DateTime?>('target_date'),
|
||||
category: row.read<int?>('category'),
|
||||
catId: row.read<int>('catId'),
|
||||
catDesc: row.read<String>('catDesc'),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
@ -1539,7 +1539,7 @@ abstract class _$TodoDb extends GeneratedDatabase {
|
|||
variables: [],
|
||||
readsFrom: {tableWithoutPK})
|
||||
.map((QueryRow row) => $TableWithoutPKTable.$converter0
|
||||
.mapToDart(row.readString('custom'))!);
|
||||
.mapToDart(row.read<String>('custom'))!);
|
||||
}
|
||||
|
||||
@override
|
||||
|
|
|
@ -56,33 +56,42 @@ class MockExecutor extends Mock implements QueryExecutor {
|
|||
@override
|
||||
SqlDialect get dialect =>
|
||||
_nsm(Invocation.getter(#dialect), SqlDialect.sqlite);
|
||||
|
||||
@override
|
||||
Future<bool> ensureOpen(QueryExecutorUser? user) =>
|
||||
_nsm(Invocation.method(#ensureOpen, [user]), Future.value(true));
|
||||
|
||||
@override
|
||||
Future<List<Map<String, Object?>>> runSelect(
|
||||
String? statement, List<Object?>? args) =>
|
||||
_nsm(Invocation.method(#runSelect, [statement, args]),
|
||||
Future.value(<Map<String, Object?>>[]));
|
||||
|
||||
@override
|
||||
Future<int> runInsert(String? statement, List<Object?>? args) =>
|
||||
_nsm(Invocation.method(#runInsert, [statement, args]), Future.value(0));
|
||||
|
||||
@override
|
||||
Future<int> runUpdate(String? statement, List<Object?>? args) =>
|
||||
_nsm(Invocation.method(#runUpdate, [statement, args]), Future.value(0));
|
||||
|
||||
@override
|
||||
Future<int> runDelete(String? statement, List<Object?>? args) =>
|
||||
_nsm(Invocation.method(#runDelete, [statement, args]), Future.value(0));
|
||||
|
||||
@override
|
||||
Future<void> runCustom(String? statement, [List<Object?>? args]) => _nsm(
|
||||
Invocation.method(#runCustom, [statement, args]), Future.value(null));
|
||||
|
||||
@override
|
||||
Future<void> runBatched(BatchedStatements? statements) =>
|
||||
_nsm(Invocation.method(#runBatched, [statements]), Future.value(null));
|
||||
|
||||
@override
|
||||
TransactionExecutor beginTransaction() =>
|
||||
_nsm(Invocation.method(#beginTransaction, []), transactions) ??
|
||||
transactions;
|
||||
|
||||
@override
|
||||
Future<void> close() =>
|
||||
_nsm(Invocation.method(#close, []), Future.value(null));
|
||||
|
@ -107,22 +116,28 @@ class MockTransactionExecutor extends MockExecutor
|
|||
|
||||
class MockStreamQueries extends Mock implements StreamQueryStore {
|
||||
@override
|
||||
Stream<T> registerStream<T>(QueryStreamFetcher<T>? fetcher) =>
|
||||
_nsm(Invocation.method(#registerStream, [fetcher]), Stream<T>.empty());
|
||||
Stream<List<Map<String, Object?>>> registerStream(
|
||||
QueryStreamFetcher? fetcher) =>
|
||||
_nsm(Invocation.method(#registerStream, [fetcher]),
|
||||
const Stream<Never>.empty());
|
||||
|
||||
@override
|
||||
Stream<Null?> updatesForSync(TableUpdateQuery? query) => _nsm(
|
||||
Invocation.method(#updatesForSync, [query]), const Stream<Never>.empty());
|
||||
|
||||
@override
|
||||
void handleTableUpdates(Set<TableUpdate>? updates) =>
|
||||
super.noSuchMethod(Invocation.method(#handleTableUpdates, [updates]));
|
||||
|
||||
@override
|
||||
void markAsClosed(
|
||||
QueryStream<dynamic>? stream, dynamic Function()? whenRemoved) =>
|
||||
void markAsClosed(QueryStream? stream, dynamic Function()? whenRemoved) =>
|
||||
super.noSuchMethod(
|
||||
Invocation.method(#markAsClosed, [stream, whenRemoved]));
|
||||
|
||||
@override
|
||||
void markAsOpened(QueryStream<dynamic>? stream) =>
|
||||
void markAsOpened(QueryStream? stream) =>
|
||||
super.noSuchMethod(Invocation.method(#markAsOpened, [stream]));
|
||||
|
||||
@override
|
||||
Future<void> close() =>
|
||||
_nsm(Invocation.method(#close, []), Future.value(null));
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import 'package:moor/ffi.dart';
|
||||
import 'package:moor/src/runtime/query_builder/query_builder.dart';
|
||||
import 'package:moor/src/runtime/query_builder/query_builder.dart' hide isNull;
|
||||
import 'package:sqlite3/sqlite3.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../data/tables/converter.dart';
|
||||
|
@ -10,7 +11,7 @@ void main() {
|
|||
late CustomTablesDb db;
|
||||
|
||||
setUp(() {
|
||||
executor = VmDatabase.memory(logStatements: true);
|
||||
executor = VmDatabase.memory();
|
||||
db = CustomTablesDb(executor);
|
||||
});
|
||||
|
||||
|
@ -21,6 +22,11 @@ void main() {
|
|||
await db.doWhenOpened((e) => null);
|
||||
});
|
||||
|
||||
test('can use nullable columns', () async {
|
||||
await db.delete(db.config).go();
|
||||
await expectLater(db.nullableQuery().getSingle(), completion(isNull));
|
||||
});
|
||||
|
||||
group('views', () {
|
||||
test('can be selected from', () {
|
||||
return expectLater(db.readView().get(), completion(isEmpty));
|
||||
|
@ -55,4 +61,25 @@ void main() {
|
|||
await expectation;
|
||||
});
|
||||
});
|
||||
|
||||
final sqliteVersion = sqlite3.version;
|
||||
final hasReturning = sqliteVersion.versionNumber > 3035000;
|
||||
|
||||
group('returning', () {
|
||||
test('for custom inserts', () async {
|
||||
final result = await db.addConfig(
|
||||
'key2', 'val', SyncType.locallyCreated, SyncType.locallyCreated);
|
||||
|
||||
expect(result, hasLength(1));
|
||||
expect(
|
||||
result.single,
|
||||
Config(
|
||||
configKey: 'key2',
|
||||
configValue: 'val',
|
||||
syncState: SyncType.locallyCreated,
|
||||
syncStateImplicit: SyncType.locallyCreated,
|
||||
),
|
||||
);
|
||||
});
|
||||
}, skip: hasReturning ? null : 'RETURNING not supported by current sqlite');
|
||||
}
|
||||
|
|
|
@ -227,6 +227,9 @@ void main() {
|
|||
await db.typeConverterVar(SyncType.locallyCreated,
|
||||
[SyncType.locallyUpdated, SyncType.synchronized]).get();
|
||||
|
||||
verify(mock.runSelect(any, [0, 1, 2]));
|
||||
verify(mock.runSelect(
|
||||
'SELECT config_key FROM config WHERE (TRUE) AND(sync_state = ? '
|
||||
'OR sync_state_implicit IN (?2, ?3))',
|
||||
[0, 1, 2]));
|
||||
});
|
||||
}
|
||||
|
|
|
@ -182,11 +182,10 @@ void main() {
|
|||
});
|
||||
|
||||
group('stream keys', () {
|
||||
final keyA = StreamKey('SELECT * FROM users;', [], User);
|
||||
final keyB = StreamKey('SELECT * FROM users;', [], User);
|
||||
final keyCustom = StreamKey('SELECT * FROM users;', [], QueryRow);
|
||||
final keyCustomTodos = StreamKey('SELECT * FROM todos;', [], QueryRow);
|
||||
final keyArgs = StreamKey('SELECT * FROM users;', ['name'], User);
|
||||
final keyA = StreamKey('SELECT * FROM users;', []);
|
||||
final keyB = StreamKey('SELECT * FROM users;', []);
|
||||
final keyTodos = StreamKey('SELECT * FROM todos;', []);
|
||||
final keyArgs = StreamKey('SELECT * FROM users;', ['name']);
|
||||
|
||||
test('are equal for same parameters', () {
|
||||
expect(keyA, equals(keyB));
|
||||
|
@ -194,19 +193,14 @@ void main() {
|
|||
});
|
||||
|
||||
test('are not equal for different queries', () {
|
||||
expect(keyCustomTodos, isNot(keyCustom));
|
||||
expect(keyCustomTodos.hashCode, isNot(keyCustom.hashCode));
|
||||
expect(keyA, isNot(keyTodos));
|
||||
expect(keyA.hashCode, isNot(keyTodos.hashCode));
|
||||
});
|
||||
|
||||
test('are not equal for different variables', () {
|
||||
expect(keyArgs, isNot(keyA));
|
||||
expect(keyArgs.hashCode, isNot(keyA.hashCode));
|
||||
});
|
||||
|
||||
test('are not equal for different types', () {
|
||||
expect(keyCustom, isNot(keyA));
|
||||
expect(keyCustom.hashCode, isNot(keyA.hashCode));
|
||||
});
|
||||
});
|
||||
|
||||
group("streams don't fetch", () {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
## 4.0.0-nullsafety
|
||||
## 4.0.0
|
||||
|
||||
- Support moor version 4
|
||||
- Migrate to null-safety
|
||||
|
|
|
@ -19,14 +19,14 @@ class TodoAppBloc {
|
|||
final BehaviorSubject<Category> _activeCategory =
|
||||
BehaviorSubject.seeded(null);
|
||||
|
||||
Observable<List<EntryWithCategory>> _currentEntries;
|
||||
Stream<List<EntryWithCategory>> _currentEntries;
|
||||
|
||||
/// A stream of entries that should be displayed on the home screen.
|
||||
Observable<List<EntryWithCategory>> get homeScreenEntries => _currentEntries;
|
||||
Stream<List<EntryWithCategory>> get homeScreenEntries => _currentEntries;
|
||||
|
||||
final BehaviorSubject<List<CategoryWithActiveInfo>> _allCategories =
|
||||
BehaviorSubject();
|
||||
Observable<List<CategoryWithActiveInfo>> get categories => _allCategories;
|
||||
Stream<List<CategoryWithActiveInfo>> get categories => _allCategories;
|
||||
|
||||
TodoAppBloc() : db = Database() {
|
||||
// listen for the category to change. Then display all entries that are in
|
||||
|
@ -35,7 +35,7 @@ class TodoAppBloc {
|
|||
|
||||
// also watch all categories so that they can be displayed in the navigation
|
||||
// drawer.
|
||||
Observable.combineLatest2<List<CategoryWithCount>, Category,
|
||||
Rx.combineLatest2<List<CategoryWithCount>, Category,
|
||||
List<CategoryWithActiveInfo>>(
|
||||
db.categoriesWithCount(),
|
||||
_activeCategory,
|
||||
|
|
|
@ -126,7 +126,7 @@ class Database extends _$Database {
|
|||
return rows.map((row) {
|
||||
return EntryWithCategory(
|
||||
row.readTable(todos),
|
||||
row.readTable(categories),
|
||||
row.readTableOrNull(categories),
|
||||
);
|
||||
}).toList();
|
||||
});
|
||||
|
|
|
@ -503,9 +503,9 @@ abstract class _$Database extends GeneratedDatabase {
|
|||
variables: [],
|
||||
readsFrom: {categories, todos}).map((QueryRow row) {
|
||||
return CategoriesWithCountResult(
|
||||
id: row.readInt('id'),
|
||||
desc: row.readString('desc'),
|
||||
amount: row.readInt('amount'),
|
||||
id: row.read<int>('id'),
|
||||
desc: row.read<String>('desc'),
|
||||
amount: row.read<int>('amount'),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -50,7 +50,8 @@ class _AddCategoryDialogState extends State<AddCategoryDialog> {
|
|||
|
||||
void _addEntry() {
|
||||
if (_controller.text.isNotEmpty) {
|
||||
Provider.of<TodoAppBloc>(context).addCategory(_controller.text);
|
||||
Provider.of<TodoAppBloc>(context, listen: false)
|
||||
.addCategory(_controller.text);
|
||||
Navigator.of(context).pop();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -101,7 +101,7 @@ class _CategoryDrawerEntry extends StatelessWidget {
|
|||
title: const Text('Delete'),
|
||||
content: Text('Really delete category $title?'),
|
||||
actions: <Widget>[
|
||||
FlatButton(
|
||||
TextButton(
|
||||
child: const Text('Cancel'),
|
||||
onPressed: () {
|
||||
Navigator.pop(context, false);
|
||||
|
|
|
@ -21,7 +21,7 @@ class HomeScreenState extends State<HomeScreen> {
|
|||
// been added
|
||||
final TextEditingController controller = TextEditingController();
|
||||
|
||||
TodoAppBloc get bloc => Provider.of<TodoAppBloc>(context);
|
||||
TodoAppBloc get bloc => Provider.of<TodoAppBloc>(context, listen: false);
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
|
@ -33,6 +33,8 @@ class HomeScreenState extends State<HomeScreen> {
|
|||
body: StreamBuilder<List<EntryWithCategory>>(
|
||||
stream: bloc.homeScreenEntries,
|
||||
builder: (context, snapshot) {
|
||||
print(snapshot);
|
||||
|
||||
if (!snapshot.hasData) {
|
||||
return const Align(
|
||||
alignment: Alignment.center,
|
||||
|
|
|
@ -10,14 +10,14 @@ environment:
|
|||
dependencies:
|
||||
flutter:
|
||||
sdk: flutter
|
||||
provider: ^3.2.0
|
||||
intl: ^0.16.0
|
||||
rxdart: 0.21.0
|
||||
moor_flutter: ^3.0.0
|
||||
provider: ^5.0.0
|
||||
intl: ^0.17.0
|
||||
rxdart: ^0.26.0
|
||||
moor_flutter: ^4.0.0
|
||||
|
||||
dev_dependencies:
|
||||
build_runner:
|
||||
moor_generator: ^3.2.0
|
||||
moor_generator: ^4.0.0
|
||||
flutter_test:
|
||||
sdk: flutter
|
||||
|
||||
|
|
|
@ -32,11 +32,8 @@ class _SqfliteDelegate extends DatabaseDelegate with _SqfliteExecutor {
|
|||
_SqfliteDelegate(this.inDbFolder, this.path,
|
||||
{this.singleInstance = true, this.creator});
|
||||
|
||||
DbVersionDelegate? _delegate;
|
||||
@override
|
||||
DbVersionDelegate get versionDelegate {
|
||||
return _delegate ??= _SqfliteVersionDelegate(db);
|
||||
}
|
||||
late final DbVersionDelegate versionDelegate = _SqfliteVersionDelegate(db);
|
||||
|
||||
@override
|
||||
TransactionDelegate get transactionDelegate =>
|
||||
|
@ -49,7 +46,7 @@ class _SqfliteDelegate extends DatabaseDelegate with _SqfliteExecutor {
|
|||
Future<void> open(QueryExecutorUser user) async {
|
||||
String resolvedPath;
|
||||
if (inDbFolder) {
|
||||
resolvedPath = join((await s.getDatabasesPath())!, path);
|
||||
resolvedPath = join(await s.getDatabasesPath(), path);
|
||||
} else {
|
||||
resolvedPath = path;
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ packages:
|
|||
name: convert
|
||||
url: "https://pub.dartlang.org"
|
||||
source: hosted
|
||||
version: "2.1.1"
|
||||
version: "3.0.0"
|
||||
fake_async:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
@ -94,7 +94,7 @@ packages:
|
|||
name: moor
|
||||
url: "https://pub.dartlang.org"
|
||||
source: hosted
|
||||
version: "4.0.0-nullsafety.1"
|
||||
version: "4.2.1"
|
||||
path:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
|
@ -108,7 +108,7 @@ packages:
|
|||
name: pedantic
|
||||
url: "https://pub.dartlang.org"
|
||||
source: hosted
|
||||
version: "1.10.0-nullsafety.3"
|
||||
version: "1.11.0"
|
||||
sky_engine:
|
||||
dependency: transitive
|
||||
description: flutter
|
||||
|
@ -127,14 +127,14 @@ packages:
|
|||
name: sqflite
|
||||
url: "https://pub.dartlang.org"
|
||||
source: hosted
|
||||
version: "2.0.0-nullsafety.2"
|
||||
version: "2.0.0+3"
|
||||
sqflite_common:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: sqflite_common
|
||||
url: "https://pub.dartlang.org"
|
||||
source: hosted
|
||||
version: "2.0.0-nullsafety.2"
|
||||
version: "2.0.0+2"
|
||||
sqlite3:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
@ -169,7 +169,7 @@ packages:
|
|||
name: synchronized
|
||||
url: "https://pub.dartlang.org"
|
||||
source: hosted
|
||||
version: "3.0.0-nullsafety.1"
|
||||
version: "3.0.0"
|
||||
term_glyph:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
@ -199,5 +199,5 @@ packages:
|
|||
source: hosted
|
||||
version: "2.1.0"
|
||||
sdks:
|
||||
dart: ">=2.12.0-29 <3.0.0"
|
||||
dart: ">=2.12.0 <3.0.0"
|
||||
flutter: ">=1.24.0-10"
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
name: moor_flutter
|
||||
description: Flutter implementation of moor, a safe and reactive persistence library for Dart applications
|
||||
version: 4.0.0-nullsafety
|
||||
version: 4.0.0
|
||||
repository: https://github.com/simolus3/moor
|
||||
homepage: https://moor.simonbinder.eu/
|
||||
issue_tracker: https://github.com/simolus3/moor/issues
|
||||
|
||||
environment:
|
||||
sdk: '>=2.12.0-0 <3.0.0'
|
||||
sdk: '>=2.12.0 <3.0.0'
|
||||
|
||||
dependencies:
|
||||
moor: ^4.0.0-nullsafety
|
||||
sqflite: '>=2.0.0-nullsafety <3.0.0'
|
||||
path: '>1.8.0-nullsafety <2.0.0'
|
||||
moor: ^4.0.0
|
||||
sqflite: ^2.0.0+3
|
||||
path: ^1.8.0
|
||||
flutter:
|
||||
sdk: flutter
|
||||
|
||||
|
|
|
@ -1,3 +1,11 @@
|
|||
## 4.2.2
|
||||
|
||||
- Fix generated code for queries using a Dart placeholder before an array variable
|
||||
|
||||
## 4.2.1
|
||||
|
||||
- Fix generated code for nullable columns in moor files
|
||||
|
||||
## 4.2.0
|
||||
|
||||
- Migrate `package:moor_generator/api/migrations.dart` to null-safety
|
||||
|
|
|
@ -30,17 +30,6 @@ class _LintingVisitor extends RecursiveVisitor<void, void> {
|
|||
|
||||
_LintingVisitor(this.linter);
|
||||
|
||||
void _checkNoReturning(StatementReturningColumns stmt) {
|
||||
if (stmt.returning != null) {
|
||||
linter.lints.add(AnalysisError(
|
||||
type: AnalysisErrorType.other,
|
||||
message: 'RETURNING is not supported in this version of moor. '
|
||||
'Follow https://github.com/simolus3/moor/issues/1096 for updates.',
|
||||
relevantNode: stmt.returning,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
void visitBinaryExpression(BinaryExpression e, void arg) {
|
||||
const numericOps = {
|
||||
|
@ -162,15 +151,8 @@ class _LintingVisitor extends RecursiveVisitor<void, void> {
|
|||
}
|
||||
}
|
||||
|
||||
@override
|
||||
void visitDeleteStatement(DeleteStatement e, void arg) {
|
||||
_checkNoReturning(e);
|
||||
visitChildren(e, arg);
|
||||
}
|
||||
|
||||
@override
|
||||
void visitInsertStatement(InsertStatement e, void arg) {
|
||||
_checkNoReturning(e);
|
||||
final targeted = e.resolvedTargetColumns;
|
||||
if (targeted == null) return;
|
||||
|
||||
|
@ -239,10 +221,4 @@ class _LintingVisitor extends RecursiveVisitor<void, void> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
void visitUpdateStatement(UpdateStatement e, void arg) {
|
||||
_checkNoReturning(e);
|
||||
visitChildren(e, arg);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ class QueryHandler {
|
|||
Set<Table> _foundTables;
|
||||
Set<View> _foundViews;
|
||||
List<FoundElement> _foundElements;
|
||||
|
||||
Iterable<FoundVariable> get _foundVariables =>
|
||||
_foundElements.whereType<FoundVariable>();
|
||||
|
||||
|
@ -56,12 +57,26 @@ class QueryHandler {
|
|||
}
|
||||
}
|
||||
|
||||
void _applyFoundTables(ReferencedTablesVisitor visitor) {
|
||||
_foundTables = visitor.foundTables;
|
||||
_foundViews = visitor.foundViews;
|
||||
}
|
||||
|
||||
UpdatingQuery _handleUpdate() {
|
||||
final updatedFinder = UpdatedTablesVisitor();
|
||||
context.root.acceptWithoutArg(updatedFinder);
|
||||
_foundTables = updatedFinder.writtenTables.map((w) => w.table).toSet();
|
||||
_applyFoundTables(updatedFinder);
|
||||
|
||||
final isInsert = context.root is InsertStatement;
|
||||
final root = context.root;
|
||||
final isInsert = root is InsertStatement;
|
||||
|
||||
InferredResultSet resultSet;
|
||||
if (root is StatementReturningColumns) {
|
||||
final columns = root.returnedResultSet?.resolvedColumns;
|
||||
if (columns != null) {
|
||||
resultSet = _inferResultSet(columns);
|
||||
}
|
||||
}
|
||||
|
||||
return UpdatingQuery(
|
||||
name,
|
||||
|
@ -70,14 +85,15 @@ class QueryHandler {
|
|||
updatedFinder.writtenTables.map(mapper.writtenToMoor).toList(),
|
||||
isInsert: isInsert,
|
||||
hasMultipleTables: updatedFinder.foundTables.length > 1,
|
||||
resultSet: resultSet,
|
||||
);
|
||||
}
|
||||
|
||||
SqlSelectQuery _handleSelect() {
|
||||
final tableFinder = ReferencedTablesVisitor();
|
||||
_select.acceptWithoutArg(tableFinder);
|
||||
_foundTables = tableFinder.foundTables;
|
||||
_foundViews = tableFinder.foundViews;
|
||||
_applyFoundTables(tableFinder);
|
||||
|
||||
final moorTables =
|
||||
_foundTables.map(mapper.tableToMoor).where((s) => s != null).toList();
|
||||
final moorViews =
|
||||
|
@ -95,15 +111,14 @@ class QueryHandler {
|
|||
context,
|
||||
_foundElements,
|
||||
moorEntities,
|
||||
_inferResultSet(),
|
||||
_inferResultSet(_select.resolvedColumns),
|
||||
requestedName,
|
||||
);
|
||||
}
|
||||
|
||||
InferredResultSet _inferResultSet() {
|
||||
InferredResultSet _inferResultSet(List<Column> rawColumns) {
|
||||
final candidatesForSingleTable = Set.of(_foundTables);
|
||||
final columns = <ResultColumn>[];
|
||||
final rawColumns = _select.resolvedColumns;
|
||||
|
||||
// First, go through regular result columns
|
||||
for (final column in rawColumns) {
|
||||
|
@ -190,7 +205,7 @@ class QueryHandler {
|
|||
}
|
||||
|
||||
List<NestedResultTable> _findNestedResultTables() {
|
||||
final query = _select;
|
||||
final query = context.root;
|
||||
// We don't currently support nested results for compound statements
|
||||
if (query is! SelectStatement) return const [];
|
||||
|
||||
|
|
|
@ -44,11 +44,12 @@ class DeclaredDartQuery extends DeclaredQuery {
|
|||
/// available.
|
||||
class DeclaredMoorQuery extends DeclaredQuery {
|
||||
final DeclaredStatement astNode;
|
||||
CrudStatement get query => astNode.statement;
|
||||
ParsedMoorFile file;
|
||||
|
||||
DeclaredMoorQuery(String name, this.astNode) : super(name);
|
||||
|
||||
CrudStatement get query => astNode.statement;
|
||||
|
||||
factory DeclaredMoorQuery.fromStatement(DeclaredStatement stmt) {
|
||||
assert(stmt.identifier is SimpleName);
|
||||
final name = (stmt.identifier as SimpleName).name;
|
||||
|
@ -69,6 +70,12 @@ abstract class SqlQuery {
|
|||
|
||||
String get sql => fromContext.sql;
|
||||
|
||||
/// The result set of this statement, mapped to moor-generated classes.
|
||||
///
|
||||
/// This is non-nullable for select queries. Updating queries might have a
|
||||
/// result set if they have a `RETURNING` clause.
|
||||
InferredResultSet /*?*/ get resultSet;
|
||||
|
||||
/// The variables that appear in the [sql] query. We support three kinds of
|
||||
/// sql variables: The regular "?" variables, explicitly indexed "?xyz"
|
||||
/// variables and colon-named variables. Even though this feature is not
|
||||
|
@ -108,13 +115,49 @@ abstract class SqlQuery {
|
|||
variables = elements.whereType<FoundVariable>().toList();
|
||||
placeholders = elements.whereType<FoundDartPlaceholder>().toList();
|
||||
}
|
||||
|
||||
String get resultClassName {
|
||||
final resultSet = this.resultSet;
|
||||
if (resultSet == null) {
|
||||
throw StateError('This query ($name) does not have a result set');
|
||||
}
|
||||
|
||||
if (resultSet.matchingTable != null || resultSet.singleColumn) {
|
||||
throw UnsupportedError('This result set does not introduce a class, '
|
||||
'either because it has a matching table or because it only returns '
|
||||
'one column.');
|
||||
}
|
||||
|
||||
return resultSet.resultClassName ?? '${ReCase(name).pascalCase}Result';
|
||||
}
|
||||
|
||||
/// The Dart type representing a row of this result set.
|
||||
String resultTypeCode(
|
||||
[GenerationOptions options = const GenerationOptions()]) {
|
||||
final resultSet = this.resultSet;
|
||||
if (resultSet == null) {
|
||||
throw StateError('This query ($name) does not have a result set');
|
||||
}
|
||||
|
||||
if (resultSet.matchingTable != null) {
|
||||
return resultSet.matchingTable.table.dartTypeName;
|
||||
}
|
||||
|
||||
if (resultSet.singleColumn) {
|
||||
return resultSet.columns.single.dartTypeCode(options);
|
||||
}
|
||||
|
||||
return resultClassName;
|
||||
}
|
||||
}
|
||||
|
||||
class SqlSelectQuery extends SqlQuery {
|
||||
final List<MoorSchemaEntity> readsFrom;
|
||||
@override
|
||||
final InferredResultSet resultSet;
|
||||
|
||||
/// The name of the result class, as requested by the user.
|
||||
// todo: Allow custom result classes for RETURNING as well?
|
||||
final String /*?*/ requestedResultClass;
|
||||
|
||||
SqlSelectQuery(
|
||||
|
@ -150,42 +193,21 @@ class SqlSelectQuery extends SqlQuery {
|
|||
null,
|
||||
);
|
||||
}
|
||||
|
||||
String get resultClassName {
|
||||
if (resultSet.matchingTable != null || resultSet.singleColumn) {
|
||||
throw UnsupportedError('This result set does not introduce a class, '
|
||||
'either because it has a matching table or because it only returns '
|
||||
'one column.');
|
||||
}
|
||||
|
||||
return resultSet.resultClassName ?? '${ReCase(name).pascalCase}Result';
|
||||
}
|
||||
|
||||
/// The Dart type representing a row of this result set.
|
||||
String resultTypeCode(
|
||||
[GenerationOptions options = const GenerationOptions()]) {
|
||||
if (resultSet.matchingTable != null) {
|
||||
return resultSet.matchingTable.table.dartTypeName;
|
||||
}
|
||||
|
||||
if (resultSet.singleColumn) {
|
||||
return resultSet.columns.single.dartTypeCode(options);
|
||||
}
|
||||
|
||||
return resultClassName;
|
||||
}
|
||||
}
|
||||
|
||||
class UpdatingQuery extends SqlQuery {
|
||||
final List<WrittenMoorTable> updates;
|
||||
final bool isInsert;
|
||||
@override
|
||||
final InferredResultSet /*?*/ resultSet;
|
||||
|
||||
bool get isOnlyDelete => updates.every((w) => w.kind == UpdateKind.delete);
|
||||
|
||||
bool get isOnlyUpdate => updates.every((w) => w.kind == UpdateKind.update);
|
||||
|
||||
UpdatingQuery(String name, AnalysisContext fromContext,
|
||||
List<FoundElement> elements, this.updates,
|
||||
{this.isInsert = false, bool hasMultipleTables})
|
||||
{this.isInsert = false, bool hasMultipleTables, this.resultSet})
|
||||
: super(name, fromContext, elements,
|
||||
hasMultipleTables: hasMultipleTables);
|
||||
}
|
||||
|
|
|
@ -65,17 +65,6 @@ const Map<ColumnType, String> dartTypeNames = {
|
|||
ColumnType.real: 'double',
|
||||
};
|
||||
|
||||
/// Maps to the method name of a "QueryRow" from moor to extract a column type
|
||||
/// of a result row.
|
||||
const Map<ColumnType, String> readFromMethods = {
|
||||
ColumnType.boolean: 'readBool',
|
||||
ColumnType.text: 'readString',
|
||||
ColumnType.integer: 'readInt',
|
||||
ColumnType.datetime: 'readDateTime',
|
||||
ColumnType.blob: 'readBlob',
|
||||
ColumnType.real: 'readDouble',
|
||||
};
|
||||
|
||||
/// Maps from a column type to code that can be used to create a variable of the
|
||||
/// respective type.
|
||||
const Map<ColumnType, String> createVariable = {
|
||||
|
|
|
@ -24,6 +24,7 @@ class QueryWriter {
|
|||
final Scope scope;
|
||||
|
||||
SqlSelectQuery get _select => query as SqlSelectQuery;
|
||||
|
||||
UpdatingQuery get _update => query as UpdatingQuery;
|
||||
|
||||
MoorOptions get options => scope.writer.options;
|
||||
|
@ -37,15 +38,22 @@ class QueryWriter {
|
|||
}
|
||||
|
||||
void write() {
|
||||
// Note that writing queries can have a result set if they use a RETURNING
|
||||
// clause.
|
||||
final resultSet = query.resultSet;
|
||||
if (resultSet?.needsOwnClass == true) {
|
||||
final resultSetScope = scope.findScopeOfLevel(DartScope.library);
|
||||
ResultSetWriter(query, resultSetScope).write();
|
||||
}
|
||||
|
||||
if (query is SqlSelectQuery) {
|
||||
final select = query as SqlSelectQuery;
|
||||
if (select.resultSet.needsOwnClass) {
|
||||
final resultSetScope = scope.findScopeOfLevel(DartScope.library);
|
||||
ResultSetWriter(select, resultSetScope).write();
|
||||
}
|
||||
_writeSelect();
|
||||
} else if (query is UpdatingQuery) {
|
||||
_writeUpdatingQuery();
|
||||
if (resultSet != null) {
|
||||
_writeUpdatingQueryWithReturning();
|
||||
} else {
|
||||
_writeUpdatingQuery();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -67,17 +75,20 @@ class QueryWriter {
|
|||
}
|
||||
|
||||
/// Writes the function literal that turns a "QueryRow" into the desired
|
||||
/// custom return type of a select statement.
|
||||
/// custom return type of a query.
|
||||
void _writeMappingLambda() {
|
||||
if (_select.resultSet.singleColumn) {
|
||||
final column = _select.resultSet.columns.single;
|
||||
final resultSet = query.resultSet;
|
||||
assert(resultSet != null);
|
||||
|
||||
if (resultSet.singleColumn) {
|
||||
final column = resultSet.columns.single;
|
||||
_buffer.write('(QueryRow row) => '
|
||||
'${readingCode(column, scope.generationOptions)}');
|
||||
} else if (_select.resultSet.matchingTable != null) {
|
||||
} else if (resultSet.matchingTable != null) {
|
||||
// note that, even if the result set has a matching table, we can't just
|
||||
// use the mapFromRow() function of that table - the column names might
|
||||
// be different!
|
||||
final match = _select.resultSet.matchingTable;
|
||||
final match = resultSet.matchingTable;
|
||||
final table = match.table;
|
||||
|
||||
if (match.effectivelyNoAlias) {
|
||||
|
@ -98,19 +109,19 @@ class QueryWriter {
|
|||
_buffer.write('})');
|
||||
}
|
||||
} else {
|
||||
_buffer.write('(QueryRow row) { return ${_select.resultClassName}(');
|
||||
_buffer.write('(QueryRow row) { return ${query.resultClassName}(');
|
||||
|
||||
if (options.rawResultSetData) {
|
||||
_buffer.write('row: row,\n');
|
||||
}
|
||||
|
||||
for (final column in _select.resultSet.columns) {
|
||||
final fieldName = _select.resultSet.dartNameFor(column);
|
||||
for (final column in resultSet.columns) {
|
||||
final fieldName = resultSet.dartNameFor(column);
|
||||
_buffer.write(
|
||||
'$fieldName: ${readingCode(column, scope.generationOptions)},');
|
||||
}
|
||||
for (final nested in _select.resultSet.nestedResults) {
|
||||
final prefix = _select.resultSet.nestedPrefixFor(nested);
|
||||
for (final nested in resultSet.nestedResults) {
|
||||
final prefix = resultSet.nestedPrefixFor(nested);
|
||||
if (prefix == null) continue;
|
||||
|
||||
final fieldName = nested.dartFieldName;
|
||||
|
@ -127,10 +138,13 @@ class QueryWriter {
|
|||
/// in the same scope, reads the [column] from that row and brings it into a
|
||||
/// suitable type.
|
||||
static String readingCode(ResultColumn column, GenerationOptions options) {
|
||||
final readMethod = readFromMethods[column.type];
|
||||
var rawDartType = dartTypeNames[column.type];
|
||||
if (column.nullable && options.nnbd) {
|
||||
rawDartType = '$rawDartType?';
|
||||
}
|
||||
|
||||
final dartLiteral = asDartLiteral(column.name);
|
||||
var code = 'row.$readMethod($dartLiteral)';
|
||||
var code = 'row.read<$rawDartType>($dartLiteral)';
|
||||
|
||||
if (column.typeConverter != null) {
|
||||
final needsAssert = !column.nullable && options.nnbd;
|
||||
|
@ -203,6 +217,20 @@ class QueryWriter {
|
|||
_buffer.write(').watch();\n}\n');
|
||||
}
|
||||
|
||||
void _writeUpdatingQueryWithReturning() {
|
||||
final type = query.resultTypeCode(scope.generationOptions);
|
||||
_buffer.write('Future<List<$type>> ${query.name}(');
|
||||
_writeParameters();
|
||||
_buffer.write(') {\n');
|
||||
|
||||
_writeExpandedDeclarations();
|
||||
_buffer.write('return customWriteReturning(${_queryCode()},');
|
||||
_writeCommonUpdateParameters();
|
||||
_buffer.write(').then((rows) => rows.map(');
|
||||
_writeMappingLambda();
|
||||
_buffer.write(').toList());\n}');
|
||||
}
|
||||
|
||||
void _writeUpdatingQuery() {
|
||||
/*
|
||||
Future<int> test() {
|
||||
|
@ -217,18 +245,15 @@ class QueryWriter {
|
|||
|
||||
_writeExpandedDeclarations();
|
||||
_buffer.write('return $implName(${_queryCode()},');
|
||||
_writeCommonUpdateParameters();
|
||||
_buffer.write(',);\n}\n');
|
||||
}
|
||||
|
||||
void _writeCommonUpdateParameters() {
|
||||
_writeVariables();
|
||||
_buffer.write(',');
|
||||
_writeUpdates();
|
||||
|
||||
if (_update.isOnlyDelete) {
|
||||
_buffer.write(', updateKind: UpdateKind.delete');
|
||||
} else if (_update.isOnlyUpdate) {
|
||||
_buffer.write(', updateKind: UpdateKind.update');
|
||||
}
|
||||
|
||||
_buffer.write(',);\n}\n');
|
||||
_writeUpdateKind();
|
||||
}
|
||||
|
||||
void _writeParameters() {
|
||||
|
@ -324,9 +349,21 @@ class QueryWriter {
|
|||
|
||||
void _writeExpandedDeclarations() {
|
||||
var indexCounterWasDeclared = false;
|
||||
final needsIndexCounter = query.variables.any((v) => v.isArray);
|
||||
var needsIndexCounter = false;
|
||||
var highestIndexBeforeArray = 0;
|
||||
|
||||
for (final variable in query.variables) {
|
||||
// Variables use an explicit index, we need to know the start index at
|
||||
// runtime (can be dynamic when placeholders or other arrays appear before
|
||||
// this one)
|
||||
if (variable.isArray) {
|
||||
needsIndexCounter = true;
|
||||
break;
|
||||
}
|
||||
|
||||
highestIndexBeforeArray = max(highestIndexBeforeArray, variable.index);
|
||||
}
|
||||
|
||||
void _writeIndexCounterIfNeeded() {
|
||||
if (indexCounterWasDeclared || !needsIndexCounter) {
|
||||
return; // already written or not necessary at all
|
||||
|
@ -368,10 +405,6 @@ class QueryWriter {
|
|||
// increase highest index for the next expanded element
|
||||
_increaseIndexCounter('${element.dartParameterName}.length');
|
||||
}
|
||||
|
||||
if (!indexCounterWasDeclared) {
|
||||
highestIndexBeforeArray = max(highestIndexBeforeArray, element.index);
|
||||
}
|
||||
} else if (element is FoundDartPlaceholder) {
|
||||
_writeIndexCounterIfNeeded();
|
||||
|
||||
|
@ -555,4 +588,12 @@ class QueryWriter {
|
|||
final from = _update.updates.map((t) => t.table.dbGetterName).join(', ');
|
||||
_buffer..write('updates: {')..write(from)..write('}');
|
||||
}
|
||||
|
||||
void _writeUpdateKind() {
|
||||
if (_update.isOnlyDelete) {
|
||||
_buffer.write(', updateKind: UpdateKind.delete');
|
||||
} else if (_update.isOnlyUpdate) {
|
||||
_buffer.write(', updateKind: UpdateKind.update');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import 'package:moor_generator/writer.dart';
|
|||
|
||||
/// Writes a class holding the result of an sql query into Dart.
|
||||
class ResultSetWriter {
|
||||
final SqlSelectQuery query;
|
||||
final SqlQuery query;
|
||||
final Scope scope;
|
||||
|
||||
ResultSetWriter(this.query, this.scope);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
name: moor_generator
|
||||
description: Dev-dependency to generate table and dataclasses together with the moor package.
|
||||
version: 4.2.0
|
||||
version: 4.2.1
|
||||
repository: https://github.com/simolus3/moor
|
||||
homepage: https://moor.simonbinder.eu/
|
||||
issue_tracker: https://github.com/simolus3/moor/issues
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
## 0.15.1-dev
|
||||
|
||||
- New analysis checks for `RETURNING`: Disallow `table.*` syntax and aggregate expressions
|
||||
- Fix resolving columns when `RETURNING` is used in an `UPDATE FROM` statement
|
||||
- Fix aliases to rowid being reported as nullable
|
||||
|
||||
## 0.15.0
|
||||
|
||||
- __Breaking__: Change `InsertStatement.upsert` to a list of upsert clauses
|
||||
|
|
|
@ -48,12 +48,23 @@ class ReferenceScope {
|
|||
}
|
||||
|
||||
set availableColumns(List<Column>? value) {
|
||||
_availableColumns = value;
|
||||
// guard against lists of subtype of column
|
||||
if (value != null) {
|
||||
_availableColumns = <Column>[...value];
|
||||
} else {
|
||||
_availableColumns = null;
|
||||
}
|
||||
}
|
||||
|
||||
ReferenceScope(this.parent,
|
||||
{this.root, this.inheritAvailableColumns = false});
|
||||
|
||||
void addAvailableColumn(Column column) {
|
||||
// make sure _availableColumns is resolved and mutable
|
||||
final ownColumns = _availableColumns ??= <Column>[...availableColumns];
|
||||
ownColumns.add(column);
|
||||
}
|
||||
|
||||
ReferenceScope createChild({bool? inheritAvailableColumns}) {
|
||||
// wonder why we're creating a linked list of reference scopes instead of
|
||||
// just passing down a copy of [_references]? In sql, some variables can be
|
||||
|
|
|
@ -53,6 +53,9 @@ class Table extends NamedResultSet with HasMetaMixin implements HumanReadable {
|
|||
|
||||
if (_rowIdColumn == null && column.isAliasForRowId()) {
|
||||
_rowIdColumn = column;
|
||||
// By design, the rowid is non-nullable, even if there isn't a NOT NULL
|
||||
// constraint set on the column definition.
|
||||
column._type = const ResolvedType(type: BasicType.int, nullable: false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -90,35 +90,57 @@ class ColumnResolver extends RecursiveVisitor<void, void> {
|
|||
if (child != e.table && child != e.from) visit(child, arg);
|
||||
}
|
||||
|
||||
_resolveReturningClause(e);
|
||||
_resolveReturningClause(e, baseTable);
|
||||
}
|
||||
|
||||
void _addIfResolved(AstNode node, TableReference ref) {
|
||||
ResultSet? _addIfResolved(AstNode node, TableReference ref) {
|
||||
final table = _resolveTableReference(ref);
|
||||
if (table != null) {
|
||||
node.scope.availableColumns = table.resolvedColumns;
|
||||
}
|
||||
|
||||
return table;
|
||||
}
|
||||
|
||||
@override
|
||||
void visitInsertStatement(InsertStatement e, void arg) {
|
||||
_addIfResolved(e, e.table);
|
||||
final into = _addIfResolved(e, e.table);
|
||||
visitChildren(e, arg);
|
||||
_resolveReturningClause(e);
|
||||
_resolveReturningClause(e, into);
|
||||
}
|
||||
|
||||
@override
|
||||
void visitDeleteStatement(DeleteStatement e, void arg) {
|
||||
_addIfResolved(e, e.from!);
|
||||
final from = _addIfResolved(e, e.from);
|
||||
visitChildren(e, arg);
|
||||
_resolveReturningClause(e);
|
||||
_resolveReturningClause(e, from);
|
||||
}
|
||||
|
||||
void _resolveReturningClause(StatementReturningColumns stmt) {
|
||||
/// Infers the result set of a `RETURNING` clause.
|
||||
///
|
||||
/// The behavior of `RETURNING` clauses is a bit weird when there are multiple
|
||||
/// tables available (which can happen with `UPDATE FROM`). When a star column
|
||||
/// is used, it only expands to columns from the main table:
|
||||
/// ```sql
|
||||
/// CREATE TABLE x (a, b);
|
||||
/// -- here, the `*` in returning does not include columns from `old`.
|
||||
/// UPDATE x SET a = x.a + 1 FROM (SELECT * FROM x) AS old RETURNING *;
|
||||
/// ```
|
||||
///
|
||||
/// However, individual columns from other tables are available and supported:
|
||||
/// ```sql
|
||||
/// UPDATE x SET a = x.a + 1 FROM (SELECT * FROM x) AS old
|
||||
/// RETURNING old.a, old.b;
|
||||
/// ```
|
||||
///
|
||||
/// Note that `old.*` is forbidden by sqlite and not applicable here.
|
||||
void _resolveReturningClause(
|
||||
StatementReturningColumns stmt, ResultSet? mainTable) {
|
||||
final clause = stmt.returning;
|
||||
if (clause == null) return;
|
||||
|
||||
final columns = _resolveColumns(stmt.scope, clause.columns);
|
||||
final columns = _resolveColumns(stmt.scope, clause.columns,
|
||||
columnsForStar: mainTable?.resolvedColumns);
|
||||
stmt.returnedResultSet = CustomResultSet(columns);
|
||||
}
|
||||
|
||||
|
@ -210,10 +232,10 @@ class ColumnResolver extends RecursiveVisitor<void, void> {
|
|||
s.resolvedColumns = _resolveColumns(scope, s.columns);
|
||||
}
|
||||
|
||||
List<Column> _resolveColumns(
|
||||
ReferenceScope scope, List<ResultColumn> columns) {
|
||||
List<Column> _resolveColumns(ReferenceScope scope, List<ResultColumn> columns,
|
||||
{List<Column>? columnsForStar}) {
|
||||
final usedColumns = <Column>[];
|
||||
final availableColumns = scope.availableColumns;
|
||||
final availableColumns = <Column>[...scope.availableColumns];
|
||||
|
||||
// a select statement can include everything from its sub queries as a
|
||||
// result, but also expressions that appear as result columns
|
||||
|
@ -234,9 +256,9 @@ class ColumnResolver extends RecursiveVisitor<void, void> {
|
|||
|
||||
visibleColumnsForStar = tableResolver.resultSet!.resolvedColumns;
|
||||
} else {
|
||||
// we have a * column without a table, that resolves to every columns
|
||||
// we have a * column without a table, that resolves to every column
|
||||
// available
|
||||
visibleColumnsForStar = availableColumns;
|
||||
visibleColumnsForStar = columnsForStar ?? availableColumns;
|
||||
}
|
||||
|
||||
usedColumns
|
||||
|
@ -262,6 +284,7 @@ class ColumnResolver extends RecursiveVisitor<void, void> {
|
|||
final name = resultColumn.as;
|
||||
if (!availableColumns.any((c) => c.name == name)) {
|
||||
availableColumns.add(column);
|
||||
scope.addAvailableColumn(column);
|
||||
}
|
||||
}
|
||||
} else if (resultColumn is NestedStarResultColumn) {
|
||||
|
|
|
@ -128,6 +128,29 @@ class LintingVisitor extends RecursiveVisitor<void, void> {
|
|||
}
|
||||
}
|
||||
|
||||
for (final column in e.columns) {
|
||||
// Table wildcards are not currently allowed, see
|
||||
// https://www.sqlite.org/src/info/132994c8b1063bfb
|
||||
if (column is StarResultColumn && column.tableName != null) {
|
||||
context.reportError(AnalysisError(
|
||||
type: AnalysisErrorType.synctactic,
|
||||
message: 'Columns in RETURNING may not use the TABLE.* syntax',
|
||||
relevantNode: column,
|
||||
));
|
||||
} else if (column is ExpressionResultColumn) {
|
||||
// While we're at it, window expressions aren't allowed either
|
||||
if (column.expression is AggregateExpression) {
|
||||
context.reportError(
|
||||
AnalysisError(
|
||||
type: AnalysisErrorType.illegalUseOfReturning,
|
||||
message: 'Aggregate expressions are not allowed in RETURNING',
|
||||
relevantNode: column.expression,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visitChildren(e, arg);
|
||||
}
|
||||
|
||||
|
|
|
@ -7,12 +7,12 @@ import 'statement.dart';
|
|||
|
||||
class DeleteStatement extends CrudStatement
|
||||
implements StatementWithWhere, StatementReturningColumns, HasPrimarySource {
|
||||
TableReference? from;
|
||||
TableReference from;
|
||||
@override
|
||||
Expression? where;
|
||||
|
||||
@override
|
||||
TableReference? get table => from;
|
||||
TableReference get table => from;
|
||||
|
||||
@override
|
||||
Returning? returning;
|
||||
|
@ -31,7 +31,7 @@ class DeleteStatement extends CrudStatement
|
|||
@override
|
||||
void transformChildren<A>(Transformer<A> transformer, A arg) {
|
||||
withClause = transformer.transformNullableChild(withClause, this, arg);
|
||||
from = transformer.transformChild(from!, this, arg);
|
||||
from = transformer.transformChild(from, this, arg);
|
||||
where = transformer.transformNullableChild(where, this, arg);
|
||||
returning = transformer.transformNullableChild(returning, this, arg);
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ class DeleteStatement extends CrudStatement
|
|||
@override
|
||||
Iterable<AstNode> get childNodes => [
|
||||
if (withClause != null) withClause!,
|
||||
from!,
|
||||
from,
|
||||
if (where != null) where!,
|
||||
if (returning != null) returning!,
|
||||
];
|
||||
|
|
|
@ -505,6 +505,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
_keyword(TokenType.delete);
|
||||
_from(e.from);
|
||||
_where(e.where);
|
||||
visitNullable(e.returning, arg);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -649,7 +650,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
_identifier(e.name);
|
||||
_symbol('(');
|
||||
visit(e.parameters, arg);
|
||||
_symbol(')');
|
||||
_symbol(')', spaceAfter: true);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -703,6 +704,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
InsertMode.insertOrFail: TokenType.fail,
|
||||
InsertMode.insertOrIgnore: TokenType.ignore,
|
||||
}[mode]!);
|
||||
visitNullable(e.returning, arg);
|
||||
}
|
||||
|
||||
_keyword(TokenType.into);
|
||||
|
@ -715,7 +717,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
}
|
||||
|
||||
visit(e.source, arg);
|
||||
visitNullable(e.upsert, arg);
|
||||
visitNullable(e.returning, arg);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -1179,6 +1181,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
_join(e.set, ',');
|
||||
_from(e.from);
|
||||
_where(e.where);
|
||||
visitNullable(e.returning, arg);
|
||||
}
|
||||
|
||||
@override
|
||||
|
|
|
@ -4,7 +4,7 @@ import 'package:test/test.dart';
|
|||
import 'data.dart';
|
||||
|
||||
void main() {
|
||||
final engine = SqlEngine();
|
||||
final engine = SqlEngine(EngineOptions(version: SqliteVersion.v3_35));
|
||||
engine.registerTable(demoTable);
|
||||
|
||||
group('CREATE TRIGGER statements', () {
|
||||
|
@ -158,12 +158,42 @@ INSERT INTO demo VALUES (?, ?)
|
|||
expect(result.errors, isEmpty);
|
||||
});
|
||||
|
||||
test('resolves RETURNING clause', () {
|
||||
final result =
|
||||
engine.analyze("INSERT INTO demo (content) VALUES ('hi') RETURNING *;");
|
||||
final returning = (result.root as InsertStatement).returnedResultSet;
|
||||
group('resolves RETURNING clause', () {
|
||||
test('for simple inserts', () {
|
||||
final result = engine
|
||||
.analyze("INSERT INTO demo (content) VALUES ('hi') RETURNING *;");
|
||||
final returning = (result.root as InsertStatement).returnedResultSet;
|
||||
|
||||
expect(returning, isNotNull);
|
||||
expect(returning!.resolvedColumns!.map((e) => e.name), ['id', 'content']);
|
||||
expect(returning, isNotNull);
|
||||
expect(returning!.resolvedColumns!.map((e) => e.name), ['id', 'content']);
|
||||
});
|
||||
|
||||
test('for custom expressions', () {
|
||||
final result = engine.analyze("INSERT INTO demo (content) VALUES ('hi') "
|
||||
'RETURNING content || content AS x;');
|
||||
final returning = (result.root as InsertStatement).returnedResultSet!;
|
||||
|
||||
expect(returning.resolvedColumns!.map((e) => e.name), ['x']);
|
||||
});
|
||||
|
||||
test('star does not include other tables', () {
|
||||
final result = engine.analyze('''
|
||||
UPDATE demo SET content = ''
|
||||
FROM (SELECT * FROM demo) AS old
|
||||
RETURNING *;
|
||||
''');
|
||||
final returning = (result.root as UpdateStatement).returnedResultSet!;
|
||||
expect(returning.resolvedColumns!.map((e) => e.name), ['id', 'content']);
|
||||
});
|
||||
|
||||
test('can refer to columns from other tables', () {
|
||||
final result = engine.analyze('''
|
||||
UPDATE demo SET content = ''
|
||||
FROM (SELECT * FROM demo) AS old
|
||||
RETURNING old.id, old.content;
|
||||
''');
|
||||
|
||||
expect(result.errors, isEmpty);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -30,4 +30,36 @@ void main() {
|
|||
expect(result.errors, hasLength(1));
|
||||
expect(result.errors.single.type, AnalysisErrorType.illegalUseOfReturning);
|
||||
});
|
||||
|
||||
test('does not allow star columns with an associated table', () {
|
||||
final result = engine.analyze('''
|
||||
UPDATE t SET id = t.id + 1
|
||||
FROM (SELECT * FROM t) AS old
|
||||
RETURNING old.*;
|
||||
''');
|
||||
|
||||
expect(result.errors, hasLength(1));
|
||||
expect(
|
||||
result.errors.single,
|
||||
isA<AnalysisError>()
|
||||
.having((e) => e.source!.span!.text, 'source.span.text', 'old.*')
|
||||
.having((e) => e.message, 'message',
|
||||
contains('RETURNING may not use the TABLE.* syntax')),
|
||||
);
|
||||
});
|
||||
|
||||
test('does not allow aggregate expressions', () {
|
||||
final result = engine.analyze('INSERT INTO t DEFAULT VALUES RETURNING '
|
||||
'MAX(id) OVER (PARTITION BY c2)');
|
||||
|
||||
expect(result.errors, hasLength(1));
|
||||
expect(
|
||||
result.errors.single,
|
||||
isA<AnalysisError>()
|
||||
.having((e) => e.source!.span!.text, 'source.span.text',
|
||||
'MAX(id) OVER (PARTITION BY c2)')
|
||||
.having((e) => e.message, 'message',
|
||||
'Aggregate expressions are not allowed in RETURNING'),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -127,4 +127,14 @@ void main() {
|
|||
final table = engine.schemaReader.read(stmt as CreateTableStatement);
|
||||
expect(table.resolvedColumns.single.type.type, BasicType.blob);
|
||||
});
|
||||
|
||||
test('aliases to rowid are non-nullable', () {
|
||||
final engine = SqlEngine();
|
||||
final stmt =
|
||||
engine.parse('CREATE TABLE foo (id INTEGER PRIMARY KEY);').rootNode;
|
||||
|
||||
final table = engine.schemaReader.read(stmt as CreateTableStatement);
|
||||
expect(table.resolvedColumns.single.type,
|
||||
const ResolvedType(type: BasicType.int, nullable: false));
|
||||
});
|
||||
}
|
||||
|
|
|
@ -223,9 +223,15 @@ CREATE UNIQUE INDEX my_idx ON t1 (c1, c2, c3) WHERE c1 < c3;
|
|||
});
|
||||
});
|
||||
|
||||
test('delete', () {
|
||||
testFormat(
|
||||
'WITH foo (id) AS (SELECT * FROM bar) DELETE FROM bar WHERE x;');
|
||||
group('delete', () {
|
||||
test('with CTEs', () {
|
||||
testFormat(
|
||||
'WITH foo (id) AS (SELECT * FROM bar) DELETE FROM bar WHERE x;');
|
||||
});
|
||||
|
||||
test('with returning', () {
|
||||
testFormat('DELETE FROM foo RETURNING *');
|
||||
});
|
||||
});
|
||||
|
||||
group('insert', () {
|
||||
|
@ -234,10 +240,14 @@ CREATE UNIQUE INDEX my_idx ON t1 (c1, c2, c3) WHERE c1 < c3;
|
|||
'REPLACE INTO foo DEFAULT VALUES');
|
||||
});
|
||||
|
||||
test('insert into select', () {
|
||||
test('into select', () {
|
||||
testFormat('INSERT INTO foo SELECT * FROM bar');
|
||||
});
|
||||
|
||||
test('with returning', () {
|
||||
testFormat('INSERT INTO foo DEFAULT VALUES RETURNING *');
|
||||
});
|
||||
|
||||
test('upsert - do nothing', () {
|
||||
testFormat(
|
||||
'INSERT OR REPLACE INTO foo DEFAULT VALUES ON CONFLICT DO NOTHING');
|
||||
|
@ -260,6 +270,10 @@ CREATE UNIQUE INDEX my_idx ON t1 (c1, c2, c3) WHERE c1 < c3;
|
|||
testFormat('UPDATE foo SET bar = baz WHERE 1;');
|
||||
});
|
||||
|
||||
test('with returning', () {
|
||||
testFormat('UPDATE foo SET bar = baz RETURNING *');
|
||||
});
|
||||
|
||||
const modes = [
|
||||
'OR ABORT',
|
||||
'OR FAIL',
|
||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue