mirror of https://github.com/AMT-Cheif/drift.git
Merge remote-tracking branch 'origin/develop' into develop
This commit is contained in:
commit
9c82dcd313
|
@ -9,6 +9,8 @@ import '../_shared/todo_tables.drift.dart';
|
||||||
class EntryWithCategory {
|
class EntryWithCategory {
|
||||||
EntryWithCategory(this.entry, this.category);
|
EntryWithCategory(this.entry, this.category);
|
||||||
|
|
||||||
|
// The classes are generated by drift for each of the tables involved in the
|
||||||
|
// join.
|
||||||
final TodoItem entry;
|
final TodoItem entry;
|
||||||
final Category? category;
|
final Category? category;
|
||||||
}
|
}
|
||||||
|
@ -69,8 +71,6 @@ extension SelectExamples on CanUseCommonTables {
|
||||||
leftOuterJoin(categories, categories.id.equalsExp(todoItems.category)),
|
leftOuterJoin(categories, categories.id.equalsExp(todoItems.category)),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
// see next section on how to parse the result
|
|
||||||
// #enddocregion joinIntro
|
|
||||||
// #docregion results
|
// #docregion results
|
||||||
return query.watch().map((rows) {
|
return query.watch().map((rows) {
|
||||||
return rows.map((row) {
|
return rows.map((row) {
|
||||||
|
@ -81,7 +81,6 @@ extension SelectExamples on CanUseCommonTables {
|
||||||
}).toList();
|
}).toList();
|
||||||
});
|
});
|
||||||
// #enddocregion results
|
// #enddocregion results
|
||||||
// #docregion joinIntro
|
|
||||||
}
|
}
|
||||||
// #enddocregion joinIntro
|
// #enddocregion joinIntro
|
||||||
|
|
||||||
|
|
|
@ -1,21 +1,19 @@
|
||||||
|
// #docregion after_generation
|
||||||
// #docregion before_generation
|
// #docregion before_generation
|
||||||
import 'package:drift/drift.dart';
|
import 'package:drift/drift.dart';
|
||||||
|
|
||||||
// #enddocregion before_generation
|
// #enddocregion before_generation
|
||||||
|
// #enddocregion after_generation
|
||||||
|
|
||||||
// #docregion open
|
// #docregion after_generation
|
||||||
// These imports are necessary to open the sqlite3 database
|
// These additional imports are necessary to open the sqlite3 database
|
||||||
import 'dart:io';
|
import 'dart:io';
|
||||||
|
|
||||||
import 'package:drift/native.dart';
|
import 'package:drift/native.dart';
|
||||||
import 'package:path_provider/path_provider.dart';
|
import 'package:path_provider/path_provider.dart';
|
||||||
import 'package:path/path.dart' as p;
|
import 'package:path/path.dart' as p;
|
||||||
import 'package:sqlite3/sqlite3.dart';
|
import 'package:sqlite3/sqlite3.dart';
|
||||||
import 'package:sqlite3_flutter_libs/sqlite3_flutter_libs.dart';
|
import 'package:sqlite3_flutter_libs/sqlite3_flutter_libs.dart';
|
||||||
|
|
||||||
// ... the TodoItems table definition stays the same
|
|
||||||
// #enddocregion open
|
|
||||||
|
|
||||||
// #docregion before_generation
|
// #docregion before_generation
|
||||||
part 'database.g.dart';
|
part 'database.g.dart';
|
||||||
|
|
||||||
|
@ -27,25 +25,22 @@ class TodoItems extends Table {
|
||||||
IntColumn get category => integer().nullable()();
|
IntColumn get category => integer().nullable()();
|
||||||
}
|
}
|
||||||
// #enddocregion table
|
// #enddocregion table
|
||||||
// #docregion open
|
|
||||||
|
|
||||||
@DriftDatabase(tables: [TodoItems])
|
@DriftDatabase(tables: [TodoItems])
|
||||||
class AppDatabase extends _$AppDatabase {
|
class AppDatabase extends _$AppDatabase {
|
||||||
// #enddocregion open
|
// #enddocregion before_generation
|
||||||
|
// #enddocregion after_generation
|
||||||
// After generating code, this class needs to define a `schemaVersion` getter
|
// After generating code, this class needs to define a `schemaVersion` getter
|
||||||
// and a constructor telling drift where the database should be stored.
|
// and a constructor telling drift where the database should be stored.
|
||||||
// These are described in the getting started guide: https://drift.simonbinder.eu/getting-started/#open
|
// These are described in the getting started guide: https://drift.simonbinder.eu/getting-started/#open
|
||||||
// #enddocregion before_generation
|
// #docregion after_generation
|
||||||
// #docregion open
|
|
||||||
AppDatabase() : super(_openConnection());
|
AppDatabase() : super(_openConnection());
|
||||||
|
|
||||||
@override
|
@override
|
||||||
int get schemaVersion => 1;
|
int get schemaVersion => 1;
|
||||||
// #docregion before_generation
|
// #docregion before_generation
|
||||||
}
|
}
|
||||||
// #enddocregion before_generation, open
|
// #enddocregion before_generation
|
||||||
|
|
||||||
// #docregion open
|
|
||||||
|
|
||||||
LazyDatabase _openConnection() {
|
LazyDatabase _openConnection() {
|
||||||
// the LazyDatabase util lets us find the right location for the file async.
|
// the LazyDatabase util lets us find the right location for the file async.
|
||||||
|
@ -70,7 +65,7 @@ LazyDatabase _openConnection() {
|
||||||
return NativeDatabase.createInBackground(file);
|
return NativeDatabase.createInBackground(file);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// #enddocregion open
|
// #enddocregion after_generation
|
||||||
|
|
||||||
class WidgetsFlutterBinding {
|
class WidgetsFlutterBinding {
|
||||||
static void ensureInitialized() {}
|
static void ensureInitialized() {}
|
||||||
|
|
|
@ -114,14 +114,14 @@ Of course, you can also join multiple tables:
|
||||||
|
|
||||||
{% include "blocks/snippet" snippets = snippets name = 'otherTodosInSameCategory' %}
|
{% include "blocks/snippet" snippets = snippets name = 'otherTodosInSameCategory' %}
|
||||||
|
|
||||||
## Parsing results
|
### Parsing results
|
||||||
|
|
||||||
Calling `get()` or `watch` on a select statement with join returns a `Future` or `Stream` of
|
Calling `get()` or `watch` on a select statement with join returns a `Future` or `Stream` of
|
||||||
`List<TypedResult>`, respectively. Each `TypedResult` represents a row from which data can be
|
`List<TypedResult>`, respectively. Each `TypedResult` represents a row from which data can be
|
||||||
read. It contains a `rawData` getter to obtain the raw columns. But more importantly, the
|
read. It contains a `rawData` getter to obtain the raw columns. But more importantly, the
|
||||||
`readTable` method can be used to read a data class from a table.
|
`readTable` method can be used to read a data class from a table.
|
||||||
|
|
||||||
In the example query above, we can read the todo entry and the category from each row like this:
|
In the example query above, we've read the todo entry and the category from each row like this:
|
||||||
|
|
||||||
{% include "blocks/snippet" snippets = snippets name = 'results' %}
|
{% include "blocks/snippet" snippets = snippets name = 'results' %}
|
||||||
|
|
||||||
|
|
|
@ -30,6 +30,9 @@ There are a couple of things that should be kept in mind when working with trans
|
||||||
on the transaction after it has been closed! This can cause data loss or runtime crashes.
|
on the transaction after it has been closed! This can cause data loss or runtime crashes.
|
||||||
Drift contains some runtime checks against this misuse and will throw an exception when a transaction
|
Drift contains some runtime checks against this misuse and will throw an exception when a transaction
|
||||||
is used after being closed.
|
is used after being closed.
|
||||||
|
A transaction is active during all asynchronous calls made in a `transaction` block, so transactions
|
||||||
|
also can't schedule timers or other operations using the database (as those would try to use the
|
||||||
|
transaction after the main `transaction` block has completed).
|
||||||
2. __Different behavior of stream queries__: Inside a `transaction` callback, stream queries behave
|
2. __Different behavior of stream queries__: Inside a `transaction` callback, stream queries behave
|
||||||
differently. If you're creating streams inside a transaction, check the next section to learn how
|
differently. If you're creating streams inside a transaction, check the next section to learn how
|
||||||
they behave.
|
they behave.
|
||||||
|
|
|
@ -179,6 +179,8 @@ We currently support the following extensions:
|
||||||
- `rtree`: Static analysis support for the [R*Tree](https://www.sqlite.org/rtree.html) extension.
|
- `rtree`: Static analysis support for the [R*Tree](https://www.sqlite.org/rtree.html) extension.
|
||||||
Enabling this option is safe when using a `NativeDatabase` with `sqlite3_flutter_libs`,
|
Enabling this option is safe when using a `NativeDatabase` with `sqlite3_flutter_libs`,
|
||||||
which compiles sqlite3 with the R*Tree extension enabled.
|
which compiles sqlite3 with the R*Tree extension enabled.
|
||||||
|
- [geopoly](https://www.sqlite.org/geopoly.html), a generalization of the R*Tree module supporting more complex
|
||||||
|
polygons.
|
||||||
- `moor_ffi`: Enables support for functions that are only available when using a `NativeDatabase`. This contains `pow`, `sqrt` and a variety
|
- `moor_ffi`: Enables support for functions that are only available when using a `NativeDatabase`. This contains `pow`, `sqrt` and a variety
|
||||||
of trigonometric functions. Details on those functions are available [here]({{ "../Platforms/vm.md#moor-only-functions" | pageUrl }}).
|
of trigonometric functions. Details on those functions are available [here]({{ "../Platforms/vm.md#moor-only-functions" | pageUrl }}).
|
||||||
- `math`: Assumes that sqlite3 was compiled with [math functions](https://www.sqlite.org/lang_mathfunc.html).
|
- `math`: Assumes that sqlite3 was compiled with [math functions](https://www.sqlite.org/lang_mathfunc.html).
|
||||||
|
|
|
@ -116,8 +116,10 @@ to determine the column type based on the declared type name.
|
||||||
Additionally, columns that have the type name `BOOLEAN` or `DATETIME` will have
|
Additionally, columns that have the type name `BOOLEAN` or `DATETIME` will have
|
||||||
`bool` or `DateTime` as their Dart counterpart.
|
`bool` or `DateTime` as their Dart counterpart.
|
||||||
Booleans are stored as `INTEGER` (either `0` or `1`). Datetimes are stored as
|
Booleans are stored as `INTEGER` (either `0` or `1`). Datetimes are stored as
|
||||||
unix timestamps (`INTEGER`) or ISO-8601 (`TEXT`) depending on a configurable
|
unix timestamps (`INTEGER`) or ISO-8601 (`TEXT`) [depending on a configurable build option]({{ '../Dart API/tables.md#datetime-options' | pageUrl }}).
|
||||||
build option.
|
For integers that should be represented as a `BigInt` in Dart (i.e. to have better compatibility with large numbers when compiling to JS),
|
||||||
|
define the column with the `INT64` type.
|
||||||
|
|
||||||
Dart enums can automatically be stored by their index by using an `ENUM()` type
|
Dart enums can automatically be stored by their index by using an `ENUM()` type
|
||||||
referencing the Dart enum class:
|
referencing the Dart enum class:
|
||||||
|
|
||||||
|
|
|
@ -81,7 +81,7 @@ to store todo items for a todo list app.
|
||||||
Everything there is to know about defining tables in Dart is described on the [Dart tables]({{'Dart API/tables.md' | pageUrl}}) page.
|
Everything there is to know about defining tables in Dart is described on the [Dart tables]({{'Dart API/tables.md' | pageUrl}}) page.
|
||||||
If you prefer using SQL to define your tables, drift supports that too! You can read all about the [SQL API]({{ 'SQL API/index.md' | pageUrl }}) here.
|
If you prefer using SQL to define your tables, drift supports that too! You can read all about the [SQL API]({{ 'SQL API/index.md' | pageUrl }}) here.
|
||||||
|
|
||||||
For now, the contents of `database.dart` are:
|
For now, populate the contents of `database.dart` with:
|
||||||
|
|
||||||
{% include "blocks/snippet" snippets = snippets name = 'before_generation' %}
|
{% include "blocks/snippet" snippets = snippets name = 'before_generation' %}
|
||||||
|
|
||||||
|
@ -97,10 +97,11 @@ After running either command, the `database.g.dart` file containing the generate
|
||||||
class will have been generated.
|
class will have been generated.
|
||||||
You will now see errors related to missing overrides and a missing constructor. The constructor
|
You will now see errors related to missing overrides and a missing constructor. The constructor
|
||||||
is responsible for telling drift how to open the database. The `schemaVersion` getter is relevant
|
is responsible for telling drift how to open the database. The `schemaVersion` getter is relevant
|
||||||
for migrations after changing the database, we can leave it at `1` for now. The database class
|
for migrations after changing the database, we can leave it at `1` for now. Update `database.dart`
|
||||||
now looks like this:
|
so it now looks like this:
|
||||||
<a name="open">
|
|
||||||
{% include "blocks/snippet" snippets = snippets name = 'open' %}
|
<a name="open"></a>
|
||||||
|
{% include "blocks/snippet" snippets = snippets name = 'after_generation' %}
|
||||||
|
|
||||||
The Android-specific workarounds are necessary because sqlite3 attempts to use `/tmp` to store
|
The Android-specific workarounds are necessary because sqlite3 attempts to use `/tmp` to store
|
||||||
private data on unix-like systems, which is forbidden on Android. We also use this opportunity
|
private data on unix-like systems, which is forbidden on Android. We also use this opportunity
|
||||||
|
|
|
@ -1,4 +1,11 @@
|
||||||
## 2.17.0-dev
|
## 2.18.0-dev
|
||||||
|
|
||||||
|
- Add `AggregateFunctionExpression` to write custom [aggregate function](https://www.sqlite.org/lang_aggfunc.html)
|
||||||
|
invocations in the Dart query builder.
|
||||||
|
- The `json_group_array` and `jsonb_group_array` functions now contain an `orderBy`
|
||||||
|
and `filter` parameter.
|
||||||
|
|
||||||
|
## 2.17.0
|
||||||
|
|
||||||
- Adds `companion` entry to `DataClassName` to override the name of the
|
- Adds `companion` entry to `DataClassName` to override the name of the
|
||||||
generated companion class.
|
generated companion class.
|
||||||
|
|
|
@ -29,6 +29,7 @@ targets:
|
||||||
modules:
|
modules:
|
||||||
- json1
|
- json1
|
||||||
- fts5
|
- fts5
|
||||||
|
- geopoly
|
||||||
build_web_compilers:entrypoint:
|
build_web_compilers:entrypoint:
|
||||||
generate_for:
|
generate_for:
|
||||||
- "web/drift_worker.dart"
|
- "web/drift_worker.dart"
|
||||||
|
@ -59,3 +60,4 @@ targets:
|
||||||
modules:
|
modules:
|
||||||
- json1
|
- json1
|
||||||
- fts5
|
- fts5
|
||||||
|
- geopoly
|
||||||
|
|
|
@ -8,9 +8,13 @@ import 'dart:typed_data';
|
||||||
import '../src/runtime/query_builder/query_builder.dart';
|
import '../src/runtime/query_builder/query_builder.dart';
|
||||||
import '../src/runtime/types/mapping.dart';
|
import '../src/runtime/types/mapping.dart';
|
||||||
|
|
||||||
|
/// The type used for the `_shape` column in virtual `GEOPOLY` tables.
|
||||||
///
|
///
|
||||||
|
/// This type is responsible for representing shape values in Dart. It is
|
||||||
|
/// created by drift when the `geopoly` extension is enabled and a `CREATE
|
||||||
|
/// VIRTUAL TABLE USING geopoly` table is declared in a `.drift` file.
|
||||||
final class GeopolyPolygonType implements CustomSqlType<GeopolyPolygon> {
|
final class GeopolyPolygonType implements CustomSqlType<GeopolyPolygon> {
|
||||||
///
|
/// Default constant constructor for the geopoly type.
|
||||||
const GeopolyPolygonType();
|
const GeopolyPolygonType();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -43,29 +47,33 @@ final class GeopolyPolygonType implements CustomSqlType<GeopolyPolygon> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// In Geopoly, a polygon can be text or a blob
|
/// In Geopoly, a polygon can be text or a blob.
|
||||||
sealed class GeopolyPolygon {
|
sealed class GeopolyPolygon {
|
||||||
const GeopolyPolygon._();
|
const GeopolyPolygon._();
|
||||||
|
|
||||||
|
/// Creates a geopoly shape from a textual representation listing its points.
|
||||||
|
///
|
||||||
|
/// For details on the syntax for [value], see https://www.sqlite.org/geopoly.html.
|
||||||
const factory GeopolyPolygon.text(String value) = GeopolyPolygonString;
|
const factory GeopolyPolygon.text(String value) = GeopolyPolygonString;
|
||||||
|
|
||||||
|
/// Creates a geopoly shape from the binary representation used by sqlite3.
|
||||||
const factory GeopolyPolygon.blob(Uint8List value) = GeopolyPolygonBlob;
|
const factory GeopolyPolygon.blob(Uint8List value) = GeopolyPolygonBlob;
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// A [GeopolyPolygon] being described as text.
|
||||||
final class GeopolyPolygonString extends GeopolyPolygon {
|
final class GeopolyPolygonString extends GeopolyPolygon {
|
||||||
///
|
/// The textual description of the polygon.
|
||||||
final String value;
|
final String value;
|
||||||
|
|
||||||
///
|
/// Creates a polygon from the underlying textual [value].
|
||||||
const GeopolyPolygonString(this.value) : super._();
|
const GeopolyPolygonString(this.value) : super._();
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// A [GeopolyPolygon] being described as binary data.
|
||||||
final class GeopolyPolygonBlob extends GeopolyPolygon {
|
final class GeopolyPolygonBlob extends GeopolyPolygon {
|
||||||
///
|
/// The binary description of the polygon.
|
||||||
final Uint8List value;
|
final Uint8List value;
|
||||||
|
|
||||||
///
|
/// Creates a polygon from the underlying binary [value].
|
||||||
const GeopolyPolygonBlob(this.value) : super._();
|
const GeopolyPolygonBlob(this.value) : super._();
|
||||||
}
|
}
|
||||||
|
|
|
@ -144,8 +144,13 @@ extension JsonExtensions on Expression<String> {
|
||||||
/// all emails in that folder.
|
/// all emails in that folder.
|
||||||
/// This string could be turned back into a list with
|
/// This string could be turned back into a list with
|
||||||
/// `(json.decode(row.read(subjects)!) as List).cast<String>()`.
|
/// `(json.decode(row.read(subjects)!) as List).cast<String>()`.
|
||||||
Expression<String> jsonGroupArray(Expression value) {
|
Expression<String> jsonGroupArray(
|
||||||
return FunctionCallExpression('json_group_array', [value]);
|
Expression value, {
|
||||||
|
OrderBy? orderBy,
|
||||||
|
Expression<bool>? filter,
|
||||||
|
}) {
|
||||||
|
return AggregateFunctionExpression('json_group_array', [value],
|
||||||
|
orderBy: orderBy, filter: filter);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a binary representation of a JSON array containing the result of
|
/// Returns a binary representation of a JSON array containing the result of
|
||||||
|
@ -153,8 +158,13 @@ Expression<String> jsonGroupArray(Expression value) {
|
||||||
///
|
///
|
||||||
/// See [jsonGroupArray], the variant of this function returning a textual
|
/// See [jsonGroupArray], the variant of this function returning a textual
|
||||||
/// description, for more details and an example.
|
/// description, for more details and an example.
|
||||||
Expression<Uint8List> jsonbGroupArray(Expression value) {
|
Expression<Uint8List> jsonbGroupArray(
|
||||||
return FunctionCallExpression('jsonb_group_array', [value]);
|
Expression value, {
|
||||||
|
OrderBy? orderBy,
|
||||||
|
Expression<bool>? filter,
|
||||||
|
}) {
|
||||||
|
return AggregateFunctionExpression('jsonb_group_array', [value],
|
||||||
|
orderBy: orderBy, filter: filter);
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Expression> _groupObjectArgs(Map<Expression<String>, Expression> values) {
|
List<Expression> _groupObjectArgs(Map<Expression<String>, Expression> values) {
|
||||||
|
|
|
@ -140,7 +140,7 @@ abstract class _TransactionExecutor extends _BaseExecutor
|
||||||
|
|
||||||
if (_closed) {
|
if (_closed) {
|
||||||
throw StateError(
|
throw StateError(
|
||||||
"A tranaction was used after being closed. Please check that you're "
|
"A transaction was used after being closed. Please check that you're "
|
||||||
'awaiting all database operations inside a `transaction` block.');
|
'awaiting all database operations inside a `transaction` block.');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@ part of '../query_builder.dart';
|
||||||
/// This is equivalent to the `COUNT(*) FILTER (WHERE filter)` sql function. The
|
/// This is equivalent to the `COUNT(*) FILTER (WHERE filter)` sql function. The
|
||||||
/// filter will be omitted if null.
|
/// filter will be omitted if null.
|
||||||
Expression<int> countAll({Expression<bool>? filter}) {
|
Expression<int> countAll({Expression<bool>? filter}) {
|
||||||
return _AggregateExpression('COUNT', const [_StarFunctionParameter()],
|
return AggregateFunctionExpression('COUNT', const [_StarFunctionParameter()],
|
||||||
filter: filter);
|
filter: filter);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ extension BaseAggregate<DT extends Object> on Expression<DT> {
|
||||||
/// counted twice.
|
/// counted twice.
|
||||||
/// {@macro drift_aggregate_filter}
|
/// {@macro drift_aggregate_filter}
|
||||||
Expression<int> count({bool distinct = false, Expression<bool>? filter}) {
|
Expression<int> count({bool distinct = false, Expression<bool>? filter}) {
|
||||||
return _AggregateExpression('COUNT', [this],
|
return AggregateFunctionExpression('COUNT', [this],
|
||||||
filter: filter, distinct: distinct);
|
filter: filter, distinct: distinct);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -35,14 +35,14 @@ extension BaseAggregate<DT extends Object> on Expression<DT> {
|
||||||
/// If there are no non-null values in the group, returns null.
|
/// If there are no non-null values in the group, returns null.
|
||||||
/// {@macro drift_aggregate_filter}
|
/// {@macro drift_aggregate_filter}
|
||||||
Expression<DT> max({Expression<bool>? filter}) =>
|
Expression<DT> max({Expression<bool>? filter}) =>
|
||||||
_AggregateExpression('MAX', [this], filter: filter);
|
AggregateFunctionExpression('MAX', [this], filter: filter);
|
||||||
|
|
||||||
/// Return the minimum of all non-null values in this group.
|
/// Return the minimum of all non-null values in this group.
|
||||||
///
|
///
|
||||||
/// If there are no non-null values in the group, returns null.
|
/// If there are no non-null values in the group, returns null.
|
||||||
/// {@macro drift_aggregate_filter}
|
/// {@macro drift_aggregate_filter}
|
||||||
Expression<DT> min({Expression<bool>? filter}) =>
|
Expression<DT> min({Expression<bool>? filter}) =>
|
||||||
_AggregateExpression('MIN', [this], filter: filter);
|
AggregateFunctionExpression('MIN', [this], filter: filter);
|
||||||
|
|
||||||
/// Returns the concatenation of all non-null values in the current group,
|
/// Returns the concatenation of all non-null values in the current group,
|
||||||
/// joined by the [separator].
|
/// joined by the [separator].
|
||||||
|
@ -71,7 +71,7 @@ extension BaseAggregate<DT extends Object> on Expression<DT> {
|
||||||
'Cannot use groupConcat with distinct: true and a custom separator');
|
'Cannot use groupConcat with distinct: true and a custom separator');
|
||||||
}
|
}
|
||||||
|
|
||||||
return _AggregateExpression(
|
return AggregateFunctionExpression(
|
||||||
'GROUP_CONCAT',
|
'GROUP_CONCAT',
|
||||||
[
|
[
|
||||||
this,
|
this,
|
||||||
|
@ -89,21 +89,21 @@ extension ArithmeticAggregates<DT extends num> on Expression<DT> {
|
||||||
///
|
///
|
||||||
/// {@macro drift_aggregate_filter}
|
/// {@macro drift_aggregate_filter}
|
||||||
Expression<double> avg({Expression<bool>? filter}) =>
|
Expression<double> avg({Expression<bool>? filter}) =>
|
||||||
_AggregateExpression('AVG', [this], filter: filter);
|
AggregateFunctionExpression('AVG', [this], filter: filter);
|
||||||
|
|
||||||
/// Return the maximum of all non-null values in this group.
|
/// Return the maximum of all non-null values in this group.
|
||||||
///
|
///
|
||||||
/// If there are no non-null values in the group, returns null.
|
/// If there are no non-null values in the group, returns null.
|
||||||
/// {@macro drift_aggregate_filter}
|
/// {@macro drift_aggregate_filter}
|
||||||
Expression<DT> max({Expression<bool>? filter}) =>
|
Expression<DT> max({Expression<bool>? filter}) =>
|
||||||
_AggregateExpression('MAX', [this], filter: filter);
|
AggregateFunctionExpression('MAX', [this], filter: filter);
|
||||||
|
|
||||||
/// Return the minimum of all non-null values in this group.
|
/// Return the minimum of all non-null values in this group.
|
||||||
///
|
///
|
||||||
/// If there are no non-null values in the group, returns null.
|
/// If there are no non-null values in the group, returns null.
|
||||||
/// {@macro drift_aggregate_filter}
|
/// {@macro drift_aggregate_filter}
|
||||||
Expression<DT> min({Expression<bool>? filter}) =>
|
Expression<DT> min({Expression<bool>? filter}) =>
|
||||||
_AggregateExpression('MIN', [this], filter: filter);
|
AggregateFunctionExpression('MIN', [this], filter: filter);
|
||||||
|
|
||||||
/// Calculate the sum of all non-null values in the group.
|
/// Calculate the sum of all non-null values in the group.
|
||||||
///
|
///
|
||||||
|
@ -115,7 +115,7 @@ extension ArithmeticAggregates<DT extends num> on Expression<DT> {
|
||||||
/// value and doesn't throw an overflow exception.
|
/// value and doesn't throw an overflow exception.
|
||||||
/// {@macro drift_aggregate_filter}
|
/// {@macro drift_aggregate_filter}
|
||||||
Expression<DT> sum({Expression<bool>? filter}) =>
|
Expression<DT> sum({Expression<bool>? filter}) =>
|
||||||
_AggregateExpression('SUM', [this], filter: filter);
|
AggregateFunctionExpression('SUM', [this], filter: filter);
|
||||||
|
|
||||||
/// Calculate the sum of all non-null values in the group.
|
/// Calculate the sum of all non-null values in the group.
|
||||||
///
|
///
|
||||||
|
@ -123,7 +123,7 @@ extension ArithmeticAggregates<DT extends num> on Expression<DT> {
|
||||||
/// uses floating-point values internally.
|
/// uses floating-point values internally.
|
||||||
/// {@macro drift_aggregate_filter}
|
/// {@macro drift_aggregate_filter}
|
||||||
Expression<double> total({Expression<bool>? filter}) =>
|
Expression<double> total({Expression<bool>? filter}) =>
|
||||||
_AggregateExpression('TOTAL', [this], filter: filter);
|
AggregateFunctionExpression('TOTAL', [this], filter: filter);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Provides aggregate functions that are available for BigInt expressions.
|
/// Provides aggregate functions that are available for BigInt expressions.
|
||||||
|
@ -197,16 +197,41 @@ extension DateTimeAggregate on Expression<DateTime> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class _AggregateExpression<D extends Object> extends Expression<D> {
|
/// An expression invoking an [aggregate function](https://www.sqlite.org/lang_aggfunc.html).
|
||||||
|
///
|
||||||
|
/// Aggregate functions, like `count()` or `sum()` collapse the entire data set
|
||||||
|
/// (or a partition of it, if `GROUP BY` is used) into a single value.
|
||||||
|
///
|
||||||
|
/// Drift exposes direct bindings to most aggregate functions (e.g. via
|
||||||
|
/// [BaseAggregate.count]). This class is useful when writing custom aggregate
|
||||||
|
/// function invocations.
|
||||||
|
final class AggregateFunctionExpression<D extends Object>
|
||||||
|
extends Expression<D> {
|
||||||
|
/// The name of the aggregate function to invoke.
|
||||||
final String functionName;
|
final String functionName;
|
||||||
final bool distinct;
|
|
||||||
final List<FunctionParameter> parameter;
|
|
||||||
|
|
||||||
|
/// Whether only distinct rows should be passed to the function.
|
||||||
|
final bool distinct;
|
||||||
|
|
||||||
|
/// The arguments to pass to the function.
|
||||||
|
final List<FunctionParameter> arguments;
|
||||||
|
|
||||||
|
/// The order in which rows of the current group should be passed to the
|
||||||
|
/// aggregate function.
|
||||||
|
final OrderBy? orderBy;
|
||||||
|
|
||||||
|
/// An optional filter clause only passing rows matching this condition into
|
||||||
|
/// the function.
|
||||||
final Where? filter;
|
final Where? filter;
|
||||||
|
|
||||||
_AggregateExpression(this.functionName, this.parameter,
|
/// Creates an aggregate function expression from the syntactic components.
|
||||||
{Expression<bool>? filter, this.distinct = false})
|
AggregateFunctionExpression(
|
||||||
: filter = filter != null ? Where(filter) : null;
|
this.functionName,
|
||||||
|
this.arguments, {
|
||||||
|
Expression<bool>? filter,
|
||||||
|
this.distinct = false,
|
||||||
|
this.orderBy,
|
||||||
|
}) : filter = filter != null ? Where(filter) : null;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final Precedence precedence = Precedence.primary;
|
final Precedence precedence = Precedence.primary;
|
||||||
|
@ -220,7 +245,11 @@ class _AggregateExpression<D extends Object> extends Expression<D> {
|
||||||
if (distinct) {
|
if (distinct) {
|
||||||
context.buffer.write('DISTINCT ');
|
context.buffer.write('DISTINCT ');
|
||||||
}
|
}
|
||||||
_writeCommaSeparated(context, parameter);
|
_writeCommaSeparated(context, arguments);
|
||||||
|
if (orderBy case final orderBy?) {
|
||||||
|
context.writeWhitespace();
|
||||||
|
orderBy.writeInto(context);
|
||||||
|
}
|
||||||
context.buffer.write(')');
|
context.buffer.write(')');
|
||||||
|
|
||||||
if (filter != null) {
|
if (filter != null) {
|
||||||
|
@ -233,20 +262,20 @@ class _AggregateExpression<D extends Object> extends Expression<D> {
|
||||||
@override
|
@override
|
||||||
int get hashCode {
|
int get hashCode {
|
||||||
return Object.hash(functionName, distinct,
|
return Object.hash(functionName, distinct,
|
||||||
const ListEquality<Object?>().hash(parameter), filter);
|
const ListEquality<Object?>().hash(arguments), orderBy, filter);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool operator ==(Object other) {
|
bool operator ==(Object other) {
|
||||||
if (!identical(this, other) && other.runtimeType != runtimeType) {
|
if (!identical(this, other) && other is! AggregateFunctionExpression<D>) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ignore: test_types_in_equals
|
final typedOther = other as AggregateFunctionExpression<D>;
|
||||||
final typedOther = other as _AggregateExpression;
|
|
||||||
return typedOther.functionName == functionName &&
|
return typedOther.functionName == functionName &&
|
||||||
typedOther.distinct == distinct &&
|
typedOther.distinct == distinct &&
|
||||||
const ListEquality<Object?>().equals(typedOther.parameter, parameter) &&
|
const ListEquality<Object?>().equals(typedOther.arguments, arguments) &&
|
||||||
|
typedOther.orderBy == orderBy &&
|
||||||
typedOther.filter == filter;
|
typedOther.filter == filter;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
name: drift
|
name: drift
|
||||||
description: Drift is a reactive library to store relational data in Dart and Flutter applications.
|
description: Drift is a reactive library to store relational data in Dart and Flutter applications.
|
||||||
version: 2.16.0
|
version: 2.17.0
|
||||||
repository: https://github.com/simolus3/drift
|
repository: https://github.com/simolus3/drift
|
||||||
homepage: https://drift.simonbinder.eu/
|
homepage: https://drift.simonbinder.eu/
|
||||||
issue_tracker: https://github.com/simolus3/drift/issues
|
issue_tracker: https://github.com/simolus3/drift/issues
|
||||||
|
@ -30,7 +30,7 @@ dev_dependencies:
|
||||||
drift_dev: any
|
drift_dev: any
|
||||||
drift_testcases:
|
drift_testcases:
|
||||||
path: ../extras/integration_tests/drift_testcases
|
path: ../extras/integration_tests/drift_testcases
|
||||||
http: ^0.13.4
|
http: ^1.2.1
|
||||||
lints: ^3.0.0
|
lints: ^3.0.0
|
||||||
uuid: ^4.0.0
|
uuid: ^4.0.0
|
||||||
build_runner: ^2.0.0
|
build_runner: ^2.0.0
|
||||||
|
@ -39,4 +39,4 @@ dev_dependencies:
|
||||||
rxdart: ^0.27.0
|
rxdart: ^0.27.0
|
||||||
shelf: ^1.3.0
|
shelf: ^1.3.0
|
||||||
test_descriptor: ^2.0.1
|
test_descriptor: ^2.0.1
|
||||||
vm_service: ^13.0.0
|
vm_service: ^14.0.0
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
CREATE VIRTUAL TABLE geopoly_test USING geopoly(a);
|
||||||
|
|
||||||
|
area: SELECT geopoly_area(_shape) FROM geopoly_test WHERE rowid = ?;
|
|
@ -0,0 +1,49 @@
|
||||||
|
@TestOn('vm')
|
||||||
|
import 'package:drift/drift.dart';
|
||||||
|
import 'package:drift/native.dart';
|
||||||
|
import 'package:drift/extensions/geopoly.dart';
|
||||||
|
import 'package:sqlite3/sqlite3.dart';
|
||||||
|
import 'package:test/test.dart';
|
||||||
|
|
||||||
|
import '../test_utils/database_vm.dart';
|
||||||
|
|
||||||
|
part 'geopoly_integration_test.g.dart';
|
||||||
|
|
||||||
|
void main() {
|
||||||
|
preferLocalSqlite3();
|
||||||
|
|
||||||
|
test(
|
||||||
|
'can access geopoly types',
|
||||||
|
() async {
|
||||||
|
final database = _GeopolyTestDatabase(NativeDatabase.memory());
|
||||||
|
expect(database.geopolyTest.shape.type, isA<GeopolyPolygonType>());
|
||||||
|
|
||||||
|
final id =
|
||||||
|
await database.geopolyTest.insertOne(GeopolyTestCompanion.insert(
|
||||||
|
shape: Value(GeopolyPolygon.text('[[0,0],[1,0],[0.5,1],[0,0]]')),
|
||||||
|
));
|
||||||
|
|
||||||
|
final area = await database.area(id).getSingle();
|
||||||
|
expect(area, 0.5);
|
||||||
|
},
|
||||||
|
skip: _canUseGeopoly()
|
||||||
|
? null
|
||||||
|
: 'Cannot test, your sqlite3 does not support geopoly.',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool _canUseGeopoly() {
|
||||||
|
final db = sqlite3.openInMemory();
|
||||||
|
final result = db
|
||||||
|
.select('SELECT sqlite_compileoption_used(?)', ['ENABLE_GEOPOLY']).single;
|
||||||
|
db.dispose();
|
||||||
|
return result.values[0] == 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
@DriftDatabase(include: {'geopoly.drift'})
|
||||||
|
class _GeopolyTestDatabase extends _$_GeopolyTestDatabase {
|
||||||
|
_GeopolyTestDatabase(super.e);
|
||||||
|
|
||||||
|
@override
|
||||||
|
int get schemaVersion => 1;
|
||||||
|
}
|
|
@ -0,0 +1,226 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
part of 'geopoly_integration_test.dart';
|
||||||
|
|
||||||
|
// ignore_for_file: type=lint
|
||||||
|
class GeopolyTest extends Table
|
||||||
|
with
|
||||||
|
TableInfo<GeopolyTest, GeopolyTestData>,
|
||||||
|
VirtualTableInfo<GeopolyTest, GeopolyTestData> {
|
||||||
|
@override
|
||||||
|
final GeneratedDatabase attachedDatabase;
|
||||||
|
final String? _alias;
|
||||||
|
GeopolyTest(this.attachedDatabase, [this._alias]);
|
||||||
|
static const VerificationMeta _shapeMeta = const VerificationMeta('shape');
|
||||||
|
late final GeneratedColumn<GeopolyPolygon> shape =
|
||||||
|
GeneratedColumn<GeopolyPolygon>('_shape', aliasedName, true,
|
||||||
|
type: const GeopolyPolygonType(),
|
||||||
|
requiredDuringInsert: false,
|
||||||
|
$customConstraints: '');
|
||||||
|
static const VerificationMeta _aMeta = const VerificationMeta('a');
|
||||||
|
late final GeneratedColumn<DriftAny> a = GeneratedColumn<DriftAny>(
|
||||||
|
'a', aliasedName, true,
|
||||||
|
type: DriftSqlType.any,
|
||||||
|
requiredDuringInsert: false,
|
||||||
|
$customConstraints: '');
|
||||||
|
@override
|
||||||
|
List<GeneratedColumn> get $columns => [shape, a];
|
||||||
|
@override
|
||||||
|
String get aliasedName => _alias ?? actualTableName;
|
||||||
|
@override
|
||||||
|
String get actualTableName => $name;
|
||||||
|
static const String $name = 'geopoly_test';
|
||||||
|
@override
|
||||||
|
VerificationContext validateIntegrity(Insertable<GeopolyTestData> instance,
|
||||||
|
{bool isInserting = false}) {
|
||||||
|
final context = VerificationContext();
|
||||||
|
final data = instance.toColumns(true);
|
||||||
|
if (data.containsKey('_shape')) {
|
||||||
|
context.handle(
|
||||||
|
_shapeMeta, shape.isAcceptableOrUnknown(data['_shape']!, _shapeMeta));
|
||||||
|
}
|
||||||
|
if (data.containsKey('a')) {
|
||||||
|
context.handle(_aMeta, a.isAcceptableOrUnknown(data['a']!, _aMeta));
|
||||||
|
}
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Set<GeneratedColumn> get $primaryKey => const {};
|
||||||
|
@override
|
||||||
|
GeopolyTestData map(Map<String, dynamic> data, {String? tablePrefix}) {
|
||||||
|
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
|
||||||
|
return GeopolyTestData(
|
||||||
|
shape: attachedDatabase.typeMapping
|
||||||
|
.read(const GeopolyPolygonType(), data['${effectivePrefix}_shape']),
|
||||||
|
a: attachedDatabase.typeMapping
|
||||||
|
.read(DriftSqlType.any, data['${effectivePrefix}a']),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
GeopolyTest createAlias(String alias) {
|
||||||
|
return GeopolyTest(attachedDatabase, alias);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
bool get dontWriteConstraints => true;
|
||||||
|
@override
|
||||||
|
String get moduleAndArgs => 'geopoly(a)';
|
||||||
|
}
|
||||||
|
|
||||||
|
class GeopolyTestData extends DataClass implements Insertable<GeopolyTestData> {
|
||||||
|
final GeopolyPolygon? shape;
|
||||||
|
final DriftAny? a;
|
||||||
|
const GeopolyTestData({this.shape, this.a});
|
||||||
|
@override
|
||||||
|
Map<String, Expression> toColumns(bool nullToAbsent) {
|
||||||
|
final map = <String, Expression>{};
|
||||||
|
if (!nullToAbsent || shape != null) {
|
||||||
|
map['_shape'] =
|
||||||
|
Variable<GeopolyPolygon>(shape, const GeopolyPolygonType());
|
||||||
|
}
|
||||||
|
if (!nullToAbsent || a != null) {
|
||||||
|
map['a'] = Variable<DriftAny>(a);
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
GeopolyTestCompanion toCompanion(bool nullToAbsent) {
|
||||||
|
return GeopolyTestCompanion(
|
||||||
|
shape:
|
||||||
|
shape == null && nullToAbsent ? const Value.absent() : Value(shape),
|
||||||
|
a: a == null && nullToAbsent ? const Value.absent() : Value(a),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
factory GeopolyTestData.fromJson(Map<String, dynamic> json,
|
||||||
|
{ValueSerializer? serializer}) {
|
||||||
|
serializer ??= driftRuntimeOptions.defaultSerializer;
|
||||||
|
return GeopolyTestData(
|
||||||
|
shape: serializer.fromJson<GeopolyPolygon?>(json['_shape']),
|
||||||
|
a: serializer.fromJson<DriftAny?>(json['a']),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
factory GeopolyTestData.fromJsonString(String encodedJson,
|
||||||
|
{ValueSerializer? serializer}) =>
|
||||||
|
GeopolyTestData.fromJson(
|
||||||
|
DataClass.parseJson(encodedJson) as Map<String, dynamic>,
|
||||||
|
serializer: serializer);
|
||||||
|
@override
|
||||||
|
Map<String, dynamic> toJson({ValueSerializer? serializer}) {
|
||||||
|
serializer ??= driftRuntimeOptions.defaultSerializer;
|
||||||
|
return <String, dynamic>{
|
||||||
|
'_shape': serializer.toJson<GeopolyPolygon?>(shape),
|
||||||
|
'a': serializer.toJson<DriftAny?>(a),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
GeopolyTestData copyWith(
|
||||||
|
{Value<GeopolyPolygon?> shape = const Value.absent(),
|
||||||
|
Value<DriftAny?> a = const Value.absent()}) =>
|
||||||
|
GeopolyTestData(
|
||||||
|
shape: shape.present ? shape.value : this.shape,
|
||||||
|
a: a.present ? a.value : this.a,
|
||||||
|
);
|
||||||
|
@override
|
||||||
|
String toString() {
|
||||||
|
return (StringBuffer('GeopolyTestData(')
|
||||||
|
..write('shape: $shape, ')
|
||||||
|
..write('a: $a')
|
||||||
|
..write(')'))
|
||||||
|
.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
int get hashCode => Object.hash(shape, a);
|
||||||
|
@override
|
||||||
|
bool operator ==(Object other) =>
|
||||||
|
identical(this, other) ||
|
||||||
|
(other is GeopolyTestData &&
|
||||||
|
other.shape == this.shape &&
|
||||||
|
other.a == this.a);
|
||||||
|
}
|
||||||
|
|
||||||
|
class GeopolyTestCompanion extends UpdateCompanion<GeopolyTestData> {
|
||||||
|
final Value<GeopolyPolygon?> shape;
|
||||||
|
final Value<DriftAny?> a;
|
||||||
|
final Value<int> rowid;
|
||||||
|
const GeopolyTestCompanion({
|
||||||
|
this.shape = const Value.absent(),
|
||||||
|
this.a = const Value.absent(),
|
||||||
|
this.rowid = const Value.absent(),
|
||||||
|
});
|
||||||
|
GeopolyTestCompanion.insert({
|
||||||
|
this.shape = const Value.absent(),
|
||||||
|
this.a = const Value.absent(),
|
||||||
|
this.rowid = const Value.absent(),
|
||||||
|
});
|
||||||
|
static Insertable<GeopolyTestData> custom({
|
||||||
|
Expression<GeopolyPolygon>? shape,
|
||||||
|
Expression<DriftAny>? a,
|
||||||
|
Expression<int>? rowid,
|
||||||
|
}) {
|
||||||
|
return RawValuesInsertable({
|
||||||
|
if (shape != null) '_shape': shape,
|
||||||
|
if (a != null) 'a': a,
|
||||||
|
if (rowid != null) 'rowid': rowid,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
GeopolyTestCompanion copyWith(
|
||||||
|
{Value<GeopolyPolygon?>? shape, Value<DriftAny?>? a, Value<int>? rowid}) {
|
||||||
|
return GeopolyTestCompanion(
|
||||||
|
shape: shape ?? this.shape,
|
||||||
|
a: a ?? this.a,
|
||||||
|
rowid: rowid ?? this.rowid,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Map<String, Expression> toColumns(bool nullToAbsent) {
|
||||||
|
final map = <String, Expression>{};
|
||||||
|
if (shape.present) {
|
||||||
|
map['_shape'] =
|
||||||
|
Variable<GeopolyPolygon>(shape.value, const GeopolyPolygonType());
|
||||||
|
}
|
||||||
|
if (a.present) {
|
||||||
|
map['a'] = Variable<DriftAny>(a.value);
|
||||||
|
}
|
||||||
|
if (rowid.present) {
|
||||||
|
map['rowid'] = Variable<int>(rowid.value);
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
String toString() {
|
||||||
|
return (StringBuffer('GeopolyTestCompanion(')
|
||||||
|
..write('shape: $shape, ')
|
||||||
|
..write('a: $a, ')
|
||||||
|
..write('rowid: $rowid')
|
||||||
|
..write(')'))
|
||||||
|
.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
abstract class _$_GeopolyTestDatabase extends GeneratedDatabase {
|
||||||
|
_$_GeopolyTestDatabase(QueryExecutor e) : super(e);
|
||||||
|
late final GeopolyTest geopolyTest = GeopolyTest(this);
|
||||||
|
Selectable<double?> area(int var1) {
|
||||||
|
return customSelect(
|
||||||
|
'SELECT geopoly_area(_shape) AS _c0 FROM geopoly_test WHERE "rowid" = ?1',
|
||||||
|
variables: [
|
||||||
|
Variable<int>(var1)
|
||||||
|
],
|
||||||
|
readsFrom: {
|
||||||
|
geopolyTest,
|
||||||
|
}).map((QueryRow row) => row.readNullable<double>('_c0'));
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Iterable<TableInfo<Table, Object?>> get allTables =>
|
||||||
|
allSchemaEntities.whereType<TableInfo<Table, Object?>>();
|
||||||
|
@override
|
||||||
|
List<DatabaseSchemaEntity> get allSchemaEntities => [geopolyTest];
|
||||||
|
}
|
|
@ -101,15 +101,17 @@ void main() {
|
||||||
db.todosTable, db.todosTable.category.equalsExp(db.categories.id))
|
db.todosTable, db.todosTable.category.equalsExp(db.categories.id))
|
||||||
]);
|
]);
|
||||||
|
|
||||||
final stringArray = jsonGroupArray(db.todosTable.id);
|
final stringArray = jsonGroupArray(db.todosTable.id,
|
||||||
final binaryArray = jsonbGroupArray(db.todosTable.id).json();
|
orderBy: OrderBy([OrderingTerm.desc(db.todosTable.id)]));
|
||||||
|
final binaryArray = jsonbGroupArray(db.todosTable.id,
|
||||||
|
orderBy: OrderBy([OrderingTerm.asc(db.todosTable.id)])).json();
|
||||||
query
|
query
|
||||||
..groupBy([db.categories.id])
|
..groupBy([db.categories.id])
|
||||||
..addColumns([stringArray, binaryArray]);
|
..addColumns([stringArray, binaryArray]);
|
||||||
|
|
||||||
final row = await query.getSingle();
|
final row = await query.getSingle();
|
||||||
expect(json.decode(row.read(stringArray)!), unorderedEquals([1, 3]));
|
expect(json.decode(row.read(stringArray)!), [3, 1]);
|
||||||
expect(json.decode(row.read(binaryArray)!), unorderedEquals([1, 3]));
|
expect(json.decode(row.read(binaryArray)!), [1, 3]);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('json_group_object', () async {
|
test('json_group_object', () async {
|
||||||
|
|
|
@ -42,6 +42,17 @@ void main() {
|
||||||
|
|
||||||
test('aggregates', () {
|
test('aggregates', () {
|
||||||
expect(jsonGroupArray(column), generates('json_group_array(col)'));
|
expect(jsonGroupArray(column), generates('json_group_array(col)'));
|
||||||
|
expect(
|
||||||
|
jsonGroupArray(
|
||||||
|
column,
|
||||||
|
orderBy: OrderBy([OrderingTerm.desc(column)]),
|
||||||
|
filter: column.length.isBiggerOrEqualValue(10),
|
||||||
|
),
|
||||||
|
generates(
|
||||||
|
'json_group_array(col ORDER BY col DESC) FILTER (WHERE LENGTH(col) >= ?)',
|
||||||
|
[10],
|
||||||
|
),
|
||||||
|
);
|
||||||
expect(
|
expect(
|
||||||
jsonGroupObject({
|
jsonGroupObject({
|
||||||
Variable('foo'): column,
|
Variable('foo'): column,
|
||||||
|
@ -84,6 +95,17 @@ void main() {
|
||||||
|
|
||||||
test('aggregates', () {
|
test('aggregates', () {
|
||||||
expect(jsonbGroupArray(column), generates('jsonb_group_array(col)'));
|
expect(jsonbGroupArray(column), generates('jsonb_group_array(col)'));
|
||||||
|
expect(
|
||||||
|
jsonbGroupArray(
|
||||||
|
column,
|
||||||
|
orderBy: OrderBy([OrderingTerm.desc(column)]),
|
||||||
|
filter: column.length.isBiggerOrEqualValue(10),
|
||||||
|
),
|
||||||
|
generates(
|
||||||
|
'jsonb_group_array(col ORDER BY col DESC) FILTER (WHERE LENGTH(col) >= ?)',
|
||||||
|
[10],
|
||||||
|
),
|
||||||
|
);
|
||||||
expect(
|
expect(
|
||||||
jsonbGroupObject({
|
jsonbGroupObject({
|
||||||
Variable('foo'): column,
|
Variable('foo'): column,
|
||||||
|
|
|
@ -107,7 +107,7 @@ class _GeneratesSqlMatcher extends Matcher {
|
||||||
matches = false;
|
matches = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
final argsMatchState = <String, Object?>{};
|
final argsMatchState = <Object?, Object?>{};
|
||||||
if (_matchVariables != null &&
|
if (_matchVariables != null &&
|
||||||
!_matchVariables.matches(ctx.boundVariables, argsMatchState)) {
|
!_matchVariables.matches(ctx.boundVariables, argsMatchState)) {
|
||||||
matchState['vars'] = ctx.boundVariables;
|
matchState['vars'] = ctx.boundVariables;
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Mocks generated by Mockito 5.4.3 from annotations
|
// Mocks generated by Mockito 5.4.4 from annotations
|
||||||
// in drift/test/test_utils/test_utils.dart.
|
// in drift/test/test_utils/test_utils.dart.
|
||||||
// Do not manually edit this file.
|
// Do not manually edit this file.
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
## 2.17.0-dev
|
## 2.17.0
|
||||||
|
|
||||||
- Fix drift using the wrong import alias in generated part files.
|
- Fix drift using the wrong import alias in generated part files.
|
||||||
- Add the `use_sql_column_name_as_json_key` builder option.
|
- Add the `use_sql_column_name_as_json_key` builder option.
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
name: drift_dev
|
name: drift_dev
|
||||||
description: Dev-dependency for users of drift. Contains the generator and development tools.
|
description: Dev-dependency for users of drift. Contains the generator and development tools.
|
||||||
version: 2.16.0
|
version: 2.17.0
|
||||||
repository: https://github.com/simolus3/drift
|
repository: https://github.com/simolus3/drift
|
||||||
homepage: https://drift.simonbinder.eu/
|
homepage: https://drift.simonbinder.eu/
|
||||||
issue_tracker: https://github.com/simolus3/drift/issues
|
issue_tracker: https://github.com/simolus3/drift/issues
|
||||||
|
@ -30,9 +30,9 @@ dependencies:
|
||||||
io: ^1.0.3
|
io: ^1.0.3
|
||||||
|
|
||||||
# Drift-specific analysis and apis
|
# Drift-specific analysis and apis
|
||||||
drift: '>=2.16.0 <2.17.0'
|
drift: '>=2.17.0 <2.18.0'
|
||||||
sqlite3: '>=0.1.6 <3.0.0'
|
sqlite3: '>=0.1.6 <3.0.0'
|
||||||
sqlparser: '^0.34.0'
|
sqlparser: '^0.35.0'
|
||||||
|
|
||||||
# Dart analysis
|
# Dart analysis
|
||||||
analyzer: '>=5.12.0 <7.0.0'
|
analyzer: '>=5.12.0 <7.0.0'
|
||||||
|
|
|
@ -295,4 +295,21 @@ class MyType implements CustomSqlType<String> {}
|
||||||
expect(custom.expression.toString(), 'MyType()');
|
expect(custom.expression.toString(), 'MyType()');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('recognizes bigint columns', () async {
|
||||||
|
final state = await TestBackend.inTest({
|
||||||
|
'a|lib/a.drift': '''
|
||||||
|
CREATE TABLE foo (
|
||||||
|
bar INT64 NOT NULL
|
||||||
|
);
|
||||||
|
''',
|
||||||
|
});
|
||||||
|
|
||||||
|
final file = await state.analyze('package:a/a.drift');
|
||||||
|
state.expectNoErrors();
|
||||||
|
|
||||||
|
final table = file.analyzedElements.single as DriftTable;
|
||||||
|
final column = table.columns.single;
|
||||||
|
expect(column.sqlType.builtin, DriftSqlType.bigInt);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -583,6 +583,8 @@ class MyDatabase {
|
||||||
return everyElement(
|
return everyElement(
|
||||||
anyOf(
|
anyOf(
|
||||||
isA<AssetId>().having((e) => e.extension, 'extension', '.json'),
|
isA<AssetId>().having((e) => e.extension, 'extension', '.json'),
|
||||||
|
// Allow reading SDK or other package assets to set up the analyzer.
|
||||||
|
isA<AssetId>().having((e) => e.package, 'package', isNot('a')),
|
||||||
other,
|
other,
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
# Uncomment this line to define a global platform for your project
|
# Uncomment this line to define a global platform for your project
|
||||||
# platform :ios, '9.0'
|
# platform :ios, '9.0'
|
||||||
|
|
||||||
|
inhibit_all_warnings!
|
||||||
|
|
||||||
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
|
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
|
||||||
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
|
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
platform :osx, '10.14'
|
platform :osx, '10.14'
|
||||||
|
|
||||||
|
inhibit_all_warnings!
|
||||||
|
|
||||||
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
|
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
|
||||||
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
|
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
|
||||||
|
|
||||||
|
|
|
@ -40,8 +40,8 @@ SPEC CHECKSUMS:
|
||||||
FlutterMacOS: 8f6f14fa908a6fb3fba0cd85dbd81ec4b251fb24
|
FlutterMacOS: 8f6f14fa908a6fb3fba0cd85dbd81ec4b251fb24
|
||||||
path_provider_foundation: 29f094ae23ebbca9d3d0cec13889cd9060c0e943
|
path_provider_foundation: 29f094ae23ebbca9d3d0cec13889cd9060c0e943
|
||||||
sqlite3: fd89671d969f3e73efe503ce203e28b016b58f68
|
sqlite3: fd89671d969f3e73efe503ce203e28b016b58f68
|
||||||
sqlite3_flutter_libs: 00a50503d69f7ab0fe85a5ff25b33082f4df4ce9
|
sqlite3_flutter_libs: 01f6f1a7e23e5b22dcbeb49fddab75ecfc1de530
|
||||||
|
|
||||||
PODFILE CHECKSUM: 236401fc2c932af29a9fcf0e97baeeb2d750d367
|
PODFILE CHECKSUM: 54c2ee7490cf98371b4d49ae3114180468e1140c
|
||||||
|
|
||||||
COCOAPODS: 1.12.1
|
COCOAPODS: 1.14.3
|
||||||
|
|
|
@ -259,7 +259,7 @@
|
||||||
isa = PBXProject;
|
isa = PBXProject;
|
||||||
attributes = {
|
attributes = {
|
||||||
LastSwiftUpdateCheck = 0920;
|
LastSwiftUpdateCheck = 0920;
|
||||||
LastUpgradeCheck = 1430;
|
LastUpgradeCheck = 1510;
|
||||||
ORGANIZATIONNAME = "";
|
ORGANIZATIONNAME = "";
|
||||||
TargetAttributes = {
|
TargetAttributes = {
|
||||||
331C80D4294CF70F00263BE5 = {
|
331C80D4294CF70F00263BE5 = {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<Scheme
|
<Scheme
|
||||||
LastUpgradeVersion = "1430"
|
LastUpgradeVersion = "1510"
|
||||||
version = "1.3">
|
version = "1.3">
|
||||||
<BuildAction
|
<BuildAction
|
||||||
parallelizeBuildables = "YES"
|
parallelizeBuildables = "YES"
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
## 3.35.0-dev
|
## 0.35.0
|
||||||
|
|
||||||
- Fix parsing binary literals.
|
- Fix parsing binary literals.
|
||||||
|
- Expand support for `IN` expressions, they now support tuples on the left-hand
|
||||||
|
side and the shorthand syntax for table references and table-valued functions.
|
||||||
- Drift extensions: Allow custom class names for `CREATE VIEW` statements.
|
- Drift extensions: Allow custom class names for `CREATE VIEW` statements.
|
||||||
|
- Drift extensions: Support the `INT64` hint for `CREATE TABLE` statements.
|
||||||
|
|
||||||
## 0.34.1
|
## 0.34.1
|
||||||
|
|
||||||
|
|
|
@ -157,7 +157,11 @@ class SchemaFromCreateTable {
|
||||||
|
|
||||||
final upper = typeName.toUpperCase();
|
final upper = typeName.toUpperCase();
|
||||||
if (upper.contains('INT')) {
|
if (upper.contains('INT')) {
|
||||||
return const ResolvedType(type: BasicType.int);
|
if (driftExtensions && upper.contains('INT64')) {
|
||||||
|
return const ResolvedType(type: BasicType.int, hints: [IsBigInt()]);
|
||||||
|
} else {
|
||||||
|
return const ResolvedType(type: BasicType.int);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (upper.contains('CHAR') ||
|
if (upper.contains('CHAR') ||
|
||||||
upper.contains('CLOB') ||
|
upper.contains('CLOB') ||
|
||||||
|
|
|
@ -27,7 +27,7 @@ abstract class ReferenceScope {
|
||||||
/// All available result sets that can also be seen in child scopes.
|
/// All available result sets that can also be seen in child scopes.
|
||||||
///
|
///
|
||||||
/// Usually, this is the same list as the result sets being declared in this
|
/// Usually, this is the same list as the result sets being declared in this
|
||||||
/// scope. However, some exceptions apply (see e.g. [SubqueryInFromScope]).
|
/// scope. However, some exceptions apply (see e.g. [SourceScope]).
|
||||||
Iterable<ResultSetAvailableInStatement> get resultSetAvailableToChildScopes =>
|
Iterable<ResultSetAvailableInStatement> get resultSetAvailableToChildScopes =>
|
||||||
const Iterable.empty();
|
const Iterable.empty();
|
||||||
|
|
||||||
|
@ -167,8 +167,8 @@ mixin _HasParentScope on ReferenceScope {
|
||||||
/// them in a [StatementScope] as well.
|
/// them in a [StatementScope] as well.
|
||||||
/// - subqueries appearing in a `FROM` clause _can't_ see outer columns and
|
/// - subqueries appearing in a `FROM` clause _can't_ see outer columns and
|
||||||
/// tables. These statements are also wrapped in a [StatementScope], but a
|
/// tables. These statements are also wrapped in a [StatementScope], but a
|
||||||
/// [SubqueryInFromScope] is insertted as an intermediatet scope to prevent
|
/// [SourceScope] is inserted as an intermediate scope to prevent the inner
|
||||||
/// the inner scope from seeing the outer columns.
|
/// scope from seeing the outer columns.
|
||||||
|
|
||||||
class StatementScope extends ReferenceScope with _HasParentScope {
|
class StatementScope extends ReferenceScope with _HasParentScope {
|
||||||
final ReferenceScope parent;
|
final ReferenceScope parent;
|
||||||
|
|
|
@ -170,6 +170,16 @@ class ColumnResolver extends RecursiveVisitor<ColumnResolverContext, void> {
|
||||||
visitExcept(e, e.foreignTable, arg);
|
visitExcept(e, e.foreignTable, arg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void visitInExpression(InExpression e, ColumnResolverContext arg) {
|
||||||
|
if (e.inside case Queryable query) {
|
||||||
|
_handle(query, [], arg);
|
||||||
|
visitExcept(e, e.inside, arg);
|
||||||
|
} else {
|
||||||
|
super.visitInExpression(e, arg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void visitUpdateStatement(UpdateStatement e, ColumnResolverContext arg) {
|
void visitUpdateStatement(UpdateStatement e, ColumnResolverContext arg) {
|
||||||
// Resolve CTEs first
|
// Resolve CTEs first
|
||||||
|
|
|
@ -199,6 +199,56 @@ class LintingVisitor extends RecursiveVisitor<void, void> {
|
||||||
visitChildren(e, arg);
|
visitChildren(e, arg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void visitInExpression(InExpression e, void arg) {
|
||||||
|
final expectedColumns = switch (e.left) {
|
||||||
|
Tuple(:var expressions) => expressions.length,
|
||||||
|
_ => 1,
|
||||||
|
};
|
||||||
|
|
||||||
|
switch (e.inside) {
|
||||||
|
case Tuple tuple:
|
||||||
|
for (final element in tuple.expressions) {
|
||||||
|
final actualColumns = switch (element) {
|
||||||
|
Tuple(:var expressions) => expressions.length,
|
||||||
|
_ => 1,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (expectedColumns != actualColumns) {
|
||||||
|
context.reportError(AnalysisError(
|
||||||
|
type: AnalysisErrorType.other,
|
||||||
|
message: 'Expected $expectedColumns columns in this entry, got '
|
||||||
|
'$actualColumns',
|
||||||
|
relevantNode: element,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case SubQuery subquery:
|
||||||
|
final columns = subquery.select.resolvedColumns;
|
||||||
|
if (columns != null && columns.length != expectedColumns) {
|
||||||
|
context.reportError(AnalysisError(
|
||||||
|
type: AnalysisErrorType.other,
|
||||||
|
message: 'The subquery must return $expectedColumns columns, '
|
||||||
|
'it returns ${columns.length}',
|
||||||
|
relevantNode: subquery,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
case TableOrSubquery table:
|
||||||
|
final columns =
|
||||||
|
table.availableResultSet?.resultSet.resultSet?.resolvedColumns;
|
||||||
|
if (columns != null && columns.length != expectedColumns) {
|
||||||
|
context.reportError(AnalysisError(
|
||||||
|
type: AnalysisErrorType.other,
|
||||||
|
message: 'To be used in this `IN` expression, this table must '
|
||||||
|
'have $expectedColumns columns (it has ${columns.length}).',
|
||||||
|
relevantNode: table,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
visitChildren(e, arg);
|
||||||
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void visitIsExpression(IsExpression e, void arg) {
|
void visitIsExpression(IsExpression e, void arg) {
|
||||||
if (e.distinctFromSyntax && options.version < SqliteVersion.v3_39) {
|
if (e.distinctFromSyntax && options.version < SqliteVersion.v3_39) {
|
||||||
|
@ -526,9 +576,9 @@ class LintingVisitor extends RecursiveVisitor<void, void> {
|
||||||
isAllowed = !comparisons.any((e) => !isRowValue(e));
|
isAllowed = !comparisons.any((e) => !isRowValue(e));
|
||||||
}
|
}
|
||||||
} else if (parent is InExpression) {
|
} else if (parent is InExpression) {
|
||||||
// In expressions are tricky. The rhs can always be a row value, but the
|
// For in expressions we have a more accurate analysis on whether tuples
|
||||||
// lhs can only be a row value if the rhs is a subquery
|
// are allowed that looks at both the LHS and the RHS.
|
||||||
isAllowed = e == parent.inside || parent.inside is SubQuery;
|
isAllowed = true;
|
||||||
} else if (parent is SetComponent) {
|
} else if (parent is SetComponent) {
|
||||||
isAllowed = true;
|
isAllowed = true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -136,6 +136,14 @@ class AstPreparingVisitor extends RecursiveVisitor<void, void> {
|
||||||
visitChildren(e, arg);
|
visitChildren(e, arg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void visitInExpression(InExpression e, void arg) {
|
||||||
|
// The RHS can use everything from the parent scope, but it can't add new
|
||||||
|
// table references that would be visible to others.
|
||||||
|
e.scope = StatementScope(e.scope);
|
||||||
|
visitChildren(e, arg);
|
||||||
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void visitNumberedVariable(NumberedVariable e, void arg) {
|
void visitNumberedVariable(NumberedVariable e, void arg) {
|
||||||
_foundVariables.add(e);
|
_foundVariables.add(e);
|
||||||
|
|
|
@ -427,9 +427,11 @@ class TypeResolver extends RecursiveVisitor<TypeExpectation, void> {
|
||||||
@override
|
@override
|
||||||
void visitInExpression(InExpression e, TypeExpectation arg) {
|
void visitInExpression(InExpression e, TypeExpectation arg) {
|
||||||
session._checkAndResolve(e, const ResolvedType.bool(), arg);
|
session._checkAndResolve(e, const ResolvedType.bool(), arg);
|
||||||
session._addRelation(NullableIfSomeOtherIs(e, e.childNodes));
|
|
||||||
|
|
||||||
session._addRelation(CopyTypeFrom(e.inside, e.left, array: true));
|
if (e.inside case Expression inExpr) {
|
||||||
|
session._addRelation(NullableIfSomeOtherIs(e, [e.left, inExpr]));
|
||||||
|
session._addRelation(CopyTypeFrom(inExpr, e.left, array: true));
|
||||||
|
}
|
||||||
|
|
||||||
visitChildren(e, const NoTypeExpectation());
|
visitChildren(e, const NoTypeExpectation());
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,7 +41,7 @@ abstract class TableOrSubquery extends Queryable {
|
||||||
/// set.
|
/// set.
|
||||||
class TableReference extends TableOrSubquery
|
class TableReference extends TableOrSubquery
|
||||||
with ReferenceOwner
|
with ReferenceOwner
|
||||||
implements Renamable, ResolvesToResultSet {
|
implements Renamable, ResolvesToResultSet, InExpressionTarget {
|
||||||
final String? schemaName;
|
final String? schemaName;
|
||||||
final String tableName;
|
final String tableName;
|
||||||
Token? tableNameToken;
|
Token? tableNameToken;
|
||||||
|
@ -213,7 +213,12 @@ class UsingConstraint extends JoinConstraint {
|
||||||
}
|
}
|
||||||
|
|
||||||
class TableValuedFunction extends Queryable
|
class TableValuedFunction extends Queryable
|
||||||
implements TableOrSubquery, SqlInvocation, Renamable, ResolvesToResultSet {
|
implements
|
||||||
|
TableOrSubquery,
|
||||||
|
SqlInvocation,
|
||||||
|
Renamable,
|
||||||
|
ResolvesToResultSet,
|
||||||
|
InExpressionTarget {
|
||||||
@override
|
@override
|
||||||
final String name;
|
final String name;
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ part of '../ast.dart';
|
||||||
/// A tuple of values, denotes in brackets. `(<expr>, ..., <expr>)`.
|
/// A tuple of values, denotes in brackets. `(<expr>, ..., <expr>)`.
|
||||||
///
|
///
|
||||||
/// In sqlite, this is also called a "row value".
|
/// In sqlite, this is also called a "row value".
|
||||||
class Tuple extends Expression {
|
class Tuple extends Expression implements InExpressionTarget {
|
||||||
/// The expressions appearing in this tuple.
|
/// The expressions appearing in this tuple.
|
||||||
List<Expression> expressions;
|
List<Expression> expressions;
|
||||||
|
|
||||||
|
|
|
@ -180,10 +180,9 @@ class InExpression extends Expression {
|
||||||
/// against. From the sqlite grammar, we support [Tuple] and a [SubQuery].
|
/// against. From the sqlite grammar, we support [Tuple] and a [SubQuery].
|
||||||
/// We also support a [Variable] as syntax sugar - it will be expanded into a
|
/// We also support a [Variable] as syntax sugar - it will be expanded into a
|
||||||
/// tuple of variables at runtime.
|
/// tuple of variables at runtime.
|
||||||
Expression inside;
|
InExpressionTarget inside;
|
||||||
|
|
||||||
InExpression({this.not = false, required this.left, required this.inside})
|
InExpression({this.not = false, required this.left, required this.inside});
|
||||||
: assert(inside is Tuple || inside is Variable || inside is SubQuery);
|
|
||||||
|
|
||||||
@override
|
@override
|
||||||
R accept<A, R>(AstVisitor<A, R> visitor, A arg) {
|
R accept<A, R>(AstVisitor<A, R> visitor, A arg) {
|
||||||
|
@ -191,7 +190,7 @@ class InExpression extends Expression {
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
List<Expression> get childNodes => [left, inside];
|
List<AstNode> get childNodes => [left, inside];
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void transformChildren<A>(Transformer<A> transformer, A arg) {
|
void transformChildren<A>(Transformer<A> transformer, A arg) {
|
||||||
|
@ -200,6 +199,19 @@ class InExpression extends Expression {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Possible values for the right-hand side of an [InExpression].
|
||||||
|
///
|
||||||
|
/// Valid subclasses are:
|
||||||
|
/// - [Tuple], to check whether the LHS is equal to any of the elements in the
|
||||||
|
/// tuple.
|
||||||
|
/// - [SubQuery], to check whether the LHS is equal to any of the rows returned
|
||||||
|
/// by the subquery.
|
||||||
|
/// - [TableReference] and [TableValuedFunction], a short-hand for [SubQuery]s
|
||||||
|
/// if the table or function only return one column.
|
||||||
|
/// - [Variable] (only if drift extensions are enabled), drift's generator
|
||||||
|
/// turns this into a tuple of variables at runtime.
|
||||||
|
abstract class InExpressionTarget implements AstNode {}
|
||||||
|
|
||||||
class Parentheses extends Expression {
|
class Parentheses extends Expression {
|
||||||
Token? openingLeft;
|
Token? openingLeft;
|
||||||
Token? closingRight;
|
Token? closingRight;
|
||||||
|
|
|
@ -2,7 +2,7 @@ part of '../ast.dart';
|
||||||
|
|
||||||
/// A subquery, which is an expression. It is expected that the inner query
|
/// A subquery, which is an expression. It is expected that the inner query
|
||||||
/// only returns one column and one row.
|
/// only returns one column and one row.
|
||||||
class SubQuery extends Expression {
|
class SubQuery extends Expression implements InExpressionTarget {
|
||||||
BaseSelectStatement select;
|
BaseSelectStatement select;
|
||||||
|
|
||||||
SubQuery({required this.select});
|
SubQuery({required this.select});
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
part of '../ast.dart';
|
part of '../ast.dart';
|
||||||
|
|
||||||
abstract class Variable extends Expression {
|
abstract class Variable extends Expression implements InExpressionTarget {
|
||||||
int? resolvedIndex;
|
int? resolvedIndex;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -511,7 +511,20 @@ class Parser {
|
||||||
final not = _matchOne(TokenType.not);
|
final not = _matchOne(TokenType.not);
|
||||||
_matchOne(TokenType.$in);
|
_matchOne(TokenType.$in);
|
||||||
|
|
||||||
final inside = _variableOrNull() ?? _consumeTuple(orSubQuery: true);
|
InExpressionTarget inside;
|
||||||
|
if (_variableOrNull() case var variable?) {
|
||||||
|
inside = variable;
|
||||||
|
} else if (_check(TokenType.leftParen)) {
|
||||||
|
inside = _consumeTuple(orSubQuery: true) as InExpressionTarget;
|
||||||
|
} else {
|
||||||
|
final target = _tableOrSubquery();
|
||||||
|
// TableOrSubquery is either a table reference, a table-valued function,
|
||||||
|
// or a Subquery. We don't support subqueries, but they can't be parsed
|
||||||
|
// here because we would have entered the tuple case above.
|
||||||
|
assert(target is! SubQuery);
|
||||||
|
inside = target as InExpressionTarget;
|
||||||
|
}
|
||||||
|
|
||||||
return InExpression(left: left, inside: inside, not: not)
|
return InExpression(left: left, inside: inside, not: not)
|
||||||
..setSpan(left.first!, _previous);
|
..setSpan(left.first!, _previous);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
name: sqlparser
|
name: sqlparser
|
||||||
description: Parses sqlite statements and performs static analysis on them
|
description: Parses sqlite statements and performs static analysis on them
|
||||||
version: 0.34.1
|
version: 0.35.0
|
||||||
homepage: https://github.com/simolus3/drift/tree/develop/sqlparser
|
homepage: https://github.com/simolus3/drift/tree/develop/sqlparser
|
||||||
repository: https://github.com/simolus3/drift
|
repository: https://github.com/simolus3/drift
|
||||||
#homepage: https://drift.simonbinder.eu/
|
#homepage: https://drift.simonbinder.eu/
|
||||||
|
|
|
@ -363,4 +363,10 @@ SELECT * FROM cars
|
||||||
|
|
||||||
expect(select.resolvedColumns?.map((e) => e.name), ['literal', 'bar']);
|
expect(select.resolvedColumns?.map((e) => e.name), ['literal', 'bar']);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('error for nonexisting table in IN expression', () {
|
||||||
|
final query = engine.analyze('SELECT 1 IN no_such_table');
|
||||||
|
query.expectError('no_such_table',
|
||||||
|
type: AnalysisErrorType.referencedUnknownTable);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,8 @@ import 'utils.dart';
|
||||||
void main() {
|
void main() {
|
||||||
late SqlEngine engine;
|
late SqlEngine engine;
|
||||||
setUp(() {
|
setUp(() {
|
||||||
engine = SqlEngine();
|
// enable json1 extension
|
||||||
|
engine = SqlEngine(EngineOptions(version: SqliteVersion.v3_38));
|
||||||
});
|
});
|
||||||
|
|
||||||
test('when using row value in select', () {
|
test('when using row value in select', () {
|
||||||
|
@ -15,12 +16,6 @@ void main() {
|
||||||
.expectError('(1, 2, 3)', type: AnalysisErrorType.rowValueMisuse);
|
.expectError('(1, 2, 3)', type: AnalysisErrorType.rowValueMisuse);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('as left hand operator of in', () {
|
|
||||||
engine
|
|
||||||
.analyze('SELECT (1, 2, 3) IN (4, 5, 6)')
|
|
||||||
.expectError('(1, 2, 3)', type: AnalysisErrorType.rowValueMisuse);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('in BETWEEN expression', () {
|
test('in BETWEEN expression', () {
|
||||||
engine
|
engine
|
||||||
.analyze('SELECT 1 BETWEEN (1, 2, 3) AND 3')
|
.analyze('SELECT 1 BETWEEN (1, 2, 3) AND 3')
|
||||||
|
@ -68,4 +63,33 @@ void main() {
|
||||||
.expectNoError();
|
.expectNoError();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
group('in expressions', () {
|
||||||
|
test('when tuple is expected', () {
|
||||||
|
engine.analyze('SELECT (1, 2) IN ((4, 5), 6)').expectError('6',
|
||||||
|
type: AnalysisErrorType.other,
|
||||||
|
message: contains('Expected 2 columns in this entry, got 1'));
|
||||||
|
});
|
||||||
|
|
||||||
|
test('when tuple is not expected', () {
|
||||||
|
engine.analyze('SELECT 1 IN ((4, 5), 6)').expectError('(4, 5)',
|
||||||
|
type: AnalysisErrorType.other,
|
||||||
|
message: contains('Expected 1 columns in this entry, got 2'));
|
||||||
|
});
|
||||||
|
|
||||||
|
test('for table reference', () {
|
||||||
|
engine
|
||||||
|
.analyze('WITH names AS (VALUES(1, 2, 3)) SELECT (1, 2) IN names')
|
||||||
|
.expectError('names',
|
||||||
|
type: AnalysisErrorType.other,
|
||||||
|
message: contains('must have 2 columns (it has 3)'));
|
||||||
|
});
|
||||||
|
|
||||||
|
test('for table-valued function', () {
|
||||||
|
engine.analyze("SELECT (1, 2) IN json_each('{}')").expectError(
|
||||||
|
"json_each('{}')",
|
||||||
|
type: AnalysisErrorType.other,
|
||||||
|
message: contains('this table must have 2 columns (it has 8)'));
|
||||||
|
});
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,10 +2,10 @@ import 'package:sqlparser/sqlparser.dart';
|
||||||
import 'package:test/test.dart';
|
import 'package:test/test.dart';
|
||||||
|
|
||||||
extension ExpectErrors on AnalysisContext {
|
extension ExpectErrors on AnalysisContext {
|
||||||
void expectError(String lexeme, {AnalysisErrorType? type}) {
|
void expectError(String lexeme, {AnalysisErrorType? type, message}) {
|
||||||
expect(
|
expect(
|
||||||
errors,
|
errors,
|
||||||
[analysisErrorWith(lexeme: lexeme, type: type)],
|
[analysisErrorWith(lexeme: lexeme, type: type, message: message)],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -103,7 +103,7 @@ void main() {
|
||||||
SqlEngine(EngineOptions(driftOptions: const DriftSqlOptions()));
|
SqlEngine(EngineOptions(driftOptions: const DriftSqlOptions()));
|
||||||
final stmt = engine.parse('''
|
final stmt = engine.parse('''
|
||||||
CREATE TABLE foo (
|
CREATE TABLE foo (
|
||||||
a BOOL, b DATETIME, c DATE, d BOOLEAN NOT NULL
|
a BOOL, b DATETIME, c DATE, d BOOLEAN NOT NULL, e INT64
|
||||||
)
|
)
|
||||||
''').rootNode;
|
''').rootNode;
|
||||||
|
|
||||||
|
@ -114,6 +114,7 @@ void main() {
|
||||||
ResolvedType(type: BasicType.int, hints: [IsDateTime()], nullable: true),
|
ResolvedType(type: BasicType.int, hints: [IsDateTime()], nullable: true),
|
||||||
ResolvedType(type: BasicType.int, hints: [IsDateTime()], nullable: true),
|
ResolvedType(type: BasicType.int, hints: [IsDateTime()], nullable: true),
|
||||||
ResolvedType(type: BasicType.int, hints: [IsBoolean()], nullable: false),
|
ResolvedType(type: BasicType.int, hints: [IsBoolean()], nullable: false),
|
||||||
|
ResolvedType(type: BasicType.int, hints: [IsBigInt()], nullable: true),
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -160,6 +160,22 @@ final Map<String, Expression> _testCases = {
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
'x IN json_each(bar)': InExpression(
|
||||||
|
left: Reference(columnName: 'x'),
|
||||||
|
inside: TableValuedFunction(
|
||||||
|
'json_each',
|
||||||
|
ExprFunctionParameters(
|
||||||
|
parameters: [
|
||||||
|
Reference(columnName: 'bar'),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
'x NOT IN "table"': InExpression(
|
||||||
|
not: true,
|
||||||
|
left: Reference(columnName: 'x'),
|
||||||
|
inside: TableReference('table'),
|
||||||
|
),
|
||||||
'CAST(3 + 4 AS TEXT)': CastExpression(
|
'CAST(3 + 4 AS TEXT)': CastExpression(
|
||||||
BinaryExpression(
|
BinaryExpression(
|
||||||
NumericLiteral(3.0),
|
NumericLiteral(3.0),
|
||||||
|
|
|
@ -485,6 +485,9 @@ CREATE UNIQUE INDEX my_idx ON t1 (c1, c2, c3) WHERE c1 < c3;
|
||||||
test('in', () {
|
test('in', () {
|
||||||
testFormat('SELECT x IN (SELECT * FROM foo);');
|
testFormat('SELECT x IN (SELECT * FROM foo);');
|
||||||
testFormat('SELECT x NOT IN (SELECT * FROM foo);');
|
testFormat('SELECT x NOT IN (SELECT * FROM foo);');
|
||||||
|
testFormat('SELECT x IN foo');
|
||||||
|
testFormat('SELECT x IN json_each(bar)');
|
||||||
|
testFormat('SELECT x IN :array');
|
||||||
});
|
});
|
||||||
|
|
||||||
test('boolean literals', () {
|
test('boolean literals', () {
|
||||||
|
|
Loading…
Reference in New Issue