mirror of https://github.com/AMT-Cheif/drift.git
PostgreSQL support (#1507)
Support PostgreSQL as a dialect in the main package and with an implementation provided in another package. Co-authored-by: Simon Binder <oss@simonbinder.eu>
This commit is contained in:
parent
f2d0864890
commit
fad654a5aa
|
@ -136,6 +136,18 @@ jobs:
|
|||
name: "Integration tests"
|
||||
needs: [compile_sqlite3]
|
||||
runs-on: ubuntu-20.04
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
env:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: cedx/setup-dart@v2
|
||||
|
|
|
@ -58,12 +58,9 @@ At the moment, drift supports these options:
|
|||
`.moor` (or `.drift`) file from and to json, use their sql name instead of the generated Dart getter name
|
||||
(so a column named `user_name` would also use `user_name` as a json key instead of `userName`).
|
||||
You can always override the json key by using a `JSON KEY` column constraint
|
||||
(e.g. `user_name VARCHAR NOT NULL JSON KEY userName`)
|
||||
(e.g. `user_name VARCHAR NOT NULL JSON KEY userName`).
|
||||
* `generate_connect_constructor`: Generate necessary code to support the [isolate runtime]({{ "isolates.md" | pageUrl }}).
|
||||
This is a build option because isolates are still experimental. This will be the default option eventually.
|
||||
* `sqlite_modules`: This list can be used to enable sqlite extensions, like those for json or full-text search.
|
||||
Modules have to be enabled explicitly because they're not supported on all platforms. See the following section for
|
||||
details.
|
||||
* `eagerly_load_dart_ast`: Drift's builder will load the resolved AST whenever it encounters a Dart file,
|
||||
instead of lazily when it reads a table. This is used to investigate rare builder crashes.
|
||||
* `data_class_to_companions` (defaults to `true`): Controls whether drift will write the `toCompanion` method in generated
|
||||
|
@ -84,10 +81,13 @@ At the moment, drift supports these options:
|
|||
The function has a parameter for each table that is available in the query, making it easier to get aliases right when using
|
||||
Dart placeholders.
|
||||
|
||||
## Assumed sqlite environment
|
||||
## Assumed SQL environment
|
||||
|
||||
You can configure the assumed sqlite version and available extensions.
|
||||
These options are used during analysis only and don't have an impact on the
|
||||
You can configure the SQL dialect you want to target with the `sql` build option.
|
||||
When using sqlite, you can further configure the assumed sqlite3 version and enabled
|
||||
extensions for more accurate analysis.
|
||||
|
||||
Note that these options are used for static analysis only and don't have an impact on the
|
||||
actual sqlite version at runtime.
|
||||
|
||||
To define the sqlite version to use, set `sqlite.version` to the `major.minor`
|
||||
|
@ -99,8 +99,10 @@ targets:
|
|||
builders:
|
||||
drift_dev:
|
||||
options:
|
||||
sqlite:
|
||||
version: "3.34"
|
||||
sql:
|
||||
dialect: sqlite
|
||||
options:
|
||||
version: "3.34"
|
||||
```
|
||||
|
||||
With that option, the generator will emit warnings when using newer sqlite version.
|
||||
|
@ -122,11 +124,13 @@ targets:
|
|||
builders:
|
||||
drift_dev:
|
||||
options:
|
||||
sqlite:
|
||||
modules:
|
||||
- json1
|
||||
- fts5
|
||||
- moor_ffi
|
||||
sql:
|
||||
options:
|
||||
dialect: sqlite
|
||||
modules:
|
||||
- json1
|
||||
- fts5
|
||||
- moor_ffi
|
||||
```
|
||||
|
||||
We currently support the following extensions:
|
||||
|
|
|
@ -14,8 +14,10 @@ targets:
|
|||
named_parameters: true
|
||||
new_sql_code_generation: true
|
||||
scoped_dart_components: true
|
||||
sqlite:
|
||||
version: "3.35"
|
||||
modules:
|
||||
- json1
|
||||
- fts5
|
||||
sql:
|
||||
dialect: sqlite
|
||||
options:
|
||||
version: "3.35"
|
||||
modules:
|
||||
- json1
|
||||
- fts5
|
||||
|
|
|
@ -167,17 +167,17 @@ class $TodoItemsTable extends TodoItems
|
|||
final VerificationMeta _idMeta = const VerificationMeta('id');
|
||||
late final GeneratedColumn<int?> id = GeneratedColumn<int?>(
|
||||
'id', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: 'PRIMARY KEY AUTOINCREMENT');
|
||||
final VerificationMeta _titleMeta = const VerificationMeta('title');
|
||||
late final GeneratedColumn<String?> title = GeneratedColumn<String?>(
|
||||
'title', aliasedName, false,
|
||||
typeName: 'TEXT', requiredDuringInsert: true);
|
||||
type: const StringType(), requiredDuringInsert: true);
|
||||
final VerificationMeta _contentMeta = const VerificationMeta('content');
|
||||
late final GeneratedColumn<String?> content = GeneratedColumn<String?>(
|
||||
'content', aliasedName, true,
|
||||
typeName: 'TEXT', requiredDuringInsert: false);
|
||||
type: const StringType(), requiredDuringInsert: false);
|
||||
@override
|
||||
List<GeneratedColumn> get $columns => [id, title, content];
|
||||
@override
|
||||
|
|
|
@ -17,5 +17,4 @@ export 'src/runtime/executor/connection_pool.dart';
|
|||
export 'src/runtime/executor/executor.dart';
|
||||
export 'src/runtime/query_builder/query_builder.dart';
|
||||
export 'src/runtime/types/sql_types.dart';
|
||||
export 'src/utils/expand_variables.dart';
|
||||
export 'src/utils/lazy_database.dart';
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
/// Provides utilities around sql keywords, like optional escaping etc.
|
||||
library drift.sqlite_keywords;
|
||||
|
||||
/// Contains a set of all sqlite keywords, according to
|
||||
/// https://www.sqlite.org/lang_keywords.html. Drift will use this list to
|
||||
/// escape keywords.
|
||||
const sqliteKeywords = {
|
||||
import 'package:drift/drift.dart';
|
||||
|
||||
/// A set of SQL keywords.
|
||||
///
|
||||
/// Drift will escape column names and identifiers that appear in this set.
|
||||
const baseKeywords = {
|
||||
'ADD',
|
||||
'ABORT',
|
||||
'ACTION',
|
||||
|
@ -155,6 +157,40 @@ const sqliteKeywords = {
|
|||
'WITHOUT',
|
||||
};
|
||||
|
||||
/// Contains a set of all sqlite keywords, according to
|
||||
/// https://www.sqlite.org/lang_keywords.html. Drift will use this list to
|
||||
/// escape keywords.
|
||||
const sqliteKeywords = baseKeywords;
|
||||
|
||||
/// A set of keywords that need to be escaped on sqlite and aren't contained
|
||||
/// in [baseKeywords].
|
||||
const additionalSqliteKeywords = <String>{};
|
||||
|
||||
/// A set of keywords that need to be escaped on postgres and aren't contained
|
||||
/// in [baseKeywords].
|
||||
const additionalPostgresKeywords = <String>{
|
||||
'ANY',
|
||||
'ARRAY',
|
||||
'ASYMMETRIC',
|
||||
'BINARY',
|
||||
'BOTH',
|
||||
'CURRENT_USER',
|
||||
'ILIKE',
|
||||
'LEADING',
|
||||
'LOCALTIME',
|
||||
'LOCALTIMESTAMP',
|
||||
'GRANT',
|
||||
'ONLY',
|
||||
'OVERLAPS',
|
||||
'PLACING',
|
||||
'SESSION_USER',
|
||||
'SIMILAR',
|
||||
'SOME',
|
||||
'SYMMETRIC',
|
||||
'TRAILING',
|
||||
'USER',
|
||||
};
|
||||
|
||||
/// Returns whether [s] is an sql keyword by comparing it to the
|
||||
/// [sqliteKeywords].
|
||||
bool isSqliteKeyword(String s) => sqliteKeywords.contains(s.toUpperCase());
|
||||
|
@ -162,7 +198,14 @@ bool isSqliteKeyword(String s) => sqliteKeywords.contains(s.toUpperCase());
|
|||
final _whitespace = RegExp(r'\s');
|
||||
|
||||
/// Escapes [s] by wrapping it in backticks if it's an sqlite keyword.
|
||||
String escapeIfNeeded(String s) {
|
||||
if (isSqliteKeyword(s) || s.contains(_whitespace)) return '"$s"';
|
||||
String escapeIfNeeded(String s, [SqlDialect dialect = SqlDialect.sqlite]) {
|
||||
final inUpperCase = s.toUpperCase();
|
||||
var isKeyword = baseKeywords.contains(inUpperCase);
|
||||
|
||||
if (dialect == SqlDialect.postgres) {
|
||||
isKeyword |= additionalPostgresKeywords.contains(inUpperCase);
|
||||
}
|
||||
|
||||
if (isKeyword || s.contains(_whitespace)) return '"$s"';
|
||||
return s;
|
||||
}
|
||||
|
|
|
@ -511,4 +511,19 @@ abstract class DatabaseConnectionUser {
|
|||
|
||||
return context;
|
||||
}
|
||||
|
||||
/// Used by generated code to expand array variables.
|
||||
String $expandVar(int start, int amount) {
|
||||
final buffer = StringBuffer();
|
||||
final mark = executor.dialect == SqlDialect.postgres ? '@' : '?';
|
||||
|
||||
for (var x = 0; x < amount; x++) {
|
||||
buffer.write('$mark${start + x}');
|
||||
if (x != amount - 1) {
|
||||
buffer.write(', ');
|
||||
}
|
||||
}
|
||||
|
||||
return buffer.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -159,6 +159,19 @@ abstract class SupportedTransactionDelegate extends TransactionDelegate {
|
|||
void startTransaction(Future Function(QueryDelegate) run);
|
||||
}
|
||||
|
||||
/// A [TransactionDelegate] for database APIs that have it's own transaction
|
||||
/// function
|
||||
abstract class WrappedTransactionDelegate extends TransactionDelegate {
|
||||
/// Constant constructor on superclass
|
||||
const WrappedTransactionDelegate();
|
||||
|
||||
/// Start a transaction, which we assume implements [QueryDelegate], and call
|
||||
/// [run] with the transaction.
|
||||
///
|
||||
/// If [run] completes with an error, rollback. Otherwise, commit.
|
||||
Future runInTransaction(Future Function(QueryDelegate) run);
|
||||
}
|
||||
|
||||
/// An interface that supports setting the database version.
|
||||
///
|
||||
/// Clients may not extend, implement or mix-in this class directly.
|
||||
|
|
|
@ -137,6 +137,9 @@ class _TransactionExecutor extends _BaseExecutor
|
|||
@override
|
||||
bool get logStatements => _db.logStatements;
|
||||
|
||||
@override
|
||||
SqlDialect get dialect => _db.dialect;
|
||||
|
||||
final Completer<void> _sendCalled = Completer();
|
||||
Completer<bool>? _openingCompleter;
|
||||
|
||||
|
@ -203,6 +206,17 @@ class _TransactionExecutor extends _BaseExecutor
|
|||
// that until send() was called.
|
||||
await _sendCalled.future;
|
||||
});
|
||||
} else if (transactionManager is WrappedTransactionDelegate) {
|
||||
unawaited(_db._synchronized(() async {
|
||||
try {
|
||||
await transactionManager.runInTransaction((transaction) async {
|
||||
impl = transaction;
|
||||
_sendFakeErrorOnRollback = true;
|
||||
transactionStarted.complete();
|
||||
await _sendCalled.future;
|
||||
});
|
||||
} catch (_) {}
|
||||
}));
|
||||
} else {
|
||||
throw Exception('Invalid delegate: Has unknown transaction delegate');
|
||||
}
|
||||
|
|
|
@ -400,7 +400,7 @@ class _CastInSqlExpression<D1, D2> extends Expression<D2> {
|
|||
|
||||
context.buffer.write('CAST(');
|
||||
inner.writeInto(context);
|
||||
context.buffer.write(' AS ${type.sqlName})');
|
||||
context.buffer.write(' AS ${type.sqlName(context.dialect)})');
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -60,15 +60,21 @@ class Variable<T> extends Expression<T> {
|
|||
|
||||
@override
|
||||
void writeInto(GenerationContext context) {
|
||||
final explicitStart = context.explicitVariableIndex;
|
||||
var explicitStart = context.explicitVariableIndex;
|
||||
|
||||
var mark = '?';
|
||||
if (context.dialect == SqlDialect.postgres) {
|
||||
explicitStart = 1;
|
||||
mark = '@';
|
||||
}
|
||||
|
||||
if (explicitStart != null) {
|
||||
context.buffer
|
||||
..write('?')
|
||||
..write(mark)
|
||||
..write(explicitStart + context.amountOfVariables);
|
||||
context.introduceVariable(this, mapToSimpleValue(context));
|
||||
} else if (value != null) {
|
||||
context.buffer.write('?');
|
||||
context.buffer.write(mark);
|
||||
context.introduceVariable(this, mapToSimpleValue(context));
|
||||
} else {
|
||||
context.buffer.write('NULL');
|
||||
|
|
|
@ -232,7 +232,7 @@ class Migrator {
|
|||
|
||||
void _writeCreateTable(TableInfo table, GenerationContext context) {
|
||||
context.buffer.write('CREATE TABLE IF NOT EXISTS '
|
||||
'${escapeIfNeeded(table.$tableName)} (');
|
||||
'${escapeIfNeeded(table.$tableName, context.dialect)} (');
|
||||
|
||||
var hasAutoIncrement = false;
|
||||
for (var i = 0; i < table.$columns.length; i++) {
|
||||
|
|
|
@ -80,5 +80,8 @@ enum SqlDialect {
|
|||
sqlite,
|
||||
|
||||
/// (currently unsupported)
|
||||
mysql
|
||||
mysql,
|
||||
|
||||
/// PostgreSQL (currently supported in an experimental state)
|
||||
postgres,
|
||||
}
|
||||
|
|
|
@ -41,8 +41,12 @@ class GeneratedColumn<T> extends Column<T> {
|
|||
/// Additional checks performed on values before inserts or updates.
|
||||
final VerificationResult Function(T, VerificationMeta)? additionalChecks;
|
||||
|
||||
/// The sql type name, such as TEXT for texts.
|
||||
final String typeName;
|
||||
/// The sql type, such as `StringType` for texts.
|
||||
final SqlType type;
|
||||
|
||||
/// The sql type name, such as `TEXT` for texts.
|
||||
@Deprecated('Use type.sqlName instead')
|
||||
String get typeName => type.sqlName(SqlDialect.sqlite);
|
||||
|
||||
/// If this column is generated (that is, it is a SQL expression of other)
|
||||
/// columns, contains information about how to generate this column.
|
||||
|
@ -65,7 +69,7 @@ class GeneratedColumn<T> extends Column<T> {
|
|||
this.tableName,
|
||||
this.$nullable, {
|
||||
this.clientDefault,
|
||||
required this.typeName,
|
||||
required this.type,
|
||||
String? defaultConstraints,
|
||||
this.$customConstraints,
|
||||
this.defaultValue,
|
||||
|
@ -85,7 +89,7 @@ class GeneratedColumn<T> extends Column<T> {
|
|||
tableName,
|
||||
$nullable,
|
||||
clientDefault,
|
||||
typeName,
|
||||
type,
|
||||
_defaultConstraints,
|
||||
$customConstraints,
|
||||
defaultValue,
|
||||
|
@ -99,10 +103,18 @@ class GeneratedColumn<T> extends Column<T> {
|
|||
/// [here](https://www.sqlite.org/syntax/column-def.html), into the given
|
||||
/// buffer.
|
||||
void writeColumnDefinition(GenerationContext into) {
|
||||
into.buffer.write('$escapedName $typeName');
|
||||
final isSerial = into.dialect == SqlDialect.postgres && hasAutoIncrement;
|
||||
|
||||
if (isSerial) {
|
||||
into.buffer.write('$escapedName bigserial PRIMARY KEY NOT NULL');
|
||||
} else {
|
||||
into.buffer.write('$escapedName ${type.sqlName(into.dialect)}');
|
||||
}
|
||||
|
||||
if ($customConstraints == null) {
|
||||
into.buffer.write($nullable ? ' NULL' : ' NOT NULL');
|
||||
if (!isSerial) {
|
||||
into.buffer.write($nullable ? ' NULL' : ' NOT NULL');
|
||||
}
|
||||
|
||||
final defaultValue = this.defaultValue;
|
||||
if (defaultValue != null) {
|
||||
|
@ -127,7 +139,7 @@ class GeneratedColumn<T> extends Column<T> {
|
|||
}
|
||||
|
||||
// these custom constraints refer to builtin constraints from drift
|
||||
if (_defaultConstraints != null) {
|
||||
if (!isSerial && _defaultConstraints != null) {
|
||||
into.buffer
|
||||
..write(' ')
|
||||
..write(_defaultConstraints);
|
||||
|
@ -230,7 +242,7 @@ class GeneratedColumnWithTypeConverter<D, S> extends GeneratedColumn<S> {
|
|||
String tableName,
|
||||
bool nullable,
|
||||
S Function()? clientDefault,
|
||||
String typeName,
|
||||
SqlType type,
|
||||
String? defaultConstraints,
|
||||
String? customConstraints,
|
||||
Expression<S>? defaultValue,
|
||||
|
@ -242,7 +254,7 @@ class GeneratedColumnWithTypeConverter<D, S> extends GeneratedColumn<S> {
|
|||
tableName,
|
||||
nullable,
|
||||
clientDefault: clientDefault,
|
||||
typeName: typeName,
|
||||
type: type,
|
||||
defaultConstraints: defaultConstraints,
|
||||
$customConstraints: customConstraints,
|
||||
defaultValue: defaultValue,
|
||||
|
|
|
@ -167,6 +167,6 @@ extension RowIdExtension on TableInfo {
|
|||
}
|
||||
|
||||
return GeneratedColumn<int?>('_rowid_', aliasedName, false,
|
||||
typeName: 'INTEGER');
|
||||
type: const IntType());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -124,7 +124,8 @@ class InsertStatement<T extends Table, D> {
|
|||
final columnName = column.$name;
|
||||
|
||||
if (rawValues.containsKey(columnName)) {
|
||||
map[columnName] = rawValues[columnName]!;
|
||||
final value = rawValues[columnName]!;
|
||||
map[columnName] = value;
|
||||
} else {
|
||||
if (column.clientDefault != null) {
|
||||
map[columnName] = column._evaluateClientDefault();
|
||||
|
@ -136,8 +137,16 @@ class InsertStatement<T extends Table, D> {
|
|||
}
|
||||
|
||||
final ctx = GenerationContext.fromDb(database);
|
||||
|
||||
if (ctx.dialect == SqlDialect.postgres &&
|
||||
mode != InsertMode.insert &&
|
||||
mode != InsertMode.insertOrIgnore) {
|
||||
throw ArgumentError('$mode not supported on postgres');
|
||||
}
|
||||
|
||||
ctx.buffer
|
||||
..write(_insertKeywords[mode])
|
||||
..write(_insertKeywords[
|
||||
ctx.dialect == SqlDialect.postgres ? InsertMode.insert : mode])
|
||||
..write(' INTO ')
|
||||
..write(table.$tableName)
|
||||
..write(' ');
|
||||
|
@ -185,24 +194,29 @@ class InsertStatement<T extends Table, D> {
|
|||
first = false;
|
||||
}
|
||||
|
||||
ctx.buffer.write(') DO UPDATE SET ');
|
||||
if (ctx.dialect == SqlDialect.postgres &&
|
||||
mode == InsertMode.insertOrIgnore) {
|
||||
ctx.buffer.write(') DO NOTHING ');
|
||||
} else {
|
||||
ctx.buffer.write(') DO UPDATE SET ');
|
||||
|
||||
first = true;
|
||||
for (final update in updateSet.entries) {
|
||||
final column = escapeIfNeeded(update.key);
|
||||
first = true;
|
||||
for (final update in updateSet.entries) {
|
||||
final column = escapeIfNeeded(update.key);
|
||||
|
||||
if (!first) ctx.buffer.write(', ');
|
||||
ctx.buffer.write('$column = ');
|
||||
update.value.writeInto(ctx);
|
||||
if (!first) ctx.buffer.write(', ');
|
||||
ctx.buffer.write('$column = ');
|
||||
update.value.writeInto(ctx);
|
||||
|
||||
first = false;
|
||||
}
|
||||
first = false;
|
||||
}
|
||||
|
||||
if (onConflict._where != null) {
|
||||
ctx.writeWhitespace();
|
||||
final where = onConflict._where!(
|
||||
table.asDslTable, table.createAlias('excluded').asDslTable);
|
||||
where.writeInto(ctx);
|
||||
if (onConflict._where != null) {
|
||||
ctx.writeWhitespace();
|
||||
final where = onConflict._where!(
|
||||
table.asDslTable, table.createAlias('excluded').asDslTable);
|
||||
where.writeInto(ctx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -214,6 +228,13 @@ class InsertStatement<T extends Table, D> {
|
|||
|
||||
if (returning) {
|
||||
ctx.buffer.write(' RETURNING *');
|
||||
} else if (ctx.dialect == SqlDialect.postgres) {
|
||||
if (table.$primaryKey.length == 1) {
|
||||
final id = table.$primaryKey.firstOrNull;
|
||||
if (id != null && id.type is IntType) {
|
||||
ctx.buffer.write(' RETURNING ${id.name}');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ctx;
|
||||
|
|
|
@ -18,7 +18,7 @@ abstract class SqlType<T> {
|
|||
const SqlType();
|
||||
|
||||
/// The name of this type in sql, such as `TEXT`.
|
||||
String get sqlName;
|
||||
String sqlName(SqlDialect dialect);
|
||||
|
||||
/// Maps the [content] to a value that we can send together with a prepared
|
||||
/// statement to represent the given value.
|
||||
|
@ -40,7 +40,8 @@ class BoolType extends SqlType<bool> {
|
|||
const BoolType();
|
||||
|
||||
@override
|
||||
String get sqlName => 'INTEGER';
|
||||
String sqlName(SqlDialect dialect) =>
|
||||
dialect == SqlDialect.sqlite ? 'INTEGER' : 'integer';
|
||||
|
||||
@override
|
||||
bool? mapFromDatabaseResponse(dynamic response) {
|
||||
|
@ -74,7 +75,8 @@ class StringType extends SqlType<String> {
|
|||
const StringType();
|
||||
|
||||
@override
|
||||
String get sqlName => 'TEXT';
|
||||
String sqlName(SqlDialect dialect) =>
|
||||
dialect == SqlDialect.sqlite ? 'TEXT' : 'text';
|
||||
|
||||
@override
|
||||
String? mapFromDatabaseResponse(dynamic response) => response?.toString();
|
||||
|
@ -103,7 +105,8 @@ class IntType extends SqlType<int> {
|
|||
const IntType();
|
||||
|
||||
@override
|
||||
String get sqlName => 'INTEGER';
|
||||
String sqlName(SqlDialect dialect) =>
|
||||
dialect == SqlDialect.sqlite ? 'INTEGER' : 'bigint';
|
||||
|
||||
@override
|
||||
int? mapFromDatabaseResponse(dynamic response) {
|
||||
|
@ -127,7 +130,8 @@ class DateTimeType extends SqlType<DateTime> {
|
|||
const DateTimeType();
|
||||
|
||||
@override
|
||||
String get sqlName => 'INTEGER';
|
||||
String sqlName(SqlDialect dialect) =>
|
||||
dialect == SqlDialect.sqlite ? 'INTEGER' : 'integer';
|
||||
|
||||
@override
|
||||
DateTime? mapFromDatabaseResponse(dynamic response) {
|
||||
|
@ -161,7 +165,8 @@ class BlobType extends SqlType<Uint8List> {
|
|||
const BlobType();
|
||||
|
||||
@override
|
||||
String get sqlName => 'BLOB';
|
||||
String sqlName(SqlDialect dialect) =>
|
||||
dialect == SqlDialect.sqlite ? 'BLOB' : 'bytea';
|
||||
|
||||
@override
|
||||
Uint8List? mapFromDatabaseResponse(dynamic response) {
|
||||
|
@ -187,7 +192,8 @@ class RealType extends SqlType<double> {
|
|||
const RealType();
|
||||
|
||||
@override
|
||||
String get sqlName => 'REAL';
|
||||
String sqlName(SqlDialect dialect) =>
|
||||
dialect == SqlDialect.sqlite ? 'REAL' : 'float8';
|
||||
|
||||
@override
|
||||
double? mapFromDatabaseResponse(dynamic response) {
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
/// Used by generated code.
|
||||
String $expandVar(int start, int amount) {
|
||||
final buffer = StringBuffer();
|
||||
|
||||
for (var x = 0; x < amount; x++) {
|
||||
buffer.write('?${start + x}');
|
||||
if (x != amount - 1) {
|
||||
buffer.write(', ');
|
||||
}
|
||||
}
|
||||
|
||||
return buffer.toString();
|
||||
}
|
|
@ -3,9 +3,9 @@ import 'package:test/test.dart';
|
|||
|
||||
void main() {
|
||||
final nullable =
|
||||
GeneratedColumn<DateTime>('name', 'table', true, typeName: 'INTEGER');
|
||||
GeneratedColumn<DateTime>('name', 'table', true, type: const IntType());
|
||||
final nonNull =
|
||||
GeneratedColumn<DateTime>('name', 'table', false, typeName: 'INTEGER');
|
||||
GeneratedColumn<DateTime>('name', 'table', false, type: const IntType());
|
||||
|
||||
test('should write column definition', () {
|
||||
final nonNullQuery = GenerationContext.fromDb(null);
|
||||
|
|
|
@ -9,7 +9,7 @@ void main() {
|
|||
'foo',
|
||||
'tbl',
|
||||
false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
$customConstraints: 'NOT NULL PRIMARY KEY AUTOINCREMENT',
|
||||
);
|
||||
|
||||
|
@ -22,7 +22,7 @@ void main() {
|
|||
|
||||
test('int column writes PRIMARY KEY constraint', () {
|
||||
final column = GeneratedColumn<int>('foo', 'tbl', false,
|
||||
typeName: 'INTEGER', $customConstraints: 'NOT NULL PRIMARY KEY');
|
||||
type: const IntType(), $customConstraints: 'NOT NULL PRIMARY KEY');
|
||||
|
||||
final context = GenerationContext.fromDb(TodoDb());
|
||||
column.writeColumnDefinition(context);
|
||||
|
|
|
@ -212,18 +212,20 @@ class ConfigTable extends Table with TableInfo<ConfigTable, Config> {
|
|||
final VerificationMeta _configKeyMeta = const VerificationMeta('configKey');
|
||||
late final GeneratedColumn<String?> configKey = GeneratedColumn<String?>(
|
||||
'config_key', aliasedName, false,
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: 'not null primary key');
|
||||
final VerificationMeta _configValueMeta =
|
||||
const VerificationMeta('configValue');
|
||||
late final GeneratedColumn<String?> configValue = GeneratedColumn<String?>(
|
||||
'config_value', aliasedName, true,
|
||||
typeName: 'TEXT', requiredDuringInsert: false, $customConstraints: '');
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: '');
|
||||
final VerificationMeta _syncStateMeta = const VerificationMeta('syncState');
|
||||
late final GeneratedColumnWithTypeConverter<SyncType, int?> syncState =
|
||||
GeneratedColumn<int?>('sync_state', aliasedName, true,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: '')
|
||||
.withConverter<SyncType>(ConfigTable.$converter0);
|
||||
|
@ -232,7 +234,7 @@ class ConfigTable extends Table with TableInfo<ConfigTable, Config> {
|
|||
late final GeneratedColumnWithTypeConverter<SyncType?, int?>
|
||||
syncStateImplicit = GeneratedColumn<int?>(
|
||||
'sync_state_implicit', aliasedName, true,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: '')
|
||||
.withConverter<SyncType?>(ConfigTable.$converter1);
|
||||
|
@ -419,14 +421,14 @@ class WithDefaults extends Table with TableInfo<WithDefaults, WithDefault> {
|
|||
final VerificationMeta _aMeta = const VerificationMeta('a');
|
||||
late final GeneratedColumn<String?> a = GeneratedColumn<String?>(
|
||||
'a', aliasedName, true,
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: 'DEFAULT \'something\'',
|
||||
defaultValue: const CustomExpression<String>('\'something\''));
|
||||
final VerificationMeta _bMeta = const VerificationMeta('b');
|
||||
late final GeneratedColumn<int?> b = GeneratedColumn<int?>(
|
||||
'b', aliasedName, true,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: 'UNIQUE');
|
||||
@override
|
||||
|
@ -513,7 +515,7 @@ class NoIds extends Table with TableInfo<NoIds, NoIdRow> {
|
|||
final VerificationMeta _payloadMeta = const VerificationMeta('payload');
|
||||
late final GeneratedColumn<Uint8List?> payload = GeneratedColumn<Uint8List?>(
|
||||
'payload', aliasedName, false,
|
||||
typeName: 'BLOB',
|
||||
type: const BlobType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: 'NOT NULL PRIMARY KEY');
|
||||
@override
|
||||
|
@ -716,17 +718,21 @@ class WithConstraints extends Table
|
|||
final VerificationMeta _aMeta = const VerificationMeta('a');
|
||||
late final GeneratedColumn<String?> a = GeneratedColumn<String?>(
|
||||
'a', aliasedName, true,
|
||||
typeName: 'TEXT', requiredDuringInsert: false, $customConstraints: '');
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: '');
|
||||
final VerificationMeta _bMeta = const VerificationMeta('b');
|
||||
late final GeneratedColumn<int?> b = GeneratedColumn<int?>(
|
||||
'b', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: 'NOT NULL');
|
||||
final VerificationMeta _cMeta = const VerificationMeta('c');
|
||||
late final GeneratedColumn<double?> c = GeneratedColumn<double?>(
|
||||
'c', aliasedName, true,
|
||||
typeName: 'REAL', requiredDuringInsert: false, $customConstraints: '');
|
||||
type: const RealType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: '');
|
||||
@override
|
||||
List<GeneratedColumn> get $columns => [a, b, c];
|
||||
@override
|
||||
|
@ -964,22 +970,28 @@ class Mytable extends Table with TableInfo<Mytable, MytableData> {
|
|||
final VerificationMeta _someidMeta = const VerificationMeta('someid');
|
||||
late final GeneratedColumn<int?> someid = GeneratedColumn<int?>(
|
||||
'someid', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: 'NOT NULL');
|
||||
final VerificationMeta _sometextMeta = const VerificationMeta('sometext');
|
||||
late final GeneratedColumn<String?> sometext = GeneratedColumn<String?>(
|
||||
'sometext', aliasedName, true,
|
||||
typeName: 'TEXT', requiredDuringInsert: false, $customConstraints: '');
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: '');
|
||||
final VerificationMeta _isInsertingMeta =
|
||||
const VerificationMeta('isInserting');
|
||||
late final GeneratedColumn<bool?> isInserting = GeneratedColumn<bool?>(
|
||||
'is_inserting', aliasedName, true,
|
||||
typeName: 'INTEGER', requiredDuringInsert: false, $customConstraints: '');
|
||||
type: const BoolType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: '');
|
||||
final VerificationMeta _somedateMeta = const VerificationMeta('somedate');
|
||||
late final GeneratedColumn<DateTime?> somedate = GeneratedColumn<DateTime?>(
|
||||
'somedate', aliasedName, true,
|
||||
typeName: 'INTEGER', requiredDuringInsert: false, $customConstraints: '');
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: '');
|
||||
@override
|
||||
List<GeneratedColumn> get $columns =>
|
||||
[someid, sometext, isInserting, somedate];
|
||||
|
@ -1186,15 +1198,21 @@ class Email extends Table
|
|||
final VerificationMeta _senderMeta = const VerificationMeta('sender');
|
||||
late final GeneratedColumn<String?> sender = GeneratedColumn<String?>(
|
||||
'sender', aliasedName, false,
|
||||
typeName: 'TEXT', requiredDuringInsert: true, $customConstraints: '');
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: '');
|
||||
final VerificationMeta _titleMeta = const VerificationMeta('title');
|
||||
late final GeneratedColumn<String?> title = GeneratedColumn<String?>(
|
||||
'title', aliasedName, false,
|
||||
typeName: 'TEXT', requiredDuringInsert: true, $customConstraints: '');
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: '');
|
||||
final VerificationMeta _bodyMeta = const VerificationMeta('body');
|
||||
late final GeneratedColumn<String?> body = GeneratedColumn<String?>(
|
||||
'body', aliasedName, false,
|
||||
typeName: 'TEXT', requiredDuringInsert: true, $customConstraints: '');
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: '');
|
||||
@override
|
||||
List<GeneratedColumn> get $columns => [sender, title, body];
|
||||
@override
|
||||
|
@ -1378,13 +1396,13 @@ class WeirdTable extends Table with TableInfo<WeirdTable, WeirdData> {
|
|||
final VerificationMeta _sqlClassMeta = const VerificationMeta('sqlClass');
|
||||
late final GeneratedColumn<int?> sqlClass = GeneratedColumn<int?>(
|
||||
'class', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: 'NOT NULL');
|
||||
final VerificationMeta _textColumnMeta = const VerificationMeta('textColumn');
|
||||
late final GeneratedColumn<String?> textColumn = GeneratedColumn<String?>(
|
||||
'text', aliasedName, false,
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: 'NOT NULL');
|
||||
@override
|
||||
|
@ -1535,18 +1553,18 @@ class MyView extends View<MyView, MyViewData> {
|
|||
|
||||
late final GeneratedColumn<String?> configKey = GeneratedColumn<String?>(
|
||||
'config_key', aliasedName, false,
|
||||
typeName: 'TEXT');
|
||||
type: const StringType());
|
||||
late final GeneratedColumn<String?> configValue = GeneratedColumn<String?>(
|
||||
'config_value', aliasedName, true,
|
||||
typeName: 'TEXT');
|
||||
type: const StringType());
|
||||
late final GeneratedColumnWithTypeConverter<SyncType, int?> syncState =
|
||||
GeneratedColumn<int?>('sync_state', aliasedName, true,
|
||||
typeName: 'INTEGER')
|
||||
type: const IntType())
|
||||
.withConverter<SyncType>(ConfigTable.$converter0);
|
||||
late final GeneratedColumnWithTypeConverter<SyncType?, int?>
|
||||
syncStateImplicit = GeneratedColumn<int?>(
|
||||
'sync_state_implicit', aliasedName, true,
|
||||
typeName: 'INTEGER')
|
||||
type: const IntType())
|
||||
.withConverter<SyncType?>(ConfigTable.$converter1);
|
||||
}
|
||||
|
||||
|
|
|
@ -186,20 +186,20 @@ class $CategoriesTable extends Categories
|
|||
final VerificationMeta _idMeta = const VerificationMeta('id');
|
||||
late final GeneratedColumn<int?> id = GeneratedColumn<int?>(
|
||||
'id', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: 'PRIMARY KEY AUTOINCREMENT');
|
||||
final VerificationMeta _descriptionMeta =
|
||||
const VerificationMeta('description');
|
||||
late final GeneratedColumn<String?> description = GeneratedColumn<String?>(
|
||||
'desc', aliasedName, false,
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: 'NOT NULL UNIQUE');
|
||||
final VerificationMeta _priorityMeta = const VerificationMeta('priority');
|
||||
late final GeneratedColumnWithTypeConverter<CategoryPriority, int?> priority =
|
||||
GeneratedColumn<int?>('priority', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
defaultValue: const Constant(0))
|
||||
.withConverter<CategoryPriority>($CategoriesTable.$converter0);
|
||||
|
@ -207,7 +207,7 @@ class $CategoriesTable extends Categories
|
|||
const VerificationMeta('descriptionInUpperCase');
|
||||
late final GeneratedColumn<String?> descriptionInUpperCase =
|
||||
GeneratedColumn<String?>('description_in_upper_case', aliasedName, false,
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: false,
|
||||
generatedAs: GeneratedAs(description.upper(), false));
|
||||
@override
|
||||
|
@ -476,7 +476,7 @@ class $TodosTableTable extends TodosTable
|
|||
final VerificationMeta _idMeta = const VerificationMeta('id');
|
||||
late final GeneratedColumn<int?> id = GeneratedColumn<int?>(
|
||||
'id', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: 'PRIMARY KEY AUTOINCREMENT');
|
||||
final VerificationMeta _titleMeta = const VerificationMeta('title');
|
||||
|
@ -484,20 +484,20 @@ class $TodosTableTable extends TodosTable
|
|||
'title', aliasedName, true,
|
||||
additionalChecks:
|
||||
GeneratedColumn.checkTextLength(minTextLength: 4, maxTextLength: 16),
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: false);
|
||||
final VerificationMeta _contentMeta = const VerificationMeta('content');
|
||||
late final GeneratedColumn<String?> content = GeneratedColumn<String?>(
|
||||
'content', aliasedName, false,
|
||||
typeName: 'TEXT', requiredDuringInsert: true);
|
||||
type: const StringType(), requiredDuringInsert: true);
|
||||
final VerificationMeta _targetDateMeta = const VerificationMeta('targetDate');
|
||||
late final GeneratedColumn<DateTime?> targetDate = GeneratedColumn<DateTime?>(
|
||||
'target_date', aliasedName, true,
|
||||
typeName: 'INTEGER', requiredDuringInsert: false);
|
||||
type: const IntType(), requiredDuringInsert: false);
|
||||
final VerificationMeta _categoryMeta = const VerificationMeta('category');
|
||||
late final GeneratedColumn<int?> category = GeneratedColumn<int?>(
|
||||
'category', aliasedName, true,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: 'REFERENCES categories (id)');
|
||||
@override
|
||||
|
@ -759,7 +759,7 @@ class $UsersTable extends Users with TableInfo<$UsersTable, User> {
|
|||
final VerificationMeta _idMeta = const VerificationMeta('id');
|
||||
late final GeneratedColumn<int?> id = GeneratedColumn<int?>(
|
||||
'id', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: 'PRIMARY KEY AUTOINCREMENT');
|
||||
final VerificationMeta _nameMeta = const VerificationMeta('name');
|
||||
|
@ -767,12 +767,12 @@ class $UsersTable extends Users with TableInfo<$UsersTable, User> {
|
|||
'name', aliasedName, false,
|
||||
additionalChecks:
|
||||
GeneratedColumn.checkTextLength(minTextLength: 6, maxTextLength: 32),
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: true);
|
||||
final VerificationMeta _isAwesomeMeta = const VerificationMeta('isAwesome');
|
||||
late final GeneratedColumn<bool?> isAwesome = GeneratedColumn<bool?>(
|
||||
'is_awesome', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const BoolType(),
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: 'CHECK (is_awesome IN (0, 1))',
|
||||
defaultValue: const Constant(true));
|
||||
|
@ -780,12 +780,12 @@ class $UsersTable extends Users with TableInfo<$UsersTable, User> {
|
|||
const VerificationMeta('profilePicture');
|
||||
late final GeneratedColumn<Uint8List?> profilePicture =
|
||||
GeneratedColumn<Uint8List?>('profile_picture', aliasedName, false,
|
||||
typeName: 'BLOB', requiredDuringInsert: true);
|
||||
type: const BlobType(), requiredDuringInsert: true);
|
||||
final VerificationMeta _creationTimeMeta =
|
||||
const VerificationMeta('creationTime');
|
||||
late final GeneratedColumn<DateTime?> creationTime =
|
||||
GeneratedColumn<DateTime?>('creation_time', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
defaultValue: currentDateAndTime);
|
||||
@override
|
||||
|
@ -976,11 +976,11 @@ class $SharedTodosTable extends SharedTodos
|
|||
final VerificationMeta _todoMeta = const VerificationMeta('todo');
|
||||
late final GeneratedColumn<int?> todo = GeneratedColumn<int?>(
|
||||
'todo', aliasedName, false,
|
||||
typeName: 'INTEGER', requiredDuringInsert: true);
|
||||
type: const IntType(), requiredDuringInsert: true);
|
||||
final VerificationMeta _userMeta = const VerificationMeta('user');
|
||||
late final GeneratedColumn<int?> user = GeneratedColumn<int?>(
|
||||
'user', aliasedName, false,
|
||||
typeName: 'INTEGER', requiredDuringInsert: true);
|
||||
type: const IntType(), requiredDuringInsert: true);
|
||||
@override
|
||||
List<GeneratedColumn> get $columns => [todo, user];
|
||||
@override
|
||||
|
@ -1116,15 +1116,15 @@ class $TableWithoutPKTable extends TableWithoutPK
|
|||
const VerificationMeta('notReallyAnId');
|
||||
late final GeneratedColumn<int?> notReallyAnId = GeneratedColumn<int?>(
|
||||
'not_really_an_id', aliasedName, false,
|
||||
typeName: 'INTEGER', requiredDuringInsert: true);
|
||||
type: const IntType(), requiredDuringInsert: true);
|
||||
final VerificationMeta _someFloatMeta = const VerificationMeta('someFloat');
|
||||
late final GeneratedColumn<double?> someFloat = GeneratedColumn<double?>(
|
||||
'some_float', aliasedName, false,
|
||||
typeName: 'REAL', requiredDuringInsert: true);
|
||||
type: const RealType(), requiredDuringInsert: true);
|
||||
final VerificationMeta _customMeta = const VerificationMeta('custom');
|
||||
late final GeneratedColumnWithTypeConverter<MyCustomObject, String?> custom =
|
||||
GeneratedColumn<String?>('custom', aliasedName, false,
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: false,
|
||||
clientDefault: _uuid.v4)
|
||||
.withConverter<MyCustomObject>($TableWithoutPKTable.$converter0);
|
||||
|
@ -1293,7 +1293,7 @@ class $PureDefaultsTable extends PureDefaults
|
|||
final VerificationMeta _txtMeta = const VerificationMeta('txt');
|
||||
late final GeneratedColumn<String?> txt = GeneratedColumn<String?>(
|
||||
'insert', aliasedName, true,
|
||||
typeName: 'TEXT', requiredDuringInsert: false);
|
||||
type: const StringType(), requiredDuringInsert: false);
|
||||
@override
|
||||
List<GeneratedColumn> get $columns => [txt];
|
||||
@override
|
||||
|
|
|
@ -105,7 +105,7 @@ void main() {
|
|||
|
||||
for (var i = 0; i < 4; i++) {
|
||||
filter.add(i);
|
||||
await pumpEventQueue();
|
||||
await pumpEventQueue(times: 10);
|
||||
}
|
||||
|
||||
final values = await db
|
||||
|
|
|
@ -136,7 +136,7 @@ void main() {
|
|||
test('columns use table names in queries with multiple tables', () async {
|
||||
await db.multiple(predicate: (d, c) => d.a.equals('foo')).get();
|
||||
|
||||
verify(mock.runSelect(argThat(contains('d.a = ?')), any));
|
||||
verify(mock.runSelect(argThat(contains('d.a = ?1')), any));
|
||||
});
|
||||
|
||||
test('order by-params are ignored by default', () async {
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
import 'package:drift/drift.dart' show $expandVar;
|
||||
import 'package:test/test.dart';
|
||||
|
||||
import '../data/tables/todos.dart';
|
||||
|
||||
void main() {
|
||||
test('\$expandVar test', () {
|
||||
expect($expandVar(4, 0), '');
|
||||
expect($expandVar(2, 3), '?2, ?3, ?4');
|
||||
final db = TodoDb();
|
||||
|
||||
expect(db.$expandVar(4, 0), '');
|
||||
expect(db.$expandVar(2, 3), '?2, ?3, ?4');
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,3 +1,13 @@
|
|||
# Build config without drift-specific builders. When running a build in this directory, this file replaces build.yaml,
|
||||
# which is what we want! Our builders depend on generated code, so they break the generated build script at the start of
|
||||
# a build.
|
||||
targets:
|
||||
$default:
|
||||
builders:
|
||||
json_serializable:
|
||||
options:
|
||||
checked: true
|
||||
any_map: true
|
||||
disallow_unrecognized_keys: true
|
||||
field_rename: snake
|
||||
create_to_json: false
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import 'package:drift/drift.dart' show SqlDialect;
|
||||
import 'package:json_annotation/json_annotation.dart';
|
||||
import 'package:meta/meta.dart';
|
||||
import 'package:sqlparser/sqlparser.dart' show SqliteVersion;
|
||||
|
@ -65,6 +66,9 @@ class MoorOptions {
|
|||
@JsonKey(name: 'sqlite')
|
||||
final SqliteAnalysisOptions? sqliteAnalysisOptions;
|
||||
|
||||
@JsonKey(name: 'sql')
|
||||
final DialectOptions? dialect;
|
||||
|
||||
@JsonKey(name: 'eagerly_load_dart_ast', defaultValue: false)
|
||||
final bool eagerlyLoadDartAst;
|
||||
|
||||
|
@ -118,6 +122,7 @@ class MoorOptions {
|
|||
this.scopedDartComponents = false,
|
||||
this.modules = const [],
|
||||
this.sqliteAnalysisOptions,
|
||||
this.dialect = const DialectOptions(SqlDialect.sqlite, null),
|
||||
});
|
||||
|
||||
MoorOptions({
|
||||
|
@ -140,6 +145,7 @@ class MoorOptions {
|
|||
required this.scopedDartComponents,
|
||||
required this.modules,
|
||||
required this.sqliteAnalysisOptions,
|
||||
this.dialect,
|
||||
}) {
|
||||
if (sqliteAnalysisOptions != null && modules.isNotEmpty) {
|
||||
throw ArgumentError.value(
|
||||
|
@ -149,13 +155,26 @@ class MoorOptions {
|
|||
'Try moving modules into the sqlite block.',
|
||||
);
|
||||
}
|
||||
|
||||
if (dialect != null && sqliteAnalysisOptions != null) {
|
||||
throw ArgumentError.value(
|
||||
sqliteAnalysisOptions,
|
||||
'sqlite',
|
||||
'The sqlite field cannot be used together the `sql` option. '
|
||||
'Try moving it to `sql.options`.',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
factory MoorOptions.fromJson(Map json) => _$MoorOptionsFromJson(json);
|
||||
|
||||
SqliteAnalysisOptions? get sqliteOptions {
|
||||
return dialect?.options ?? sqliteAnalysisOptions;
|
||||
}
|
||||
|
||||
/// All enabled sqlite modules from these options.
|
||||
List<SqlModule> get effectiveModules {
|
||||
return sqliteAnalysisOptions?.modules ?? modules;
|
||||
return sqliteOptions?.modules ?? modules;
|
||||
}
|
||||
|
||||
/// Whether the [module] has been enabled in this configuration.
|
||||
|
@ -164,19 +183,25 @@ class MoorOptions {
|
|||
/// Checks whether a deprecated option is enabled.
|
||||
bool get enabledDeprecatedOption => eagerlyLoadDartAst;
|
||||
|
||||
SqlDialect get effectiveDialect => dialect?.dialect ?? SqlDialect.sqlite;
|
||||
|
||||
/// The assumed sqlite version used when analyzing queries.
|
||||
SqliteVersion get sqliteVersion {
|
||||
return sqliteAnalysisOptions?.version ?? _defaultSqliteVersion;
|
||||
return sqliteOptions?.version ?? _defaultSqliteVersion;
|
||||
}
|
||||
}
|
||||
|
||||
@JsonSerializable(
|
||||
checked: true,
|
||||
anyMap: true,
|
||||
disallowUnrecognizedKeys: true,
|
||||
fieldRename: FieldRename.snake,
|
||||
createToJson: false,
|
||||
)
|
||||
@JsonSerializable()
|
||||
class DialectOptions {
|
||||
final SqlDialect dialect;
|
||||
final SqliteAnalysisOptions? options;
|
||||
|
||||
const DialectOptions(this.dialect, this.options);
|
||||
|
||||
factory DialectOptions.fromJson(Map json) => _$DialectOptionsFromJson(json);
|
||||
}
|
||||
|
||||
@JsonSerializable()
|
||||
class SqliteAnalysisOptions {
|
||||
@JsonKey(name: 'modules', defaultValue: [])
|
||||
final List<SqlModule> modules;
|
||||
|
|
|
@ -22,6 +22,7 @@ MoorOptions _$MoorOptionsFromJson(Map json) => $checkedCreate(
|
|||
'generate_connect_constructor',
|
||||
'sqlite_modules',
|
||||
'sqlite',
|
||||
'sql',
|
||||
'eagerly_load_dart_ast',
|
||||
'data_class_to_companions',
|
||||
'mutable_classes',
|
||||
|
@ -82,6 +83,8 @@ MoorOptions _$MoorOptionsFromJson(Map json) => $checkedCreate(
|
|||
'sqlite',
|
||||
(v) =>
|
||||
v == null ? null : SqliteAnalysisOptions.fromJson(v as Map)),
|
||||
dialect: $checkedConvert('sql',
|
||||
(v) => v == null ? null : DialectOptions.fromJson(v as Map)),
|
||||
);
|
||||
return val;
|
||||
},
|
||||
|
@ -107,7 +110,8 @@ MoorOptions _$MoorOptionsFromJson(Map json) => $checkedCreate(
|
|||
'newSqlCodeGeneration': 'new_sql_code_generation',
|
||||
'scopedDartComponents': 'scoped_dart_components',
|
||||
'modules': 'sqlite_modules',
|
||||
'sqliteAnalysisOptions': 'sqlite'
|
||||
'sqliteAnalysisOptions': 'sqlite',
|
||||
'dialect': 'sql'
|
||||
},
|
||||
);
|
||||
|
||||
|
@ -144,6 +148,32 @@ const _$SqlModuleEnumMap = {
|
|||
SqlModule.math: 'math',
|
||||
};
|
||||
|
||||
DialectOptions _$DialectOptionsFromJson(Map json) => $checkedCreate(
|
||||
'DialectOptions',
|
||||
json,
|
||||
($checkedConvert) {
|
||||
$checkKeys(
|
||||
json,
|
||||
allowedKeys: const ['dialect', 'options'],
|
||||
);
|
||||
final val = DialectOptions(
|
||||
$checkedConvert(
|
||||
'dialect', (v) => _$enumDecode(_$SqlDialectEnumMap, v)),
|
||||
$checkedConvert(
|
||||
'options',
|
||||
(v) =>
|
||||
v == null ? null : SqliteAnalysisOptions.fromJson(v as Map)),
|
||||
);
|
||||
return val;
|
||||
},
|
||||
);
|
||||
|
||||
const _$SqlDialectEnumMap = {
|
||||
SqlDialect.sqlite: 'sqlite',
|
||||
SqlDialect.mysql: 'mysql',
|
||||
SqlDialect.postgres: 'postgres',
|
||||
};
|
||||
|
||||
SqliteAnalysisOptions _$SqliteAnalysisOptionsFromJson(Map json) =>
|
||||
$checkedCreate(
|
||||
'SqliteAnalysisOptions',
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import 'package:analyzer/dart/ast/ast.dart';
|
||||
import 'package:analyzer/dart/element/element.dart';
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:drift_dev/src/analyzer/options.dart';
|
||||
import 'package:drift_dev/writer.dart';
|
||||
import 'package:sqlparser/sqlparser.dart' show ReferenceAction;
|
||||
|
@ -153,18 +154,20 @@ class MoorColumn implements HasDeclaration, HasType {
|
|||
return options.nnbd ? '$code?' : code;
|
||||
}
|
||||
|
||||
String sqlTypeName() {
|
||||
SqlType sqlType() {
|
||||
switch (type) {
|
||||
case ColumnType.integer:
|
||||
return const IntType();
|
||||
case ColumnType.boolean:
|
||||
return const BoolType();
|
||||
case ColumnType.datetime:
|
||||
return 'INTEGER';
|
||||
return const IntType();
|
||||
case ColumnType.text:
|
||||
return 'TEXT';
|
||||
return const StringType();
|
||||
case ColumnType.blob:
|
||||
return 'BLOB';
|
||||
return const BlobType();
|
||||
case ColumnType.real:
|
||||
return 'REAL';
|
||||
return const RealType();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,8 +2,8 @@ import 'package:analyzer/dart/element/element.dart';
|
|||
import 'package:drift_dev/src/analyzer/options.dart';
|
||||
import 'package:drift_dev/src/analyzer/runner/file_graph.dart';
|
||||
import 'package:drift_dev/src/model/sources.dart';
|
||||
import 'package:drift_dev/src/writer/queries/sql_writer.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
import 'package:sqlparser/utils/node_to_text.dart';
|
||||
|
||||
part 'columns.dart';
|
||||
part 'database.dart';
|
||||
|
@ -40,7 +40,8 @@ abstract class MoorDeclaration extends Declaration {
|
|||
extension ToSql on MoorDeclaration {
|
||||
String exportSql(MoorOptions options) {
|
||||
if (options.newSqlCodeGeneration) {
|
||||
return node.toSql();
|
||||
final writer = SqlWriter(options, escapeForDart: false);
|
||||
return writer.writeSql(node);
|
||||
} else {
|
||||
return node.span!.text;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import 'package:drift_dev/src/analyzer/options.dart';
|
||||
import 'package:drift_dev/src/analyzer/runner/file_graph.dart';
|
||||
import 'package:drift_dev/src/writer/queries/sql_writer.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
import 'package:sqlparser/utils/node_to_text.dart';
|
||||
|
||||
import 'model.dart';
|
||||
|
||||
|
@ -39,7 +39,8 @@ class SpecialQuery implements MoorSchemaEntity {
|
|||
String formattedSql(MoorOptions options) {
|
||||
final decl = declaration;
|
||||
if (decl is MoorSpecialQueryDeclaration && options.newSqlCodeGeneration) {
|
||||
return decl.node.statement.toSql();
|
||||
final writer = SqlWriter(options, escapeForDart: false);
|
||||
return writer.writeSql(decl.node.statement);
|
||||
}
|
||||
return sql;
|
||||
}
|
||||
|
|
|
@ -8,7 +8,6 @@ import 'package:drift_dev/src/utils/string_escaper.dart';
|
|||
import 'package:drift_dev/writer.dart';
|
||||
import 'package:recase/recase.dart';
|
||||
import 'package:sqlparser/sqlparser.dart' hide ResultColumn;
|
||||
import 'package:sqlparser/utils/node_to_text.dart';
|
||||
|
||||
import 'sql_writer.dart';
|
||||
|
||||
|
@ -328,9 +327,9 @@ class QueryWriter {
|
|||
kind.defaultValue != null) {
|
||||
// Wrap the default expression in parentheses to avoid issues with
|
||||
// the surrounding precedence in SQL.
|
||||
final defaultSql =
|
||||
"'(${escapeForDart(kind.defaultValue!.toSql())})'";
|
||||
defaultCode = 'const CustomExpression($defaultSql)';
|
||||
final sql = SqlWriter(scope.options)
|
||||
.writeNodeIntoStringLiteral(Parentheses(kind.defaultValue!));
|
||||
defaultCode = 'const CustomExpression($sql)';
|
||||
} else if (kind is SimpleDartPlaceholderType &&
|
||||
kind.kind == SimpleDartPlaceholderKind.orderBy) {
|
||||
defaultCode = 'const OrderBy.nothing()';
|
||||
|
@ -424,7 +423,7 @@ class QueryWriter {
|
|||
/// into 'SELECT * FROM t WHERE x IN ($expandedVar1)'.
|
||||
String _queryCode() {
|
||||
if (scope.options.newSqlCodeGeneration) {
|
||||
return SqlWriter(query).write();
|
||||
return SqlWriter(scope.options, query: query).write();
|
||||
} else {
|
||||
return _legacyQueryCode();
|
||||
}
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
import 'package:charcode/ascii.dart';
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:drift/drift.dart' show SqlDialect;
|
||||
import 'package:drift/sqlite_keywords.dart';
|
||||
import 'package:drift_dev/moor_generator.dart';
|
||||
import 'package:drift_dev/src/analyzer/options.dart';
|
||||
import 'package:drift_dev/src/utils/string_escaper.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
import 'package:sqlparser/utils/node_to_text.dart';
|
||||
|
@ -23,14 +26,19 @@ String placeholderContextName(FoundDartPlaceholder placeholder) {
|
|||
|
||||
class SqlWriter extends NodeSqlBuilder {
|
||||
final StringBuffer _out;
|
||||
final SqlQuery query;
|
||||
final SqlQuery? query;
|
||||
final MoorOptions options;
|
||||
final Map<NestedStarResultColumn, NestedResultTable> _starColumnToResolved;
|
||||
|
||||
SqlWriter._(this.query, this._starColumnToResolved, StringBuffer out)
|
||||
: _out = out,
|
||||
super(_DartEscapingSink(out));
|
||||
bool get _isPostgres => options.effectiveDialect == SqlDialect.postgres;
|
||||
|
||||
factory SqlWriter(SqlQuery query) {
|
||||
SqlWriter._(this.query, this.options, this._starColumnToResolved,
|
||||
StringBuffer out, bool escapeForDart)
|
||||
: _out = out,
|
||||
super(escapeForDart ? _DartEscapingSink(out) : out);
|
||||
|
||||
factory SqlWriter(MoorOptions options,
|
||||
{SqlQuery? query, bool escapeForDart = true}) {
|
||||
// Index nested results by their syntactic origin for faster lookups later
|
||||
var doubleStarColumnToResolvedTable =
|
||||
const <NestedStarResultColumn, NestedResultTable>{};
|
||||
|
@ -41,27 +49,45 @@ class SqlWriter extends NodeSqlBuilder {
|
|||
nestedResult.from: nestedResult
|
||||
};
|
||||
}
|
||||
return SqlWriter._(query, doubleStarColumnToResolvedTable, StringBuffer());
|
||||
return SqlWriter._(query, options, doubleStarColumnToResolvedTable,
|
||||
StringBuffer(), escapeForDart);
|
||||
}
|
||||
|
||||
String write() {
|
||||
return writeNodeIntoStringLiteral(query!.fromContext!.root);
|
||||
}
|
||||
|
||||
String writeNodeIntoStringLiteral(AstNode node) {
|
||||
_out.write("'");
|
||||
visit(query.fromContext!.root, null);
|
||||
visit(node, null);
|
||||
_out.write("'");
|
||||
|
||||
return _out.toString();
|
||||
}
|
||||
|
||||
String writeSql(AstNode node) {
|
||||
visit(node, null);
|
||||
return _out.toString();
|
||||
}
|
||||
|
||||
FoundVariable? _findMoorVar(Variable target) {
|
||||
return query.variables.firstWhereOrNull(
|
||||
return query!.variables.firstWhereOrNull(
|
||||
(f) => f.variable.resolvedIndex == target.resolvedIndex);
|
||||
}
|
||||
|
||||
@override
|
||||
void identifier(String identifier,
|
||||
{bool spaceBefore = true, bool spaceAfter = true}) {
|
||||
final escaped = escapeIfNeeded(identifier, options.effectiveDialect);
|
||||
symbol(escaped, spaceBefore: spaceBefore, spaceAfter: spaceAfter);
|
||||
}
|
||||
|
||||
void _writeMoorVariable(FoundVariable variable) {
|
||||
if (variable.isArray) {
|
||||
_writeRawInSpaces('(\$${expandedName(variable)})');
|
||||
} else {
|
||||
_writeRawInSpaces('?${variable.index}');
|
||||
final mark = _isPostgres ? '@' : '?';
|
||||
_writeRawInSpaces('$mark${variable.index}');
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -118,7 +144,7 @@ class SqlWriter extends NodeSqlBuilder {
|
|||
}
|
||||
} else if (e is DartPlaceholder) {
|
||||
final moorPlaceholder =
|
||||
query.placeholders.singleWhere((p) => p.astNode == e);
|
||||
query!.placeholders.singleWhere((p) => p.astNode == e);
|
||||
|
||||
_writeRawInSpaces('\${${placeholderContextName(moorPlaceholder)}.sql}');
|
||||
} else {
|
||||
|
|
|
@ -72,7 +72,7 @@ abstract class TableOrViewWriter {
|
|||
final name = escapeIfNeeded(column.name.name);
|
||||
defaultConstraints.add('CHECK ($name IN (0, 1))');
|
||||
}
|
||||
additionalParams['typeName'] = asDartLiteral(column.sqlTypeName());
|
||||
additionalParams['type'] = 'const ${column.sqlType().runtimeType}()';
|
||||
|
||||
if (tableOrView is MoorTable) {
|
||||
additionalParams['requiredDuringInsert'] = (tableOrView as MoorTable)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
//@dart=2.9
|
||||
import 'package:drift_dev/moor_generator.dart';
|
||||
import 'package:drift_dev/src/analyzer/options.dart';
|
||||
import 'package:drift_dev/src/writer/queries/sql_writer.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
@ -11,7 +12,8 @@ void main() {
|
|||
final query = SqlSelectQuery(
|
||||
'name', context, [], [], InferredResultSet(null, []), null);
|
||||
|
||||
final result = SqlWriter(query).write();
|
||||
final result =
|
||||
SqlWriter(const MoorOptions.defaults(), query: query).write();
|
||||
|
||||
expect(result, expectedDart);
|
||||
}
|
||||
|
|
|
@ -11,8 +11,7 @@ class KeyValue extends DataClass implements Insertable<KeyValue> {
|
|||
final String key;
|
||||
final String value;
|
||||
KeyValue({required this.key, required this.value});
|
||||
factory KeyValue.fromData(Map<String, dynamic> data, GeneratedDatabase db,
|
||||
{String? prefix}) {
|
||||
factory KeyValue.fromData(Map<String, dynamic> data, {String? prefix}) {
|
||||
final effectivePrefix = prefix ?? '';
|
||||
return KeyValue(
|
||||
key: const StringType()
|
||||
|
@ -133,11 +132,11 @@ class $KeyValuesTable extends KeyValues
|
|||
final VerificationMeta _keyMeta = const VerificationMeta('key');
|
||||
late final GeneratedColumn<String?> key = GeneratedColumn<String?>(
|
||||
'key', aliasedName, false,
|
||||
typeName: 'TEXT', requiredDuringInsert: true);
|
||||
type: const StringType(), requiredDuringInsert: true);
|
||||
final VerificationMeta _valueMeta = const VerificationMeta('value');
|
||||
late final GeneratedColumn<String?> value = GeneratedColumn<String?>(
|
||||
'value', aliasedName, false,
|
||||
typeName: 'TEXT', requiredDuringInsert: true);
|
||||
type: const StringType(), requiredDuringInsert: true);
|
||||
@override
|
||||
List<GeneratedColumn> get $columns => [key, value];
|
||||
@override
|
||||
|
@ -168,7 +167,7 @@ class $KeyValuesTable extends KeyValues
|
|||
Set<GeneratedColumn> get $primaryKey => {key};
|
||||
@override
|
||||
KeyValue map(Map<String, dynamic> data, {String? tablePrefix}) {
|
||||
return KeyValue.fromData(data, _db,
|
||||
return KeyValue.fromData(data,
|
||||
prefix: tablePrefix != null ? '$tablePrefix.' : null);
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
# Files and directories created by pub
|
||||
.dart_tool/
|
||||
.packages
|
||||
# Remove the following pattern if you wish to check in your lock file
|
||||
pubspec.lock
|
||||
|
||||
# Conventional directory for build outputs
|
||||
build/
|
||||
|
||||
# Directory created by dartdoc
|
||||
doc/api/
|
|
@ -0,0 +1,18 @@
|
|||
An experimental postgres backend for Drift.
|
||||
|
||||
## Using this
|
||||
|
||||
For general notes on using drift, see [this guide](https://drift.simonbinder.eu/getting-started/).
|
||||
|
||||
To use drift_postgres, add this to your `pubspec.yaml`
|
||||
```yaml
|
||||
dependencies:
|
||||
drift: "$latest version"
|
||||
drift_postgres:
|
||||
git:
|
||||
url: https://github.com/simolus3/moor.git
|
||||
path: extras/dirft_postgres
|
||||
```
|
||||
|
||||
To connect your drift database class to postgres, use a `PgDatabase` from `package:drift_postgres/postgres.dart`.
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
/// PostgreSQL
|
||||
@experimental
|
||||
library drift.postgres;
|
||||
|
||||
import 'package:meta/meta.dart';
|
||||
|
||||
export 'src/pg_database.dart';
|
|
@ -0,0 +1,164 @@
|
|||
import 'package:collection/collection.dart';
|
||||
import 'package:drift/backends.dart';
|
||||
import 'package:postgres/postgres.dart';
|
||||
|
||||
/// A drift database implementation that talks to a postgres database.
|
||||
class PgDatabase extends DelegatedDatabase {
|
||||
/// Creates a drift database implementation from a postgres database
|
||||
/// [connection].
|
||||
PgDatabase(PostgreSQLConnection connection)
|
||||
: super(_PgDelegate(connection, connection),
|
||||
isSequential: true, logStatements: true);
|
||||
}
|
||||
|
||||
class _PgDelegate extends DatabaseDelegate {
|
||||
final PostgreSQLConnection _db;
|
||||
final PostgreSQLExecutionContext _ec;
|
||||
|
||||
@override
|
||||
SqlDialect get dialect => SqlDialect.postgres;
|
||||
|
||||
_PgDelegate(this._db, this._ec) : closeUnderlyingWhenClosed = false;
|
||||
|
||||
bool _isOpen = false;
|
||||
|
||||
final bool closeUnderlyingWhenClosed;
|
||||
|
||||
@override
|
||||
TransactionDelegate get transactionDelegate => _PgTransactionDelegate(_db);
|
||||
|
||||
@override
|
||||
late DbVersionDelegate versionDelegate;
|
||||
|
||||
@override
|
||||
Future<bool> get isOpen => Future.value(_isOpen);
|
||||
|
||||
@override
|
||||
Future<void> open(QueryExecutorUser user) async {
|
||||
final pgVersionDelegate = _PgVersionDelegate(_db);
|
||||
|
||||
await _db.open();
|
||||
await pgVersionDelegate.init();
|
||||
|
||||
versionDelegate = pgVersionDelegate;
|
||||
_isOpen = true;
|
||||
}
|
||||
|
||||
Future _ensureOpen() async {
|
||||
if (_db.isClosed) {
|
||||
await _db.open();
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> runBatched(BatchedStatements statements) async {
|
||||
await _ensureOpen();
|
||||
|
||||
for (final row in statements.arguments) {
|
||||
final stmt = statements.statements[row.statementIndex];
|
||||
final args = row.arguments;
|
||||
|
||||
await _ec.execute(stmt,
|
||||
substitutionValues: args
|
||||
.asMap()
|
||||
.map((key, value) => MapEntry((key + 1).toString(), value)));
|
||||
}
|
||||
|
||||
return Future.value();
|
||||
}
|
||||
|
||||
Future<int> _runWithArgs(String statement, List<Object?> args) async {
|
||||
await _ensureOpen();
|
||||
|
||||
if (args.isEmpty) {
|
||||
return _ec.execute(statement);
|
||||
} else {
|
||||
return _ec.execute(statement,
|
||||
substitutionValues: args
|
||||
.asMap()
|
||||
.map((key, value) => MapEntry((key + 1).toString(), value)));
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> runCustom(String statement, List<Object?> args) async {
|
||||
await _runWithArgs(statement, args);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<int> runInsert(String statement, List<Object?> args) async {
|
||||
await _ensureOpen();
|
||||
PostgreSQLResult result;
|
||||
if (args.isEmpty) {
|
||||
result = await _ec.query(statement);
|
||||
} else {
|
||||
result = await _ec.query(statement,
|
||||
substitutionValues: args
|
||||
.asMap()
|
||||
.map((key, value) => MapEntry((key + 1).toString(), value)));
|
||||
}
|
||||
return result.firstOrNull?[0] as int? ?? 0;
|
||||
}
|
||||
|
||||
@override
|
||||
Future<int> runUpdate(String statement, List<Object?> args) async {
|
||||
return _runWithArgs(statement, args);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<QueryResult> runSelect(String statement, List<Object?> args) async {
|
||||
await _ensureOpen();
|
||||
final result = await _ec.query(statement,
|
||||
substitutionValues: args
|
||||
.asMap()
|
||||
.map((key, value) => MapEntry((key + 1).toString(), value)));
|
||||
|
||||
return Future.value(QueryResult.fromRows(
|
||||
result.map((e) => e.toColumnMap()).toList(growable: false)));
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> close() async {
|
||||
if (closeUnderlyingWhenClosed) {
|
||||
await _db.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class _PgVersionDelegate extends DynamicVersionDelegate {
|
||||
final PostgreSQLConnection database;
|
||||
|
||||
_PgVersionDelegate(this.database);
|
||||
|
||||
@override
|
||||
Future<int> get schemaVersion async {
|
||||
final result = await database.query('SELECT version FROM __schema');
|
||||
return result[0][0] as int;
|
||||
}
|
||||
|
||||
Future init() async {
|
||||
await database.query('CREATE TABLE IF NOT EXISTS __schema ('
|
||||
'version integer NOT NULL DEFAULT 0)');
|
||||
final count = await database.query('SELECT COUNT(*) FROM __schema');
|
||||
if (count[0][0] as int == 0) {
|
||||
await database.query('INSERT INTO __schema (version) VALUES (0)');
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> setSchemaVersion(int version) async {
|
||||
await database.query('UPDATE __schema SET version = @1',
|
||||
substitutionValues: {'1': version});
|
||||
}
|
||||
}
|
||||
|
||||
class _PgTransactionDelegate extends WrappedTransactionDelegate {
|
||||
final PostgreSQLConnection _db;
|
||||
|
||||
const _PgTransactionDelegate(this._db);
|
||||
|
||||
@override
|
||||
Future runInTransaction(Future Function(QueryDelegate p1) run) async {
|
||||
await _db.transaction((connection) => run(_PgDelegate(_db, connection)));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
name: drift_postgres
|
||||
description: Postgres support for drift
|
||||
version: 1.0.0
|
||||
|
||||
environment:
|
||||
sdk: '>=2.12.0-0 <3.0.0'
|
||||
|
||||
dependencies:
|
||||
drift: ^1.0.1
|
||||
postgres: ^2.4.1+2
|
||||
|
||||
dev_dependencies:
|
||||
build_runner: ^2.1.5
|
||||
drift_dev:
|
||||
|
||||
dependency_overrides:
|
||||
drift:
|
||||
path: ../../drift
|
||||
drift_dev:
|
||||
path: ../../drift_dev
|
||||
sqlparser:
|
||||
path: ../../sqlparser
|
||||
moor:
|
||||
path: ../../moor
|
|
@ -130,13 +130,13 @@ class Entries extends Table with TableInfo<Entries, Entrie> {
|
|||
final VerificationMeta _idMeta = const VerificationMeta('id');
|
||||
late final GeneratedColumn<int?> id = GeneratedColumn<int?>(
|
||||
'id', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: 'PRIMARY KEY');
|
||||
final VerificationMeta _valueMeta = const VerificationMeta('value');
|
||||
late final GeneratedColumn<String?> value = GeneratedColumn<String?>(
|
||||
'text', aliasedName, false,
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: 'NOT NULL');
|
||||
@override
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"
|
||||
#include "Generated.xcconfig"
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"
|
||||
#include "Generated.xcconfig"
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
# Uncomment this line to define a global platform for your project
|
||||
# platform :ios, '9.0'
|
||||
|
||||
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
|
||||
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
|
||||
|
||||
project 'Runner', {
|
||||
'Debug' => :debug,
|
||||
'Profile' => :release,
|
||||
'Release' => :release,
|
||||
}
|
||||
|
||||
def flutter_root
|
||||
generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'Generated.xcconfig'), __FILE__)
|
||||
unless File.exist?(generated_xcode_build_settings_path)
|
||||
raise "#{generated_xcode_build_settings_path} must exist. If you're running pod install manually, make sure flutter pub get is executed first"
|
||||
end
|
||||
|
||||
File.foreach(generated_xcode_build_settings_path) do |line|
|
||||
matches = line.match(/FLUTTER_ROOT\=(.*)/)
|
||||
return matches[1].strip if matches
|
||||
end
|
||||
raise "FLUTTER_ROOT not found in #{generated_xcode_build_settings_path}. Try deleting Generated.xcconfig, then run flutter pub get"
|
||||
end
|
||||
|
||||
require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelper'), flutter_root)
|
||||
|
||||
flutter_ios_podfile_setup
|
||||
|
||||
target 'Runner' do
|
||||
flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__))
|
||||
end
|
||||
|
||||
post_install do |installer|
|
||||
installer.pods_project.targets.each do |target|
|
||||
flutter_additional_ios_build_settings(target)
|
||||
end
|
||||
end
|
|
@ -1 +1,2 @@
|
|||
#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"
|
||||
#include "Generated.xcconfig"
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"
|
||||
#include "Generated.xcconfig"
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
# Uncomment this line to define a global platform for your project
|
||||
# platform :ios, '9.0'
|
||||
|
||||
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
|
||||
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
|
||||
|
||||
project 'Runner', {
|
||||
'Debug' => :debug,
|
||||
'Profile' => :release,
|
||||
'Release' => :release,
|
||||
}
|
||||
|
||||
def flutter_root
|
||||
generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'Generated.xcconfig'), __FILE__)
|
||||
unless File.exist?(generated_xcode_build_settings_path)
|
||||
raise "#{generated_xcode_build_settings_path} must exist. If you're running pod install manually, make sure flutter pub get is executed first"
|
||||
end
|
||||
|
||||
File.foreach(generated_xcode_build_settings_path) do |line|
|
||||
matches = line.match(/FLUTTER_ROOT\=(.*)/)
|
||||
return matches[1].strip if matches
|
||||
end
|
||||
raise "FLUTTER_ROOT not found in #{generated_xcode_build_settings_path}. Try deleting Generated.xcconfig, then run flutter pub get"
|
||||
end
|
||||
|
||||
require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelper'), flutter_root)
|
||||
|
||||
flutter_ios_podfile_setup
|
||||
|
||||
target 'Runner' do
|
||||
use_frameworks!
|
||||
use_modular_headers!
|
||||
|
||||
flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__))
|
||||
end
|
||||
|
||||
post_install do |installer|
|
||||
installer.pods_project.targets.each do |target|
|
||||
flutter_additional_ios_build_settings(target)
|
||||
end
|
||||
end
|
|
@ -0,0 +1,11 @@
|
|||
# Files and directories created by pub
|
||||
.dart_tool/
|
||||
.packages
|
||||
# Remove the following pattern if you wish to check in your lock file
|
||||
pubspec.lock
|
||||
|
||||
# Conventional directory for build outputs
|
||||
build/
|
||||
|
||||
# Directory created by dartdoc
|
||||
doc/api/
|
|
@ -0,0 +1,25 @@
|
|||
name: pg
|
||||
description: A sample command-line application.
|
||||
# version: 1.0.0
|
||||
# homepage: https://www.example.com
|
||||
# author: Simon Binder <oss@simonbinder.eu>
|
||||
|
||||
environment:
|
||||
sdk: '>=2.12.0 <3.0.0'
|
||||
|
||||
dependencies:
|
||||
tests:
|
||||
path: ../tests
|
||||
postgres: ^2.4.1+2
|
||||
drift_postgres: ^1.0.0
|
||||
|
||||
dev_dependencies:
|
||||
test: ^1.16.0
|
||||
|
||||
dependency_overrides:
|
||||
drift:
|
||||
path: ../../../drift
|
||||
moor:
|
||||
path: ../../../moor
|
||||
drift_postgres:
|
||||
path: ../../../extras/drift_postgres
|
|
@ -0,0 +1,31 @@
|
|||
import 'package:drift_postgres/postgres.dart';
|
||||
import 'package:postgres/postgres.dart';
|
||||
import 'package:tests/tests.dart';
|
||||
|
||||
class PgExecutor extends TestExecutor {
|
||||
@override
|
||||
bool get supportsReturning => true;
|
||||
|
||||
@override
|
||||
DatabaseConnection createConnection() {
|
||||
final pgConnection = PostgreSQLConnection('localhost', 5432, 'postgres',
|
||||
username: 'postgres', password: 'postgres');
|
||||
return DatabaseConnection.fromExecutor(PgDatabase(pgConnection));
|
||||
}
|
||||
|
||||
@override
|
||||
Future clearDatabaseAndClose(Database db) async {
|
||||
await db.customStatement('DROP SCHEMA public CASCADE;');
|
||||
await db.customStatement('CREATE SCHEMA public;');
|
||||
await db.customStatement('GRANT ALL ON SCHEMA public TO postgres;');
|
||||
await db.customStatement('GRANT ALL ON SCHEMA public TO public;');
|
||||
await db.close();
|
||||
}
|
||||
|
||||
@override
|
||||
Future deleteData() async {}
|
||||
}
|
||||
|
||||
void main() {
|
||||
runAllTests(PgExecutor());
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
import 'dart:io';
|
||||
|
||||
import 'package:moor/ffi.dart';
|
||||
import 'package:path/path.dart';
|
||||
import 'package:test/test.dart';
|
||||
import 'package:tests/database/database.dart';
|
||||
|
||||
final File mainFile =
|
||||
File(join(Directory.systemTemp.path, 'moor-save-and-restore-tests-1'));
|
||||
final File createdForSwap =
|
||||
File(join(Directory.systemTemp.path, 'moor-save-and-restore-tests-2'));
|
||||
|
||||
void main() {
|
||||
test('can save and restore a database', () async {
|
||||
if (await mainFile.exists()) {
|
||||
await mainFile.delete();
|
||||
}
|
||||
if (await createdForSwap.exists()) {
|
||||
await createdForSwap.delete();
|
||||
}
|
||||
|
||||
const nameInSwap = 'swap user';
|
||||
const nameInMain = 'main';
|
||||
|
||||
// Prepare the file we're swapping in later
|
||||
final dbForSetup = Database.executor(VmDatabase(createdForSwap));
|
||||
await dbForSetup.into(dbForSetup.users).insert(
|
||||
UsersCompanion.insert(name: nameInSwap, birthDate: DateTime.now()));
|
||||
await dbForSetup.close();
|
||||
|
||||
// Open the main file
|
||||
var db = Database.executor(VmDatabase(mainFile));
|
||||
await db.into(db.users).insert(
|
||||
UsersCompanion.insert(name: nameInMain, birthDate: DateTime.now()));
|
||||
await db.close();
|
||||
|
||||
// Copy swap file to main file
|
||||
await mainFile.writeAsBytes(await createdForSwap.readAsBytes(),
|
||||
flush: true);
|
||||
|
||||
// Re-open database
|
||||
db = Database.executor(VmDatabase(mainFile));
|
||||
final users = await db.select(db.users).get();
|
||||
|
||||
expect(
|
||||
users.map((u) => u.name),
|
||||
allOf(contains(nameInSwap), isNot(contains(nameInMain))),
|
||||
);
|
||||
});
|
||||
}
|
|
@ -1,9 +1,12 @@
|
|||
targets:
|
||||
$default:
|
||||
builders:
|
||||
moor_generator:
|
||||
drift_dev:
|
||||
options:
|
||||
generate_connect_constructor: true
|
||||
override_hash_and_equals_in_result_sets: true
|
||||
sqlite:
|
||||
version: "3.35"
|
||||
new_sql_code_generation: true
|
||||
sql:
|
||||
dialect: postgres # using postgres because sqlite3 understands that too
|
||||
options:
|
||||
version: "3.35"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import 'package:moor/moor.dart';
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:tests/database/database.dart';
|
||||
|
||||
const int dashId = 1, dukeId = 2, gopherId = 3;
|
||||
|
@ -23,3 +23,9 @@ UsersCompanion florian = UsersCompanion(
|
|||
'Florian, the fluffy Ferret from Florida familiar with Flutter'),
|
||||
birthDate: Value(DateTime(2015, 4, 29)),
|
||||
);
|
||||
|
||||
UsersCompanion marcell = UsersCompanion(
|
||||
id: const Value(1),
|
||||
name: const Value('Marcell'),
|
||||
birthDate: Value(DateTime(1989, 12, 31)),
|
||||
);
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import 'dart:convert';
|
||||
|
||||
import 'package:json_annotation/json_annotation.dart' as j;
|
||||
import 'package:moor/moor.dart';
|
||||
|
||||
import 'package:tests/data/sample_data.dart' as people;
|
||||
import 'package:tests/tests.dart';
|
||||
|
||||
part 'database.g.dart';
|
||||
|
||||
|
@ -69,20 +69,20 @@ class PreferenceConverter extends TypeConverter<Preferences, String> {
|
|||
}
|
||||
}
|
||||
|
||||
@UseMoor(
|
||||
@DriftDatabase(
|
||||
tables: [Users, Friendships],
|
||||
queries: {
|
||||
'mostPopularUsers': 'SELECT * FROM users u '
|
||||
'ORDER BY (SELECT COUNT(*) FROM friendships '
|
||||
'WHERE first_user = u.id OR second_user = u.id) DESC LIMIT :amount',
|
||||
'amountOfGoodFriends':
|
||||
'SELECT COUNT(*) FROM friendships f WHERE f.really_good_friends AND '
|
||||
'(f.first_user = :user OR f.second_user = :user)',
|
||||
'amountOfGoodFriends': 'SELECT COUNT(*) FROM friendships f WHERE '
|
||||
'f.really_good_friends = 1 AND '
|
||||
'(f.first_user = :user OR f.second_user = :user)',
|
||||
'friendshipsOf': ''' SELECT
|
||||
f.really_good_friends, user.**
|
||||
f.really_good_friends, "user".**
|
||||
FROM friendships f
|
||||
INNER JOIN users user ON user.id IN (f.first_user, f.second_user) AND
|
||||
user.id != :user
|
||||
INNER JOIN users "user" ON "user".id IN (f.first_user, f.second_user) AND
|
||||
"user".id != :user
|
||||
WHERE (f.first_user = :user OR f.second_user = :user)''',
|
||||
'userCount': 'SELECT COUNT(id) FROM users',
|
||||
'settingsFor': 'SELECT preferences FROM users WHERE id = :user',
|
||||
|
@ -184,7 +184,13 @@ class Database extends _$Database {
|
|||
reallyGoodFriends: friendsValue,
|
||||
);
|
||||
|
||||
await into(friendships).insert(companion, mode: InsertMode.insertOrReplace);
|
||||
if (connection.executor.dialect == SqlDialect.sqlite) {
|
||||
await into(friendships)
|
||||
.insert(companion, mode: InsertMode.insertOrReplace);
|
||||
} else if (connection.executor.dialect == SqlDialect.postgres) {
|
||||
await into(friendships)
|
||||
.insert(companion, mode: InsertMode.insertOrIgnore);
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> updateSettings(int userId, Preferences c) async {
|
||||
|
|
|
@ -6,11 +6,9 @@ part of 'database.dart';
|
|||
// JsonSerializableGenerator
|
||||
// **************************************************************************
|
||||
|
||||
Preferences _$PreferencesFromJson(Map<String, dynamic> json) {
|
||||
return Preferences(
|
||||
json['receiveEmails'] as bool,
|
||||
);
|
||||
}
|
||||
Preferences _$PreferencesFromJson(Map<String, dynamic> json) => Preferences(
|
||||
json['receiveEmails'] as bool,
|
||||
);
|
||||
|
||||
Map<String, dynamic> _$PreferencesToJson(Preferences instance) =>
|
||||
<String, dynamic>{
|
||||
|
@ -39,8 +37,7 @@ class User extends DataClass implements Insertable<User> {
|
|||
required this.birthDate,
|
||||
this.profilePicture,
|
||||
this.preferences});
|
||||
factory User.fromData(Map<String, dynamic> data, GeneratedDatabase db,
|
||||
{String? prefix}) {
|
||||
factory User.fromData(Map<String, dynamic> data, {String? prefix}) {
|
||||
final effectivePrefix = prefix ?? '';
|
||||
return User(
|
||||
id: const IntType()
|
||||
|
@ -87,7 +84,7 @@ class User extends DataClass implements Insertable<User> {
|
|||
|
||||
factory User.fromJson(Map<String, dynamic> json,
|
||||
{ValueSerializer? serializer}) {
|
||||
serializer ??= moorRuntimeOptions.defaultSerializer;
|
||||
serializer ??= driftRuntimeOptions.defaultSerializer;
|
||||
return User(
|
||||
id: serializer.fromJson<int>(json['id']),
|
||||
name: serializer.fromJson<String>(json['name']),
|
||||
|
@ -98,7 +95,7 @@ class User extends DataClass implements Insertable<User> {
|
|||
}
|
||||
@override
|
||||
Map<String, dynamic> toJson({ValueSerializer? serializer}) {
|
||||
serializer ??= moorRuntimeOptions.defaultSerializer;
|
||||
serializer ??= driftRuntimeOptions.defaultSerializer;
|
||||
return <String, dynamic>{
|
||||
'id': serializer.toJson<int>(id),
|
||||
'name': serializer.toJson<String>(name),
|
||||
|
@ -134,12 +131,8 @@ class User extends DataClass implements Insertable<User> {
|
|||
}
|
||||
|
||||
@override
|
||||
int get hashCode => $mrjf($mrjc(
|
||||
id.hashCode,
|
||||
$mrjc(
|
||||
name.hashCode,
|
||||
$mrjc(birthDate.hashCode,
|
||||
$mrjc(profilePicture.hashCode, preferences.hashCode)))));
|
||||
int get hashCode =>
|
||||
Object.hash(id, name, birthDate, profilePicture, preferences);
|
||||
@override
|
||||
bool operator ==(Object other) =>
|
||||
identical(this, other) ||
|
||||
|
@ -246,27 +239,28 @@ class $UsersTable extends Users with TableInfo<$UsersTable, User> {
|
|||
final VerificationMeta _idMeta = const VerificationMeta('id');
|
||||
late final GeneratedColumn<int?> id = GeneratedColumn<int?>(
|
||||
'id', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: 'PRIMARY KEY AUTOINCREMENT');
|
||||
final VerificationMeta _nameMeta = const VerificationMeta('name');
|
||||
late final GeneratedColumn<String?> name = GeneratedColumn<String?>(
|
||||
'name', aliasedName, false,
|
||||
typeName: 'TEXT', requiredDuringInsert: true);
|
||||
type: const StringType(), requiredDuringInsert: true);
|
||||
final VerificationMeta _birthDateMeta = const VerificationMeta('birthDate');
|
||||
late final GeneratedColumn<DateTime?> birthDate = GeneratedColumn<DateTime?>(
|
||||
'birth_date', aliasedName, false,
|
||||
typeName: 'INTEGER', requiredDuringInsert: true);
|
||||
type: const IntType(), requiredDuringInsert: true);
|
||||
final VerificationMeta _profilePictureMeta =
|
||||
const VerificationMeta('profilePicture');
|
||||
late final GeneratedColumn<Uint8List?> profilePicture =
|
||||
GeneratedColumn<Uint8List?>('profile_picture', aliasedName, true,
|
||||
typeName: 'BLOB', requiredDuringInsert: false);
|
||||
type: const BlobType(), requiredDuringInsert: false);
|
||||
final VerificationMeta _preferencesMeta =
|
||||
const VerificationMeta('preferences');
|
||||
late final GeneratedColumn<String?> preferences = GeneratedColumn<String?>(
|
||||
'preferences', aliasedName, true,
|
||||
typeName: 'TEXT', requiredDuringInsert: false);
|
||||
late final GeneratedColumnWithTypeConverter<Preferences, String?>
|
||||
preferences = GeneratedColumn<String?>('preferences', aliasedName, true,
|
||||
type: const StringType(), requiredDuringInsert: false)
|
||||
.withConverter<Preferences>($UsersTable.$converter0);
|
||||
@override
|
||||
List<GeneratedColumn> get $columns =>
|
||||
[id, name, birthDate, profilePicture, preferences];
|
||||
|
@ -308,7 +302,7 @@ class $UsersTable extends Users with TableInfo<$UsersTable, User> {
|
|||
Set<GeneratedColumn> get $primaryKey => {id};
|
||||
@override
|
||||
User map(Map<String, dynamic> data, {String? tablePrefix}) {
|
||||
return User.fromData(data, _db,
|
||||
return User.fromData(data,
|
||||
prefix: tablePrefix != null ? '$tablePrefix.' : null);
|
||||
}
|
||||
|
||||
|
@ -329,8 +323,7 @@ class Friendship extends DataClass implements Insertable<Friendship> {
|
|||
{required this.firstUser,
|
||||
required this.secondUser,
|
||||
required this.reallyGoodFriends});
|
||||
factory Friendship.fromData(Map<String, dynamic> data, GeneratedDatabase db,
|
||||
{String? prefix}) {
|
||||
factory Friendship.fromData(Map<String, dynamic> data, {String? prefix}) {
|
||||
final effectivePrefix = prefix ?? '';
|
||||
return Friendship(
|
||||
firstUser: const IntType()
|
||||
|
@ -360,7 +353,7 @@ class Friendship extends DataClass implements Insertable<Friendship> {
|
|||
|
||||
factory Friendship.fromJson(Map<String, dynamic> json,
|
||||
{ValueSerializer? serializer}) {
|
||||
serializer ??= moorRuntimeOptions.defaultSerializer;
|
||||
serializer ??= driftRuntimeOptions.defaultSerializer;
|
||||
return Friendship(
|
||||
firstUser: serializer.fromJson<int>(json['firstUser']),
|
||||
secondUser: serializer.fromJson<int>(json['secondUser']),
|
||||
|
@ -369,7 +362,7 @@ class Friendship extends DataClass implements Insertable<Friendship> {
|
|||
}
|
||||
@override
|
||||
Map<String, dynamic> toJson({ValueSerializer? serializer}) {
|
||||
serializer ??= moorRuntimeOptions.defaultSerializer;
|
||||
serializer ??= driftRuntimeOptions.defaultSerializer;
|
||||
return <String, dynamic>{
|
||||
'firstUser': serializer.toJson<int>(firstUser),
|
||||
'secondUser': serializer.toJson<int>(secondUser),
|
||||
|
@ -395,8 +388,7 @@ class Friendship extends DataClass implements Insertable<Friendship> {
|
|||
}
|
||||
|
||||
@override
|
||||
int get hashCode => $mrjf($mrjc(firstUser.hashCode,
|
||||
$mrjc(secondUser.hashCode, reallyGoodFriends.hashCode)));
|
||||
int get hashCode => Object.hash(firstUser, secondUser, reallyGoodFriends);
|
||||
@override
|
||||
bool operator ==(Object other) =>
|
||||
identical(this, other) ||
|
||||
|
@ -478,16 +470,16 @@ class $FriendshipsTable extends Friendships
|
|||
final VerificationMeta _firstUserMeta = const VerificationMeta('firstUser');
|
||||
late final GeneratedColumn<int?> firstUser = GeneratedColumn<int?>(
|
||||
'first_user', aliasedName, false,
|
||||
typeName: 'INTEGER', requiredDuringInsert: true);
|
||||
type: const IntType(), requiredDuringInsert: true);
|
||||
final VerificationMeta _secondUserMeta = const VerificationMeta('secondUser');
|
||||
late final GeneratedColumn<int?> secondUser = GeneratedColumn<int?>(
|
||||
'second_user', aliasedName, false,
|
||||
typeName: 'INTEGER', requiredDuringInsert: true);
|
||||
type: const IntType(), requiredDuringInsert: true);
|
||||
final VerificationMeta _reallyGoodFriendsMeta =
|
||||
const VerificationMeta('reallyGoodFriends');
|
||||
late final GeneratedColumn<bool?> reallyGoodFriends = GeneratedColumn<bool?>(
|
||||
'really_good_friends', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const BoolType(),
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: 'CHECK (really_good_friends IN (0, 1))',
|
||||
defaultValue: const Constant(false));
|
||||
|
@ -530,7 +522,7 @@ class $FriendshipsTable extends Friendships
|
|||
Set<GeneratedColumn> get $primaryKey => {firstUser, secondUser};
|
||||
@override
|
||||
Friendship map(Map<String, dynamic> data, {String? tablePrefix}) {
|
||||
return Friendship.fromData(data, _db,
|
||||
return Friendship.fromData(data,
|
||||
prefix: tablePrefix != null ? '$tablePrefix.' : null);
|
||||
}
|
||||
|
||||
|
@ -547,27 +539,37 @@ abstract class _$Database extends GeneratedDatabase {
|
|||
late final $FriendshipsTable friendships = $FriendshipsTable(this);
|
||||
Selectable<User> mostPopularUsers(int amount) {
|
||||
return customSelect(
|
||||
'SELECT * FROM users u ORDER BY (SELECT COUNT(*) FROM friendships WHERE first_user = u.id OR second_user = u.id) DESC LIMIT :amount',
|
||||
variables: [Variable<int>(amount)],
|
||||
readsFrom: {users, friendships}).map(users.mapFromRow);
|
||||
'SELECT * FROM users AS u ORDER BY (SELECT COUNT(*) FROM friendships WHERE first_user = u.id OR second_user = u.id) DESC LIMIT @1',
|
||||
variables: [
|
||||
Variable<int>(amount)
|
||||
],
|
||||
readsFrom: {
|
||||
users,
|
||||
friendships,
|
||||
}).map(users.mapFromRow);
|
||||
}
|
||||
|
||||
Selectable<int> amountOfGoodFriends(int user) {
|
||||
return customSelect(
|
||||
'SELECT COUNT(*) FROM friendships f WHERE f.really_good_friends AND (f.first_user = :user OR f.second_user = :user)',
|
||||
'SELECT COUNT(*) AS _c0 FROM friendships AS f WHERE f.really_good_friends = 1 AND(f.first_user = @1 OR f.second_user = @1)',
|
||||
variables: [
|
||||
Variable<int>(user)
|
||||
],
|
||||
readsFrom: {
|
||||
friendships
|
||||
}).map((QueryRow row) => row.read<int>('COUNT(*)'));
|
||||
friendships,
|
||||
}).map((QueryRow row) => row.read<int>('_c0'));
|
||||
}
|
||||
|
||||
Selectable<FriendshipsOfResult> friendshipsOf(int user) {
|
||||
return customSelect(
|
||||
'SELECT \n f.really_good_friends, "user"."id" AS "nested_0.id", "user"."name" AS "nested_0.name", "user"."birth_date" AS "nested_0.birth_date", "user"."profile_picture" AS "nested_0.profile_picture", "user"."preferences" AS "nested_0.preferences"\n FROM friendships f\n INNER JOIN users user ON user.id IN (f.first_user, f.second_user) AND\n user.id != :user\n WHERE (f.first_user = :user OR f.second_user = :user)',
|
||||
variables: [Variable<int>(user)],
|
||||
readsFrom: {friendships, users}).map((QueryRow row) {
|
||||
'SELECT f.really_good_friends,"user"."id" AS "nested_0.id", "user"."name" AS "nested_0.name", "user"."birth_date" AS "nested_0.birth_date", "user"."profile_picture" AS "nested_0.profile_picture", "user"."preferences" AS "nested_0.preferences" FROM friendships AS f INNER JOIN users AS "user" ON "user".id IN (f.first_user, f.second_user) AND "user".id != @1 WHERE(f.first_user = @1 OR f.second_user = @1)',
|
||||
variables: [
|
||||
Variable<int>(user)
|
||||
],
|
||||
readsFrom: {
|
||||
friendships,
|
||||
users,
|
||||
}).map((QueryRow row) {
|
||||
return FriendshipsOfResult(
|
||||
reallyGoodFriends: row.read<bool>('really_good_friends'),
|
||||
user: users.mapFromRow(row, tablePrefix: 'nested_0'),
|
||||
|
@ -576,16 +578,22 @@ abstract class _$Database extends GeneratedDatabase {
|
|||
}
|
||||
|
||||
Selectable<int> userCount() {
|
||||
return customSelect('SELECT COUNT(id) FROM users',
|
||||
return customSelect('SELECT COUNT(id) AS _c0 FROM users',
|
||||
variables: [],
|
||||
readsFrom: {users}).map((QueryRow row) => row.read<int>('COUNT(id)'));
|
||||
readsFrom: {
|
||||
users,
|
||||
}).map((QueryRow row) => row.read<int>('_c0'));
|
||||
}
|
||||
|
||||
Selectable<Preferences?> settingsFor(int user) {
|
||||
return customSelect('SELECT preferences FROM users WHERE id = :user',
|
||||
variables: [Variable<int>(user)], readsFrom: {users})
|
||||
.map((QueryRow row) => $UsersTable.$converter0
|
||||
.mapToDart(row.read<String?>('preferences')));
|
||||
return customSelect('SELECT preferences FROM users WHERE id = @1',
|
||||
variables: [
|
||||
Variable<int>(user)
|
||||
],
|
||||
readsFrom: {
|
||||
users,
|
||||
}).map((QueryRow row) =>
|
||||
$UsersTable.$converter0.mapToDart(row.read<String?>('preferences')));
|
||||
}
|
||||
|
||||
Selectable<User> usersById(List<int> var1) {
|
||||
|
@ -593,13 +601,17 @@ abstract class _$Database extends GeneratedDatabase {
|
|||
final expandedvar1 = $expandVar($arrayStartIndex, var1.length);
|
||||
$arrayStartIndex += var1.length;
|
||||
return customSelect('SELECT * FROM users WHERE id IN ($expandedvar1)',
|
||||
variables: [for (var $ in var1) Variable<int>($)],
|
||||
readsFrom: {users}).map(users.mapFromRow);
|
||||
variables: [
|
||||
for (var $ in var1) Variable<int>($)
|
||||
],
|
||||
readsFrom: {
|
||||
users,
|
||||
}).map(users.mapFromRow);
|
||||
}
|
||||
|
||||
Future<List<Friendship>> returning(int var1, int var2, bool var3) {
|
||||
return customWriteReturning(
|
||||
'INSERT INTO friendships VALUES (?, ?, ?) RETURNING *;',
|
||||
'INSERT INTO friendships VALUES (@1, @2, @3) RETURNING *',
|
||||
variables: [
|
||||
Variable<int>(var1),
|
||||
Variable<int>(var2),
|
||||
|
@ -624,7 +636,7 @@ class FriendshipsOfResult {
|
|||
required this.user,
|
||||
});
|
||||
@override
|
||||
int get hashCode => $mrjf($mrjc(reallyGoodFriends.hashCode, user.hashCode));
|
||||
int get hashCode => Object.hash(reallyGoodFriends, user);
|
||||
@override
|
||||
bool operator ==(Object other) =>
|
||||
identical(this, other) ||
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
import 'package:test/test.dart';
|
||||
import 'package:tests/data/sample_data.dart';
|
||||
import 'package:tests/database/database.dart';
|
||||
import 'package:tests/suite/suite.dart';
|
||||
|
||||
import '../tests.dart';
|
||||
|
||||
void crudTests(TestExecutor executor) {
|
||||
test('inserting updates a select stream', () async {
|
||||
final db = Database(executor.createConnection());
|
||||
|
@ -12,22 +15,61 @@ void crudTests(TestExecutor executor) {
|
|||
|
||||
expect(await friends.first, isEmpty);
|
||||
|
||||
// after we called makeFriends(a,b)
|
||||
final expectation = expectLater(friends, emits(equals(<User>[b])));
|
||||
|
||||
await db.makeFriends(a, b);
|
||||
await expectation;
|
||||
await expectLater(friends, emits(equals(<User>[b])));
|
||||
|
||||
await db.close();
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
});
|
||||
|
||||
test('update row', () async {
|
||||
final db = Database(executor.createConnection());
|
||||
|
||||
await db.update(db.users)
|
||||
..where((tbl) => tbl.id.equals(1))
|
||||
..write(UsersCompanion(name: Value("Jack")));
|
||||
final updatedUser = await db.getUserById(1);
|
||||
|
||||
expect(updatedUser.name, equals('Jack'));
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
});
|
||||
|
||||
test('insert duplicate', () async {
|
||||
final db = Database(executor.createConnection());
|
||||
|
||||
await expectLater(
|
||||
db.into(db.users).insert(marcell),
|
||||
throwsA(toString(
|
||||
matches(RegExp(r'unique constraint', caseSensitive: false)))));
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
});
|
||||
|
||||
test('insert on conflict update', () async {
|
||||
final db = Database(executor.createConnection());
|
||||
|
||||
await db.into(db.users).insertOnConflictUpdate(marcell);
|
||||
final updatedUser = await db.getUserById(1);
|
||||
|
||||
expect(updatedUser.name, equals('Marcell'));
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
});
|
||||
|
||||
test('insert mode', () async {
|
||||
final db = Database(executor.createConnection());
|
||||
if (db.executor.dialect == SqlDialect.postgres) {
|
||||
await expectLater(
|
||||
db.into(db.users).insert(marcell, mode: InsertMode.insertOrReplace),
|
||||
throwsA(isA<ArgumentError>()));
|
||||
}
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
});
|
||||
|
||||
test('supports RETURNING', () async {
|
||||
final db = Database(executor.createConnection());
|
||||
final result = await db.returning(1, 2, true);
|
||||
|
||||
expect(result,
|
||||
[Friendship(firstUser: 1, secondUser: 2, reallyGoodFriends: true)]);
|
||||
|
||||
await db.close();
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
},
|
||||
skip: executor.supportsReturning
|
||||
? null
|
||||
|
@ -36,12 +78,10 @@ void crudTests(TestExecutor executor) {
|
|||
test('IN ? expressions can be expanded', () async {
|
||||
// regression test for https://github.com/simolus3/moor/issues/156
|
||||
final db = Database(executor.createConnection());
|
||||
|
||||
final result = await db.usersById([1, 2, 3]).get();
|
||||
|
||||
expect(result.map((u) => u.name), ['Dash', 'Duke', 'Go Gopher']);
|
||||
|
||||
await db.close();
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
});
|
||||
|
||||
test('nested results', () async {
|
||||
|
@ -54,7 +94,7 @@ void crudTests(TestExecutor executor) {
|
|||
final result = await db.friendshipsOf(a.id).getSingle();
|
||||
|
||||
expect(result, FriendshipsOfResult(reallyGoodFriends: true, user: b));
|
||||
await db.close();
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
});
|
||||
|
||||
test('runCustom with args', () async {
|
||||
|
@ -62,11 +102,17 @@ void crudTests(TestExecutor executor) {
|
|||
final db = Database(executor.createConnection());
|
||||
|
||||
// ignore: invalid_use_of_visible_for_testing_member, invalid_use_of_protected_member
|
||||
await db.customStatement(
|
||||
'INSERT INTO friendships (first_user, second_user) VALUES (?, ?)',
|
||||
<int>[1, 2]);
|
||||
if (db.executor.dialect == SqlDialect.postgres) {
|
||||
await db.customStatement(
|
||||
'INSERT INTO friendships (first_user, second_user) VALUES (@1, @2)',
|
||||
<int>[1, 2]);
|
||||
} else {
|
||||
await db.customStatement(
|
||||
'INSERT INTO friendships (first_user, second_user) VALUES (?1, ?2)',
|
||||
<int>[1, 2]);
|
||||
}
|
||||
|
||||
expect(await db.friendsOf(1).get(), isNotEmpty);
|
||||
await db.close();
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -15,6 +15,6 @@ void customObjectTests(TestExecutor executor) {
|
|||
|
||||
expect(preferences?.receiveEmails, true);
|
||||
|
||||
await db.close();
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ void migrationTests(TestExecutor executor) {
|
|||
final count = await database.userCount().getSingle();
|
||||
expect(count, 3);
|
||||
|
||||
await database.close();
|
||||
await executor.clearDatabaseAndClose(database);
|
||||
});
|
||||
|
||||
test('saves and restores database', () async {
|
||||
|
@ -29,7 +29,7 @@ void migrationTests(TestExecutor executor) {
|
|||
expect(database.schemaVersionChangedFrom, 1);
|
||||
expect(database.schemaVersionChangedTo, 2);
|
||||
|
||||
await database.close();
|
||||
await executor.clearDatabaseAndClose(database);
|
||||
});
|
||||
|
||||
test('can use destructive migration', () async {
|
||||
|
@ -43,6 +43,8 @@ void migrationTests(TestExecutor executor) {
|
|||
// No users now, we deleted everything
|
||||
final count = await database.userCount().getSingle();
|
||||
expect(count, 0);
|
||||
|
||||
await executor.clearDatabaseAndClose(database);
|
||||
});
|
||||
|
||||
test('runs the migrator when downgrading', () async {
|
||||
|
@ -56,7 +58,7 @@ void migrationTests(TestExecutor executor) {
|
|||
expect(database.schemaVersionChangedFrom, 2);
|
||||
expect(database.schemaVersionChangedTo, 1);
|
||||
|
||||
await database.close();
|
||||
await executor.clearDatabaseAndClose(database);
|
||||
});
|
||||
|
||||
test('does not apply schema version when migration throws', () async {
|
||||
|
@ -79,9 +81,17 @@ void migrationTests(TestExecutor executor) {
|
|||
|
||||
// Open it one last time, the schema version should still be at 1
|
||||
database = Database(executor.createConnection(), schemaVersion: 1);
|
||||
final result =
|
||||
await database.customSelect('PRAGMA user_version').getSingle();
|
||||
|
||||
QueryRow result;
|
||||
if (database.executor.dialect == SqlDialect.sqlite) {
|
||||
result = await database.customSelect('PRAGMA user_version').getSingle();
|
||||
} else {
|
||||
result = await database
|
||||
.customSelect('SELECT version FROM __schema')
|
||||
.getSingle();
|
||||
}
|
||||
expect(result.data.values.single, 1);
|
||||
await database.close();
|
||||
|
||||
await executor.clearDatabaseAndClose(database);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import 'package:moor/moor.dart';
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:test/test.dart';
|
||||
import 'package:tests/suite/crud_tests.dart';
|
||||
import 'package:tests/suite/transactions.dart';
|
||||
|
||||
import '../tests.dart';
|
||||
import 'custom_objects.dart';
|
||||
import 'migrations.dart';
|
||||
|
||||
|
@ -13,10 +14,15 @@ abstract class TestExecutor {
|
|||
|
||||
/// Delete the data that would be written by the executor.
|
||||
Future deleteData();
|
||||
|
||||
/// Clear database before close
|
||||
Future clearDatabaseAndClose(Database db) async {
|
||||
await db.close();
|
||||
}
|
||||
}
|
||||
|
||||
void runAllTests(TestExecutor executor) {
|
||||
moorRuntimeOptions.dontWarnAboutMultipleDatabases = true;
|
||||
driftRuntimeOptions.dontWarnAboutMultipleDatabases = true;
|
||||
|
||||
tearDown(() async {
|
||||
await executor.deleteData();
|
||||
|
@ -33,3 +39,13 @@ void runAllTests(TestExecutor executor) {
|
|||
await connection.executor.close();
|
||||
});
|
||||
}
|
||||
|
||||
Matcher toString(Matcher matcher) => _ToString(matcher);
|
||||
|
||||
class _ToString extends CustomMatcher {
|
||||
_ToString(Matcher matcher)
|
||||
: super("Object string represent is", "toString()", matcher);
|
||||
|
||||
@override
|
||||
Object? featureValueOf(dynamic actual) => actual.toString();
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ void transactionTests(TestExecutor executor) {
|
|||
await db.amountOfGoodFriends(people.dashId).getSingle();
|
||||
expect(friendsResult, 1);
|
||||
|
||||
await db.close();
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
});
|
||||
|
||||
test('transaction is rolled back then an exception occurs', () async {
|
||||
|
@ -51,12 +51,12 @@ void transactionTests(TestExecutor executor) {
|
|||
await db.amountOfGoodFriends(people.dashId).getSingle();
|
||||
expect(friendsResult, 0); // no friendship was inserted
|
||||
|
||||
await db.close();
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
});
|
||||
|
||||
test('can use no-op transactions', () async {
|
||||
final db = Database(executor.createConnection());
|
||||
await db.transaction(() => Future.value(null));
|
||||
await db.close();
|
||||
await executor.clearDatabaseAndClose(db);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
library tests;
|
||||
|
||||
export 'package:moor/moor.dart';
|
||||
export 'package:drift/drift.dart';
|
||||
|
||||
export 'data/sample_data.dart';
|
||||
export 'database/database.dart';
|
||||
|
|
|
@ -8,20 +8,18 @@ environment:
|
|||
sdk: '>=2.12.0 <3.0.0'
|
||||
|
||||
dependencies:
|
||||
moor: ^4.0.0
|
||||
drift: ^1.0.1
|
||||
json_annotation: ^4.0.0
|
||||
dev_dependencies:
|
||||
build_runner: ^2.0.0
|
||||
moor_generator: ^4.0.0
|
||||
drift_dev: ^1.0.2
|
||||
json_serializable: ^5.0.0
|
||||
test:
|
||||
|
||||
dependency_overrides:
|
||||
drift:
|
||||
path: ../../../drift
|
||||
moor:
|
||||
path: ../../../moor
|
||||
moor_generator:
|
||||
path: ../../../moor_generator
|
||||
drift_dev:
|
||||
path: ../../../drift_dev
|
||||
sqlparser:
|
||||
path: ../../../sqlparser
|
|
@ -36,7 +36,7 @@ class WebExecutorIndexedDb extends TestExecutor {
|
|||
|
||||
@override
|
||||
Future deleteData() async {
|
||||
await window.indexedDB.deleteDatabase('moor_databases');
|
||||
await window.indexedDB?.deleteDatabase('moor_databases');
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -13,9 +13,8 @@ class Foo extends DataClass implements Insertable<Foo> {
|
|||
factory Foo.fromData(Map<String, dynamic> data, GeneratedDatabase db,
|
||||
{String prefix}) {
|
||||
final effectivePrefix = prefix ?? '';
|
||||
final intType = db.typeSystem.forDartType<int>();
|
||||
return Foo(
|
||||
id: intType.mapFromDatabaseResponse(data['${effectivePrefix}id']),
|
||||
id: const IntType().mapFromDatabaseResponse(data['${effectivePrefix}id']),
|
||||
);
|
||||
}
|
||||
@override
|
||||
|
@ -27,6 +26,12 @@ class Foo extends DataClass implements Insertable<Foo> {
|
|||
return map;
|
||||
}
|
||||
|
||||
FoosCompanion toCompanion(bool nullToAbsent) {
|
||||
return FoosCompanion(
|
||||
id: id == null && nullToAbsent ? const Value.absent() : Value(id),
|
||||
);
|
||||
}
|
||||
|
||||
factory Foo.fromJson(Map<String, dynamic> json,
|
||||
{ValueSerializer serializer}) {
|
||||
serializer ??= moorRuntimeOptions.defaultSerializer;
|
||||
|
@ -47,13 +52,16 @@ class Foo extends DataClass implements Insertable<Foo> {
|
|||
);
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('Foo(')..write('id: $id')..write(')')).toString();
|
||||
return (StringBuffer('Foo(')
|
||||
..write('id: $id')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => $mrjf(id.hashCode);
|
||||
int get hashCode => id.hashCode;
|
||||
@override
|
||||
bool operator ==(dynamic other) =>
|
||||
bool operator ==(Object other) =>
|
||||
identical(this, other) || (other is Foo && other.id == this.id);
|
||||
}
|
||||
|
||||
|
@ -87,6 +95,14 @@ class FoosCompanion extends UpdateCompanion<Foo> {
|
|||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('FoosCompanion(')
|
||||
..write('id: $id')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
|
||||
class $FoosTable extends Foos with TableInfo<$FoosTable, Foo> {
|
||||
|
@ -94,22 +110,19 @@ class $FoosTable extends Foos with TableInfo<$FoosTable, Foo> {
|
|||
final String _alias;
|
||||
$FoosTable(this._db, [this._alias]);
|
||||
final VerificationMeta _idMeta = const VerificationMeta('id');
|
||||
GeneratedIntColumn _id;
|
||||
GeneratedColumn<int> _id;
|
||||
@override
|
||||
GeneratedIntColumn get id => _id ??= _constructId();
|
||||
GeneratedIntColumn _constructId() {
|
||||
return GeneratedIntColumn('id', $tableName, false,
|
||||
hasAutoIncrement: true, declaredAsPrimaryKey: true);
|
||||
}
|
||||
|
||||
GeneratedColumn<int> get id =>
|
||||
_id ??= GeneratedColumn<int>('id', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: 'PRIMARY KEY AUTOINCREMENT');
|
||||
@override
|
||||
List<GeneratedColumn> get $columns => [id];
|
||||
@override
|
||||
$FoosTable get asDslTable => this;
|
||||
String get aliasedName => _alias ?? 'foos';
|
||||
@override
|
||||
String get $tableName => _alias ?? 'foos';
|
||||
@override
|
||||
final String actualTableName = 'foos';
|
||||
String get actualTableName => 'foos';
|
||||
@override
|
||||
VerificationContext validateIntegrity(Insertable<Foo> instance,
|
||||
{bool isInserting = false}) {
|
||||
|
@ -125,8 +138,8 @@ class $FoosTable extends Foos with TableInfo<$FoosTable, Foo> {
|
|||
Set<GeneratedColumn> get $primaryKey => {id};
|
||||
@override
|
||||
Foo map(Map<String, dynamic> data, {String tablePrefix}) {
|
||||
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : null;
|
||||
return Foo.fromData(data, _db, prefix: effectivePrefix);
|
||||
return Foo.fromData(data, _db,
|
||||
prefix: tablePrefix != null ? '$tablePrefix.' : null);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -141,9 +154,8 @@ class Bar extends DataClass implements Insertable<Bar> {
|
|||
factory Bar.fromData(Map<String, dynamic> data, GeneratedDatabase db,
|
||||
{String prefix}) {
|
||||
final effectivePrefix = prefix ?? '';
|
||||
final intType = db.typeSystem.forDartType<int>();
|
||||
return Bar(
|
||||
id: intType.mapFromDatabaseResponse(data['${effectivePrefix}id']),
|
||||
id: const IntType().mapFromDatabaseResponse(data['${effectivePrefix}id']),
|
||||
);
|
||||
}
|
||||
@override
|
||||
|
@ -155,6 +167,12 @@ class Bar extends DataClass implements Insertable<Bar> {
|
|||
return map;
|
||||
}
|
||||
|
||||
BarsCompanion toCompanion(bool nullToAbsent) {
|
||||
return BarsCompanion(
|
||||
id: id == null && nullToAbsent ? const Value.absent() : Value(id),
|
||||
);
|
||||
}
|
||||
|
||||
factory Bar.fromJson(Map<String, dynamic> json,
|
||||
{ValueSerializer serializer}) {
|
||||
serializer ??= moorRuntimeOptions.defaultSerializer;
|
||||
|
@ -175,13 +193,16 @@ class Bar extends DataClass implements Insertable<Bar> {
|
|||
);
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('Bar(')..write('id: $id')..write(')')).toString();
|
||||
return (StringBuffer('Bar(')
|
||||
..write('id: $id')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => $mrjf(id.hashCode);
|
||||
int get hashCode => id.hashCode;
|
||||
@override
|
||||
bool operator ==(dynamic other) =>
|
||||
bool operator ==(Object other) =>
|
||||
identical(this, other) || (other is Bar && other.id == this.id);
|
||||
}
|
||||
|
||||
|
@ -215,6 +236,14 @@ class BarsCompanion extends UpdateCompanion<Bar> {
|
|||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('BarsCompanion(')
|
||||
..write('id: $id')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
|
||||
class $BarsTable extends Bars with TableInfo<$BarsTable, Bar> {
|
||||
|
@ -222,22 +251,19 @@ class $BarsTable extends Bars with TableInfo<$BarsTable, Bar> {
|
|||
final String _alias;
|
||||
$BarsTable(this._db, [this._alias]);
|
||||
final VerificationMeta _idMeta = const VerificationMeta('id');
|
||||
GeneratedIntColumn _id;
|
||||
GeneratedColumn<int> _id;
|
||||
@override
|
||||
GeneratedIntColumn get id => _id ??= _constructId();
|
||||
GeneratedIntColumn _constructId() {
|
||||
return GeneratedIntColumn('id', $tableName, false,
|
||||
hasAutoIncrement: true, declaredAsPrimaryKey: true);
|
||||
}
|
||||
|
||||
GeneratedColumn<int> get id =>
|
||||
_id ??= GeneratedColumn<int>('id', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: 'PRIMARY KEY AUTOINCREMENT');
|
||||
@override
|
||||
List<GeneratedColumn> get $columns => [id];
|
||||
@override
|
||||
$BarsTable get asDslTable => this;
|
||||
String get aliasedName => _alias ?? 'bars';
|
||||
@override
|
||||
String get $tableName => _alias ?? 'bars';
|
||||
@override
|
||||
final String actualTableName = 'bars';
|
||||
String get actualTableName => 'bars';
|
||||
@override
|
||||
VerificationContext validateIntegrity(Insertable<Bar> instance,
|
||||
{bool isInserting = false}) {
|
||||
|
@ -253,8 +279,8 @@ class $BarsTable extends Bars with TableInfo<$BarsTable, Bar> {
|
|||
Set<GeneratedColumn> get $primaryKey => {id};
|
||||
@override
|
||||
Bar map(Map<String, dynamic> data, {String tablePrefix}) {
|
||||
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : null;
|
||||
return Bar.fromData(data, _db, prefix: effectivePrefix);
|
||||
return Bar.fromData(data, _db,
|
||||
prefix: tablePrefix != null ? '$tablePrefix.' : null);
|
||||
}
|
||||
|
||||
@override
|
||||
|
|
|
@ -11,8 +11,7 @@ class User extends DataClass implements Insertable<User> {
|
|||
final int id;
|
||||
final String name;
|
||||
User({required this.id, required this.name});
|
||||
factory User.fromData(Map<String, dynamic> data, GeneratedDatabase db,
|
||||
{String? prefix}) {
|
||||
factory User.fromData(Map<String, dynamic> data, {String? prefix}) {
|
||||
final effectivePrefix = prefix ?? '';
|
||||
return User(
|
||||
id: const IntType()
|
||||
|
@ -131,13 +130,13 @@ class $UsersTable extends Users with TableInfo<$UsersTable, User> {
|
|||
final VerificationMeta _idMeta = const VerificationMeta('id');
|
||||
late final GeneratedColumn<int?> id = GeneratedColumn<int?>(
|
||||
'id', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: 'PRIMARY KEY AUTOINCREMENT');
|
||||
final VerificationMeta _nameMeta = const VerificationMeta('name');
|
||||
late final GeneratedColumn<String?> name = GeneratedColumn<String?>(
|
||||
'name', aliasedName, false,
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: false,
|
||||
defaultValue: const Constant('name'));
|
||||
@override
|
||||
|
@ -165,7 +164,7 @@ class $UsersTable extends Users with TableInfo<$UsersTable, User> {
|
|||
Set<GeneratedColumn> get $primaryKey => {id};
|
||||
@override
|
||||
User map(Map<String, dynamic> data, {String? tablePrefix}) {
|
||||
return User.fromData(data, _db,
|
||||
return User.fromData(data,
|
||||
prefix: tablePrefix != null ? '$tablePrefix.' : null);
|
||||
}
|
||||
|
||||
|
@ -185,8 +184,7 @@ class Group extends DataClass implements Insertable<Group> {
|
|||
required this.title,
|
||||
this.deleted,
|
||||
required this.owner});
|
||||
factory Group.fromData(Map<String, dynamic> data, GeneratedDatabase db,
|
||||
{String? prefix}) {
|
||||
factory Group.fromData(Map<String, dynamic> data, {String? prefix}) {
|
||||
final effectivePrefix = prefix ?? '';
|
||||
return Group(
|
||||
id: const IntType()
|
||||
|
@ -354,26 +352,26 @@ class Groups extends Table with TableInfo<Groups, Group> {
|
|||
final VerificationMeta _idMeta = const VerificationMeta('id');
|
||||
late final GeneratedColumn<int?> id = GeneratedColumn<int?>(
|
||||
'id', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: 'NOT NULL');
|
||||
final VerificationMeta _titleMeta = const VerificationMeta('title');
|
||||
late final GeneratedColumn<String?> title = GeneratedColumn<String?>(
|
||||
'title', aliasedName, false,
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: 'NOT NULL');
|
||||
final VerificationMeta _deletedMeta = const VerificationMeta('deleted');
|
||||
late final GeneratedColumn<bool?> deleted = GeneratedColumn<bool?>(
|
||||
'deleted', aliasedName, true,
|
||||
typeName: 'INTEGER',
|
||||
type: const BoolType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: 'DEFAULT FALSE',
|
||||
defaultValue: const CustomExpression<bool>('FALSE'));
|
||||
final VerificationMeta _ownerMeta = const VerificationMeta('owner');
|
||||
late final GeneratedColumn<int?> owner = GeneratedColumn<int?>(
|
||||
'owner', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: 'NOT NULL REFERENCES users (id)');
|
||||
@override
|
||||
|
@ -413,7 +411,7 @@ class Groups extends Table with TableInfo<Groups, Group> {
|
|||
Set<GeneratedColumn> get $primaryKey => {id};
|
||||
@override
|
||||
Group map(Map<String, dynamic> data, {String? tablePrefix}) {
|
||||
return Group.fromData(data, _db,
|
||||
return Group.fromData(data,
|
||||
prefix: tablePrefix != null ? '$tablePrefix.' : null);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
import 'tables.dart';
|
||||
|
||||
-- This table was added in schema version 3
|
||||
CREATE TABLE "groups" (
|
||||
id INTEGER NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
deleted BOOLEAN DEFAULT FALSE,
|
||||
owner INTEGER NOT NULL REFERENCES users (id),
|
||||
|
||||
PRIMARY KEY (id)
|
||||
);
|
|
@ -11,8 +11,7 @@ class Entrie extends DataClass implements Insertable<Entrie> {
|
|||
final int id;
|
||||
final String value;
|
||||
Entrie({required this.id, required this.value});
|
||||
factory Entrie.fromData(Map<String, dynamic> data, GeneratedDatabase db,
|
||||
{String? prefix}) {
|
||||
factory Entrie.fromData(Map<String, dynamic> data, {String? prefix}) {
|
||||
final effectivePrefix = prefix ?? '';
|
||||
return Entrie(
|
||||
id: const IntType()
|
||||
|
@ -131,13 +130,13 @@ class Entries extends Table with TableInfo<Entries, Entrie> {
|
|||
final VerificationMeta _idMeta = const VerificationMeta('id');
|
||||
late final GeneratedColumn<int?> id = GeneratedColumn<int?>(
|
||||
'id', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: 'PRIMARY KEY');
|
||||
final VerificationMeta _valueMeta = const VerificationMeta('value');
|
||||
late final GeneratedColumn<String?> value = GeneratedColumn<String?>(
|
||||
'text', aliasedName, false,
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: 'NOT NULL');
|
||||
@override
|
||||
|
@ -167,7 +166,7 @@ class Entries extends Table with TableInfo<Entries, Entrie> {
|
|||
Set<GeneratedColumn> get $primaryKey => {id};
|
||||
@override
|
||||
Entrie map(Map<String, dynamic> data, {String? tablePrefix}) {
|
||||
return Entrie.fromData(data, _db,
|
||||
return Entrie.fromData(data,
|
||||
prefix: tablePrefix != null ? '$tablePrefix.' : null);
|
||||
}
|
||||
|
||||
|
|
|
@ -134,14 +134,14 @@ class Users extends Table with TableInfo<Users, User> {
|
|||
GeneratedColumn<int> _id;
|
||||
GeneratedColumn<int> get id =>
|
||||
_id ??= GeneratedColumn<int>('id', aliasedName, false,
|
||||
typeName: 'INTEGER',
|
||||
type: const IntType(),
|
||||
requiredDuringInsert: false,
|
||||
$customConstraints: 'NOT NULL PRIMARY KEY AUTOINCREMENT');
|
||||
final VerificationMeta _nameMeta = const VerificationMeta('name');
|
||||
GeneratedColumn<String> _name;
|
||||
GeneratedColumn<String> get name =>
|
||||
_name ??= GeneratedColumn<String>('name', aliasedName, false,
|
||||
typeName: 'TEXT',
|
||||
type: const StringType(),
|
||||
requiredDuringInsert: true,
|
||||
$customConstraints: 'NOT NULL');
|
||||
@override
|
||||
|
|
|
@ -7,7 +7,7 @@ packages:
|
|||
name: async
|
||||
url: "https://pub.dartlang.org"
|
||||
source: hosted
|
||||
version: "2.6.1"
|
||||
version: "2.8.1"
|
||||
boolean_selector:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
@ -28,7 +28,7 @@ packages:
|
|||
name: charcode
|
||||
url: "https://pub.dartlang.org"
|
||||
source: hosted
|
||||
version: "1.2.0"
|
||||
version: "1.3.1"
|
||||
clock:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
@ -87,7 +87,7 @@ packages:
|
|||
name: meta
|
||||
url: "https://pub.dartlang.org"
|
||||
source: hosted
|
||||
version: "1.3.0"
|
||||
version: "1.7.0"
|
||||
moor:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
|
@ -183,7 +183,7 @@ packages:
|
|||
name: test_api
|
||||
url: "https://pub.dartlang.org"
|
||||
source: hosted
|
||||
version: "0.3.0"
|
||||
version: "0.4.2"
|
||||
typed_data:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
|
|
@ -195,13 +195,11 @@ class InExpression extends Expression {
|
|||
}
|
||||
|
||||
class Parentheses extends Expression {
|
||||
final Token openingLeft;
|
||||
Token? openingLeft;
|
||||
Token? closingRight;
|
||||
Expression expression;
|
||||
final Token closingRight;
|
||||
|
||||
Parentheses(this.openingLeft, this.expression, this.closingRight) {
|
||||
setSpan(openingLeft, closingRight);
|
||||
}
|
||||
Parentheses(this.expression);
|
||||
|
||||
@override
|
||||
R accept<A, R>(AstVisitor<A, R> visitor, A arg) {
|
||||
|
|
|
@ -783,7 +783,10 @@ class Parser {
|
|||
}
|
||||
|
||||
_consume(TokenType.rightParen, 'Expected a closing bracket');
|
||||
return Parentheses(left, expr, _previous)..setSpan(left, _previous);
|
||||
return Parentheses(expr)
|
||||
..openingLeft = left
|
||||
..closingRight = _previous
|
||||
..setSpan(left, _previous);
|
||||
}
|
||||
} else if (_matchOne(TokenType.dollarSignVariable)) {
|
||||
if (enableMoorExtensions) {
|
||||
|
|
|
@ -29,14 +29,19 @@ enum TokenType {
|
|||
ampersand,
|
||||
analyze,
|
||||
and,
|
||||
any,
|
||||
array,
|
||||
as,
|
||||
asc,
|
||||
asymmetric,
|
||||
atSignVariable,
|
||||
attach,
|
||||
autoincrement,
|
||||
before,
|
||||
begin,
|
||||
between,
|
||||
binary,
|
||||
both,
|
||||
by,
|
||||
cascade,
|
||||
cast,
|
||||
|
@ -56,6 +61,7 @@ enum TokenType {
|
|||
currentDate,
|
||||
currentTime,
|
||||
currentTimestamp,
|
||||
currentUser,
|
||||
database,
|
||||
deferrable,
|
||||
deferred,
|
||||
|
@ -88,11 +94,13 @@ enum TokenType {
|
|||
full,
|
||||
generated,
|
||||
glob,
|
||||
grant,
|
||||
group,
|
||||
groups,
|
||||
having,
|
||||
identifier,
|
||||
ignore,
|
||||
ilike,
|
||||
immediate,
|
||||
indexed,
|
||||
initially,
|
||||
|
@ -105,6 +113,7 @@ enum TokenType {
|
|||
join,
|
||||
key,
|
||||
last,
|
||||
leading,
|
||||
left,
|
||||
leftParen,
|
||||
less,
|
||||
|
@ -112,6 +121,8 @@ enum TokenType {
|
|||
lessMore,
|
||||
like,
|
||||
limit,
|
||||
localTime,
|
||||
localTimestamp,
|
||||
match,
|
||||
materialized,
|
||||
minus,
|
||||
|
@ -127,12 +138,15 @@ enum TokenType {
|
|||
of,
|
||||
offset,
|
||||
on,
|
||||
only,
|
||||
or,
|
||||
order,
|
||||
others,
|
||||
outer,
|
||||
over,
|
||||
overlaps,
|
||||
partition,
|
||||
placing,
|
||||
percent,
|
||||
pipe,
|
||||
plan,
|
||||
|
@ -163,12 +177,16 @@ enum TokenType {
|
|||
select,
|
||||
semicolon,
|
||||
set,
|
||||
sessionUser,
|
||||
shiftLeft,
|
||||
shiftRight,
|
||||
similar,
|
||||
slash,
|
||||
some,
|
||||
stored,
|
||||
star,
|
||||
strict,
|
||||
symmetric,
|
||||
stringLiteral,
|
||||
table,
|
||||
temp,
|
||||
|
@ -177,12 +195,14 @@ enum TokenType {
|
|||
ties,
|
||||
tilde,
|
||||
to,
|
||||
trailing,
|
||||
transaction,
|
||||
trigger,
|
||||
unbounded,
|
||||
union,
|
||||
unique,
|
||||
update,
|
||||
user,
|
||||
using,
|
||||
vacuum,
|
||||
view,
|
||||
|
@ -286,6 +306,7 @@ const Map<String, TokenType> keywords = {
|
|||
'JOIN': TokenType.join,
|
||||
'KEY': TokenType.key,
|
||||
'LAST': TokenType.last,
|
||||
'LEADING': TokenType.leading,
|
||||
'LEFT': TokenType.left,
|
||||
'LIKE': TokenType.like,
|
||||
'LIMIT': TokenType.limit,
|
||||
|
@ -358,6 +379,29 @@ const Map<String, TokenType> keywords = {
|
|||
'WITHOUT': TokenType.without,
|
||||
};
|
||||
|
||||
const Map<String, TokenType> postgresKeywords = {
|
||||
'ANY': TokenType.any,
|
||||
'ARRAY': TokenType.array,
|
||||
'ASYMMETRIC': TokenType.asymmetric,
|
||||
'BINARY': TokenType.binary,
|
||||
'BOTH': TokenType.both,
|
||||
'CURRENT_USER': TokenType.currentUser,
|
||||
'ILIKE': TokenType.ilike,
|
||||
'LEADING': TokenType.leading,
|
||||
'LOCALTIME': TokenType.localTime,
|
||||
'LOCALTIMESTAMP': TokenType.localTimestamp,
|
||||
'GRANT': TokenType.grant,
|
||||
'ONLY': TokenType.only,
|
||||
'OVERLAPS': TokenType.overlaps,
|
||||
'PLACING': TokenType.placing,
|
||||
'SESSION_USER': TokenType.sessionUser,
|
||||
'SIMILAR': TokenType.similar,
|
||||
'SOME': TokenType.some,
|
||||
'SYMMETRIC': TokenType.symmetric,
|
||||
'TRAILING': TokenType.trailing,
|
||||
'USER': TokenType.user,
|
||||
};
|
||||
|
||||
/// Maps [TokenType]s which are keywords to their lexeme.
|
||||
final Map<TokenType, String> reverseKeywords = {
|
||||
for (var entry in keywords.entries) entry.value: entry.key,
|
||||
|
@ -383,6 +427,10 @@ bool isKeyword(TokenType type) => reverseKeywords.containsKey(type);
|
|||
/// Returns true if [name] is a reserved keyword in sqlite.
|
||||
bool isKeywordLexeme(String name) => keywords.containsKey(name.toUpperCase());
|
||||
|
||||
/// Returns true if [name] is a reserved keyword in postgres.
|
||||
bool isPostgresKeywordLexeme(String name) =>
|
||||
postgresKeywords.containsKey(name.toUpperCase());
|
||||
|
||||
class Token implements SyntacticEntity {
|
||||
final TokenType type;
|
||||
|
||||
|
|
|
@ -27,18 +27,18 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
|
||||
@override
|
||||
void visitAggregateExpression(AggregateExpression e, void arg) {
|
||||
_symbol(e.name);
|
||||
symbol(e.name);
|
||||
|
||||
_symbol('(');
|
||||
symbol('(');
|
||||
visit(e.parameters, arg);
|
||||
_symbol(')');
|
||||
symbol(')');
|
||||
|
||||
if (e.filter != null) {
|
||||
_keyword(TokenType.filter);
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
_keyword(TokenType.where);
|
||||
visit(e.filter!, arg);
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
}
|
||||
|
||||
if (e.windowDefinition != null) {
|
||||
|
@ -46,7 +46,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
visit(e.windowDefinition!, arg);
|
||||
} else if (e.windowName != null) {
|
||||
_keyword(TokenType.over);
|
||||
_identifier(e.windowName!);
|
||||
identifier(e.windowName!);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -87,7 +87,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
void visitBinaryExpression(BinaryExpression e, void arg) {
|
||||
visit(e.left, arg);
|
||||
|
||||
final symbol = const {
|
||||
final operatorSymbol = const {
|
||||
TokenType.doublePipe: '||',
|
||||
TokenType.star: '*',
|
||||
TokenType.slash: '/',
|
||||
|
@ -108,8 +108,8 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
TokenType.lessMore: '<>',
|
||||
}[e.operator.type];
|
||||
|
||||
if (symbol != null) {
|
||||
_symbol(symbol, spaceBefore: true, spaceAfter: true);
|
||||
if (operatorSymbol != null) {
|
||||
symbol(operatorSymbol, spaceBefore: true, spaceAfter: true);
|
||||
} else {
|
||||
_keyword(e.operator.type);
|
||||
}
|
||||
|
@ -120,7 +120,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
void _writeStatements(Iterable<Statement> statements) {
|
||||
for (final stmt in statements) {
|
||||
visit(stmt, null);
|
||||
_symbol(';');
|
||||
symbol(';');
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -154,25 +154,25 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
@override
|
||||
void visitCastExpression(CastExpression e, void arg) {
|
||||
_keyword(TokenType.cast);
|
||||
_symbol('(');
|
||||
symbol('(');
|
||||
visit(e.operand, arg);
|
||||
_keyword(TokenType.as);
|
||||
_symbol(e.typeName, spaceBefore: true);
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(e.typeName, spaceBefore: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
}
|
||||
|
||||
@override
|
||||
void visitCollateExpression(CollateExpression e, void arg) {
|
||||
visit(e.inner, arg);
|
||||
_keyword(TokenType.collate);
|
||||
_identifier(e.collation);
|
||||
identifier(e.collation);
|
||||
}
|
||||
|
||||
@override
|
||||
void visitColumnConstraint(ColumnConstraint e, void arg) {
|
||||
if (e.name != null) {
|
||||
_keyword(TokenType.constraint);
|
||||
_identifier(e.name!);
|
||||
identifier(e.name!);
|
||||
}
|
||||
|
||||
e.when(primaryKey: (primaryKey) {
|
||||
|
@ -190,22 +190,22 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
_conflictClause(unique.onConflict);
|
||||
}, check: (check) {
|
||||
_keyword(TokenType.check);
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
visit(check.expression, arg);
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
}, isDefault: (def) {
|
||||
_keyword(TokenType.$default);
|
||||
final expr = def.expression;
|
||||
if (expr is Literal) {
|
||||
visit(expr, arg);
|
||||
} else {
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
visit(expr, arg);
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
}
|
||||
}, collate: (collate) {
|
||||
_keyword(TokenType.collate);
|
||||
_identifier(collate.collation);
|
||||
identifier(collate.collation);
|
||||
}, foreignKey: (foreignKey) {
|
||||
visit(foreignKey.clause, arg);
|
||||
}, generatedAs: (generatedAs) {
|
||||
|
@ -213,9 +213,9 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
_keyword(TokenType.always);
|
||||
_keyword(TokenType.as);
|
||||
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
visit(generatedAs.expression, arg);
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
|
||||
if (generatedAs.stored) {
|
||||
_keyword(TokenType.stored);
|
||||
|
@ -227,9 +227,9 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
|
||||
@override
|
||||
void visitColumnDefinition(ColumnDefinition e, void arg) {
|
||||
_identifier(e.columnName);
|
||||
identifier(e.columnName);
|
||||
if (e.typeName != null) {
|
||||
_symbol(e.typeName!, spaceAfter: true, spaceBefore: true);
|
||||
symbol(e.typeName!, spaceAfter: true, spaceBefore: true);
|
||||
}
|
||||
|
||||
visitList(e.constraints, arg);
|
||||
|
@ -242,9 +242,9 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
|
||||
@override
|
||||
void visitCommonTableExpression(CommonTableExpression e, void arg) {
|
||||
_identifier(e.cteTableName);
|
||||
identifier(e.cteTableName);
|
||||
if (e.columnNames != null) {
|
||||
_symbol('(${e.columnNames!.join(', ')})', spaceAfter: true);
|
||||
symbol('(${e.columnNames!.join(', ')})', spaceAfter: true);
|
||||
}
|
||||
|
||||
_keyword(TokenType.as);
|
||||
|
@ -260,9 +260,9 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
break;
|
||||
}
|
||||
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
visit(e.as, arg);
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -302,13 +302,13 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
_keyword(TokenType.$index);
|
||||
_ifNotExists(e.ifNotExists);
|
||||
|
||||
_identifier(e.indexName);
|
||||
identifier(e.indexName);
|
||||
_keyword(TokenType.on);
|
||||
visit(e.on, arg);
|
||||
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
_join(e.columns, ',');
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
|
||||
_where(e.where);
|
||||
}
|
||||
|
@ -319,10 +319,10 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
_keyword(TokenType.table);
|
||||
_ifNotExists(e.ifNotExists);
|
||||
|
||||
_identifier(e.tableName);
|
||||
_symbol('(');
|
||||
identifier(e.tableName);
|
||||
symbol('(');
|
||||
_join([...e.columns, ...e.tableConstraints], ',');
|
||||
_symbol(')');
|
||||
symbol(')');
|
||||
|
||||
if (e.withoutRowId) {
|
||||
_keyword(TokenType.without);
|
||||
|
@ -330,7 +330,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
}
|
||||
|
||||
if (e.isStrict) {
|
||||
if (e.withoutRowId) _symbol(',');
|
||||
if (e.withoutRowId) symbol(',');
|
||||
|
||||
_keyword(TokenType.strict);
|
||||
}
|
||||
|
@ -342,7 +342,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
_keyword(TokenType.trigger);
|
||||
_ifNotExists(e.ifNotExists);
|
||||
|
||||
_identifier(e.triggerName);
|
||||
identifier(e.triggerName);
|
||||
|
||||
switch (e.mode) {
|
||||
case TriggerMode.before:
|
||||
|
@ -379,12 +379,12 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
_keyword(TokenType.view);
|
||||
_ifNotExists(e.ifNotExists);
|
||||
|
||||
_identifier(e.viewName);
|
||||
identifier(e.viewName);
|
||||
|
||||
if (e.columns != null) {
|
||||
_symbol('(', spaceBefore: true);
|
||||
_symbol(e.columns!.join(','));
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
symbol(e.columns!.join(','));
|
||||
symbol(')', spaceAfter: true);
|
||||
}
|
||||
|
||||
_keyword(TokenType.as);
|
||||
|
@ -399,35 +399,35 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
_keyword(TokenType.table);
|
||||
_ifNotExists(e.ifNotExists);
|
||||
|
||||
_identifier(e.tableName);
|
||||
identifier(e.tableName);
|
||||
_keyword(TokenType.using);
|
||||
_identifier(e.moduleName);
|
||||
identifier(e.moduleName);
|
||||
|
||||
_symbol('(${e.argumentContent.join(', ')})');
|
||||
symbol('(${e.argumentContent.join(', ')})');
|
||||
}
|
||||
|
||||
@override
|
||||
void visitMoorSpecificNode(MoorSpecificNode e, void arg) {
|
||||
if (e is DartPlaceholder) {
|
||||
_symbol(r'$', spaceBefore: true);
|
||||
_symbol(e.name, spaceAfter: true);
|
||||
symbol(r'$', spaceBefore: true);
|
||||
symbol(e.name, spaceAfter: true);
|
||||
} else if (e is DeclaredStatement) {
|
||||
_identifier(e.identifier.name);
|
||||
identifier(e.identifier.name);
|
||||
|
||||
if (e.parameters.isNotEmpty) {
|
||||
_symbol('(');
|
||||
symbol('(');
|
||||
_join(e.parameters, ',');
|
||||
_symbol(')');
|
||||
symbol(')');
|
||||
}
|
||||
|
||||
if (e.as != null) {
|
||||
_keyword(TokenType.as);
|
||||
_identifier(e.as!);
|
||||
identifier(e.as!);
|
||||
}
|
||||
|
||||
_symbol(':', spaceAfter: true);
|
||||
symbol(':', spaceAfter: true);
|
||||
visit(e.statement, arg);
|
||||
_symbol(';');
|
||||
symbol(';');
|
||||
} else if (e is MoorFile) {
|
||||
for (final stmt in e.statements) {
|
||||
visit(stmt, arg);
|
||||
|
@ -437,10 +437,10 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
} else if (e is ImportStatement) {
|
||||
_keyword(TokenType.import);
|
||||
_stringLiteral(e.importedFile);
|
||||
_symbol(';', spaceAfter: true);
|
||||
symbol(';', spaceAfter: true);
|
||||
} else if (e is NestedStarResultColumn) {
|
||||
_identifier(e.tableName);
|
||||
_symbol('.**', spaceAfter: true);
|
||||
identifier(e.tableName);
|
||||
symbol('.**', spaceAfter: true);
|
||||
} else if (e is StatementParameter) {
|
||||
if (e is VariableTypeHint) {
|
||||
if (e.isRequired) _keyword(TokenType.required);
|
||||
|
@ -449,7 +449,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
final typeName = e.typeName;
|
||||
if (typeName != null) {
|
||||
_keyword(TokenType.as);
|
||||
_symbol(typeName, spaceBefore: true, spaceAfter: true);
|
||||
symbol(typeName, spaceBefore: true, spaceAfter: true);
|
||||
}
|
||||
|
||||
if (e.orNull) {
|
||||
|
@ -457,18 +457,18 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
_keyword(TokenType.$null);
|
||||
}
|
||||
} else if (e is DartPlaceholderDefaultValue) {
|
||||
_symbol('\$${e.variableName}', spaceAfter: true);
|
||||
_symbol('=', spaceBefore: true, spaceAfter: true);
|
||||
symbol('\$${e.variableName}', spaceAfter: true);
|
||||
symbol('=', spaceBefore: true, spaceAfter: true);
|
||||
visit(e.defaultValue, arg);
|
||||
} else {
|
||||
throw AssertionError('Unknown StatementParameter: $e');
|
||||
}
|
||||
} else if (e is MoorTableName) {
|
||||
_keyword(e.useExistingDartClass ? TokenType.$with : TokenType.as);
|
||||
_identifier(e.overriddenDataClassName);
|
||||
identifier(e.overriddenDataClassName);
|
||||
} else if (e is NestedStarResultColumn) {
|
||||
_identifier(e.tableName);
|
||||
_symbol('.**', spaceAfter: true);
|
||||
identifier(e.tableName);
|
||||
symbol('.**', spaceAfter: true);
|
||||
} else if (e is TransactionBlock) {
|
||||
visit(e.begin, arg);
|
||||
_writeStatements(e.innerStatements);
|
||||
|
@ -535,9 +535,9 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
@override
|
||||
void visitExists(ExistsExpression e, void arg) {
|
||||
_keyword(TokenType.exists);
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
visit(e.select, null);
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -553,7 +553,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
visit(e.expression, arg);
|
||||
if (e.as != null) {
|
||||
_keyword(TokenType.as);
|
||||
_identifier(e.as!);
|
||||
identifier(e.as!);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -563,9 +563,9 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
visit(e.foreignTable, arg);
|
||||
|
||||
if (e.columnNames.isNotEmpty) {
|
||||
_symbol('(');
|
||||
symbol('(');
|
||||
_join(e.columnNames, ',');
|
||||
_symbol(')');
|
||||
symbol(')');
|
||||
}
|
||||
|
||||
void referenceAction(ReferenceAction action) {
|
||||
|
@ -662,10 +662,10 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
|
||||
@override
|
||||
void visitFunction(FunctionExpression e, void arg) {
|
||||
_identifier(e.name);
|
||||
_symbol('(');
|
||||
identifier(e.name);
|
||||
symbol('(');
|
||||
visit(e.parameters, arg);
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -726,9 +726,9 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
visit(e.table, arg);
|
||||
|
||||
if (e.targetColumns.isNotEmpty) {
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
_join(e.targetColumns, ',');
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
}
|
||||
|
||||
visit(e.source, arg);
|
||||
|
@ -772,7 +772,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
@override
|
||||
void visitJoin(Join e, void arg) {
|
||||
if (e.operator == JoinOperator.comma) {
|
||||
_symbol(',');
|
||||
symbol(',');
|
||||
} else {
|
||||
if (e.natural) {
|
||||
_keyword(TokenType.natural);
|
||||
|
@ -808,7 +808,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
visit(constraint.expression, arg);
|
||||
} else if (constraint is UsingConstraint) {
|
||||
_keyword(TokenType.using);
|
||||
_symbol('(${constraint.columnNames.join(', ')})');
|
||||
symbol('(${constraint.columnNames.join(', ')})');
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -832,7 +832,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
@override
|
||||
void visitNamedVariable(ColonNamedVariable e, void arg) {
|
||||
// Note: The name already starts with the colon
|
||||
_symbol(e.name, spaceBefore: true, spaceAfter: true);
|
||||
symbol(e.name, spaceBefore: true, spaceAfter: true);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -842,15 +842,15 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
|
||||
@override
|
||||
void visitNumberedVariable(NumberedVariable e, void arg) {
|
||||
_symbol('?', spaceBefore: true, spaceAfter: e.explicitIndex == null);
|
||||
symbol('?', spaceBefore: true, spaceAfter: e.explicitIndex == null);
|
||||
if (e.explicitIndex != null) {
|
||||
_symbol(e.explicitIndex.toString(), spaceAfter: true);
|
||||
symbol(e.explicitIndex.toString(), spaceAfter: true);
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
void visitNumericLiteral(NumericLiteral e, void arg) {
|
||||
_symbol(e.value.toString(), spaceBefore: true, spaceAfter: true);
|
||||
symbol(e.value.toString(), spaceBefore: true, spaceAfter: true);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -877,15 +877,15 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
|
||||
@override
|
||||
void visitParentheses(Parentheses e, void arg) {
|
||||
_symbol('(');
|
||||
symbol('(');
|
||||
visit(e.expression, arg);
|
||||
_symbol(')');
|
||||
symbol(')');
|
||||
}
|
||||
|
||||
@override
|
||||
void visitRaiseExpression(RaiseExpression e, void arg) {
|
||||
_keyword(TokenType.raise);
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
_keyword(const {
|
||||
RaiseKind.ignore: TokenType.ignore,
|
||||
RaiseKind.rollback: TokenType.rollback,
|
||||
|
@ -894,10 +894,10 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
}[e.raiseKind]!);
|
||||
|
||||
if (e.errorMessage != null) {
|
||||
_symbol(',', spaceAfter: true);
|
||||
symbol(',', spaceAfter: true);
|
||||
_stringLiteral(e.errorMessage!);
|
||||
}
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -905,18 +905,18 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
var didWriteSpaceBefore = false;
|
||||
|
||||
if (e.schemaName != null) {
|
||||
_identifier(e.schemaName!, spaceAfter: false);
|
||||
_symbol('.');
|
||||
identifier(e.schemaName!, spaceAfter: false);
|
||||
symbol('.');
|
||||
didWriteSpaceBefore = true;
|
||||
}
|
||||
if (e.entityName != null) {
|
||||
_identifier(e.entityName!,
|
||||
identifier(e.entityName!,
|
||||
spaceAfter: false, spaceBefore: !didWriteSpaceBefore);
|
||||
_symbol('.');
|
||||
symbol('.');
|
||||
didWriteSpaceBefore = true;
|
||||
}
|
||||
|
||||
_identifier(e.columnName,
|
||||
identifier(e.columnName,
|
||||
spaceAfter: true, spaceBefore: !didWriteSpaceBefore);
|
||||
}
|
||||
|
||||
|
@ -950,10 +950,10 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
var isFirst = true;
|
||||
for (final declaration in e.windowDeclarations) {
|
||||
if (!isFirst) {
|
||||
_symbol(',', spaceAfter: true);
|
||||
symbol(',', spaceAfter: true);
|
||||
}
|
||||
|
||||
_identifier(declaration.name);
|
||||
identifier(declaration.name);
|
||||
_keyword(TokenType.as);
|
||||
|
||||
visit(declaration.definition, arg);
|
||||
|
@ -966,36 +966,36 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
|
||||
@override
|
||||
void visitSelectStatementAsSource(SelectStatementAsSource e, void arg) {
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
visit(e.statement, arg);
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
|
||||
if (e.as != null) {
|
||||
_keyword(TokenType.as);
|
||||
_identifier(e.as!);
|
||||
identifier(e.as!);
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
void visitSetComponent(SetComponent e, void arg) {
|
||||
visit(e.column, arg);
|
||||
_symbol('=', spaceBefore: true, spaceAfter: true);
|
||||
symbol('=', spaceBefore: true, spaceAfter: true);
|
||||
visit(e.expression, arg);
|
||||
}
|
||||
|
||||
@override
|
||||
void visitStarFunctionParameter(StarFunctionParameter e, void arg) {
|
||||
_symbol('*', spaceAfter: true);
|
||||
symbol('*', spaceAfter: true);
|
||||
}
|
||||
|
||||
@override
|
||||
void visitStarResultColumn(StarResultColumn e, void arg) {
|
||||
if (e.tableName != null) {
|
||||
_identifier(e.tableName!);
|
||||
_symbol('.');
|
||||
identifier(e.tableName!);
|
||||
symbol('.');
|
||||
}
|
||||
|
||||
_symbol('*', spaceAfter: true, spaceBefore: e.tableName == null);
|
||||
symbol('*', spaceAfter: true, spaceBefore: e.tableName == null);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -1021,16 +1021,16 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
|
||||
@override
|
||||
void visitSubQuery(SubQuery e, void arg) {
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
visit(e.select, arg);
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
}
|
||||
|
||||
@override
|
||||
void visitTableConstraint(TableConstraint e, void arg) {
|
||||
if (e.name != null) {
|
||||
_keyword(TokenType.constraint);
|
||||
_identifier(e.name!);
|
||||
identifier(e.name!);
|
||||
}
|
||||
|
||||
if (e is KeyClause) {
|
||||
|
@ -1041,21 +1041,21 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
_keyword(TokenType.unique);
|
||||
}
|
||||
|
||||
_symbol('(');
|
||||
symbol('(');
|
||||
_join(e.columns, ',');
|
||||
_symbol(')');
|
||||
symbol(')');
|
||||
_conflictClause(e.onConflict);
|
||||
} else if (e is CheckTable) {
|
||||
_keyword(TokenType.check);
|
||||
_symbol('(');
|
||||
symbol('(');
|
||||
visit(e.expression, arg);
|
||||
_symbol(')');
|
||||
symbol(')');
|
||||
} else if (e is ForeignKeyTableConstraint) {
|
||||
_keyword(TokenType.foreign);
|
||||
_keyword(TokenType.key);
|
||||
_symbol('(');
|
||||
symbol('(');
|
||||
_join(e.columns, ',');
|
||||
_symbol(')');
|
||||
symbol(')');
|
||||
visit(e.clause, arg);
|
||||
}
|
||||
}
|
||||
|
@ -1063,27 +1063,27 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
@override
|
||||
void visitTableReference(TableReference e, void arg) {
|
||||
if (e.schemaName != null) {
|
||||
_identifier(e.schemaName!, spaceAfter: false);
|
||||
_symbol('.');
|
||||
identifier(e.schemaName!, spaceAfter: false);
|
||||
symbol('.');
|
||||
}
|
||||
_identifier(e.tableName, spaceBefore: e.schemaName == null);
|
||||
identifier(e.tableName, spaceBefore: e.schemaName == null);
|
||||
|
||||
if (e.as != null) {
|
||||
_keyword(TokenType.as);
|
||||
_identifier(e.as!);
|
||||
identifier(e.as!);
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
void visitTableValuedFunction(TableValuedFunction e, void arg) {
|
||||
_identifier(e.name);
|
||||
_symbol('(');
|
||||
identifier(e.name);
|
||||
symbol('(');
|
||||
visit(e.parameters, arg);
|
||||
_symbol(')');
|
||||
symbol(')');
|
||||
|
||||
if (e.as != null) {
|
||||
_keyword(TokenType.as);
|
||||
_identifier(e.as!);
|
||||
identifier(e.as!);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1104,22 +1104,22 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
|
||||
@override
|
||||
void visitTuple(Tuple e, void arg) {
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
_join(e.expressions, ',');
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
}
|
||||
|
||||
@override
|
||||
void visitUnaryExpression(UnaryExpression e, void arg) {
|
||||
switch (e.operator.type) {
|
||||
case TokenType.minus:
|
||||
_symbol('-', spaceBefore: true);
|
||||
symbol('-', spaceBefore: true);
|
||||
break;
|
||||
case TokenType.plus:
|
||||
_symbol('+', spaceBefore: true);
|
||||
symbol('+', spaceBefore: true);
|
||||
break;
|
||||
case TokenType.tilde:
|
||||
_symbol('~', spaceBefore: true);
|
||||
symbol('~', spaceBefore: true);
|
||||
break;
|
||||
case TokenType.not:
|
||||
_keyword(TokenType.not);
|
||||
|
@ -1206,10 +1206,10 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
|
||||
@override
|
||||
void visitWindowDefinition(WindowDefinition e, void arg) {
|
||||
_symbol('(', spaceBefore: true);
|
||||
symbol('(', spaceBefore: true);
|
||||
|
||||
if (e.baseWindowName != null) {
|
||||
_identifier(e.baseWindowName!);
|
||||
identifier(e.baseWindowName!);
|
||||
}
|
||||
|
||||
if (e.partitionBy.isNotEmpty) {
|
||||
|
@ -1221,7 +1221,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
visitNullable(e.orderBy, arg);
|
||||
visitNullable(e.frameSpec, arg);
|
||||
|
||||
_symbol(')', spaceAfter: true);
|
||||
symbol(')', spaceAfter: true);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -1256,13 +1256,14 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
}
|
||||
}
|
||||
|
||||
void _identifier(String identifier,
|
||||
/// Writes an identifier, escaping it if necessary.
|
||||
void identifier(String identifier,
|
||||
{bool spaceBefore = true, bool spaceAfter = true}) {
|
||||
if (isKeywordLexeme(identifier) || identifier.contains(' ')) {
|
||||
identifier = '"$identifier"';
|
||||
}
|
||||
|
||||
_symbol(identifier, spaceBefore: spaceBefore, spaceAfter: spaceAfter);
|
||||
symbol(identifier, spaceBefore: spaceBefore, spaceAfter: spaceAfter);
|
||||
}
|
||||
|
||||
void _ifNotExists(bool ifNotExists) {
|
||||
|
@ -1278,7 +1279,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
|
||||
for (final node in nodes) {
|
||||
if (!isFirst) {
|
||||
_symbol(separatingSymbol, spaceAfter: true);
|
||||
symbol(separatingSymbol, spaceAfter: true);
|
||||
}
|
||||
|
||||
visit(node, null);
|
||||
|
@ -1287,7 +1288,7 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
}
|
||||
|
||||
void _keyword(TokenType type) {
|
||||
_symbol(reverseKeywords[type]!, spaceAfter: true, spaceBefore: true);
|
||||
symbol(reverseKeywords[type]!, spaceAfter: true, spaceBefore: true);
|
||||
}
|
||||
|
||||
void _orderingMode(OrderingMode? mode) {
|
||||
|
@ -1303,10 +1304,11 @@ class NodeSqlBuilder extends AstVisitor<void, void> {
|
|||
|
||||
void _stringLiteral(String content) {
|
||||
final escapedChars = content.replaceAll("'", "''");
|
||||
_symbol("'$escapedChars'", spaceBefore: true, spaceAfter: true);
|
||||
symbol("'$escapedChars'", spaceBefore: true, spaceAfter: true);
|
||||
}
|
||||
|
||||
void _symbol(String lexeme,
|
||||
/// Writes the [lexeme], unchanged.
|
||||
void symbol(String lexeme,
|
||||
{bool spaceBefore = false, bool spaceAfter = false}) {
|
||||
if (needsSpace && spaceBefore) {
|
||||
_space();
|
||||
|
@ -1337,7 +1339,7 @@ extension NodeToText on AstNode {
|
|||
/// ways to represent an equivalent node (e.g. the no-op `FOR EACH ROW` on
|
||||
/// triggers).
|
||||
String toSql() {
|
||||
final builder = NodeSqlBuilder();
|
||||
final builder = NodeSqlBuilder(null);
|
||||
builder.visit(this, null);
|
||||
return builder.buffer.toString();
|
||||
}
|
||||
|
|
|
@ -14,3 +14,5 @@ echo "- Generate web_worker_example"
|
|||
(cd ../extras/web_worker_example && dart pub get && dart run build_runner build --delete-conflicting-outputs)
|
||||
echo "- Generate with_built_value"
|
||||
(cd ../extras/with_built_value && dart pub get && dart run build_runner build --delete-conflicting-outputs)
|
||||
echo "- Generate flutter_web_worker_example"
|
||||
(cd ../extras/flutter_web_worker_example && dart pub get && dart run build_runner build --delete-conflicting-outputs)
|
||||
|
|
|
@ -2,12 +2,22 @@
|
|||
|
||||
EXIT_CODE=0
|
||||
|
||||
pushd extras/integration_tests
|
||||
find . -type d -name .dart_tool -exec rm -rf {} \;
|
||||
popd
|
||||
|
||||
pushd extras/integration_tests/vm
|
||||
echo "Running integration tests with moor_ffi & VM"
|
||||
dart pub upgrade
|
||||
dart test || EXIT_CODE=$?
|
||||
popd
|
||||
|
||||
pushd extras/integration_tests/postgres
|
||||
echo "Running integration tests with Postgres"
|
||||
dart pub upgrade
|
||||
dart test || EXIT_CODE=$?
|
||||
popd
|
||||
|
||||
pushd extras/with_built_value
|
||||
echo "Running build runner in with_built_value"
|
||||
dart pub upgrade
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
find ../ -name pubspec.yaml -execdir dart pub get \;
|
|
@ -1,6 +1,7 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
cd ../drift
|
||||
rm -rf .dart_tool
|
||||
dart pub get
|
||||
dart format -o none --set-exit-if-changed .
|
||||
dart analyze --fatal-infos --fatal-warnings
|
||||
|
@ -8,6 +9,14 @@ dart run build_runner build --delete-conflicting-outputs
|
|||
dart test
|
||||
|
||||
cd ../drift_dev
|
||||
rm -rf .dart_tool
|
||||
dart pub get
|
||||
dart format -o none --set-exit-if-changed .
|
||||
dart analyze --fatal-infos --fatal-warnings
|
||||
dart test
|
||||
|
||||
cd ../sqlparser
|
||||
rm -rf .dart_tool
|
||||
dart pub get
|
||||
dart format -o none --set-exit-if-changed .
|
||||
dart analyze --fatal-infos --fatal-warnings
|
||||
|
|
Loading…
Reference in New Issue