mirror of https://github.com/AMT-Cheif/drift.git
Merge branch 'develop' into ffi
# Conflicts: # moor/lib/src/runtime/database.dart # moor/lib/src/runtime/executor/executor.dart
This commit is contained in:
commit
70033b0bde
|
@ -5,6 +5,10 @@ container:
|
|||
task:
|
||||
pub_cache:
|
||||
folder: $HOME/.pub-cache
|
||||
environment:
|
||||
CODECOV_TOKEN: ENCRYPTED[6322a159d9b7692b999d13fa2bc7981c8b61ecb1ac18ae076864f1355ee9b65088b2bf9d97d7860196e58bf1db5708af]
|
||||
pub_get_script: tool/pub_get.sh
|
||||
analyze_script: tool/analyze.sh
|
||||
test_moor_script: tool/test_moor.sh
|
||||
test_moor_script: tool/test_moor.sh
|
||||
test_sqlparser_script: tool/test_sqlparser.sh
|
||||
format_coverage_script: tool/upload_coverage.sh
|
|
@ -0,0 +1 @@
|
|||
*.moor linguist-language=SQL
|
|
@ -1,2 +1,4 @@
|
|||
**/.idea
|
||||
**/*.iml
|
||||
|
||||
lcov.info
|
15
.travis.yml
15
.travis.yml
|
@ -1,15 +0,0 @@
|
|||
language: dart
|
||||
|
||||
dart:
|
||||
- stable
|
||||
env:
|
||||
- PKG="moor"
|
||||
- PKG="moor_generator"
|
||||
- PKG="sqlparser"
|
||||
|
||||
script: ./tool/mono_repo_wrapper.sh
|
||||
after_success: ./tool/upload_coverage.sh
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- "$HOME/.pub-cache"
|
|
@ -1,5 +1,5 @@
|
|||
# Moor
|
||||
[](https://travis-ci.com/simolus3/moor)
|
||||
[](https://cirrus-ci.com/github/simolus3/moor)
|
||||
[](https://codecov.io/gh/simolus3/moor)
|
||||
|
||||
|
||||
|
|
|
@ -28,7 +28,6 @@ linter:
|
|||
- await_only_futures
|
||||
- camel_case_types
|
||||
- cancel_subscriptions
|
||||
- cascade_invocations
|
||||
- comment_references
|
||||
- constant_identifier_names
|
||||
- curly_braces_in_flow_control_structures
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
---
|
||||
layout: feature
|
||||
title: Type converters
|
||||
since: 1.7
|
||||
nav_order: 8
|
||||
permalink: /type_converters
|
||||
---
|
||||
|
||||
# Type converters
|
||||
Moor supports a variety of types out of the box, but sometimes you need to store more complex types.
|
||||
You can achieve this by using `TypeConverters`. In this example, we'll use the the
|
||||
[json_serializable](https://pub.dev/packages/json_annotation) package to store a custom object in a
|
||||
column. Moor supports any Dart object, but using that package can make serialization easier.
|
||||
```dart
|
||||
import 'dart:convert';
|
||||
|
||||
import 'package:json_annotation/json_annotation.dart' as j;
|
||||
import 'package:moor/moor.dart';
|
||||
|
||||
part 'database.g.dart';
|
||||
|
||||
@j.JsonSerializable()
|
||||
class Preferences {
|
||||
bool receiveEmails;
|
||||
String selectedTheme;
|
||||
|
||||
Preferences(this.receiveEmails, this.selectedTheme);
|
||||
|
||||
factory Preferences.fromJson(Map<String, dynamic> json) =>
|
||||
_$PreferencesFromJson(json);
|
||||
|
||||
Map<String, dynamic> toJson() => _$PreferencesToJson(this);
|
||||
}
|
||||
```
|
||||
|
||||
Next, we have to tell moor how to store a `Preferences` object in the database. We write
|
||||
a `TypeConverter` for that:
|
||||
```dart
|
||||
// stores preferences as strings
|
||||
class PreferenceConverter extends TypeConverter<Preferences, String> {
|
||||
const PreferenceConverter();
|
||||
@override
|
||||
Preferences mapToDart(String fromDb) {
|
||||
if (fromDb == null) {
|
||||
return null;
|
||||
}
|
||||
return Preferences.fromJson(json.decode(fromDb) as Map<String, dynamic>);
|
||||
}
|
||||
|
||||
@override
|
||||
String mapToSql(Preferences value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return json.encode(value.toJson());
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Finally, we can use that converter in a table declaration:
|
||||
```dart
|
||||
class Users extends Table {
|
||||
IntColumn get id => integer().autoIncrement()();
|
||||
TextColumn get name => text()();
|
||||
|
||||
TextColumn get preferences =>
|
||||
text().map(const PreferenceConverter()).nullable()();
|
||||
}
|
||||
```
|
||||
|
||||
The generated `User` class will then have a `preferences` column of type
|
||||
`Preferences`. Moor will automatically take care of storing and loading
|
||||
the object in `select`, `update` and `insert` statements. This feature
|
||||
also works with [compiled custom queries]({{ "/queries/custom" | absolute_url }}).
|
|
@ -14,7 +14,7 @@ database tables in pure Dart and enjoy a fluent query API, auto-updating streams
|
|||
and more!
|
||||
{: .fs-6 .fw-300 }
|
||||
|
||||
[](https://travis-ci.com/simolus3/moor)
|
||||
[](https://cirrus-ci.com/github/simolus3/moor)
|
||||
[](https://codecov.io/gh/simolus3/moor)
|
||||
|
||||
[Get started now]({{ site.common_links.getting_started | absolute_url }}){: .btn .btn-green .fs-5 .mb-4 .mb-md-0 .mr-2 }
|
||||
|
|
|
@ -15,5 +15,7 @@ dependencies:
|
|||
path: extras/encryption
|
||||
```
|
||||
|
||||
Then, instead using a `FlutterQueryExecutor`,
|
||||
`import 'package:encrypted_moor/encrypted_moor.dart'` and use the `EncryptedExecutor`.
|
||||
To use this, you can stop depending on `moor_flutter`. Then, instead of using
|
||||
a `FlutterQueryExecutor`, import `package:moor/moor.dart` and `package:encrypted_moor/encrypted_moor.dart`.
|
||||
|
||||
You can then replace the `FlutterQueryExecutor` with an `EncryptedExecutor`.
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
# Integration tests
|
||||
|
||||
These directories contain integration tests for the various moor backends by running the same
|
||||
set of actions on multiple databases.
|
||||
|
||||
## Tests
|
||||
All test cases live in `tests/lib`. We have a `runAllTests` method that basically takes a
|
||||
`QueryExecutor` (the database backend in moor) and then runs all tests on that executor.
|
||||
Everything the other packages are doing is calling the `runAllTests` method with the
|
||||
database they're supposed to test.
|
||||
|
||||
------
|
||||
|
||||
Flutter is a bit annoying here because AFAIK there is no easy way to run tests that run on
|
||||
a real device? With `flutter drive`, the tests are still run on a local machine which
|
||||
communicates with an app to verify behavior of widgets. As we want to run the whole test bundle
|
||||
on a device, we instead put the test files into `flutter_db/lib` and run them with
|
||||
`flutter run`. That works, but we don't get an output format that is machine readable.
|
||||
Please create an issue if you know a better way, thanks!
|
||||
TODO: https://github.com/tomaszpolanski/flutter-presentations/blob/master/lib/test_driver/test_runner.dart
|
||||
looks promising
|
||||
|
||||
That is also why these tests are not running automatically.
|
|
@ -8,7 +8,10 @@ import 'package:path/path.dart';
|
|||
class SqfliteExecutor extends TestExecutor {
|
||||
@override
|
||||
QueryExecutor createExecutor() {
|
||||
return FlutterQueryExecutor.inDatabaseFolder(path: 'app.db');
|
||||
return FlutterQueryExecutor.inDatabaseFolder(
|
||||
path: 'app.db',
|
||||
singleInstance: false,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
|
|
|
@ -15,9 +15,8 @@ class Users extends Table {
|
|||
|
||||
BlobColumn get profilePicture => blob().nullable()();
|
||||
|
||||
// todo enable custom column example. The feature isn't stable yet.
|
||||
//TextColumn get preferences =>
|
||||
// text().map(const PreferenceConverter()).nullable()();
|
||||
TextColumn get preferences =>
|
||||
text().map(const PreferenceConverter()).nullable()();
|
||||
}
|
||||
|
||||
class Friendships extends Table {
|
||||
|
@ -59,7 +58,7 @@ class PreferenceConverter extends TypeConverter<Preferences, String> {
|
|||
return null;
|
||||
}
|
||||
|
||||
return json.encode(json.encode(value.toJson()));
|
||||
return json.encode(value.toJson());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -71,6 +70,7 @@ class PreferenceConverter extends TypeConverter<Preferences, String> {
|
|||
'amountOfGoodFriends':
|
||||
'SELECT COUNT(*) FROM friendships f WHERE f.really_good_friends AND (f.first_user = :user OR f.second_user = :user)',
|
||||
'userCount': 'SELECT COUNT(id) FROM users',
|
||||
'settingsFor': 'SELECT preferences FROM users WHERE id = :user',
|
||||
},
|
||||
)
|
||||
class Database extends _$Database {
|
||||
|
@ -121,6 +121,14 @@ class Database extends _$Database {
|
|||
return (select(users)..where((u) => u.id.equals(id))).watchSingle();
|
||||
}
|
||||
|
||||
Future<User> getUserById(int id) {
|
||||
return (select(users)..where((u) => u.id.equals(id))).getSingle();
|
||||
}
|
||||
|
||||
Future<int> writeUser(Insertable<User> user) {
|
||||
return into(users).insert(user);
|
||||
}
|
||||
|
||||
Future<void> makeFriends(User a, User b, {bool goodFriends}) async {
|
||||
var friendsValue = const Value<bool>.absent();
|
||||
if (goodFriends != null) {
|
||||
|
@ -135,4 +143,9 @@ class Database extends _$Database {
|
|||
|
||||
await into(friendships).insert(companion, orReplace: true);
|
||||
}
|
||||
|
||||
Future<void> updateSettings(int userId, Preferences c) async {
|
||||
await (update(users)..where((u) => u.id.equals(userId)))
|
||||
.write(UsersCompanion(preferences: Value(c)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,11 +27,13 @@ class User extends DataClass implements Insertable<User> {
|
|||
final String name;
|
||||
final DateTime birthDate;
|
||||
final Uint8List profilePicture;
|
||||
final Preferences preferences;
|
||||
User(
|
||||
{@required this.id,
|
||||
@required this.name,
|
||||
@required this.birthDate,
|
||||
this.profilePicture});
|
||||
this.profilePicture,
|
||||
this.preferences});
|
||||
factory User.fromData(Map<String, dynamic> data, GeneratedDatabase db,
|
||||
{String prefix}) {
|
||||
final effectivePrefix = prefix ?? '';
|
||||
|
@ -46,6 +48,8 @@ class User extends DataClass implements Insertable<User> {
|
|||
.mapFromDatabaseResponse(data['${effectivePrefix}birth_date']),
|
||||
profilePicture: uint8ListType
|
||||
.mapFromDatabaseResponse(data['${effectivePrefix}profile_picture']),
|
||||
preferences: $UsersTable.$converter0.mapToDart(stringType
|
||||
.mapFromDatabaseResponse(data['${effectivePrefix}preferences'])),
|
||||
);
|
||||
}
|
||||
factory User.fromJson(Map<String, dynamic> json,
|
||||
|
@ -55,6 +59,7 @@ class User extends DataClass implements Insertable<User> {
|
|||
name: serializer.fromJson<String>(json['name']),
|
||||
birthDate: serializer.fromJson<DateTime>(json['born_on']),
|
||||
profilePicture: serializer.fromJson<Uint8List>(json['profilePicture']),
|
||||
preferences: serializer.fromJson<Preferences>(json['preferences']),
|
||||
);
|
||||
}
|
||||
@override
|
||||
|
@ -65,6 +70,7 @@ class User extends DataClass implements Insertable<User> {
|
|||
'name': serializer.toJson<String>(name),
|
||||
'born_on': serializer.toJson<DateTime>(birthDate),
|
||||
'profilePicture': serializer.toJson<Uint8List>(profilePicture),
|
||||
'preferences': serializer.toJson<Preferences>(preferences),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -79,6 +85,9 @@ class User extends DataClass implements Insertable<User> {
|
|||
profilePicture: profilePicture == null && nullToAbsent
|
||||
? const Value.absent()
|
||||
: Value(profilePicture),
|
||||
preferences: preferences == null && nullToAbsent
|
||||
? const Value.absent()
|
||||
: Value(preferences),
|
||||
) as T;
|
||||
}
|
||||
|
||||
|
@ -86,12 +95,14 @@ class User extends DataClass implements Insertable<User> {
|
|||
{int id,
|
||||
String name,
|
||||
DateTime birthDate,
|
||||
Uint8List profilePicture}) =>
|
||||
Uint8List profilePicture,
|
||||
Preferences preferences}) =>
|
||||
User(
|
||||
id: id ?? this.id,
|
||||
name: name ?? this.name,
|
||||
birthDate: birthDate ?? this.birthDate,
|
||||
profilePicture: profilePicture ?? this.profilePicture,
|
||||
preferences: preferences ?? this.preferences,
|
||||
);
|
||||
@override
|
||||
String toString() {
|
||||
|
@ -99,7 +110,8 @@ class User extends DataClass implements Insertable<User> {
|
|||
..write('id: $id, ')
|
||||
..write('name: $name, ')
|
||||
..write('birthDate: $birthDate, ')
|
||||
..write('profilePicture: $profilePicture')
|
||||
..write('profilePicture: $profilePicture, ')
|
||||
..write('preferences: $preferences')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
@ -108,7 +120,9 @@ class User extends DataClass implements Insertable<User> {
|
|||
int get hashCode => $mrjf($mrjc(
|
||||
id.hashCode,
|
||||
$mrjc(
|
||||
name.hashCode, $mrjc(birthDate.hashCode, profilePicture.hashCode))));
|
||||
name.hashCode,
|
||||
$mrjc(birthDate.hashCode,
|
||||
$mrjc(profilePicture.hashCode, preferences.hashCode)))));
|
||||
@override
|
||||
bool operator ==(other) =>
|
||||
identical(this, other) ||
|
||||
|
@ -116,7 +130,8 @@ class User extends DataClass implements Insertable<User> {
|
|||
other.id == id &&
|
||||
other.name == name &&
|
||||
other.birthDate == birthDate &&
|
||||
other.profilePicture == profilePicture);
|
||||
other.profilePicture == profilePicture &&
|
||||
other.preferences == preferences);
|
||||
}
|
||||
|
||||
class UsersCompanion extends UpdateCompanion<User> {
|
||||
|
@ -124,11 +139,13 @@ class UsersCompanion extends UpdateCompanion<User> {
|
|||
final Value<String> name;
|
||||
final Value<DateTime> birthDate;
|
||||
final Value<Uint8List> profilePicture;
|
||||
final Value<Preferences> preferences;
|
||||
const UsersCompanion({
|
||||
this.id = const Value.absent(),
|
||||
this.name = const Value.absent(),
|
||||
this.birthDate = const Value.absent(),
|
||||
this.profilePicture = const Value.absent(),
|
||||
this.preferences = const Value.absent(),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -182,8 +199,23 @@ class $UsersTable extends Users with TableInfo<$UsersTable, User> {
|
|||
);
|
||||
}
|
||||
|
||||
final VerificationMeta _preferencesMeta =
|
||||
const VerificationMeta('preferences');
|
||||
GeneratedTextColumn _preferences;
|
||||
@override
|
||||
List<GeneratedColumn> get $columns => [id, name, birthDate, profilePicture];
|
||||
GeneratedTextColumn get preferences =>
|
||||
_preferences ??= _constructPreferences();
|
||||
GeneratedTextColumn _constructPreferences() {
|
||||
return GeneratedTextColumn(
|
||||
'preferences',
|
||||
$tableName,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
List<GeneratedColumn> get $columns =>
|
||||
[id, name, birthDate, profilePicture, preferences];
|
||||
@override
|
||||
$UsersTable get asDslTable => this;
|
||||
@override
|
||||
|
@ -219,6 +251,7 @@ class $UsersTable extends Users with TableInfo<$UsersTable, User> {
|
|||
} else if (profilePicture.isRequired && isInserting) {
|
||||
context.missing(_profilePictureMeta);
|
||||
}
|
||||
context.handle(_preferencesMeta, const VerificationResult.success());
|
||||
return context;
|
||||
}
|
||||
|
||||
|
@ -246,6 +279,11 @@ class $UsersTable extends Users with TableInfo<$UsersTable, User> {
|
|||
map['profile_picture'] =
|
||||
Variable<Uint8List, BlobType>(d.profilePicture.value);
|
||||
}
|
||||
if (d.preferences.present) {
|
||||
final converter = $UsersTable.$converter0;
|
||||
map['preferences'] =
|
||||
Variable<String, StringType>(converter.mapToSql(d.preferences.value));
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
|
@ -253,6 +291,8 @@ class $UsersTable extends Users with TableInfo<$UsersTable, User> {
|
|||
$UsersTable createAlias(String alias) {
|
||||
return $UsersTable(_db, alias);
|
||||
}
|
||||
|
||||
static PreferenceConverter $converter0 = const PreferenceConverter();
|
||||
}
|
||||
|
||||
class Friendship extends DataClass implements Insertable<Friendship> {
|
||||
|
@ -470,6 +510,13 @@ class UserCountResult {
|
|||
});
|
||||
}
|
||||
|
||||
class SettingsForResult {
|
||||
final Preferences preferences;
|
||||
SettingsForResult({
|
||||
this.preferences,
|
||||
});
|
||||
}
|
||||
|
||||
abstract class _$Database extends GeneratedDatabase {
|
||||
_$Database(QueryExecutor e) : super(const SqlTypeSystem.withDefaults(), e);
|
||||
$UsersTable _users;
|
||||
|
@ -482,6 +529,8 @@ abstract class _$Database extends GeneratedDatabase {
|
|||
name: row.readString('name'),
|
||||
birthDate: row.readDateTime('birth_date'),
|
||||
profilePicture: row.readBlob('profile_picture'),
|
||||
preferences:
|
||||
$UsersTable.$converter0.mapToDart(row.readString('preferences')),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -555,6 +604,34 @@ abstract class _$Database extends GeneratedDatabase {
|
|||
.map((rows) => rows.map(_rowToUserCountResult).toList());
|
||||
}
|
||||
|
||||
SettingsForResult _rowToSettingsForResult(QueryRow row) {
|
||||
return SettingsForResult(
|
||||
preferences:
|
||||
$UsersTable.$converter0.mapToDart(row.readString('preferences')),
|
||||
);
|
||||
}
|
||||
|
||||
Future<List<SettingsForResult>> settingsFor(
|
||||
int user,
|
||||
{@Deprecated('No longer needed with Moor 1.6 - see the changelog for details')
|
||||
QueryEngine operateOn}) {
|
||||
return (operateOn ?? this).customSelect(
|
||||
'SELECT preferences FROM users WHERE id = :user',
|
||||
variables: [
|
||||
Variable.withInt(user),
|
||||
]).then((rows) => rows.map(_rowToSettingsForResult).toList());
|
||||
}
|
||||
|
||||
Stream<List<SettingsForResult>> watchSettingsFor(int user) {
|
||||
return customSelectStream('SELECT preferences FROM users WHERE id = :user',
|
||||
variables: [
|
||||
Variable.withInt(user),
|
||||
],
|
||||
readsFrom: {
|
||||
users
|
||||
}).map((rows) => rows.map(_rowToSettingsForResult).toList());
|
||||
}
|
||||
|
||||
@override
|
||||
List<TableInfo> get allTables => [users, friendships];
|
||||
}
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
import 'package:test/test.dart';
|
||||
import 'package:tests/database/database.dart';
|
||||
|
||||
import 'suite.dart';
|
||||
|
||||
void customObjectTests(TestExecutor executor) {
|
||||
test('custom objects', () async {
|
||||
final db = Database(executor.createExecutor());
|
||||
|
||||
var preferences = await db.settingsFor(1);
|
||||
expect(preferences.single.preferences, isNull);
|
||||
|
||||
await db.updateSettings(1, Preferences(true));
|
||||
preferences = await db.settingsFor(1);
|
||||
|
||||
expect(preferences.single.preferences.receiveEmails, true);
|
||||
|
||||
await db.close();
|
||||
});
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
import 'package:test/test.dart';
|
||||
import 'package:tests/data/sample_data.dart';
|
||||
import 'package:tests/database/database.dart';
|
||||
|
||||
import 'suite.dart';
|
||||
|
@ -10,5 +11,21 @@ void migrationTests(TestExecutor executor) {
|
|||
// we write 3 users when the database is created
|
||||
final count = await database.userCount();
|
||||
expect(count.single.cOUNTid, 3);
|
||||
|
||||
await database.close();
|
||||
});
|
||||
|
||||
test('saves and restores database', () async {
|
||||
var database = Database(executor.createExecutor(), schemaVersion: 1);
|
||||
await database.writeUser(People.florian);
|
||||
await database.close();
|
||||
|
||||
database = Database(executor.createExecutor(), schemaVersion: 2);
|
||||
|
||||
// the 3 initial users plus People.florian
|
||||
final count = await database.userCount();
|
||||
expect(count.single.cOUNTid, 4);
|
||||
|
||||
await database.close();
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import 'package:moor/moor.dart';
|
||||
import 'package:test/test.dart';
|
||||
import 'package:tests/suite/transactions.dart';
|
||||
|
||||
import 'package:tests/suite/migrations.dart';
|
||||
import 'custom_objects.dart';
|
||||
import 'migrations.dart';
|
||||
|
||||
abstract class TestExecutor {
|
||||
QueryExecutor createExecutor();
|
||||
|
@ -16,4 +18,6 @@ void runAllTests(TestExecutor executor) {
|
|||
});
|
||||
|
||||
migrationTests(executor);
|
||||
customObjectTests(executor);
|
||||
transactionTests(executor);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
import 'package:test/test.dart';
|
||||
import 'package:tests/data/sample_data.dart';
|
||||
import 'package:tests/database/database.dart';
|
||||
|
||||
import 'suite.dart';
|
||||
|
||||
void transactionTests(TestExecutor executor) {
|
||||
test('transactions write data', () async {
|
||||
final db = Database(executor.createExecutor());
|
||||
|
||||
await db.transaction((_) async {
|
||||
final florianId = await db.writeUser(People.florian);
|
||||
print(florianId);
|
||||
|
||||
final dash = await db.getUserById(People.dashId);
|
||||
final florian = await db.getUserById(florianId);
|
||||
|
||||
await db.makeFriends(dash, florian, goodFriends: true);
|
||||
});
|
||||
|
||||
final countResult = await db.userCount();
|
||||
expect(countResult.single.cOUNTid, 4);
|
||||
|
||||
final friendsResult = await db.amountOfGoodFriends(People.dashId);
|
||||
expect(friendsResult.single.count, 1);
|
||||
|
||||
await db.close();
|
||||
});
|
||||
|
||||
test('transaction is rolled back then an exception occurs', () async {
|
||||
final db = Database(executor.createExecutor());
|
||||
|
||||
try {
|
||||
await db.transaction((_) async {
|
||||
final florianId = await db.writeUser(People.florian);
|
||||
|
||||
final dash = await db.getUserById(People.dashId);
|
||||
final florian = await db.getUserById(florianId);
|
||||
|
||||
await db.makeFriends(dash, florian, goodFriends: true);
|
||||
throw Exception('nope i made a mistake please rollback thank you');
|
||||
});
|
||||
fail('the transaction should have thrown!');
|
||||
} on Exception catch (_) {}
|
||||
|
||||
final countResult = await db.userCount();
|
||||
expect(countResult.single.cOUNTid, 3); // only the default folks
|
||||
|
||||
final friendsResult = await db.amountOfGoodFriends(People.dashId);
|
||||
expect(friendsResult.single.count, 0); // no friendship was inserted
|
||||
|
||||
await db.close();
|
||||
});
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
# Files and directories created by pub
|
||||
.dart_tool/
|
||||
.packages
|
||||
# Remove the following pattern if you wish to check in your lock file
|
||||
pubspec.lock
|
||||
|
||||
# Conventional directory for build outputs
|
||||
build/
|
||||
|
||||
# Directory created by dartdoc
|
||||
doc/api/
|
|
@ -0,0 +1 @@
|
|||
platforms: [chrome, firefox]
|
|
@ -0,0 +1,13 @@
|
|||
name: web
|
||||
description: Run integration tests for Moor on the web
|
||||
environment:
|
||||
sdk: '>=2.4.0 <3.0.0'
|
||||
|
||||
dependencies:
|
||||
tests:
|
||||
path: ../tests
|
||||
|
||||
dev_dependencies:
|
||||
test: ^1.5.0
|
||||
build_runner:
|
||||
build_web_compilers:
|
|
@ -0,0 +1,25 @@
|
|||
@TestOn('browser')
|
||||
import 'dart:html';
|
||||
|
||||
import 'package:tests/tests.dart';
|
||||
import 'package:test/test.dart';
|
||||
import 'package:moor/moor_web.dart';
|
||||
|
||||
class WebExecutor extends TestExecutor {
|
||||
final name = 'db';
|
||||
|
||||
@override
|
||||
QueryExecutor createExecutor() {
|
||||
return WebDatabase(name);
|
||||
}
|
||||
|
||||
@override
|
||||
Future deleteData() {
|
||||
window.localStorage.clear();
|
||||
return Future.value();
|
||||
}
|
||||
}
|
||||
|
||||
void main() {
|
||||
runAllTests(WebExecutor());
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title></title>
|
||||
<link rel="x-dart-test" href="integration_test.dart">
|
||||
<script src="sql-wasm.js"></script>
|
||||
<script src="packages/test/dart.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,209 @@
|
|||
|
||||
// We are modularizing this manually because the current modularize setting in Emscripten has some issues:
|
||||
// https://github.com/kripken/emscripten/issues/5820
|
||||
// In addition, When you use emcc's modularization, it still expects to export a global object called `Module`,
|
||||
// which is able to be used/called before the WASM is loaded.
|
||||
// The modularization below exports a promise that loads and resolves to the actual sql.js module.
|
||||
// That way, this module can't be used before the WASM is finished loading.
|
||||
|
||||
// We are going to define a function that a user will call to start loading initializing our Sql.js library
|
||||
// However, that function might be called multiple times, and on subsequent calls, we don't actually want it to instantiate a new instance of the Module
|
||||
// Instead, we want to return the previously loaded module
|
||||
|
||||
// TODO: Make this not declare a global if used in the browser
|
||||
var initSqlJsPromise = undefined;
|
||||
|
||||
var initSqlJs = function (moduleConfig) {
|
||||
|
||||
if (initSqlJsPromise){
|
||||
return initSqlJsPromise;
|
||||
}
|
||||
// If we're here, we've never called this function before
|
||||
initSqlJsPromise = new Promise((resolveModule, reject) => {
|
||||
|
||||
// We are modularizing this manually because the current modularize setting in Emscripten has some issues:
|
||||
// https://github.com/kripken/emscripten/issues/5820
|
||||
|
||||
// The way to affect the loading of emcc compiled modules is to create a variable called `Module` and add
|
||||
// properties to it, like `preRun`, `postRun`, etc
|
||||
// We are using that to get notified when the WASM has finished loading.
|
||||
// Only then will we return our promise
|
||||
|
||||
// If they passed in a moduleConfig object, use that
|
||||
// Otherwise, initialize Module to the empty object
|
||||
var Module = typeof moduleConfig !== 'undefined' ? moduleConfig : {};
|
||||
|
||||
// EMCC only allows for a single onAbort function (not an array of functions)
|
||||
// So if the user defined their own onAbort function, we remember it and call it
|
||||
var originalOnAbortFunction = Module['onAbort'];
|
||||
Module['onAbort'] = function (errorThatCausedAbort) {
|
||||
reject(new Error(errorThatCausedAbort));
|
||||
if (originalOnAbortFunction){
|
||||
originalOnAbortFunction(errorThatCausedAbort);
|
||||
}
|
||||
};
|
||||
|
||||
Module['postRun'] = Module['postRun'] || [];
|
||||
Module['postRun'].push(function () {
|
||||
// When Emscripted calls postRun, this promise resolves with the built Module
|
||||
resolveModule(Module);
|
||||
});
|
||||
|
||||
// There is a section of code in the emcc-generated code below that looks like this:
|
||||
// (Note that this is lowercase `module`)
|
||||
// if (typeof module !== 'undefined') {
|
||||
// module['exports'] = Module;
|
||||
// }
|
||||
// When that runs, it's going to overwrite our own modularization export efforts in shell-post.js!
|
||||
// The only way to tell emcc not to emit it is to pass the MODULARIZE=1 or MODULARIZE_INSTANCE=1 flags,
|
||||
// but that carries with it additional unnecessary baggage/bugs we don't want either.
|
||||
// So, we have three options:
|
||||
// 1) We undefine `module`
|
||||
// 2) We remember what `module['exports']` was at the beginning of this function and we restore it later
|
||||
// 3) We write a script to remove those lines of code as part of the Make process.
|
||||
//
|
||||
// Since those are the only lines of code that care about module, we will undefine it. It's the most straightforward
|
||||
// of the options, and has the side effect of reducing emcc's efforts to modify the module if its output were to change in the future.
|
||||
// That's a nice side effect since we're handling the modularization efforts ourselves
|
||||
module = undefined;
|
||||
|
||||
// The emcc-generated code and shell-post.js code goes below,
|
||||
// meaning that all of it runs inside of this promise. If anything throws an exception, our promise will abort
|
||||
var aa;var f;f||(f=typeof Module !== 'undefined' ? Module : {});
|
||||
var va=function(){var a;var b=h(4);var c={};var d=function(){function a(a,b){this.fb=a;this.db=b;this.nb=1;this.Eb=[]}a.prototype.bind=function(a){if(!this.fb)throw"Statement closed";this.reset();return Array.isArray(a)?this.lc(a):this.mc(a)};a.prototype.step=function(){var a;if(!this.fb)throw"Statement closed";this.nb=1;switch(a=Tb(this.fb)){case c.hc:return!0;case c.DONE:return!1;default:return this.db.handleError(a)}};a.prototype.sc=function(a){null==a&&(a=this.nb++);return Ub(this.fb,a)};a.prototype.tc=
|
||||
function(a){null==a&&(a=this.nb++);return Vb(this.fb,a)};a.prototype.getBlob=function(a){var b;null==a&&(a=this.nb++);var c=Wb(this.fb,a);var d=Xb(this.fb,a);var e=new Uint8Array(c);for(a=b=0;0<=c?b<c:b>c;a=0<=c?++b:--b)e[a]=l[d+a];return e};a.prototype.get=function(a){var b,d;null!=a&&this.bind(a)&&this.step();var e=[];a=b=0;for(d=ib(this.fb);0<=d?b<d:b>d;a=0<=d?++b:--b)switch(Yb(this.fb,a)){case c.fc:case c.FLOAT:e.push(this.sc(a));break;case c.ic:e.push(this.tc(a));break;case c.Zb:e.push(this.getBlob(a));
|
||||
break;default:e.push(null)}return e};a.prototype.getColumnNames=function(){var a,b;var c=[];var d=a=0;for(b=ib(this.fb);0<=b?a<b:a>b;d=0<=b?++a:--a)c.push(Zb(this.fb,d));return c};a.prototype.getAsObject=function(a){var b,c;var d=this.get(a);var e=this.getColumnNames();var g={};a=b=0;for(c=e.length;b<c;a=++b){var Sb=e[a];g[Sb]=d[a]}return g};a.prototype.run=function(a){null!=a&&this.bind(a);this.step();return this.reset()};a.prototype.pc=function(a,b){var c;null==b&&(b=this.nb++);a=ba(a);this.Eb.push(c=
|
||||
ea(a));this.db.handleError(ca(this.fb,b,c,a.length-1,0))};a.prototype.kc=function(a,b){var c;null==b&&(b=this.nb++);this.Eb.push(c=ea(a));this.db.handleError(Ia(this.fb,b,c,a.length,0))};a.prototype.oc=function(a,b){null==b&&(b=this.nb++);this.db.handleError((a===(a|0)?$b:ac)(this.fb,b,a))};a.prototype.nc=function(a){null==a&&(a=this.nb++);Ia(this.fb,a,0,0,0)};a.prototype.Qb=function(a,b){null==b&&(b=this.nb++);switch(typeof a){case "string":this.pc(a,b);break;case "number":case "boolean":this.oc(a+
|
||||
0,b);break;case "object":if(null===a)this.nc(b);else if(null!=a.length)this.kc(a,b);else throw"Wrong API use : tried to bind a value of an unknown type ("+a+").";}};a.prototype.mc=function(a){var b;for(b in a){var c=a[b];var d=bc(this.fb,b);0!==d&&this.Qb(c,d)}return!0};a.prototype.lc=function(a){var b,c;var d=b=0;for(c=a.length;b<c;d=++b){var e=a[d];this.Qb(e,d+1)}return!0};a.prototype.reset=function(){this.freemem();return cc(this.fb)===c.xb&&dc(this.fb)===c.xb};a.prototype.freemem=function(){for(var a;a=
|
||||
this.Eb.pop();)ha(a);return null};a.prototype.free=function(){this.freemem();var a=ec(this.fb)===c.xb;delete this.db.Bb[this.fb];this.fb=da;return a};return a}();var e=function(){function a(a){this.filename="dbfile_"+(4294967295*Math.random()>>>0);if(null!=a){var c=this.filename,d=c?n("/",c):"/";c=ia(!0,!0);d=ja(d,(void 0!==c?c:438)&4095|32768,0);if(a){if("string"===typeof a){for(var e=Array(a.length),k=0,m=a.length;k<m;++k)e[k]=a.charCodeAt(k);a=e}ka(d,c|146);e=p(d,"w");la(e,a,0,a.length,0,void 0);
|
||||
ma(e);ka(d,c)}}this.handleError(g(this.filename,b));this.db=q(b,"i32");fc(this.db);this.Bb={}}a.prototype.run=function(a,c){if(!this.db)throw"Database closed";c?(a=this.prepare(a,c),a.step(),a.free()):this.handleError(m(this.db,a,0,0,b));return this};a.prototype.exec=function(a){if(!this.db)throw"Database closed";var c=na();var e=oa(a)+1;var g=h(e);r(a,l,g,e);a=g;e=h(4);for(g=[];q(a,"i8")!==da;){pa(b);pa(e);this.handleError(fa(this.db,a,-1,b,e));var k=q(b,"i32");a=q(e,"i32");if(k!==da){var m=new d(k,
|
||||
this);for(k=null;m.step();)null===k&&(k={columns:m.getColumnNames(),values:[]},g.push(k)),k.values.push(m.get());m.free()}}qa(c);return g};a.prototype.each=function(a,b,c,d){"function"===typeof b&&(d=c,c=b,b=void 0);for(a=this.prepare(a,b);a.step();)c(a.getAsObject());a.free();if("function"===typeof d)return d()};a.prototype.prepare=function(a,c){pa(b);this.handleError(z(this.db,a,-1,b,da));a=q(b,"i32");if(a===da)throw"Nothing to prepare";var e=new d(a,this);null!=c&&e.bind(c);return this.Bb[a]=e};
|
||||
a.prototype["export"]=function(){var a;var c=this.Bb;for(e in c){var d=c[e];d.free()}this.handleError(k(this.db));d=this.filename;var e=e={encoding:"binary"};e.flags=e.flags||"r";e.encoding=e.encoding||"binary";if("utf8"!==e.encoding&&"binary"!==e.encoding)throw Error('Invalid encoding type "'+e.encoding+'"');c=p(d,e.flags);d=ra(d).size;var m=new Uint8Array(d);sa(c,m,0,d,0);"utf8"===e.encoding?a=t(m,0):"binary"===e.encoding&&(a=m);ma(c);this.handleError(g(this.filename,b));this.db=q(b,"i32");return a};
|
||||
a.prototype.close=function(){var a;var b=this.Bb;for(a in b){var c=b[a];c.free()}this.handleError(k(this.db));ta("/"+this.filename);return this.db=null};a.prototype.handleError=function(a){if(a===c.xb)return null;a=hc(this.db);throw Error(a);};a.prototype.getRowsModified=function(){return y(this.db)};a.prototype.create_function=function(a,b){var d=ua(function(a,c,d){var e,g;var k=[];for(e=g=0;0<=c?g<c:g>c;e=0<=c?++g:--g){var m=q(d+4*e,"i32");var z=jc(m);e=function(){switch(!1){case 1!==z:return kc;
|
||||
case 2!==z:return lc;case 3!==z:return mc;case 4!==z:return function(a){var b,c;var d=nc(a);var e=oc(a);a=new Uint8Array(d);for(b=c=0;0<=d?c<d:c>d;b=0<=d?++c:--c)a[b]=l[e+b];return a};default:return function(){return null}}}();e=e(m);k.push(e)}if(c=b.apply(null,k))switch(typeof c){case "number":return pc(a,c);case "string":return qc(a,c,-1,-1)}else return rc(a)});this.handleError(sc(this.db,a,b.length,c.jc,0,d,0,0,0));return this};return a}();var g=f.cwrap("sqlite3_open","number",["string","number"]);
|
||||
var k=f.cwrap("sqlite3_close_v2","number",["number"]);var m=f.cwrap("sqlite3_exec","number",["number","string","number","number","number"]);f.cwrap("sqlite3_free","",["number"]);var y=f.cwrap("sqlite3_changes","number",["number"]);var z=f.cwrap("sqlite3_prepare_v2","number",["number","string","number","number","number"]);var fa=f.cwrap("sqlite3_prepare_v2","number",["number","number","number","number","number"]);var ca=f.cwrap("sqlite3_bind_text","number",["number","number","number","number","number"]);
|
||||
var Ia=f.cwrap("sqlite3_bind_blob","number",["number","number","number","number","number"]);var ac=f.cwrap("sqlite3_bind_double","number",["number","number","number"]);var $b=f.cwrap("sqlite3_bind_int","number",["number","number","number"]);var bc=f.cwrap("sqlite3_bind_parameter_index","number",["number","string"]);var Tb=f.cwrap("sqlite3_step","number",["number"]);var hc=f.cwrap("sqlite3_errmsg","string",["number"]);var ib=f.cwrap("sqlite3_data_count","number",["number"]);var Ub=f.cwrap("sqlite3_column_double",
|
||||
"number",["number","number"]);var Vb=f.cwrap("sqlite3_column_text","string",["number","number"]);var Xb=f.cwrap("sqlite3_column_blob","number",["number","number"]);var Wb=f.cwrap("sqlite3_column_bytes","number",["number","number"]);var Yb=f.cwrap("sqlite3_column_type","number",["number","number"]);var Zb=f.cwrap("sqlite3_column_name","string",["number","number"]);var dc=f.cwrap("sqlite3_reset","number",["number"]);var cc=f.cwrap("sqlite3_clear_bindings","number",["number"]);var ec=f.cwrap("sqlite3_finalize",
|
||||
"number",["number"]);var sc=f.cwrap("sqlite3_create_function_v2","number","number string number number number number number number number".split(" "));var jc=f.cwrap("sqlite3_value_type","number",["number"]);var nc=f.cwrap("sqlite3_value_bytes","number",["number"]);var mc=f.cwrap("sqlite3_value_text","string",["number"]);var kc=f.cwrap("sqlite3_value_int","number",["number"]);var oc=f.cwrap("sqlite3_value_blob","number",["number"]);var lc=f.cwrap("sqlite3_value_double","number",["number"]);var pc=
|
||||
f.cwrap("sqlite3_result_double","",["number","number"]);var rc=f.cwrap("sqlite3_result_null","",["number"]);var qc=f.cwrap("sqlite3_result_text","",["number","string","number","number"]);var fc=f.cwrap("RegisterExtensionFunctions","number",["number"]);this.SQL={Database:e};for(a in this.SQL)f[a]=this.SQL[a];var da=0;c.xb=0;c.we=1;c.Pe=2;c.Ze=3;c.Cc=4;c.Ec=5;c.Se=6;c.NOMEM=7;c.bf=8;c.Qe=9;c.Re=10;c.Hc=11;c.NOTFOUND=12;c.Oe=13;c.Fc=14;c.$e=15;c.EMPTY=16;c.cf=17;c.df=18;c.Gc=19;c.Te=20;c.Ue=21;c.Ve=
|
||||
22;c.Dc=23;c.Ne=24;c.af=25;c.We=26;c.Xe=27;c.ef=28;c.hc=100;c.DONE=101;c.fc=1;c.FLOAT=2;c.ic=3;c.Zb=4;c.Ye=5;c.jc=1}.bind(this);f.preRun=f.preRun||[];f.preRun.push(va);var wa={},u;for(u in f)f.hasOwnProperty(u)&&(wa[u]=f[u]);f.arguments=[];f.thisProgram="./this.program";f.quit=function(a,b){throw b;};f.preRun=[];f.postRun=[];var v=!1,w=!1,x=!1,xa=!1;v="object"===typeof window;w="function"===typeof importScripts;x="object"===typeof process&&"function"===typeof require&&!v&&!w;xa=!v&&!x&&!w;var A="";
|
||||
if(x){A=__dirname+"/";var ya,za;f.read=function(a,b){ya||(ya=require("fs"));za||(za=require("path"));a=za.normalize(a);a=ya.readFileSync(a);return b?a:a.toString()};f.readBinary=function(a){a=f.read(a,!0);a.buffer||(a=new Uint8Array(a));assert(a.buffer);return a};1<process.argv.length&&(f.thisProgram=process.argv[1].replace(/\\/g,"/"));f.arguments=process.argv.slice(2);"undefined"!==typeof module&&(module.exports=f);process.on("unhandledRejection",B);f.quit=function(a){process.exit(a)};f.inspect=
|
||||
function(){return"[Emscripten Module object]"}}else if(xa)"undefined"!=typeof read&&(f.read=function(a){return read(a)}),f.readBinary=function(a){if("function"===typeof readbuffer)return new Uint8Array(readbuffer(a));a=read(a,"binary");assert("object"===typeof a);return a},"undefined"!=typeof scriptArgs?f.arguments=scriptArgs:"undefined"!=typeof arguments&&(f.arguments=arguments),"function"===typeof quit&&(f.quit=function(a){quit(a)});else if(v||w)w?A=self.location.href:document.currentScript&&(A=
|
||||
document.currentScript.src),A=0!==A.indexOf("blob:")?A.substr(0,A.lastIndexOf("/")+1):"",f.read=function(a){var b=new XMLHttpRequest;b.open("GET",a,!1);b.send(null);return b.responseText},w&&(f.readBinary=function(a){var b=new XMLHttpRequest;b.open("GET",a,!1);b.responseType="arraybuffer";b.send(null);return new Uint8Array(b.response)}),f.readAsync=function(a,b,c){var d=new XMLHttpRequest;d.open("GET",a,!0);d.responseType="arraybuffer";d.onload=function(){200==d.status||0==d.status&&d.response?b(d.response):
|
||||
c()};d.onerror=c;d.send(null)},f.setWindowTitle=function(a){document.title=a};var Aa=f.print||("undefined"!==typeof console?console.log.bind(console):"undefined"!==typeof print?print:null),C=f.printErr||("undefined"!==typeof printErr?printErr:"undefined"!==typeof console&&console.warn.bind(console)||Aa);for(u in wa)wa.hasOwnProperty(u)&&(f[u]=wa[u]);wa=void 0;function Ba(a){var b=D[Ca>>2];a=b+a+15&-16;if(a<=Da())D[Ca>>2]=a;else if(!Ea(a))return 0;return b}
|
||||
var Fa={"f64-rem":function(a,b){return a%b},"debugger":function(){debugger}},Ga=1,E=Array(64);function ua(a){for(var b=0;64>b;b++)if(!E[b])return E[b]=a,Ga+b;throw"Finished up all reserved function pointers. Use a higher value for RESERVED_FUNCTION_POINTERS.";}"object"!==typeof WebAssembly&&C("no native wasm support detected");
|
||||
function q(a,b){b=b||"i8";"*"===b.charAt(b.length-1)&&(b="i32");switch(b){case "i1":return l[a>>0];case "i8":return l[a>>0];case "i16":return Ha[a>>1];case "i32":return D[a>>2];case "i64":return D[a>>2];case "float":return Ja[a>>2];case "double":return Ka[a>>3];default:B("invalid type for getValue: "+b)}return null}var La,Ma=!1;function assert(a,b){a||B("Assertion failed: "+b)}function Na(a){var b=f["_"+a];assert(b,"Cannot call unknown function "+a+", make sure it is exported");return b}
|
||||
function Oa(a,b,c,d){var e={string:function(a){var b=0;if(null!==a&&void 0!==a&&0!==a){var c=(a.length<<2)+1;b=h(c);r(a,F,b,c)}return b},array:function(a){var b=h(a.length);l.set(a,b);return b}},g=Na(a),k=[];a=0;if(d)for(var m=0;m<d.length;m++){var y=e[c[m]];y?(0===a&&(a=na()),k[m]=y(d[m])):k[m]=d[m]}c=g.apply(null,k);c=function(a){return"string"===b?G(a):"boolean"===b?!!a:a}(c);0!==a&&qa(a);return c}
|
||||
function pa(a){var b="i32";"*"===b.charAt(b.length-1)&&(b="i32");switch(b){case "i1":l[a>>0]=0;break;case "i8":l[a>>0]=0;break;case "i16":Ha[a>>1]=0;break;case "i32":D[a>>2]=0;break;case "i64":aa=[0,1<=+Pa(0)?~~+Qa(0)>>>0:0];D[a>>2]=aa[0];D[a+4>>2]=aa[1];break;case "float":Ja[a>>2]=0;break;case "double":Ka[a>>3]=0;break;default:B("invalid type for setValue: "+b)}}var Ra=0,Sa=3;
|
||||
function ea(a){var b=Ra;if("number"===typeof a){var c=!0;var d=a}else c=!1,d=a.length;b=b==Sa?e:[Ta,h,Ba][b](Math.max(d,1));if(c){var e=b;assert(0==(b&3));for(a=b+(d&-4);e<a;e+=4)D[e>>2]=0;for(a=b+d;e<a;)l[e++>>0]=0;return b}a.subarray||a.slice?F.set(a,b):F.set(new Uint8Array(a),b);return b}var Ua="undefined"!==typeof TextDecoder?new TextDecoder("utf8"):void 0;
|
||||
function t(a,b,c){var d=b+c;for(c=b;a[c]&&!(c>=d);)++c;if(16<c-b&&a.subarray&&Ua)return Ua.decode(a.subarray(b,c));for(d="";b<c;){var e=a[b++];if(e&128){var g=a[b++]&63;if(192==(e&224))d+=String.fromCharCode((e&31)<<6|g);else{var k=a[b++]&63;e=224==(e&240)?(e&15)<<12|g<<6|k:(e&7)<<18|g<<12|k<<6|a[b++]&63;65536>e?d+=String.fromCharCode(e):(e-=65536,d+=String.fromCharCode(55296|e>>10,56320|e&1023))}}else d+=String.fromCharCode(e)}return d}function G(a){return a?t(F,a,void 0):""}
|
||||
function r(a,b,c,d){if(!(0<d))return 0;var e=c;d=c+d-1;for(var g=0;g<a.length;++g){var k=a.charCodeAt(g);if(55296<=k&&57343>=k){var m=a.charCodeAt(++g);k=65536+((k&1023)<<10)|m&1023}if(127>=k){if(c>=d)break;b[c++]=k}else{if(2047>=k){if(c+1>=d)break;b[c++]=192|k>>6}else{if(65535>=k){if(c+2>=d)break;b[c++]=224|k>>12}else{if(c+3>=d)break;b[c++]=240|k>>18;b[c++]=128|k>>12&63}b[c++]=128|k>>6&63}b[c++]=128|k&63}}b[c]=0;return c-e}
|
||||
function oa(a){for(var b=0,c=0;c<a.length;++c){var d=a.charCodeAt(c);55296<=d&&57343>=d&&(d=65536+((d&1023)<<10)|a.charCodeAt(++c)&1023);127>=d?++b:b=2047>=d?b+2:65535>=d?b+3:b+4}return b}"undefined"!==typeof TextDecoder&&new TextDecoder("utf-16le");function Va(a){return a.replace(/__Z[\w\d_]+/g,function(a){return a===a?a:a+" ["+a+"]"})}function Wa(a){0<a%65536&&(a+=65536-a%65536);return a}var buffer,l,F,Ha,D,Ja,Ka;
|
||||
function Xa(){f.HEAP8=l=new Int8Array(buffer);f.HEAP16=Ha=new Int16Array(buffer);f.HEAP32=D=new Int32Array(buffer);f.HEAPU8=F=new Uint8Array(buffer);f.HEAPU16=new Uint16Array(buffer);f.HEAPU32=new Uint32Array(buffer);f.HEAPF32=Ja=new Float32Array(buffer);f.HEAPF64=Ka=new Float64Array(buffer)}var Ca=60128,Ya=f.TOTAL_MEMORY||16777216;5242880>Ya&&C("TOTAL_MEMORY should be larger than TOTAL_STACK, was "+Ya+"! (TOTAL_STACK=5242880)");
|
||||
f.buffer?buffer=f.buffer:"object"===typeof WebAssembly&&"function"===typeof WebAssembly.Memory?(La=new WebAssembly.Memory({initial:Ya/65536}),buffer=La.buffer):buffer=new ArrayBuffer(Ya);Xa();D[Ca>>2]=5303264;function Za(a){for(;0<a.length;){var b=a.shift();if("function"==typeof b)b();else{var c=b.rc;"number"===typeof c?void 0===b.Fb?f.dynCall_v(c):f.dynCall_vi(c,b.Fb):c(void 0===b.Fb?null:b.Fb)}}}var $a=[],ab=[],bb=[],cb=[],db=!1;function eb(){var a=f.preRun.shift();$a.unshift(a)}
|
||||
var Pa=Math.abs,Qa=Math.ceil,H=0,fb=null,gb=null;f.preloadedImages={};f.preloadedAudios={};function hb(){var a=I;return String.prototype.startsWith?a.startsWith("data:application/octet-stream;base64,"):0===a.indexOf("data:application/octet-stream;base64,")}var I="sql-wasm.wasm";if(!hb()){var jb=I;I=f.locateFile?f.locateFile(jb,A):A+jb}
|
||||
function kb(){try{if(f.wasmBinary)return new Uint8Array(f.wasmBinary);if(f.readBinary)return f.readBinary(I);throw"both async and sync fetching of the wasm failed";}catch(a){B(a)}}function lb(){return f.wasmBinary||!v&&!w||"function"!==typeof fetch?new Promise(function(a){a(kb())}):fetch(I,{credentials:"same-origin"}).then(function(a){if(!a.ok)throw"failed to load wasm binary file at '"+I+"'";return a.arrayBuffer()}).catch(function(){return kb()})}
|
||||
function mb(a){function b(a){f.asm=a.exports;H--;f.monitorRunDependencies&&f.monitorRunDependencies(H);0==H&&(null!==fb&&(clearInterval(fb),fb=null),gb&&(a=gb,gb=null,a()))}function c(a){b(a.instance)}function d(a){lb().then(function(a){return WebAssembly.instantiate(a,e)}).then(a,function(a){C("failed to asynchronously prepare wasm: "+a);B(a)})}var e={env:a,global:{NaN:NaN,Infinity:Infinity},"global.Math":Math,asm2wasm:Fa};H++;f.monitorRunDependencies&&f.monitorRunDependencies(H);if(f.instantiateWasm)try{return f.instantiateWasm(e,
|
||||
b)}catch(g){return C("Module.instantiateWasm callback failed with error: "+g),!1}f.wasmBinary||"function"!==typeof WebAssembly.instantiateStreaming||hb()||"function"!==typeof fetch?d(c):WebAssembly.instantiateStreaming(fetch(I,{credentials:"same-origin"}),e).then(c,function(a){C("wasm streaming compile failed: "+a);C("falling back to ArrayBuffer instantiation");d(c)});return{}}
|
||||
f.asm=function(a,b){b.memory=La;b.table=new WebAssembly.Table({initial:2560,maximum:2560,element:"anyfunc"});b.__memory_base=1024;b.__table_base=0;return mb(b)};ab.push({rc:function(){nb()}});var J={};
|
||||
function ob(a){if(ob.rb){var b=D[a>>2];var c=D[b>>2]}else ob.rb=!0,J.USER=J.LOGNAME="web_user",J.PATH="/",J.PWD="/",J.HOME="/home/web_user",J.LANG="C.UTF-8",J._=f.thisProgram,c=db?Ta(1024):Ba(1024),b=db?Ta(256):Ba(256),D[b>>2]=c,D[a>>2]=b;a=[];var d=0,e;for(e in J)if("string"===typeof J[e]){var g=e+"="+J[e];a.push(g);d+=g.length}if(1024<d)throw Error("Environment size exceeded TOTAL_ENV_SIZE!");for(e=0;e<a.length;e++){d=g=a[e];for(var k=c,m=0;m<d.length;++m)l[k++>>0]=d.charCodeAt(m);l[k>>0]=0;D[b+
|
||||
4*e>>2]=c;c+=g.length+1}D[b+4*a.length>>2]=0}function pb(a){f.___errno_location&&(D[f.___errno_location()>>2]=a);return a}function qb(a,b){for(var c=0,d=a.length-1;0<=d;d--){var e=a[d];"."===e?a.splice(d,1):".."===e?(a.splice(d,1),c++):c&&(a.splice(d,1),c--)}if(b)for(;c;c--)a.unshift("..");return a}function rb(a){var b="/"===a.charAt(0),c="/"===a.substr(-1);(a=qb(a.split("/").filter(function(a){return!!a}),!b).join("/"))||b||(a=".");a&&c&&(a+="/");return(b?"/":"")+a}
|
||||
function sb(a){var b=/^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/.exec(a).slice(1);a=b[0];b=b[1];if(!a&&!b)return".";b&&(b=b.substr(0,b.length-1));return a+b}function tb(a){if("/"===a)return"/";var b=a.lastIndexOf("/");return-1===b?a:a.substr(b+1)}function ub(){var a=Array.prototype.slice.call(arguments,0);return rb(a.join("/"))}function n(a,b){return rb(a+"/"+b)}
|
||||
function vb(){for(var a="",b=!1,c=arguments.length-1;-1<=c&&!b;c--){b=0<=c?arguments[c]:"/";if("string"!==typeof b)throw new TypeError("Arguments to path.resolve must be strings");if(!b)return"";a=b+"/"+a;b="/"===b.charAt(0)}a=qb(a.split("/").filter(function(a){return!!a}),!b).join("/");return(b?"/":"")+a||"."}var wb=[];function xb(a,b){wb[a]={input:[],output:[],ub:b};yb(a,zb)}
|
||||
var zb={open:function(a){var b=wb[a.node.rdev];if(!b)throw new K(L.Cb);a.tty=b;a.seekable=!1},close:function(a){a.tty.ub.flush(a.tty)},flush:function(a){a.tty.ub.flush(a.tty)},read:function(a,b,c,d){if(!a.tty||!a.tty.ub.Xb)throw new K(L.Ob);for(var e=0,g=0;g<d;g++){try{var k=a.tty.ub.Xb(a.tty)}catch(m){throw new K(L.Lb);}if(void 0===k&&0===e)throw new K(L.ac);if(null===k||void 0===k)break;e++;b[c+g]=k}e&&(a.node.timestamp=Date.now());return e},write:function(a,b,c,d){if(!a.tty||!a.tty.ub.Ib)throw new K(L.Ob);
|
||||
try{for(var e=0;e<d;e++)a.tty.ub.Ib(a.tty,b[c+e])}catch(g){throw new K(L.Lb);}d&&(a.node.timestamp=Date.now());return e}},Ab={Xb:function(a){if(!a.input.length){var b=null;if(x){var c=new Buffer(256),d=0,e=process.stdin.fd;if("win32"!=process.platform){var g=!1;try{e=fs.openSync("/dev/stdin","r"),g=!0}catch(k){}}try{d=fs.readSync(e,c,0,256,null)}catch(k){if(-1!=k.toString().indexOf("EOF"))d=0;else throw k;}g&&fs.closeSync(e);0<d?b=c.slice(0,d).toString("utf-8"):b=null}else"undefined"!=typeof window&&
|
||||
"function"==typeof window.prompt?(b=window.prompt("Input: "),null!==b&&(b+="\n")):"function"==typeof readline&&(b=readline(),null!==b&&(b+="\n"));if(!b)return null;a.input=ba(b,!0)}return a.input.shift()},Ib:function(a,b){null===b||10===b?(Aa(t(a.output,0)),a.output=[]):0!=b&&a.output.push(b)},flush:function(a){a.output&&0<a.output.length&&(Aa(t(a.output,0)),a.output=[])}},Bb={Ib:function(a,b){null===b||10===b?(C(t(a.output,0)),a.output=[]):0!=b&&a.output.push(b)},flush:function(a){a.output&&0<a.output.length&&
|
||||
(C(t(a.output,0)),a.output=[])}},M={mb:null,jb:function(){return M.createNode(null,"/",16895,0)},createNode:function(a,b,c,d){if(24576===(c&61440)||4096===(c&61440))throw new K(L.dc);M.mb||(M.mb={dir:{node:{lb:M.ab.lb,hb:M.ab.hb,lookup:M.ab.lookup,vb:M.ab.vb,rename:M.ab.rename,unlink:M.ab.unlink,rmdir:M.ab.rmdir,readdir:M.ab.readdir,symlink:M.ab.symlink},stream:{ob:M.cb.ob}},file:{node:{lb:M.ab.lb,hb:M.ab.hb},stream:{ob:M.cb.ob,read:M.cb.read,write:M.cb.write,Pb:M.cb.Pb,zb:M.cb.zb,Ab:M.cb.Ab}},link:{node:{lb:M.ab.lb,
|
||||
hb:M.ab.hb,readlink:M.ab.readlink},stream:{}},Sb:{node:{lb:M.ab.lb,hb:M.ab.hb},stream:Cb}});c=Db(a,b,c,d);N(c.mode)?(c.ab=M.mb.dir.node,c.cb=M.mb.dir.stream,c.bb={}):32768===(c.mode&61440)?(c.ab=M.mb.file.node,c.cb=M.mb.file.stream,c.gb=0,c.bb=null):40960===(c.mode&61440)?(c.ab=M.mb.link.node,c.cb=M.mb.link.stream):8192===(c.mode&61440)&&(c.ab=M.mb.Sb.node,c.cb=M.mb.Sb.stream);c.timestamp=Date.now();a&&(a.bb[b]=c);return c},ff:function(a){if(a.bb&&a.bb.subarray){for(var b=[],c=0;c<a.gb;++c)b.push(a.bb[c]);
|
||||
return b}return a.bb},gf:function(a){return a.bb?a.bb.subarray?a.bb.subarray(0,a.gb):new Uint8Array(a.bb):new Uint8Array},Tb:function(a,b){var c=a.bb?a.bb.length:0;c>=b||(b=Math.max(b,c*(1048576>c?2:1.125)|0),0!=c&&(b=Math.max(b,256)),c=a.bb,a.bb=new Uint8Array(b),0<a.gb&&a.bb.set(c.subarray(0,a.gb),0))},yc:function(a,b){if(a.gb!=b)if(0==b)a.bb=null,a.gb=0;else{if(!a.bb||a.bb.subarray){var c=a.bb;a.bb=new Uint8Array(new ArrayBuffer(b));c&&a.bb.set(c.subarray(0,Math.min(b,a.gb)))}else if(a.bb||(a.bb=
|
||||
[]),a.bb.length>b)a.bb.length=b;else for(;a.bb.length<b;)a.bb.push(0);a.gb=b}},ab:{lb:function(a){var b={};b.dev=8192===(a.mode&61440)?a.id:1;b.ino=a.id;b.mode=a.mode;b.nlink=1;b.uid=0;b.gid=0;b.rdev=a.rdev;N(a.mode)?b.size=4096:32768===(a.mode&61440)?b.size=a.gb:40960===(a.mode&61440)?b.size=a.link.length:b.size=0;b.atime=new Date(a.timestamp);b.mtime=new Date(a.timestamp);b.ctime=new Date(a.timestamp);b.pb=4096;b.blocks=Math.ceil(b.size/b.pb);return b},hb:function(a,b){void 0!==b.mode&&(a.mode=
|
||||
b.mode);void 0!==b.timestamp&&(a.timestamp=b.timestamp);void 0!==b.size&&M.yc(a,b.size)},lookup:function(){throw Eb[L.bc];},vb:function(a,b,c,d){return M.createNode(a,b,c,d)},rename:function(a,b,c){if(N(a.mode)){try{var d=O(b,c)}catch(g){}if(d)for(var e in d.bb)throw new K(L.Nb);}delete a.parent.bb[a.name];a.name=c;b.bb[c]=a;a.parent=b},unlink:function(a,b){delete a.bb[b]},rmdir:function(a,b){var c=O(a,b),d;for(d in c.bb)throw new K(L.Nb);delete a.bb[b]},readdir:function(a){var b=[".",".."],c;for(c in a.bb)a.bb.hasOwnProperty(c)&&
|
||||
b.push(c);return b},symlink:function(a,b,c){a=M.createNode(a,b,41471,0);a.link=c;return a},readlink:function(a){if(40960!==(a.mode&61440))throw new K(L.ib);return a.link}},cb:{read:function(a,b,c,d,e){var g=a.node.bb;if(e>=a.node.gb)return 0;a=Math.min(a.node.gb-e,d);if(8<a&&g.subarray)b.set(g.subarray(e,e+a),c);else for(d=0;d<a;d++)b[c+d]=g[e+d];return a},write:function(a,b,c,d,e,g){g=!1;if(!d)return 0;a=a.node;a.timestamp=Date.now();if(b.subarray&&(!a.bb||a.bb.subarray)){if(g)return a.bb=b.subarray(c,
|
||||
c+d),a.gb=d;if(0===a.gb&&0===e)return a.bb=new Uint8Array(b.subarray(c,c+d)),a.gb=d;if(e+d<=a.gb)return a.bb.set(b.subarray(c,c+d),e),d}M.Tb(a,e+d);if(a.bb.subarray&&b.subarray)a.bb.set(b.subarray(c,c+d),e);else for(g=0;g<d;g++)a.bb[e+g]=b[c+g];a.gb=Math.max(a.gb,e+d);return d},ob:function(a,b,c){1===c?b+=a.position:2===c&&32768===(a.node.mode&61440)&&(b+=a.node.gb);if(0>b)throw new K(L.ib);return b},Pb:function(a,b,c){M.Tb(a.node,b+c);a.node.gb=Math.max(a.node.gb,b+c)},zb:function(a,b,c,d,e,g,k){if(32768!==
|
||||
(a.node.mode&61440))throw new K(L.Cb);c=a.node.bb;if(k&2||c.buffer!==b&&c.buffer!==b.buffer){if(0<e||e+d<a.node.gb)c.subarray?c=c.subarray(e,e+d):c=Array.prototype.slice.call(c,e,e+d);a=!0;d=Ta(d);if(!d)throw new K(L.Mb);b.set(c,d)}else a=!1,d=c.byteOffset;return{xc:d,Db:a}},Ab:function(a,b,c,d,e){if(32768!==(a.node.mode&61440))throw new K(L.Cb);if(e&2)return 0;M.cb.write(a,b,0,d,c,!1);return 0}}},P={yb:!1,Ac:function(){P.yb=!!process.platform.match(/^win/);var a=process.binding("constants");a.fs&&
|
||||
(a=a.fs);P.Ub={1024:a.O_APPEND,64:a.O_CREAT,128:a.O_EXCL,0:a.O_RDONLY,2:a.O_RDWR,4096:a.O_SYNC,512:a.O_TRUNC,1:a.O_WRONLY}},Rb:function(a){return Buffer.rb?Buffer.from(a):new Buffer(a)},jb:function(a){assert(x);return P.createNode(null,"/",P.Wb(a.Hb.root),0)},createNode:function(a,b,c){if(!N(c)&&32768!==(c&61440)&&40960!==(c&61440))throw new K(L.ib);a=Db(a,b,c);a.ab=P.ab;a.cb=P.cb;return a},Wb:function(a){try{var b=fs.lstatSync(a);P.yb&&(b.mode=b.mode|(b.mode&292)>>2)}catch(c){if(!c.code)throw c;
|
||||
throw new K(L[c.code]);}return b.mode},kb:function(a){for(var b=[];a.parent!==a;)b.push(a.name),a=a.parent;b.push(a.jb.Hb.root);b.reverse();return ub.apply(null,b)},qc:function(a){a&=-2656257;var b=0,c;for(c in P.Ub)a&c&&(b|=P.Ub[c],a^=c);if(a)throw new K(L.ib);return b},ab:{lb:function(a){a=P.kb(a);try{var b=fs.lstatSync(a)}catch(c){if(!c.code)throw c;throw new K(L[c.code]);}P.yb&&!b.pb&&(b.pb=4096);P.yb&&!b.blocks&&(b.blocks=(b.size+b.pb-1)/b.pb|0);return{dev:b.dev,ino:b.ino,mode:b.mode,nlink:b.nlink,
|
||||
uid:b.uid,gid:b.gid,rdev:b.rdev,size:b.size,atime:b.atime,mtime:b.mtime,ctime:b.ctime,pb:b.pb,blocks:b.blocks}},hb:function(a,b){var c=P.kb(a);try{void 0!==b.mode&&(fs.chmodSync(c,b.mode),a.mode=b.mode),void 0!==b.size&&fs.truncateSync(c,b.size)}catch(d){if(!d.code)throw d;throw new K(L[d.code]);}},lookup:function(a,b){var c=n(P.kb(a),b);c=P.Wb(c);return P.createNode(a,b,c)},vb:function(a,b,c,d){a=P.createNode(a,b,c,d);b=P.kb(a);try{N(a.mode)?fs.mkdirSync(b,a.mode):fs.writeFileSync(b,"",{mode:a.mode})}catch(e){if(!e.code)throw e;
|
||||
throw new K(L[e.code]);}return a},rename:function(a,b,c){a=P.kb(a);b=n(P.kb(b),c);try{fs.renameSync(a,b)}catch(d){if(!d.code)throw d;throw new K(L[d.code]);}},unlink:function(a,b){a=n(P.kb(a),b);try{fs.unlinkSync(a)}catch(c){if(!c.code)throw c;throw new K(L[c.code]);}},rmdir:function(a,b){a=n(P.kb(a),b);try{fs.rmdirSync(a)}catch(c){if(!c.code)throw c;throw new K(L[c.code]);}},readdir:function(a){a=P.kb(a);try{return fs.readdirSync(a)}catch(b){if(!b.code)throw b;throw new K(L[b.code]);}},symlink:function(a,
|
||||
b,c){a=n(P.kb(a),b);try{fs.symlinkSync(c,a)}catch(d){if(!d.code)throw d;throw new K(L[d.code]);}},readlink:function(a){var b=P.kb(a);try{return b=fs.readlinkSync(b),b=Fb.relative(Fb.resolve(a.jb.Hb.root),b)}catch(c){if(!c.code)throw c;throw new K(L[c.code]);}}},cb:{open:function(a){var b=P.kb(a.node);try{32768===(a.node.mode&61440)&&(a.wb=fs.openSync(b,P.qc(a.flags)))}catch(c){if(!c.code)throw c;throw new K(L[c.code]);}},close:function(a){try{32768===(a.node.mode&61440)&&a.wb&&fs.closeSync(a.wb)}catch(b){if(!b.code)throw b;
|
||||
throw new K(L[b.code]);}},read:function(a,b,c,d,e){if(0===d)return 0;try{return fs.readSync(a.wb,P.Rb(b.buffer),c,d,e)}catch(g){throw new K(L[g.code]);}},write:function(a,b,c,d,e){try{return fs.writeSync(a.wb,P.Rb(b.buffer),c,d,e)}catch(g){throw new K(L[g.code]);}},ob:function(a,b,c){if(1===c)b+=a.position;else if(2===c&&32768===(a.node.mode&61440))try{b+=fs.fstatSync(a.wb).size}catch(d){throw new K(L[d.code]);}if(0>b)throw new K(L.ib);return b}}},Gb=null,Hb={},Q=[],Ib=1,R=null,Jb=!0,S={},K=null,
|
||||
Eb={};function T(a,b){a=vb("/",a);b=b||{};if(!a)return{path:"",node:null};var c={Vb:!0,Jb:0},d;for(d in c)void 0===b[d]&&(b[d]=c[d]);if(8<b.Jb)throw new K(40);a=qb(a.split("/").filter(function(a){return!!a}),!1);var e=Gb;c="/";for(d=0;d<a.length;d++){var g=d===a.length-1;if(g&&b.parent)break;e=O(e,a[d]);c=n(c,a[d]);e.sb&&(!g||g&&b.Vb)&&(e=e.sb.root);if(!g||b.qb)for(g=0;40960===(e.mode&61440);)if(e=Kb(c),c=vb(sb(c),e),e=T(c,{Jb:b.Jb}).node,40<g++)throw new K(40);}return{path:c,node:e}}
|
||||
function Lb(a){for(var b;;){if(a===a.parent)return a=a.jb.Yb,b?"/"!==a[a.length-1]?a+"/"+b:a+b:a;b=b?a.name+"/"+b:a.name;a=a.parent}}function Mb(a,b){for(var c=0,d=0;d<b.length;d++)c=(c<<5)-c+b.charCodeAt(d)|0;return(a+c>>>0)%R.length}function Nb(a){var b=Mb(a.parent.id,a.name);a.tb=R[b];R[b]=a}function Ob(a){var b=Mb(a.parent.id,a.name);if(R[b]===a)R[b]=a.tb;else for(b=R[b];b;){if(b.tb===a){b.tb=a.tb;break}b=b.tb}}
|
||||
function O(a,b){var c;if(c=(c=Pb(a,"x"))?c:a.ab.lookup?0:13)throw new K(c,a);for(c=R[Mb(a.id,b)];c;c=c.tb){var d=c.name;if(c.parent.id===a.id&&d===b)return c}return a.ab.lookup(a,b)}
|
||||
function Db(a,b,c,d){Qb||(Qb=function(a,b,c,d){a||(a=this);this.parent=a;this.jb=a.jb;this.sb=null;this.id=Ib++;this.name=b;this.mode=c;this.ab={};this.cb={};this.rdev=d},Qb.prototype={},Object.defineProperties(Qb.prototype,{read:{get:function(){return 365===(this.mode&365)},set:function(a){a?this.mode|=365:this.mode&=-366}},write:{get:function(){return 146===(this.mode&146)},set:function(a){a?this.mode|=146:this.mode&=-147}}}));a=new Qb(a,b,c,d);Nb(a);return a}
|
||||
function N(a){return 16384===(a&61440)}var Rb={r:0,rs:1052672,"r+":2,w:577,wx:705,xw:705,"w+":578,"wx+":706,"xw+":706,a:1089,ax:1217,xa:1217,"a+":1090,"ax+":1218,"xa+":1218};function ic(a){var b=["r","w","rw"][a&3];a&512&&(b+="w");return b}function Pb(a,b){if(Jb)return 0;if(-1===b.indexOf("r")||a.mode&292){if(-1!==b.indexOf("w")&&!(a.mode&146)||-1!==b.indexOf("x")&&!(a.mode&73))return 13}else return 13;return 0}function tc(a,b){try{return O(a,b),17}catch(c){}return Pb(a,"wx")}
|
||||
function uc(a,b,c){try{var d=O(a,b)}catch(e){return e.eb}if(a=Pb(a,"wx"))return a;if(c){if(!N(d.mode))return 20;if(d===d.parent||"/"===Lb(d))return 16}else if(N(d.mode))return 21;return 0}function vc(a){var b=4096;for(a=a||0;a<=b;a++)if(!Q[a])return a;throw new K(24);}
|
||||
function wc(a,b){xc||(xc=function(){},xc.prototype={},Object.defineProperties(xc.prototype,{object:{get:function(){return this.node},set:function(a){this.node=a}}}));var c=new xc,d;for(d in a)c[d]=a[d];a=c;b=vc(b);a.fd=b;return Q[b]=a}var Cb={open:function(a){a.cb=Hb[a.node.rdev].cb;a.cb.open&&a.cb.open(a)},ob:function(){throw new K(29);}};function yb(a,b){Hb[a]={cb:b}}
|
||||
function yc(a,b){var c="/"===b,d=!b;if(c&&Gb)throw new K(16);if(!c&&!d){var e=T(b,{Vb:!1});b=e.path;e=e.node;if(e.sb)throw new K(16);if(!N(e.mode))throw new K(20);}b={type:a,Hb:{},Yb:b,wc:[]};a=a.jb(b);a.jb=b;b.root=a;c?Gb=a:e&&(e.sb=b,e.jb&&e.jb.wc.push(b))}function ja(a,b,c){var d=T(a,{parent:!0}).node;a=tb(a);if(!a||"."===a||".."===a)throw new K(22);var e=tc(d,a);if(e)throw new K(e);if(!d.ab.vb)throw new K(1);return d.ab.vb(d,a,b,c)}function U(a,b){ja(a,(void 0!==b?b:511)&1023|16384,0)}
|
||||
function zc(a,b,c){"undefined"===typeof c&&(c=b,b=438);ja(a,b|8192,c)}function Ac(a,b){if(!vb(a))throw new K(2);var c=T(b,{parent:!0}).node;if(!c)throw new K(2);b=tb(b);var d=tc(c,b);if(d)throw new K(d);if(!c.ab.symlink)throw new K(1);c.ab.symlink(c,b,a)}
|
||||
function ta(a){var b=T(a,{parent:!0}).node,c=tb(a),d=O(b,c),e=uc(b,c,!1);if(e)throw new K(e);if(!b.ab.unlink)throw new K(1);if(d.sb)throw new K(16);try{S.willDeletePath&&S.willDeletePath(a)}catch(g){console.log("FS.trackingDelegate['willDeletePath']('"+a+"') threw an exception: "+g.message)}b.ab.unlink(b,c);Ob(d);try{if(S.onDeletePath)S.onDeletePath(a)}catch(g){console.log("FS.trackingDelegate['onDeletePath']('"+a+"') threw an exception: "+g.message)}}
|
||||
function Kb(a){a=T(a).node;if(!a)throw new K(2);if(!a.ab.readlink)throw new K(22);return vb(Lb(a.parent),a.ab.readlink(a))}function ra(a,b){a=T(a,{qb:!b}).node;if(!a)throw new K(2);if(!a.ab.lb)throw new K(1);return a.ab.lb(a)}function Bc(a){return ra(a,!0)}function ka(a,b){var c;"string"===typeof a?c=T(a,{qb:!0}).node:c=a;if(!c.ab.hb)throw new K(1);c.ab.hb(c,{mode:b&4095|c.mode&-4096,timestamp:Date.now()})}
|
||||
function Cc(a){var b;"string"===typeof a?b=T(a,{qb:!0}).node:b=a;if(!b.ab.hb)throw new K(1);b.ab.hb(b,{timestamp:Date.now()})}function Dc(a,b){if(0>b)throw new K(22);var c;"string"===typeof a?c=T(a,{qb:!0}).node:c=a;if(!c.ab.hb)throw new K(1);if(N(c.mode))throw new K(21);if(32768!==(c.mode&61440))throw new K(22);if(a=Pb(c,"w"))throw new K(a);c.ab.hb(c,{size:b,timestamp:Date.now()})}
|
||||
function p(a,b,c,d){if(""===a)throw new K(2);if("string"===typeof b){var e=Rb[b];if("undefined"===typeof e)throw Error("Unknown file open mode: "+b);b=e}c=b&64?("undefined"===typeof c?438:c)&4095|32768:0;if("object"===typeof a)var g=a;else{a=rb(a);try{g=T(a,{qb:!(b&131072)}).node}catch(k){}}e=!1;if(b&64)if(g){if(b&128)throw new K(17);}else g=ja(a,c,0),e=!0;if(!g)throw new K(2);8192===(g.mode&61440)&&(b&=-513);if(b&65536&&!N(g.mode))throw new K(20);if(!e&&(c=g?40960===(g.mode&61440)?40:N(g.mode)&&
|
||||
("r"!==ic(b)||b&512)?21:Pb(g,ic(b)):2))throw new K(c);b&512&&Dc(g,0);b&=-641;d=wc({node:g,path:Lb(g),flags:b,seekable:!0,position:0,cb:g.cb,Bc:[],error:!1},d);d.cb.open&&d.cb.open(d);!f.logReadFiles||b&1||(Ec||(Ec={}),a in Ec||(Ec[a]=1,console.log("FS.trackingDelegate error on read file: "+a)));try{S.onOpenFile&&(g=0,1!==(b&2097155)&&(g|=1),0!==(b&2097155)&&(g|=2),S.onOpenFile(a,g))}catch(k){console.log("FS.trackingDelegate['onOpenFile']('"+a+"', flags) threw an exception: "+k.message)}return d}
|
||||
function ma(a){if(null===a.fd)throw new K(9);a.Gb&&(a.Gb=null);try{a.cb.close&&a.cb.close(a)}catch(b){throw b;}finally{Q[a.fd]=null}a.fd=null}function Fc(a,b,c){if(null===a.fd)throw new K(9);if(!a.seekable||!a.cb.ob)throw new K(29);if(0!=c&&1!=c&&2!=c)throw new K(22);a.position=a.cb.ob(a,b,c);a.Bc=[]}
|
||||
function sa(a,b,c,d,e){if(0>d||0>e)throw new K(22);if(null===a.fd)throw new K(9);if(1===(a.flags&2097155))throw new K(9);if(N(a.node.mode))throw new K(21);if(!a.cb.read)throw new K(22);var g="undefined"!==typeof e;if(!g)e=a.position;else if(!a.seekable)throw new K(29);b=a.cb.read(a,b,c,d,e);g||(a.position+=b);return b}
|
||||
function la(a,b,c,d,e,g){if(0>d||0>e)throw new K(22);if(null===a.fd)throw new K(9);if(0===(a.flags&2097155))throw new K(9);if(N(a.node.mode))throw new K(21);if(!a.cb.write)throw new K(22);a.flags&1024&&Fc(a,0,2);var k="undefined"!==typeof e;if(!k)e=a.position;else if(!a.seekable)throw new K(29);b=a.cb.write(a,b,c,d,e,g);k||(a.position+=b);try{if(a.path&&S.onWriteToFile)S.onWriteToFile(a.path)}catch(m){console.log("FS.trackingDelegate['onWriteToFile']('"+a.path+"') threw an exception: "+m.message)}return b}
|
||||
function Gc(){K||(K=function(a,b){this.node=b;this.zc=function(a){this.eb=a};this.zc(a);this.message="FS error";this.stack&&Object.defineProperty(this,"stack",{value:Error().stack,writable:!0})},K.prototype=Error(),K.prototype.constructor=K,[2].forEach(function(a){Eb[a]=new K(a);Eb[a].stack="<generic error, no stack>"}))}var Hc;function ia(a,b){var c=0;a&&(c|=365);b&&(c|=146);return c}
|
||||
function Ic(a,b,c){a=n("/dev",a);var d=ia(!!b,!!c);Jc||(Jc=64);var e=Jc++<<8|0;yb(e,{open:function(a){a.seekable=!1},close:function(){c&&c.buffer&&c.buffer.length&&c(10)},read:function(a,c,d,e){for(var g=0,k=0;k<e;k++){try{var m=b()}catch(Ia){throw new K(5);}if(void 0===m&&0===g)throw new K(11);if(null===m||void 0===m)break;g++;c[d+k]=m}g&&(a.node.timestamp=Date.now());return g},write:function(a,b,d,e){for(var g=0;g<e;g++)try{c(b[d+g])}catch(fa){throw new K(5);}e&&(a.node.timestamp=Date.now());return g}});
|
||||
zc(a,d,e)}
|
||||
var Jc,V={},Qb,xc,Ec,L={dc:1,bc:2,Ae:3,sd:4,Lb:5,Ob:6,Ic:7,Td:8,Kb:9,Xc:10,ac:11,Ke:11,Mb:12,$b:13,ld:14,ee:15,Vc:16,kd:17,Le:18,Cb:19,cc:20,ud:21,ib:22,Od:23,Gd:24,je:25,He:26,md:27,ae:28,ze:29,ve:30,Hd:31,pe:32,gd:33,ec:34,Xd:42,pd:43,Yc:44,wd:45,xd:46,yd:47,Ed:48,Ie:49,Rd:50,vd:51,cd:35,Ud:37,Oc:52,Rc:53,Me:54,Pd:55,Sc:56,Tc:57,dd:35,Uc:59,ce:60,Sd:61,Ee:62,be:63,Yd:64,Zd:65,ue:66,Vd:67,Lc:68,Be:69,Zc:70,qe:71,Jd:72,hd:73,Qc:74,ke:76,Pc:77,te:78,zd:79,Ad:80,Dd:81,Cd:82,Bd:83,de:38,Nb:39,Kd:36,
|
||||
Fd:40,le:95,oe:96,bd:104,Qd:105,Mc:97,se:91,he:88,$d:92,xe:108,ad:111,Jc:98,$c:103,Nd:101,Ld:100,Fe:110,nd:112,od:113,rd:115,Nc:114,ed:89,Id:90,re:93,ye:94,Kc:99,Md:102,td:106,fe:107,Ge:109,Je:87,jd:122,Ce:116,ie:95,Wd:123,qd:84,me:75,Wc:125,ge:131,ne:130,De:86},Kc={};
|
||||
function Lc(a,b,c){try{var d=a(b)}catch(e){if(e&&e.node&&rb(b)!==rb(Lb(e.node)))return-L.cc;throw e;}D[c>>2]=d.dev;D[c+4>>2]=0;D[c+8>>2]=d.ino;D[c+12>>2]=d.mode;D[c+16>>2]=d.nlink;D[c+20>>2]=d.uid;D[c+24>>2]=d.gid;D[c+28>>2]=d.rdev;D[c+32>>2]=0;D[c+36>>2]=d.size;D[c+40>>2]=4096;D[c+44>>2]=d.blocks;D[c+48>>2]=d.atime.getTime()/1E3|0;D[c+52>>2]=0;D[c+56>>2]=d.mtime.getTime()/1E3|0;D[c+60>>2]=0;D[c+64>>2]=d.ctime.getTime()/1E3|0;D[c+68>>2]=0;D[c+72>>2]=d.ino;return 0}var W=0;
|
||||
function X(){W+=4;return D[W-4>>2]}function Y(){return G(X())}function Z(){var a=Q[X()];if(!a)throw new K(L.Kb);return a}function Da(){return l.length}function Ea(a){if(2147418112<a)return!1;for(var b=Math.max(Da(),16777216);b<a;)536870912>=b?b=Wa(2*b):b=Math.min(Wa((3*b+2147483648)/4),2147418112);a=Wa(b);var c=buffer.byteLength;try{var d=-1!==La.grow((a-c)/65536)?buffer=La.buffer:null}catch(e){d=null}if(!d||d.byteLength!=b)return!1;Xa();return!0}
|
||||
function Mc(a){if(0===a)return 0;a=G(a);if(!J.hasOwnProperty(a))return 0;Mc.rb&&ha(Mc.rb);a=J[a];var b=oa(a)+1,c=Ta(b);c&&r(a,l,c,b);Mc.rb=c;return Mc.rb}r("GMT",F,60272,4);
|
||||
function Nc(){function a(a){return(a=a.toTimeString().match(/\(([A-Za-z ]+)\)$/))?a[1]:"GMT"}if(!Oc){Oc=!0;D[Pc()>>2]=60*(new Date).getTimezoneOffset();var b=new Date(2E3,0,1),c=new Date(2E3,6,1);D[Qc()>>2]=Number(b.getTimezoneOffset()!=c.getTimezoneOffset());var d=a(b),e=a(c);d=ea(ba(d));e=ea(ba(e));c.getTimezoneOffset()<b.getTimezoneOffset()?(D[Rc()>>2]=d,D[Rc()+4>>2]=e):(D[Rc()>>2]=e,D[Rc()+4>>2]=d)}}var Oc;
|
||||
function Sc(a){a/=1E3;if((v||w)&&self.performance&&self.performance.now)for(var b=self.performance.now();self.performance.now()-b<a;);else for(b=Date.now();Date.now()-b<a;);return 0}f._usleep=Sc;Gc();R=Array(4096);yc(M,"/");U("/tmp");U("/home");U("/home/web_user");
|
||||
(function(){U("/dev");yb(259,{read:function(){return 0},write:function(a,b,c,k){return k}});zc("/dev/null",259);xb(1280,Ab);xb(1536,Bb);zc("/dev/tty",1280);zc("/dev/tty1",1536);if("object"===typeof crypto&&"function"===typeof crypto.getRandomValues){var a=new Uint8Array(1);var b=function(){crypto.getRandomValues(a);return a[0]}}else if(x)try{var c=require("crypto");b=function(){return c.randomBytes(1)[0]}}catch(d){}b||(b=function(){B("random_device")});Ic("random",b);Ic("urandom",b);U("/dev/shm");
|
||||
U("/dev/shm/tmp")})();U("/proc");U("/proc/self");U("/proc/self/fd");yc({jb:function(){var a=Db("/proc/self","fd",16895,73);a.ab={lookup:function(a,c){var b=Q[+c];if(!b)throw new K(9);a={parent:null,jb:{Yb:"fake"},ab:{readlink:function(){return b.path}}};return a.parent=a}};return a}},"/proc/self/fd");if(x){var fs=require("fs"),Fb=require("path");P.Ac()}function ba(a,b){var c=Array(oa(a)+1);a=r(a,c,0,c.length);b&&(c.length=a);return c}
|
||||
var Vc=f.asm({},{n:B,l:function(a){return E[a]()},i:function(a,b){return E[a](b)},h:function(a,b,c){return E[a](b,c)},g:function(a,b,c,d){return E[a](b,c,d)},f:function(a,b,c,d,e){return E[a](b,c,d,e)},e:function(a,b,c,d,e,g){return E[a](b,c,d,e,g)},d:function(a,b,c,d,e,g,k){return E[a](b,c,d,e,g,k)},B:function(a,b,c,d,e){return E[a](b,c,d,e)},A:function(a,b,c){return E[a](b,c)},z:function(a,b,c,d){return E[a](b,c,d)},y:function(a,b,c,d,e){return E[a](b,c,d,e)},c:function(a,b){E[a](b)},b:function(a,
|
||||
b,c){E[a](b,c)},k:function(a,b,c,d){E[a](b,c,d)},j:function(a,b,c,d,e){E[a](b,c,d,e)},x:function(a,b,c,d,e,g){E[a](b,c,d,e,g)},w:function(a,b,c,d){E[a](b,c,d)},v:function(a,b,c,d){E[a](b,c,d)},m:function(a,b,c,d){B("Assertion failed: "+G(a)+", at: "+[b?G(b):"unknown filename",c,d?G(d):"unknown function"])},ga:ob,u:pb,fa:function(a,b){W=b;try{var c=Y();ta(c);return 0}catch(d){return"undefined"!==typeof V&&d instanceof K||B(d),-d.eb}},ea:function(a,b){W=b;try{return Z(),0}catch(c){return"undefined"!==
|
||||
typeof V&&c instanceof K||B(c),-c.eb}},da:function(a,b){W=b;try{var c=Z();X();var d=X(),e=X(),g=X();Fc(c,d,g);D[e>>2]=c.position;c.Gb&&0===d&&0===g&&(c.Gb=null);return 0}catch(k){return"undefined"!==typeof V&&k instanceof K||B(k),-k.eb}},ca:function(a,b){W=b;try{var c=Y(),d=X();ka(c,d);return 0}catch(e){return"undefined"!==typeof V&&e instanceof K||B(e),-e.eb}},ba:function(a,b){W=b;try{var c=X(),d=X();if(0===d)return-L.ib;if(d<oa("/")+1)return-L.ec;r("/",F,c,d);return c}catch(e){return"undefined"!==
|
||||
typeof V&&e instanceof K||B(e),-e.eb}},aa:function(a,b){W=b;try{var c=X(),d=X(),e=X(),g=X(),k=X(),m=X();m<<=12;a=!1;if(-1===k){var y=Tc(16384,d);if(!y)return-L.Mb;Uc(y,0,d);a=!0}else{var z=Q[k];if(!z)return-L.Kb;b=F;if(1===(z.flags&2097155))throw new K(13);if(!z.cb.zb)throw new K(19);var fa=z.cb.zb(z,b,c,d,m,e,g);y=fa.xc;a=fa.Db}Kc[y]={vc:y,uc:d,Db:a,fd:k,flags:g};return y}catch(ca){return"undefined"!==typeof V&&ca instanceof K||B(ca),-ca.eb}},$:function(a,b){W=b;try{var c=X();X();var d=X();X();var e=
|
||||
Q[c];if(!e)throw new K(9);if(0===(e.flags&2097155))throw new K(22);Dc(e.node,d);return 0}catch(g){return"undefined"!==typeof V&&g instanceof K||B(g),-g.eb}},t:function(a,b){W=b;try{var c=Y(),d=X();return Lc(ra,c,d)}catch(e){return"undefined"!==typeof V&&e instanceof K||B(e),-e.eb}},_:function(a,b){W=b;try{var c=Y(),d=X();return Lc(Bc,c,d)}catch(e){return"undefined"!==typeof V&&e instanceof K||B(e),-e.eb}},Z:function(a,b){W=b;try{var c=Z(),d=X();return Lc(ra,c.path,d)}catch(e){return"undefined"!==
|
||||
typeof V&&e instanceof K||B(e),-e.eb}},Y:function(a,b){W=b;return 42},X:function(a,b){W=b;return 0},W:function(a,b){W=b;try{var c=X();X();X();var d=Q[c];if(!d)throw new K(9);Cc(d.node);return 0}catch(e){return"undefined"!==typeof V&&e instanceof K||B(e),-e.eb}},V:function(a,b){W=b;try{var c=Y();X();X();Cc(c);return 0}catch(d){return"undefined"!==typeof V&&d instanceof K||B(d),-d.eb}},o:function(a,b){W=b;try{var c=Z();switch(X()){case 0:var d=X();return 0>d?-L.ib:p(c.path,c.flags,0,d).fd;case 1:case 2:return 0;
|
||||
case 3:return c.flags;case 4:return d=X(),c.flags|=d,0;case 12:return d=X(),Ha[d+0>>1]=2,0;case 13:case 14:return 0;case 16:case 8:return-L.ib;case 9:return pb(L.ib),-1;default:return-L.ib}}catch(e){return"undefined"!==typeof V&&e instanceof K||B(e),-e.eb}},U:function(a,b){W=b;try{var c=Z(),d=X(),e=X();return sa(c,l,d,e)}catch(g){return"undefined"!==typeof V&&g instanceof K||B(g),-g.eb}},T:function(a,b){W=b;try{var c=Y();var d=X();if(d&-8)var e=-L.ib;else{var g=T(c,{qb:!0}).node;a="";d&4&&(a+="r");
|
||||
d&2&&(a+="w");d&1&&(a+="x");e=a&&Pb(g,a)?-L.$b:0}return e}catch(k){return"undefined"!==typeof V&&k instanceof K||B(k),-k.eb}},S:function(a,b){W=b;try{var c=Y(),d=X();a=c;a=rb(a);"/"===a[a.length-1]&&(a=a.substr(0,a.length-1));U(a,d);return 0}catch(e){return"undefined"!==typeof V&&e instanceof K||B(e),-e.eb}},R:function(a,b){W=b;try{var c=Z(),d=X(),e=X();return la(c,l,d,e)}catch(g){return"undefined"!==typeof V&&g instanceof K||B(g),-g.eb}},Q:function(a,b){W=b;try{var c=Y(),d=T(c,{parent:!0}).node,
|
||||
e=tb(c),g=O(d,e),k=uc(d,e,!0);if(k)throw new K(k);if(!d.ab.rmdir)throw new K(1);if(g.sb)throw new K(16);try{S.willDeletePath&&S.willDeletePath(c)}catch(m){console.log("FS.trackingDelegate['willDeletePath']('"+c+"') threw an exception: "+m.message)}d.ab.rmdir(d,e);Ob(g);try{if(S.onDeletePath)S.onDeletePath(c)}catch(m){console.log("FS.trackingDelegate['onDeletePath']('"+c+"') threw an exception: "+m.message)}return 0}catch(m){return"undefined"!==typeof V&&m instanceof K||B(m),-m.eb}},P:function(a,b){W=
|
||||
b;try{var c=Y(),d=X(),e=X();return p(c,d,e).fd}catch(g){return"undefined"!==typeof V&&g instanceof K||B(g),-g.eb}},s:function(a,b){W=b;try{var c=Z();ma(c);return 0}catch(d){return"undefined"!==typeof V&&d instanceof K||B(d),-d.eb}},O:function(a,b){W=b;try{var c=Y(),d=X();var e=X();if(0>=e)var g=-L.ib;else{var k=Kb(c),m=Math.min(e,oa(k)),y=l[d+m];r(k,F,d,e+1);l[d+m]=y;g=m}return g}catch(z){return"undefined"!==typeof V&&z instanceof K||B(z),-z.eb}},N:function(a,b){W=b;try{var c=X(),d=X(),e=Kc[c];if(!e)return 0;
|
||||
if(d===e.uc){var g=Q[e.fd],k=e.flags,m=new Uint8Array(F.subarray(c,c+d));g&&g.cb.Ab&&g.cb.Ab(g,m,0,d,k);Kc[c]=null;e.Db&&ha(e.vc)}return 0}catch(y){return"undefined"!==typeof V&&y instanceof K||B(y),-y.eb}},M:function(a,b){W=b;try{var c=X(),d=X(),e=Q[c];if(!e)throw new K(9);ka(e.node,d);return 0}catch(g){return"undefined"!==typeof V&&g instanceof K||B(g),-g.eb}},L:Da,K:function(a,b,c){F.set(F.subarray(b,b+c),a)},J:Ea,r:Mc,q:function(a){var b=Date.now();D[a>>2]=b/1E3|0;D[a+4>>2]=b%1E3*1E3|0;return 0},
|
||||
I:function(a){return Math.log(a)/Math.LN10},p:function(){B("trap!")},H:function(a){Nc();a=new Date(1E3*D[a>>2]);D[15056]=a.getSeconds();D[15057]=a.getMinutes();D[15058]=a.getHours();D[15059]=a.getDate();D[15060]=a.getMonth();D[15061]=a.getFullYear()-1900;D[15062]=a.getDay();var b=new Date(a.getFullYear(),0,1);D[15063]=(a.getTime()-b.getTime())/864E5|0;D[15065]=-(60*a.getTimezoneOffset());var c=(new Date(2E3,6,1)).getTimezoneOffset();b=b.getTimezoneOffset();a=(c!=b&&a.getTimezoneOffset()==Math.min(b,
|
||||
c))|0;D[15064]=a;a=D[Rc()+(a?4:0)>>2];D[15066]=a;return 60224},G:function(a,b){var c=D[a>>2];a=D[a+4>>2];0!==b&&(D[b>>2]=0,D[b+4>>2]=0);return Sc(1E6*c+a/1E3)},F:function(a){switch(a){case 30:return 16384;case 85:return 131068;case 132:case 133:case 12:case 137:case 138:case 15:case 235:case 16:case 17:case 18:case 19:case 20:case 149:case 13:case 10:case 236:case 153:case 9:case 21:case 22:case 159:case 154:case 14:case 77:case 78:case 139:case 80:case 81:case 82:case 68:case 67:case 164:case 11:case 29:case 47:case 48:case 95:case 52:case 51:case 46:return 200809;
|
||||
case 79:return 0;case 27:case 246:case 127:case 128:case 23:case 24:case 160:case 161:case 181:case 182:case 242:case 183:case 184:case 243:case 244:case 245:case 165:case 178:case 179:case 49:case 50:case 168:case 169:case 175:case 170:case 171:case 172:case 97:case 76:case 32:case 173:case 35:return-1;case 176:case 177:case 7:case 155:case 8:case 157:case 125:case 126:case 92:case 93:case 129:case 130:case 131:case 94:case 91:return 1;case 74:case 60:case 69:case 70:case 4:return 1024;case 31:case 42:case 72:return 32;
|
||||
case 87:case 26:case 33:return 2147483647;case 34:case 1:return 47839;case 38:case 36:return 99;case 43:case 37:return 2048;case 0:return 2097152;case 3:return 65536;case 28:return 32768;case 44:return 32767;case 75:return 16384;case 39:return 1E3;case 89:return 700;case 71:return 256;case 40:return 255;case 2:return 100;case 180:return 64;case 25:return 20;case 5:return 16;case 6:return 6;case 73:return 4;case 84:return"object"===typeof navigator?navigator.hardwareConcurrency||1:1}pb(22);return-1},
|
||||
E:function(a){var b=Date.now()/1E3|0;a&&(D[a>>2]=b);return b},D:function(a,b){if(b){var c=1E3*D[b+8>>2];c+=D[b+12>>2]/1E3}else c=Date.now();a=G(a);try{b=c;var d=T(a,{qb:!0}).node;d.ab.hb(d,{timestamp:Math.max(b,c)});return 0}catch(e){a=e;if(!(a instanceof K)){a+=" : ";a:{d=Error();if(!d.stack){try{throw Error(0);}catch(g){d=g}if(!d.stack){d="(no stack trace available)";break a}}d=d.stack.toString()}f.extraStackTrace&&(d+="\n"+f.extraStackTrace());d=Va(d);throw a+d;}pb(a.eb);return-1}},C:function(){B("OOM")},
|
||||
a:Ca},buffer);f.asm=Vc;f._RegisterExtensionFunctions=function(){return f.asm.ha.apply(null,arguments)};var nb=f.___emscripten_environ_constructor=function(){return f.asm.ia.apply(null,arguments)};f.___errno_location=function(){return f.asm.ja.apply(null,arguments)};
|
||||
var Qc=f.__get_daylight=function(){return f.asm.ka.apply(null,arguments)},Pc=f.__get_timezone=function(){return f.asm.la.apply(null,arguments)},Rc=f.__get_tzname=function(){return f.asm.ma.apply(null,arguments)},ha=f._free=function(){return f.asm.na.apply(null,arguments)},Ta=f._malloc=function(){return f.asm.oa.apply(null,arguments)},Tc=f._memalign=function(){return f.asm.pa.apply(null,arguments)},Uc=f._memset=function(){return f.asm.qa.apply(null,arguments)};
|
||||
f._sqlite3_bind_blob=function(){return f.asm.ra.apply(null,arguments)};f._sqlite3_bind_double=function(){return f.asm.sa.apply(null,arguments)};f._sqlite3_bind_int=function(){return f.asm.ta.apply(null,arguments)};f._sqlite3_bind_parameter_index=function(){return f.asm.ua.apply(null,arguments)};f._sqlite3_bind_text=function(){return f.asm.va.apply(null,arguments)};f._sqlite3_changes=function(){return f.asm.wa.apply(null,arguments)};f._sqlite3_clear_bindings=function(){return f.asm.xa.apply(null,arguments)};
|
||||
f._sqlite3_close_v2=function(){return f.asm.ya.apply(null,arguments)};f._sqlite3_column_blob=function(){return f.asm.za.apply(null,arguments)};f._sqlite3_column_bytes=function(){return f.asm.Aa.apply(null,arguments)};f._sqlite3_column_double=function(){return f.asm.Ba.apply(null,arguments)};f._sqlite3_column_name=function(){return f.asm.Ca.apply(null,arguments)};f._sqlite3_column_text=function(){return f.asm.Da.apply(null,arguments)};f._sqlite3_column_type=function(){return f.asm.Ea.apply(null,arguments)};
|
||||
f._sqlite3_create_function_v2=function(){return f.asm.Fa.apply(null,arguments)};f._sqlite3_data_count=function(){return f.asm.Ga.apply(null,arguments)};f._sqlite3_errmsg=function(){return f.asm.Ha.apply(null,arguments)};f._sqlite3_exec=function(){return f.asm.Ia.apply(null,arguments)};f._sqlite3_finalize=function(){return f.asm.Ja.apply(null,arguments)};f._sqlite3_free=function(){return f.asm.Ka.apply(null,arguments)};f._sqlite3_open=function(){return f.asm.La.apply(null,arguments)};
|
||||
f._sqlite3_prepare_v2=function(){return f.asm.Ma.apply(null,arguments)};f._sqlite3_reset=function(){return f.asm.Na.apply(null,arguments)};f._sqlite3_result_double=function(){return f.asm.Oa.apply(null,arguments)};f._sqlite3_result_null=function(){return f.asm.Pa.apply(null,arguments)};f._sqlite3_result_text=function(){return f.asm.Qa.apply(null,arguments)};f._sqlite3_step=function(){return f.asm.Ra.apply(null,arguments)};f._sqlite3_value_blob=function(){return f.asm.Sa.apply(null,arguments)};
|
||||
f._sqlite3_value_bytes=function(){return f.asm.Ta.apply(null,arguments)};f._sqlite3_value_double=function(){return f.asm.Ua.apply(null,arguments)};f._sqlite3_value_int=function(){return f.asm.Va.apply(null,arguments)};f._sqlite3_value_text=function(){return f.asm.Wa.apply(null,arguments)};f._sqlite3_value_type=function(){return f.asm.Xa.apply(null,arguments)};
|
||||
var h=f.stackAlloc=function(){return f.asm.Za.apply(null,arguments)},qa=f.stackRestore=function(){return f.asm._a.apply(null,arguments)},na=f.stackSave=function(){return f.asm.$a.apply(null,arguments)};f.dynCall_vi=function(){return f.asm.Ya.apply(null,arguments)};f.asm=Vc;f.cwrap=function(a,b,c,d){c=c||[];var e=c.every(function(a){return"number"===a});return"string"!==b&&e&&!d?Na(a):function(){return Oa(a,b,c,arguments)}};f.stackSave=na;f.stackRestore=qa;f.stackAlloc=h;
|
||||
function Wc(a){this.name="ExitStatus";this.message="Program terminated with exit("+a+")";this.status=a}Wc.prototype=Error();Wc.prototype.constructor=Wc;gb=function Xc(){f.calledRun||Yc();f.calledRun||(gb=Xc)};
|
||||
function Yc(){function a(){if(!f.calledRun&&(f.calledRun=!0,!Ma)){db||(db=!0,f.noFSInit||Hc||(Hc=!0,Gc(),f.stdin=f.stdin,f.stdout=f.stdout,f.stderr=f.stderr,f.stdin?Ic("stdin",f.stdin):Ac("/dev/tty","/dev/stdin"),f.stdout?Ic("stdout",null,f.stdout):Ac("/dev/tty","/dev/stdout"),f.stderr?Ic("stderr",null,f.stderr):Ac("/dev/tty1","/dev/stderr"),p("/dev/stdin","r"),p("/dev/stdout","w"),p("/dev/stderr","w")),Za(ab));Jb=!1;Za(bb);if(f.onRuntimeInitialized)f.onRuntimeInitialized();if(f.postRun)for("function"==
|
||||
typeof f.postRun&&(f.postRun=[f.postRun]);f.postRun.length;){var a=f.postRun.shift();cb.unshift(a)}Za(cb)}}if(!(0<H)){if(f.preRun)for("function"==typeof f.preRun&&(f.preRun=[f.preRun]);f.preRun.length;)eb();Za($a);0<H||f.calledRun||(f.setStatus?(f.setStatus("Running..."),setTimeout(function(){setTimeout(function(){f.setStatus("")},1);a()},1)):a())}}f.run=Yc;
|
||||
function B(a){if(f.onAbort)f.onAbort(a);void 0!==a?(Aa(a),C(a),a=JSON.stringify(a)):a="";Ma=!0;throw"abort("+a+"). Build with -s ASSERTIONS=1 for more info.";}f.abort=B;if(f.preInit)for("function"==typeof f.preInit&&(f.preInit=[f.preInit]);0<f.preInit.length;)f.preInit.pop()();f.noExitRuntime=!0;Yc();
|
||||
|
||||
|
||||
// The shell-pre.js and emcc-generated code goes above
|
||||
return Module;
|
||||
}); // The end of the promise being returned
|
||||
|
||||
return initSqlJsPromise;
|
||||
} // The end of our initSqlJs function
|
||||
|
||||
// This bit below is copied almost exactly from what you get when you use the MODULARIZE=1 flag with emcc
|
||||
// However, we don't want to use the emcc modularization. See shell-pre.js
|
||||
if (typeof exports === 'object' && typeof module === 'object'){
|
||||
module.exports = initSqlJs;
|
||||
// This will allow the module to be used in ES6 or CommonJS
|
||||
module.exports.default = initSqlJs;
|
||||
}
|
||||
else if (typeof define === 'function' && define['amd']) {
|
||||
define([], function() { return initSqlJs; });
|
||||
}
|
||||
else if (typeof exports === 'object'){
|
||||
exports["Module"] = initSqlJs;
|
||||
}
|
||||
|
Binary file not shown.
|
@ -20,7 +20,7 @@ You'll need to import `package:moor_mysql/moor_mysql.dart`.
|
|||
## Limitations
|
||||
We're currently experimenting with other database engines - Moor was mainly designed for
|
||||
sqlite and supporting advanced features of MySQL is not a priority right now.
|
||||
- No migrations
|
||||
- No migrations - you'll need to create your tables manually
|
||||
- Some statements don't work
|
||||
- Compiled custom queries don't work - we can only parse sqlite. Of course, runtime custom
|
||||
queries with `customSelect` and `customUpdate` will work as expected.
|
|
@ -71,6 +71,9 @@ class _MySqlDelegate extends DatabaseDelegate with _MySqlExecutor {
|
|||
@override
|
||||
TransactionDelegate get transactionDelegate => _TransactionOpener(this);
|
||||
|
||||
@override
|
||||
SqlDialect get dialect => SqlDialect.mysql;
|
||||
|
||||
@override
|
||||
Future<void> open([GeneratedDatabase db]) async {
|
||||
_connection = await MySqlConnection.connect(_settings);
|
||||
|
|
|
@ -1,6 +1,18 @@
|
|||
## unreleased
|
||||
- Support custom columns
|
||||
|
||||
- Support custom columns via type converters. See the [docs](https://moor.simonbinder.eu/type_converters)
|
||||
for details on how to use this feature.
|
||||
- Transactions now roll back when not completed successfully, they also rethrow the exception
|
||||
to make debugging easier.
|
||||
- New `backends` api, making it easier to write database drivers that work with moor. Apart from
|
||||
`moor_flutter`, new experimental backends can be checked out from git:
|
||||
1. `encrypted_moor`: An encrypted moor database: https://github.com/simolus3/moor/tree/develop/extras/encryption
|
||||
2. `moor_mysql`: Work in progress mysql backend for moor. https://github.com/simolus3/moor/tree/develop/extras/mysql
|
||||
- The compiled sql feature is no longer experimental and will stay stable until a major version bump
|
||||
- New, experimental support for `.moor` files! Instead of declaring your tables in Dart, you can
|
||||
choose to declare them with sql by writing the `CREATE TABLE` statement in a `.moor` file.
|
||||
You can then use these tables in the database and with daos by using the `include` parameter
|
||||
on `@UseMoor` and `@UseDao`. Again, please notice that this is an experimental api and there
|
||||
might be some hiccups. Please report any issues you run into.
|
||||
## 1.6.0
|
||||
- Experimental web support! See [the documentation](https://moor.simonbinder.eu/web) for details.
|
||||
- Make transactions easier to use: Thanks to some Dart async magic, you no longer need to run
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
/// with moor.
|
||||
library backends;
|
||||
|
||||
export 'src/runtime/components/component.dart' show SqlDialect;
|
||||
export 'src/runtime/executor/executor.dart';
|
||||
export 'src/runtime/executor/helpers/delegates.dart';
|
||||
export 'src/runtime/executor/helpers/engines.dart';
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/// A utility library to find an edit script that turns a list into another.
|
||||
/// This is useful when displaying a updating stream of immutable lists in a
|
||||
/// list that can be updated.
|
||||
@Deprecated('Will be removed in moor 2.0')
|
||||
library diff_util;
|
||||
|
||||
import 'package:moor/src/utils/android_diffutils_port.dart' as impl;
|
||||
|
|
|
@ -42,15 +42,24 @@ class UseMoor {
|
|||
/// Moor will generate two methods for you: `userById(int id)` and
|
||||
/// `watchUserById(int id)`.
|
||||
/// {@endtemplate}
|
||||
@experimental
|
||||
final Map<String, String> queries;
|
||||
|
||||
/// {@template moor_include_param}
|
||||
/// Defines the `.moor` files to include when building the table structure for
|
||||
/// this database.
|
||||
///
|
||||
/// Please note that this feature is experimental at the moment.
|
||||
/// {@endtemplate}
|
||||
@experimental
|
||||
final Set<String> include;
|
||||
|
||||
/// Use this class as an annotation to inform moor_generator that a database
|
||||
/// class should be generated using the specified [UseMoor.tables].
|
||||
const UseMoor({
|
||||
@required this.tables,
|
||||
this.daos = const [],
|
||||
@experimental this.queries = const {},
|
||||
this.queries = const {},
|
||||
@experimental this.include = const {},
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -80,8 +89,14 @@ class UseDao {
|
|||
final List<Type> tables;
|
||||
|
||||
/// {@macro moor_compile_queries_param}
|
||||
@experimental
|
||||
final Map<String, String> queries;
|
||||
|
||||
const UseDao({@required this.tables, @experimental this.queries = const {}});
|
||||
/// {@macro moor_include_param}
|
||||
@experimental
|
||||
final Set<String> include;
|
||||
|
||||
const UseDao(
|
||||
{@required this.tables,
|
||||
this.queries = const {},
|
||||
@experimental this.include = const {}});
|
||||
}
|
||||
|
|
|
@ -19,6 +19,10 @@ abstract class Table {
|
|||
@visibleForOverriding
|
||||
String get tableName => null;
|
||||
|
||||
/// Whether to append a `WITHOUT ROWID` clause in the `CREATE TABLE`
|
||||
/// statement.
|
||||
bool get withoutRowId => false;
|
||||
|
||||
/// Override this to specify custom primary keys:
|
||||
/// ```dart
|
||||
/// class IngredientInRecipes extends Table {
|
||||
|
|
|
@ -10,6 +10,11 @@ abstract class Component {
|
|||
void writeInto(GenerationContext context);
|
||||
}
|
||||
|
||||
/// An enumeration of database systems supported by moor. Only
|
||||
/// [SqlDialect.sqlite] is officially supported, all others are in an
|
||||
/// experimental state at the moment.
|
||||
enum SqlDialect { sqlite, mysql }
|
||||
|
||||
/// Contains information about a query while it's being constructed.
|
||||
class GenerationContext {
|
||||
/// Whether the query obtained by this context operates on multiple tables.
|
||||
|
@ -19,6 +24,7 @@ class GenerationContext {
|
|||
bool hasMultipleTables = false;
|
||||
|
||||
final SqlTypeSystem typeSystem;
|
||||
final SqlDialect dialect;
|
||||
final QueryExecutor executor;
|
||||
|
||||
final List<dynamic> _boundVariables = [];
|
||||
|
@ -32,9 +38,11 @@ class GenerationContext {
|
|||
|
||||
GenerationContext.fromDb(QueryEngine database)
|
||||
: typeSystem = database.typeSystem,
|
||||
executor = database.executor;
|
||||
executor = database.executor,
|
||||
dialect = database.executor?.dialect ?? SqlDialect.sqlite;
|
||||
|
||||
GenerationContext(this.typeSystem, this.executor);
|
||||
GenerationContext(this.typeSystem, this.executor,
|
||||
{this.dialect = SqlDialect.sqlite});
|
||||
|
||||
/// Introduces a variable that will be sent to the database engine. Whenever
|
||||
/// this method is called, a question mark should be added to the [buffer] so
|
||||
|
|
|
@ -244,6 +244,7 @@ mixin QueryEngine on DatabaseConnectionUser {
|
|||
success = true;
|
||||
} catch (e) {
|
||||
await transactionExecutor.rollback();
|
||||
|
||||
// pass the exception on to the one who called transaction()
|
||||
rethrow;
|
||||
} finally {
|
||||
|
@ -332,9 +333,8 @@ abstract class GeneratedDatabase extends DatabaseConnectionUser
|
|||
}
|
||||
}
|
||||
|
||||
/// Closes this database instance and released the resources associated with
|
||||
/// it.
|
||||
void close() {
|
||||
executor.close();
|
||||
/// Closes this database and releases associated resources.
|
||||
Future<void> close() async {
|
||||
await executor.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import 'dart:async';
|
||||
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:moor/src/runtime/components/component.dart';
|
||||
import 'package:moor/src/runtime/database.dart';
|
||||
import 'package:moor/src/utils/hash.dart';
|
||||
|
||||
|
@ -8,11 +9,17 @@ import 'package:moor/src/utils/hash.dart';
|
|||
/// return their results in a raw form.
|
||||
///
|
||||
/// This is an internal api of moor, which can break often. If you want to
|
||||
/// implement custom database backends, consider using a delegate as described
|
||||
/// [here](https://moor.simonbinder.eu/custom_backend)
|
||||
/// implement custom database backends, consider using the new `backends` API.
|
||||
/// The [moor_flutter implementation](https://github.com/simolus3/moor/blob/develop/moor_flutter/lib/moor_flutter.dart)
|
||||
/// might be useful as a reference. If you want to write your own database
|
||||
/// engine to use with moor and run into issues, please consider creating an
|
||||
/// issue.
|
||||
abstract class QueryExecutor {
|
||||
GeneratedDatabase databaseInfo;
|
||||
|
||||
/// The [SqlDialect] to use for this database engine.
|
||||
SqlDialect get dialect => SqlDialect.sqlite;
|
||||
|
||||
/// Performs the async [fn] after this executor is ready, or directly if it's
|
||||
/// already ready.
|
||||
Future<T> doWhenOpened<T>(FutureOr<T> fn(QueryExecutor e)) {
|
||||
|
@ -40,7 +47,7 @@ abstract class QueryExecutor {
|
|||
|
||||
/// Runs a custom SQL statement without any variables. The result of that
|
||||
/// statement will be ignored.
|
||||
Future<void> runCustom(String statement);
|
||||
Future<void> runCustom(String statement, [List<dynamic> args]);
|
||||
|
||||
/// Prepares the [statements] and then executes each one with all of the
|
||||
/// [BatchedStatement.variables].
|
||||
|
@ -49,10 +56,11 @@ abstract class QueryExecutor {
|
|||
/// Starts a [TransactionExecutor].
|
||||
TransactionExecutor beginTransaction();
|
||||
|
||||
/// Closes this database connection. After this future completes, all further
|
||||
/// calls to this executor should fail.
|
||||
Future<void> close() {
|
||||
return Future.value();
|
||||
/// Closes this database connection and releases all resources associated with
|
||||
/// it. Implementations should also handle [close] calls in a state where the
|
||||
/// database isn't open.
|
||||
Future<void> close() async {
|
||||
// no-op per default for backwards compatibility
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import 'dart:typed_data' show Uint8List;
|
||||
import 'package:moor/moor.dart';
|
||||
import 'package:moor/src/runtime/components/component.dart';
|
||||
import 'package:moor/src/runtime/executor/helpers/results.dart';
|
||||
|
||||
/// An interface that supports sending database queries. Used as a backend for
|
||||
|
@ -12,6 +13,12 @@ import 'package:moor/src/runtime/executor/helpers/results.dart';
|
|||
/// - [String]
|
||||
/// - [Uint8List]
|
||||
abstract class DatabaseDelegate implements QueryDelegate {
|
||||
/// Whether the database managed by this delegate is in a transaction at the
|
||||
/// moment. This field is only set when the [transactionDelegate] is a
|
||||
/// [NoTransactionDelegate], because in that case transactions are run on
|
||||
/// this delegate.
|
||||
bool isInTransaction = false;
|
||||
|
||||
/// Returns an appropriate class to resolve the current schema version in
|
||||
/// this database.
|
||||
///
|
||||
|
@ -48,6 +55,9 @@ abstract class DatabaseDelegate implements QueryDelegate {
|
|||
Future<void> close() async {
|
||||
// default no-op implementation
|
||||
}
|
||||
|
||||
/// The [SqlDialect] understood by this database engine.
|
||||
SqlDialect get dialect => SqlDialect.sqlite;
|
||||
}
|
||||
|
||||
/// An interface which can execute sql statements.
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import 'dart:async';
|
||||
|
||||
import 'package:moor/moor.dart';
|
||||
import 'package:moor/src/runtime/components/component.dart';
|
||||
import 'package:pedantic/pedantic.dart';
|
||||
import 'package:synchronized/synchronized.dart';
|
||||
|
||||
|
@ -63,10 +64,11 @@ mixin _ExecutorWithQueryDelegate on QueryExecutor {
|
|||
}
|
||||
|
||||
@override
|
||||
Future<void> runCustom(String statement) {
|
||||
Future<void> runCustom(String statement, [List<dynamic> args]) {
|
||||
return _synchronized(() {
|
||||
_log(statement, const []);
|
||||
return impl.runCustom(statement, const []);
|
||||
final resolvedArgs = args ?? const [];
|
||||
_log(statement, resolvedArgs);
|
||||
return impl.runCustom(statement, resolvedArgs);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -101,6 +103,7 @@ class _TransactionExecutor extends TransactionExecutor
|
|||
String _sendOnRollback;
|
||||
|
||||
Future get completed => _sendCalled.future;
|
||||
bool _sendFakeErrorOnRollback = false;
|
||||
|
||||
_TransactionExecutor(this._db);
|
||||
|
||||
|
@ -123,13 +126,15 @@ class _TransactionExecutor extends TransactionExecutor
|
|||
if (transactionManager is NoTransactionDelegate) {
|
||||
assert(
|
||||
_db.isSequential,
|
||||
'When using the default NoTransactionDelegate, the database must be'
|
||||
'When using the default NoTransactionDelegate, the database must be '
|
||||
'sequential.');
|
||||
// run all the commands on the main database, which we block while the
|
||||
// transaction is running.
|
||||
unawaited(_db._synchronized(() async {
|
||||
impl = _db.delegate;
|
||||
await impl.runCustom(transactionManager.start, const []);
|
||||
await runCustom(transactionManager.start, const []);
|
||||
_db.delegate.isInTransaction = true;
|
||||
|
||||
_sendOnCommit = transactionManager.commit;
|
||||
_sendOnRollback = transactionManager.rollback;
|
||||
|
||||
|
@ -141,6 +146,9 @@ class _TransactionExecutor extends TransactionExecutor
|
|||
} else if (transactionManager is SupportedTransactionDelegate) {
|
||||
transactionManager.startTransaction((transaction) async {
|
||||
impl = transaction;
|
||||
// specs say that the db implementation will perform a rollback when
|
||||
// this future completes with an error.
|
||||
_sendFakeErrorOnRollback = true;
|
||||
transactionStarted.complete();
|
||||
|
||||
// this callback must be running as long as the transaction, so we do
|
||||
|
@ -159,7 +167,7 @@ class _TransactionExecutor extends TransactionExecutor
|
|||
@override
|
||||
Future<void> send() async {
|
||||
if (_sendOnCommit != null) {
|
||||
await impl.runCustom(_sendOnCommit, const []);
|
||||
await runCustom(_sendOnCommit, const []);
|
||||
}
|
||||
|
||||
_sendCalled.complete();
|
||||
|
@ -168,11 +176,16 @@ class _TransactionExecutor extends TransactionExecutor
|
|||
@override
|
||||
Future<void> rollback() async {
|
||||
if (_sendOnRollback != null) {
|
||||
await impl.runCustom(_sendOnRollback, const []);
|
||||
await runCustom(_sendOnRollback, const []);
|
||||
_db.delegate.isInTransaction = false;
|
||||
}
|
||||
|
||||
_sendCalled.completeError(
|
||||
Exception('artificial exception to rollback the transaction'));
|
||||
if (_sendFakeErrorOnRollback) {
|
||||
_sendCalled.completeError(
|
||||
Exception('artificial exception to rollback the transaction'));
|
||||
} else {
|
||||
_sendCalled.complete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -203,6 +216,8 @@ class _BeforeOpeningExecutor extends QueryExecutor
|
|||
}
|
||||
}
|
||||
|
||||
/// A database engine (implements [QueryExecutor]) that delegated the relevant
|
||||
/// work to a [DatabaseDelegate].
|
||||
class DelegatedDatabase extends QueryExecutor with _ExecutorWithQueryDelegate {
|
||||
final DatabaseDelegate delegate;
|
||||
Completer<bool> _openingCompleter;
|
||||
|
@ -215,6 +230,9 @@ class DelegatedDatabase extends QueryExecutor with _ExecutorWithQueryDelegate {
|
|||
@override
|
||||
QueryDelegate get impl => delegate;
|
||||
|
||||
@override
|
||||
SqlDialect get dialect => delegate.dialect;
|
||||
|
||||
DelegatedDatabase(this.delegate,
|
||||
{this.logStatements, this.isSequential = false}) {
|
||||
// not using default value because it's commonly set to null
|
||||
|
|
|
@ -6,8 +6,6 @@ import 'package:moor/src/runtime/expressions/expression.dart';
|
|||
/// The [CustomExpression.content] will be written into the query without any
|
||||
/// modification.
|
||||
///
|
||||
/// When this statement appears in a query
|
||||
///
|
||||
/// See also:
|
||||
/// - [currentDate] and [currentDateAndTime], which use a [CustomExpression]
|
||||
/// internally.
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
export 'bools.dart' show and, or, not;
|
||||
export 'custom.dart';
|
||||
export 'datetimes.dart';
|
||||
export 'in.dart';
|
||||
export 'null_check.dart';
|
||||
|
|
|
@ -45,25 +45,18 @@ class Variable<T, S extends SqlType<T>> extends Expression<T, S> {
|
|||
/// database engine. For instance, a [DateTime] will me mapped to its unix
|
||||
/// timestamp.
|
||||
dynamic mapToSimpleValue(GenerationContext context) {
|
||||
final type = context.typeSystem.forDartType<T>();
|
||||
return type.mapToSqlVariable(value);
|
||||
return _mapToSimpleValue(context, value);
|
||||
}
|
||||
|
||||
@override
|
||||
void writeInto(GenerationContext context) {
|
||||
if (value != null) {
|
||||
context.buffer.write('?');
|
||||
context.introduceVariable(mapToSimpleValue(context));
|
||||
} else {
|
||||
context.buffer.write('NULL');
|
||||
}
|
||||
_writeVariableIntoContext(context, value);
|
||||
}
|
||||
}
|
||||
|
||||
/// An expression that represents the value of a dart object encoded to sql
|
||||
/// by writing them into the sql statements. This is not supported for all types
|
||||
/// yet as it can be vulnerable to SQL-injection attacks. Please use [Variable]
|
||||
/// instead.
|
||||
/// by writing them into the sql statements. For most cases, consider using
|
||||
/// [Variable] instead.
|
||||
class Constant<T, S extends SqlType<T>> extends Expression<T, S> {
|
||||
const Constant(this.value);
|
||||
|
||||
|
@ -74,7 +67,27 @@ class Constant<T, S extends SqlType<T>> extends Expression<T, S> {
|
|||
|
||||
@override
|
||||
void writeInto(GenerationContext context) {
|
||||
final type = context.typeSystem.forDartType<T>();
|
||||
context.buffer.write(type.mapToSqlConstant(value));
|
||||
// Instead of writing string literals (which we don't support because of
|
||||
// possible sql injections), just write the variable.
|
||||
if (value is String) {
|
||||
_writeVariableIntoContext(context, value);
|
||||
} else {
|
||||
final type = context.typeSystem.forDartType<T>();
|
||||
context.buffer.write(type.mapToSqlConstant(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void _writeVariableIntoContext<T>(GenerationContext context, T value) {
|
||||
if (value != null) {
|
||||
context.buffer.write('?');
|
||||
context.introduceVariable(_mapToSimpleValue<T>(context, value));
|
||||
} else {
|
||||
context.buffer.write('NULL');
|
||||
}
|
||||
}
|
||||
|
||||
dynamic _mapToSimpleValue<T>(GenerationContext context, T value) {
|
||||
final type = context.typeSystem.forDartType<T>();
|
||||
return type.mapToSqlVariable(value);
|
||||
}
|
||||
|
|
|
@ -57,7 +57,7 @@ class MigrationStrategy {
|
|||
}
|
||||
|
||||
/// A function that executes queries and ignores what they return.
|
||||
typedef Future<void> SqlExecutor(String sql);
|
||||
typedef Future<void> SqlExecutor(String sql, [List<dynamic> args]);
|
||||
|
||||
class Migrator {
|
||||
final GeneratedDatabase _db;
|
||||
|
@ -72,7 +72,9 @@ class Migrator {
|
|||
|
||||
GenerationContext _createContext() {
|
||||
return GenerationContext(
|
||||
_db.typeSystem, _SimpleSqlAsQueryExecutor(_executor));
|
||||
_db.typeSystem,
|
||||
_SimpleSqlAsQueryExecutor(_executor),
|
||||
);
|
||||
}
|
||||
|
||||
/// Creates the given table if it doesn't exist
|
||||
|
@ -107,15 +109,23 @@ class Migrator {
|
|||
context.buffer.write(')');
|
||||
}
|
||||
|
||||
final constraints = table.asDslTable.customConstraints ?? [];
|
||||
final dslTable = table.asDslTable;
|
||||
final constraints = dslTable.customConstraints ?? [];
|
||||
|
||||
for (var i = 0; i < constraints.length; i++) {
|
||||
context.buffer..write(', ')..write(constraints[i]);
|
||||
}
|
||||
|
||||
context.buffer.write(');');
|
||||
context.buffer.write(')');
|
||||
|
||||
return issueCustomQuery(context.sql);
|
||||
// == true because of nullability
|
||||
if (dslTable.withoutRowId == true) {
|
||||
context.buffer.write(' WITHOUT ROWID');
|
||||
}
|
||||
|
||||
context.buffer.write(';');
|
||||
|
||||
return issueCustomQuery(context.sql, context.boundVariables);
|
||||
}
|
||||
|
||||
/// Deletes the table with the given name. Note that this function does not
|
||||
|
@ -136,8 +146,8 @@ class Migrator {
|
|||
}
|
||||
|
||||
/// Executes the custom query.
|
||||
Future<void> issueCustomQuery(String sql) async {
|
||||
return _executor(sql);
|
||||
Future<void> issueCustomQuery(String sql, [List<dynamic> args]) async {
|
||||
return _executor(sql, args);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -181,8 +191,8 @@ class _SimpleSqlAsQueryExecutor extends QueryExecutor {
|
|||
}
|
||||
|
||||
@override
|
||||
Future<void> runCustom(String statement) {
|
||||
return executor(statement);
|
||||
Future<void> runCustom(String statement, [List<dynamic> args]) {
|
||||
return executor(statement, args);
|
||||
}
|
||||
|
||||
@override
|
||||
|
|
|
@ -47,6 +47,7 @@ class DiffInput<T> {
|
|||
}
|
||||
}
|
||||
|
||||
@Deprecated('Will be removed in moor 2.0')
|
||||
List<Snake> calculateDiff(DiffInput input) {
|
||||
final oldSize = input.from.length;
|
||||
final newSize = input.to.length;
|
||||
|
|
|
@ -101,6 +101,10 @@ class SqlJsDatabase {
|
|||
Uint8List export() {
|
||||
return _obj.callMethod('export') as Uint8List;
|
||||
}
|
||||
|
||||
void close() {
|
||||
_obj.callMethod('close');
|
||||
}
|
||||
}
|
||||
|
||||
class PreparedStatement {
|
||||
|
|
|
@ -4,7 +4,8 @@ part of 'package:moor/moor_web.dart';
|
|||
/// include the latest version of `sql.js` in your html.
|
||||
class WebDatabase extends DelegatedDatabase {
|
||||
WebDatabase(String name, {bool logStatements = false})
|
||||
: super(_WebDelegate(name), logStatements: logStatements);
|
||||
: super(_WebDelegate(name),
|
||||
logStatements: logStatements, isSequential: true);
|
||||
}
|
||||
|
||||
class _WebDelegate extends DatabaseDelegate {
|
||||
|
@ -13,8 +14,23 @@ class _WebDelegate extends DatabaseDelegate {
|
|||
|
||||
String get _persistenceKey => 'moor_db_str_$name';
|
||||
|
||||
bool _inTransaction = false;
|
||||
|
||||
_WebDelegate(this.name);
|
||||
|
||||
@override
|
||||
set isInTransaction(bool value) {
|
||||
_inTransaction = value;
|
||||
|
||||
if (!_inTransaction) {
|
||||
// transaction completed, save the database!
|
||||
_storeDb();
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
bool get isInTransaction => _inTransaction;
|
||||
|
||||
@override
|
||||
final TransactionDelegate transactionDelegate = const NoTransactionDelegate();
|
||||
|
||||
|
@ -40,7 +56,9 @@ class _WebDelegate extends DatabaseDelegate {
|
|||
final prepared = _db.prepare(stmt.sql);
|
||||
|
||||
for (var args in stmt.variables) {
|
||||
prepared.executeWith(args);
|
||||
prepared
|
||||
..executeWith(args)
|
||||
..step();
|
||||
}
|
||||
}
|
||||
return _handlePotentialUpdate();
|
||||
|
@ -85,6 +103,13 @@ class _WebDelegate extends DatabaseDelegate {
|
|||
return _handlePotentialUpdate();
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> close() {
|
||||
_storeDb();
|
||||
_db?.close();
|
||||
return Future.value();
|
||||
}
|
||||
|
||||
/// Saves the database if the last statement changed rows. As a side-effect,
|
||||
/// saving the database resets the `last_insert_id` counter in sqlite.
|
||||
Future<int> _handlePotentialUpdate() {
|
||||
|
@ -104,9 +129,11 @@ class _WebDelegate extends DatabaseDelegate {
|
|||
}
|
||||
|
||||
void _storeDb() {
|
||||
final data = _db.export();
|
||||
final binStr = bin2str.encode(data);
|
||||
window.localStorage[_persistenceKey] = binStr;
|
||||
if (!isInTransaction) {
|
||||
final data = _db.export();
|
||||
final binStr = bin2str.encode(data);
|
||||
window.localStorage[_persistenceKey] = binStr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,4 +6,5 @@ stages:
|
|||
- dartanalyzer: --fatal-infos --fatal-warnings lib/ test/
|
||||
- dartfmt
|
||||
- unit_test:
|
||||
- command: dart tool/coverage.dart
|
||||
- command: pub run build_runner build -v --delete-conflicting-outputs
|
||||
- test
|
|
@ -0,0 +1,11 @@
|
|||
import 'package:moor/moor.dart';
|
||||
|
||||
part 'custom_tables.g.dart';
|
||||
|
||||
@UseMoor(include: {'tables.moor'})
|
||||
class CustomTablesDb extends _$CustomTablesDb {
|
||||
CustomTablesDb(QueryExecutor e) : super(e);
|
||||
|
||||
@override
|
||||
int get schemaVersion => 1;
|
||||
}
|
|
@ -0,0 +1,451 @@
|
|||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||
|
||||
part of 'custom_tables.dart';
|
||||
|
||||
// **************************************************************************
|
||||
// MoorGenerator
|
||||
// **************************************************************************
|
||||
|
||||
// ignore_for_file: unnecessary_brace_in_string_interps
|
||||
class NoId extends DataClass implements Insertable<NoId> {
|
||||
final Uint8List payload;
|
||||
NoId({@required this.payload});
|
||||
factory NoId.fromData(Map<String, dynamic> data, GeneratedDatabase db,
|
||||
{String prefix}) {
|
||||
final effectivePrefix = prefix ?? '';
|
||||
final uint8ListType = db.typeSystem.forDartType<Uint8List>();
|
||||
return NoId(
|
||||
payload: uint8ListType
|
||||
.mapFromDatabaseResponse(data['${effectivePrefix}payload']),
|
||||
);
|
||||
}
|
||||
factory NoId.fromJson(Map<String, dynamic> json,
|
||||
{ValueSerializer serializer = const ValueSerializer.defaults()}) {
|
||||
return NoId(
|
||||
payload: serializer.fromJson<Uint8List>(json['payload']),
|
||||
);
|
||||
}
|
||||
@override
|
||||
Map<String, dynamic> toJson(
|
||||
{ValueSerializer serializer = const ValueSerializer.defaults()}) {
|
||||
return {
|
||||
'payload': serializer.toJson<Uint8List>(payload),
|
||||
};
|
||||
}
|
||||
|
||||
@override
|
||||
T createCompanion<T extends UpdateCompanion<NoId>>(bool nullToAbsent) {
|
||||
return NoIdsCompanion(
|
||||
payload: payload == null && nullToAbsent
|
||||
? const Value.absent()
|
||||
: Value(payload),
|
||||
) as T;
|
||||
}
|
||||
|
||||
NoId copyWith({Uint8List payload}) => NoId(
|
||||
payload: payload ?? this.payload,
|
||||
);
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('NoId(')..write('payload: $payload')..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => $mrjf(payload.hashCode);
|
||||
@override
|
||||
bool operator ==(other) =>
|
||||
identical(this, other) || (other is NoId && other.payload == payload);
|
||||
}
|
||||
|
||||
class NoIdsCompanion extends UpdateCompanion<NoId> {
|
||||
final Value<Uint8List> payload;
|
||||
const NoIdsCompanion({
|
||||
this.payload = const Value.absent(),
|
||||
});
|
||||
}
|
||||
|
||||
class NoIds extends Table with TableInfo<NoIds, NoId> {
|
||||
final GeneratedDatabase _db;
|
||||
final String _alias;
|
||||
NoIds(this._db, [this._alias]);
|
||||
final VerificationMeta _payloadMeta = const VerificationMeta('payload');
|
||||
GeneratedBlobColumn _payload;
|
||||
GeneratedBlobColumn get payload => _payload ??= _constructPayload();
|
||||
GeneratedBlobColumn _constructPayload() {
|
||||
return GeneratedBlobColumn('payload', $tableName, false,
|
||||
$customConstraints: 'NOT NULL');
|
||||
}
|
||||
|
||||
@override
|
||||
List<GeneratedColumn> get $columns => [payload];
|
||||
@override
|
||||
NoIds get asDslTable => this;
|
||||
@override
|
||||
String get $tableName => _alias ?? 'no_ids';
|
||||
@override
|
||||
final String actualTableName = 'no_ids';
|
||||
@override
|
||||
VerificationContext validateIntegrity(NoIdsCompanion d,
|
||||
{bool isInserting = false}) {
|
||||
final context = VerificationContext();
|
||||
if (d.payload.present) {
|
||||
context.handle(_payloadMeta,
|
||||
payload.isAcceptableValue(d.payload.value, _payloadMeta));
|
||||
} else if (payload.isRequired && isInserting) {
|
||||
context.missing(_payloadMeta);
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
@override
|
||||
Set<GeneratedColumn> get $primaryKey => <GeneratedColumn>{};
|
||||
@override
|
||||
NoId map(Map<String, dynamic> data, {String tablePrefix}) {
|
||||
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : null;
|
||||
return NoId.fromData(data, _db, prefix: effectivePrefix);
|
||||
}
|
||||
|
||||
@override
|
||||
Map<String, Variable> entityToSql(NoIdsCompanion d) {
|
||||
final map = <String, Variable>{};
|
||||
if (d.payload.present) {
|
||||
map['payload'] = Variable<Uint8List, BlobType>(d.payload.value);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@override
|
||||
NoIds createAlias(String alias) {
|
||||
return NoIds(_db, alias);
|
||||
}
|
||||
|
||||
@override
|
||||
final bool withoutRowId = true;
|
||||
}
|
||||
|
||||
class WithDefault extends DataClass implements Insertable<WithDefault> {
|
||||
final String a;
|
||||
final int b;
|
||||
WithDefault({this.a, this.b});
|
||||
factory WithDefault.fromData(Map<String, dynamic> data, GeneratedDatabase db,
|
||||
{String prefix}) {
|
||||
final effectivePrefix = prefix ?? '';
|
||||
final stringType = db.typeSystem.forDartType<String>();
|
||||
final intType = db.typeSystem.forDartType<int>();
|
||||
return WithDefault(
|
||||
a: stringType.mapFromDatabaseResponse(data['${effectivePrefix}a']),
|
||||
b: intType.mapFromDatabaseResponse(data['${effectivePrefix}b']),
|
||||
);
|
||||
}
|
||||
factory WithDefault.fromJson(Map<String, dynamic> json,
|
||||
{ValueSerializer serializer = const ValueSerializer.defaults()}) {
|
||||
return WithDefault(
|
||||
a: serializer.fromJson<String>(json['a']),
|
||||
b: serializer.fromJson<int>(json['b']),
|
||||
);
|
||||
}
|
||||
@override
|
||||
Map<String, dynamic> toJson(
|
||||
{ValueSerializer serializer = const ValueSerializer.defaults()}) {
|
||||
return {
|
||||
'a': serializer.toJson<String>(a),
|
||||
'b': serializer.toJson<int>(b),
|
||||
};
|
||||
}
|
||||
|
||||
@override
|
||||
T createCompanion<T extends UpdateCompanion<WithDefault>>(bool nullToAbsent) {
|
||||
return WithDefaultsCompanion(
|
||||
a: a == null && nullToAbsent ? const Value.absent() : Value(a),
|
||||
b: b == null && nullToAbsent ? const Value.absent() : Value(b),
|
||||
) as T;
|
||||
}
|
||||
|
||||
WithDefault copyWith({String a, int b}) => WithDefault(
|
||||
a: a ?? this.a,
|
||||
b: b ?? this.b,
|
||||
);
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('WithDefault(')
|
||||
..write('a: $a, ')
|
||||
..write('b: $b')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => $mrjf($mrjc(a.hashCode, b.hashCode));
|
||||
@override
|
||||
bool operator ==(other) =>
|
||||
identical(this, other) ||
|
||||
(other is WithDefault && other.a == a && other.b == b);
|
||||
}
|
||||
|
||||
class WithDefaultsCompanion extends UpdateCompanion<WithDefault> {
|
||||
final Value<String> a;
|
||||
final Value<int> b;
|
||||
const WithDefaultsCompanion({
|
||||
this.a = const Value.absent(),
|
||||
this.b = const Value.absent(),
|
||||
});
|
||||
}
|
||||
|
||||
class WithDefaults extends Table with TableInfo<WithDefaults, WithDefault> {
|
||||
final GeneratedDatabase _db;
|
||||
final String _alias;
|
||||
WithDefaults(this._db, [this._alias]);
|
||||
final VerificationMeta _aMeta = const VerificationMeta('a');
|
||||
GeneratedTextColumn _a;
|
||||
GeneratedTextColumn get a => _a ??= _constructA();
|
||||
GeneratedTextColumn _constructA() {
|
||||
return GeneratedTextColumn('a', $tableName, true,
|
||||
$customConstraints: 'DEFAULT \'something\'',
|
||||
defaultValue:
|
||||
const CustomExpression<String, StringType>('\'something\''));
|
||||
}
|
||||
|
||||
final VerificationMeta _bMeta = const VerificationMeta('b');
|
||||
GeneratedIntColumn _b;
|
||||
GeneratedIntColumn get b => _b ??= _constructB();
|
||||
GeneratedIntColumn _constructB() {
|
||||
return GeneratedIntColumn('b', $tableName, true,
|
||||
$customConstraints: 'UNIQUE');
|
||||
}
|
||||
|
||||
@override
|
||||
List<GeneratedColumn> get $columns => [a, b];
|
||||
@override
|
||||
WithDefaults get asDslTable => this;
|
||||
@override
|
||||
String get $tableName => _alias ?? 'with_defaults';
|
||||
@override
|
||||
final String actualTableName = 'with_defaults';
|
||||
@override
|
||||
VerificationContext validateIntegrity(WithDefaultsCompanion d,
|
||||
{bool isInserting = false}) {
|
||||
final context = VerificationContext();
|
||||
if (d.a.present) {
|
||||
context.handle(_aMeta, a.isAcceptableValue(d.a.value, _aMeta));
|
||||
} else if (a.isRequired && isInserting) {
|
||||
context.missing(_aMeta);
|
||||
}
|
||||
if (d.b.present) {
|
||||
context.handle(_bMeta, b.isAcceptableValue(d.b.value, _bMeta));
|
||||
} else if (b.isRequired && isInserting) {
|
||||
context.missing(_bMeta);
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
@override
|
||||
Set<GeneratedColumn> get $primaryKey => <GeneratedColumn>{};
|
||||
@override
|
||||
WithDefault map(Map<String, dynamic> data, {String tablePrefix}) {
|
||||
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : null;
|
||||
return WithDefault.fromData(data, _db, prefix: effectivePrefix);
|
||||
}
|
||||
|
||||
@override
|
||||
Map<String, Variable> entityToSql(WithDefaultsCompanion d) {
|
||||
final map = <String, Variable>{};
|
||||
if (d.a.present) {
|
||||
map['a'] = Variable<String, StringType>(d.a.value);
|
||||
}
|
||||
if (d.b.present) {
|
||||
map['b'] = Variable<int, IntType>(d.b.value);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@override
|
||||
WithDefaults createAlias(String alias) {
|
||||
return WithDefaults(_db, alias);
|
||||
}
|
||||
}
|
||||
|
||||
class WithConstraint extends DataClass implements Insertable<WithConstraint> {
|
||||
final String a;
|
||||
final int b;
|
||||
final double c;
|
||||
WithConstraint({this.a, @required this.b, this.c});
|
||||
factory WithConstraint.fromData(
|
||||
Map<String, dynamic> data, GeneratedDatabase db,
|
||||
{String prefix}) {
|
||||
final effectivePrefix = prefix ?? '';
|
||||
final stringType = db.typeSystem.forDartType<String>();
|
||||
final intType = db.typeSystem.forDartType<int>();
|
||||
final doubleType = db.typeSystem.forDartType<double>();
|
||||
return WithConstraint(
|
||||
a: stringType.mapFromDatabaseResponse(data['${effectivePrefix}a']),
|
||||
b: intType.mapFromDatabaseResponse(data['${effectivePrefix}b']),
|
||||
c: doubleType.mapFromDatabaseResponse(data['${effectivePrefix}c']),
|
||||
);
|
||||
}
|
||||
factory WithConstraint.fromJson(Map<String, dynamic> json,
|
||||
{ValueSerializer serializer = const ValueSerializer.defaults()}) {
|
||||
return WithConstraint(
|
||||
a: serializer.fromJson<String>(json['a']),
|
||||
b: serializer.fromJson<int>(json['b']),
|
||||
c: serializer.fromJson<double>(json['c']),
|
||||
);
|
||||
}
|
||||
@override
|
||||
Map<String, dynamic> toJson(
|
||||
{ValueSerializer serializer = const ValueSerializer.defaults()}) {
|
||||
return {
|
||||
'a': serializer.toJson<String>(a),
|
||||
'b': serializer.toJson<int>(b),
|
||||
'c': serializer.toJson<double>(c),
|
||||
};
|
||||
}
|
||||
|
||||
@override
|
||||
T createCompanion<T extends UpdateCompanion<WithConstraint>>(
|
||||
bool nullToAbsent) {
|
||||
return WithConstraintsCompanion(
|
||||
a: a == null && nullToAbsent ? const Value.absent() : Value(a),
|
||||
b: b == null && nullToAbsent ? const Value.absent() : Value(b),
|
||||
c: c == null && nullToAbsent ? const Value.absent() : Value(c),
|
||||
) as T;
|
||||
}
|
||||
|
||||
WithConstraint copyWith({String a, int b, double c}) => WithConstraint(
|
||||
a: a ?? this.a,
|
||||
b: b ?? this.b,
|
||||
c: c ?? this.c,
|
||||
);
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('WithConstraint(')
|
||||
..write('a: $a, ')
|
||||
..write('b: $b, ')
|
||||
..write('c: $c')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => $mrjf($mrjc(a.hashCode, $mrjc(b.hashCode, c.hashCode)));
|
||||
@override
|
||||
bool operator ==(other) =>
|
||||
identical(this, other) ||
|
||||
(other is WithConstraint && other.a == a && other.b == b && other.c == c);
|
||||
}
|
||||
|
||||
class WithConstraintsCompanion extends UpdateCompanion<WithConstraint> {
|
||||
final Value<String> a;
|
||||
final Value<int> b;
|
||||
final Value<double> c;
|
||||
const WithConstraintsCompanion({
|
||||
this.a = const Value.absent(),
|
||||
this.b = const Value.absent(),
|
||||
this.c = const Value.absent(),
|
||||
});
|
||||
}
|
||||
|
||||
class WithConstraints extends Table
|
||||
with TableInfo<WithConstraints, WithConstraint> {
|
||||
final GeneratedDatabase _db;
|
||||
final String _alias;
|
||||
WithConstraints(this._db, [this._alias]);
|
||||
final VerificationMeta _aMeta = const VerificationMeta('a');
|
||||
GeneratedTextColumn _a;
|
||||
GeneratedTextColumn get a => _a ??= _constructA();
|
||||
GeneratedTextColumn _constructA() {
|
||||
return GeneratedTextColumn('a', $tableName, true, $customConstraints: '');
|
||||
}
|
||||
|
||||
final VerificationMeta _bMeta = const VerificationMeta('b');
|
||||
GeneratedIntColumn _b;
|
||||
GeneratedIntColumn get b => _b ??= _constructB();
|
||||
GeneratedIntColumn _constructB() {
|
||||
return GeneratedIntColumn('b', $tableName, false,
|
||||
$customConstraints: 'NOT NULL');
|
||||
}
|
||||
|
||||
final VerificationMeta _cMeta = const VerificationMeta('c');
|
||||
GeneratedRealColumn _c;
|
||||
GeneratedRealColumn get c => _c ??= _constructC();
|
||||
GeneratedRealColumn _constructC() {
|
||||
return GeneratedRealColumn('c', $tableName, true, $customConstraints: '');
|
||||
}
|
||||
|
||||
@override
|
||||
List<GeneratedColumn> get $columns => [a, b, c];
|
||||
@override
|
||||
WithConstraints get asDslTable => this;
|
||||
@override
|
||||
String get $tableName => _alias ?? 'with_constraints';
|
||||
@override
|
||||
final String actualTableName = 'with_constraints';
|
||||
@override
|
||||
VerificationContext validateIntegrity(WithConstraintsCompanion d,
|
||||
{bool isInserting = false}) {
|
||||
final context = VerificationContext();
|
||||
if (d.a.present) {
|
||||
context.handle(_aMeta, a.isAcceptableValue(d.a.value, _aMeta));
|
||||
} else if (a.isRequired && isInserting) {
|
||||
context.missing(_aMeta);
|
||||
}
|
||||
if (d.b.present) {
|
||||
context.handle(_bMeta, b.isAcceptableValue(d.b.value, _bMeta));
|
||||
} else if (b.isRequired && isInserting) {
|
||||
context.missing(_bMeta);
|
||||
}
|
||||
if (d.c.present) {
|
||||
context.handle(_cMeta, c.isAcceptableValue(d.c.value, _cMeta));
|
||||
} else if (c.isRequired && isInserting) {
|
||||
context.missing(_cMeta);
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
@override
|
||||
Set<GeneratedColumn> get $primaryKey => <GeneratedColumn>{};
|
||||
@override
|
||||
WithConstraint map(Map<String, dynamic> data, {String tablePrefix}) {
|
||||
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : null;
|
||||
return WithConstraint.fromData(data, _db, prefix: effectivePrefix);
|
||||
}
|
||||
|
||||
@override
|
||||
Map<String, Variable> entityToSql(WithConstraintsCompanion d) {
|
||||
final map = <String, Variable>{};
|
||||
if (d.a.present) {
|
||||
map['a'] = Variable<String, StringType>(d.a.value);
|
||||
}
|
||||
if (d.b.present) {
|
||||
map['b'] = Variable<int, IntType>(d.b.value);
|
||||
}
|
||||
if (d.c.present) {
|
||||
map['c'] = Variable<double, RealType>(d.c.value);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@override
|
||||
WithConstraints createAlias(String alias) {
|
||||
return WithConstraints(_db, alias);
|
||||
}
|
||||
|
||||
@override
|
||||
final List<String> customConstraints = const [
|
||||
'FOREIGN KEY (a, b) REFERENCES with_defaults (a, b)'
|
||||
];
|
||||
}
|
||||
|
||||
abstract class _$CustomTablesDb extends GeneratedDatabase {
|
||||
_$CustomTablesDb(QueryExecutor e)
|
||||
: super(const SqlTypeSystem.withDefaults(), e);
|
||||
NoIds _noIds;
|
||||
NoIds get noIds => _noIds ??= NoIds(this);
|
||||
WithDefaults _withDefaults;
|
||||
WithDefaults get withDefaults => _withDefaults ??= WithDefaults(this);
|
||||
WithConstraints _withConstraints;
|
||||
WithConstraints get withConstraints =>
|
||||
_withConstraints ??= WithConstraints(this);
|
||||
@override
|
||||
List<TableInfo> get allTables => [noIds, withDefaults, withConstraints];
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
CREATE TABLE no_ids (
|
||||
payload BLOB NOT NULL
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TABLE with_defaults (
|
||||
a TEXT DEFAULT 'something',
|
||||
b INT UNIQUE
|
||||
)
|
||||
|
||||
CREATE TABLE with_constraints (
|
||||
a TEXT,
|
||||
b INT NOT NULL,
|
||||
c FLOAT(10, 2),
|
||||
|
||||
FOREIGN KEY (a, b) REFERENCES with_defaults (a, b)
|
||||
)
|
|
@ -95,6 +95,7 @@ class CustomConverter extends TypeConverter<MyCustomObject, String> {
|
|||
'withIn': 'SELECT * FROM todos WHERE title = ?2 OR id IN ? OR title = ?1',
|
||||
'search':
|
||||
'SELECT * FROM todos WHERE CASE WHEN -1 = :id THEN 1 ELSE id = :id END',
|
||||
'findCustom': 'SELECT custom FROM table_without_p_k WHERE some_float < 10',
|
||||
},
|
||||
)
|
||||
class TodoDb extends _$TodoDb {
|
||||
|
|
|
@ -855,13 +855,12 @@ class TableWithoutPKData extends DataClass
|
|||
final intType = db.typeSystem.forDartType<int>();
|
||||
final doubleType = db.typeSystem.forDartType<double>();
|
||||
final stringType = db.typeSystem.forDartType<String>();
|
||||
final customConverter = const CustomConverter();
|
||||
return TableWithoutPKData(
|
||||
notReallyAnId: intType
|
||||
.mapFromDatabaseResponse(data['${effectivePrefix}not_really_an_id']),
|
||||
someFloat: doubleType
|
||||
.mapFromDatabaseResponse(data['${effectivePrefix}some_float']),
|
||||
custom: customConverter.mapToDart(
|
||||
custom: $TableWithoutPKTable.$converter0.mapToDart(
|
||||
stringType.mapFromDatabaseResponse(data['${effectivePrefix}custom'])),
|
||||
);
|
||||
}
|
||||
|
@ -1029,7 +1028,7 @@ class $TableWithoutPKTable extends TableWithoutPK
|
|||
map['some_float'] = Variable<double, RealType>(d.someFloat.value);
|
||||
}
|
||||
if (d.custom.present) {
|
||||
final converter = const CustomConverter();
|
||||
final converter = $TableWithoutPKTable.$converter0;
|
||||
map['custom'] =
|
||||
Variable<String, StringType>(converter.mapToSql(d.custom.value));
|
||||
}
|
||||
|
@ -1040,6 +1039,8 @@ class $TableWithoutPKTable extends TableWithoutPK
|
|||
$TableWithoutPKTable createAlias(String alias) {
|
||||
return $TableWithoutPKTable(_db, alias);
|
||||
}
|
||||
|
||||
static CustomConverter $converter0 = const CustomConverter();
|
||||
}
|
||||
|
||||
class PureDefault extends DataClass implements Insertable<PureDefault> {
|
||||
|
@ -1205,6 +1206,13 @@ class AllTodosWithCategoryResult {
|
|||
});
|
||||
}
|
||||
|
||||
class FindCustomResult {
|
||||
final MyCustomObject custom;
|
||||
FindCustomResult({
|
||||
this.custom,
|
||||
});
|
||||
}
|
||||
|
||||
abstract class _$TodoDb extends GeneratedDatabase {
|
||||
_$TodoDb(QueryExecutor e) : super(const SqlTypeSystem.withDefaults(), e);
|
||||
$TodosTableTable _todosTable;
|
||||
|
@ -1329,6 +1337,29 @@ abstract class _$TodoDb extends GeneratedDatabase {
|
|||
}).map((rows) => rows.map(_rowToTodoEntry).toList());
|
||||
}
|
||||
|
||||
FindCustomResult _rowToFindCustomResult(QueryRow row) {
|
||||
return FindCustomResult(
|
||||
custom:
|
||||
$TableWithoutPKTable.$converter0.mapToDart(row.readString('custom')),
|
||||
);
|
||||
}
|
||||
|
||||
Future<List<FindCustomResult>> findCustom(
|
||||
{@Deprecated('No longer needed with Moor 1.6 - see the changelog for details')
|
||||
QueryEngine operateOn}) {
|
||||
return (operateOn ?? this).customSelect(
|
||||
'SELECT custom FROM table_without_p_k WHERE some_float < 10',
|
||||
variables: []).then((rows) => rows.map(_rowToFindCustomResult).toList());
|
||||
}
|
||||
|
||||
Stream<List<FindCustomResult>> watchFindCustom() {
|
||||
return customSelectStream(
|
||||
'SELECT custom FROM table_without_p_k WHERE some_float < 10',
|
||||
variables: [],
|
||||
readsFrom: {tableWithoutPK})
|
||||
.map((rows) => rows.map(_rowToFindCustomResult).toList());
|
||||
}
|
||||
|
||||
@override
|
||||
List<TableInfo> get allTables => [
|
||||
todosTable,
|
||||
|
|
|
@ -47,7 +47,7 @@ class MockStreamQueries extends Mock implements StreamQueryStore {}
|
|||
|
||||
// used so that we can mock the SqlExecutor typedef
|
||||
abstract class SqlExecutorAsClass {
|
||||
Future<void> call(String sql);
|
||||
Future<void> call(String sql, [List<dynamic> args]);
|
||||
}
|
||||
|
||||
class MockQueryExecutor extends Mock implements SqlExecutorAsClass {}
|
||||
|
|
|
@ -20,46 +20,58 @@ void main() {
|
|||
await Migrator(db, mockQueryExecutor).createAllTables();
|
||||
|
||||
// should create todos, categories, users and shared_todos table
|
||||
verify(mockQueryExecutor.call('CREATE TABLE IF NOT EXISTS todos '
|
||||
verify(mockQueryExecutor.call(
|
||||
'CREATE TABLE IF NOT EXISTS todos '
|
||||
'(id INTEGER PRIMARY KEY AUTOINCREMENT, title VARCHAR NULL, '
|
||||
'content VARCHAR NOT NULL, target_date INTEGER NULL, '
|
||||
'category INTEGER NULL);'));
|
||||
'category INTEGER NULL);',
|
||||
[]));
|
||||
|
||||
verify(mockQueryExecutor.call('CREATE TABLE IF NOT EXISTS categories '
|
||||
'(id INTEGER PRIMARY KEY AUTOINCREMENT, `desc` VARCHAR NOT NULL UNIQUE);'));
|
||||
verify(mockQueryExecutor.call(
|
||||
'CREATE TABLE IF NOT EXISTS categories '
|
||||
'(id INTEGER PRIMARY KEY AUTOINCREMENT, `desc` VARCHAR NOT NULL UNIQUE);',
|
||||
[]));
|
||||
|
||||
verify(mockQueryExecutor.call('CREATE TABLE IF NOT EXISTS users '
|
||||
verify(mockQueryExecutor.call(
|
||||
'CREATE TABLE IF NOT EXISTS users '
|
||||
'(id INTEGER PRIMARY KEY AUTOINCREMENT, name VARCHAR NOT NULL, '
|
||||
'is_awesome BOOLEAN NOT NULL DEFAULT 1 CHECK (is_awesome in (0, 1)), '
|
||||
'profile_picture BLOB NOT NULL, '
|
||||
'creation_time INTEGER NOT NULL '
|
||||
"DEFAULT (strftime('%s', CURRENT_TIMESTAMP)));"));
|
||||
"DEFAULT (strftime('%s', CURRENT_TIMESTAMP)));",
|
||||
[]));
|
||||
|
||||
verify(mockQueryExecutor.call('CREATE TABLE IF NOT EXISTS shared_todos ('
|
||||
verify(mockQueryExecutor.call(
|
||||
'CREATE TABLE IF NOT EXISTS shared_todos ('
|
||||
'todo INTEGER NOT NULL, '
|
||||
'user INTEGER NOT NULL, '
|
||||
'PRIMARY KEY (todo, user), '
|
||||
'FOREIGN KEY (todo) REFERENCES todos(id), '
|
||||
'FOREIGN KEY (user) REFERENCES users(id)'
|
||||
');'));
|
||||
');',
|
||||
[]));
|
||||
|
||||
verify(mockQueryExecutor.call('CREATE TABLE IF NOT EXISTS '
|
||||
verify(mockQueryExecutor.call(
|
||||
'CREATE TABLE IF NOT EXISTS '
|
||||
'table_without_p_k ('
|
||||
'not_really_an_id INTEGER NOT NULL, '
|
||||
'some_float REAL NOT NULL, '
|
||||
'custom VARCHAR NOT NULL'
|
||||
');'));
|
||||
');',
|
||||
[]));
|
||||
});
|
||||
|
||||
test('creates individual tables', () async {
|
||||
await Migrator(db, mockQueryExecutor).createTable(db.users);
|
||||
|
||||
verify(mockQueryExecutor.call('CREATE TABLE IF NOT EXISTS users '
|
||||
verify(mockQueryExecutor.call(
|
||||
'CREATE TABLE IF NOT EXISTS users '
|
||||
'(id INTEGER PRIMARY KEY AUTOINCREMENT, name VARCHAR NOT NULL, '
|
||||
'is_awesome BOOLEAN NOT NULL DEFAULT 1 CHECK (is_awesome in (0, 1)), '
|
||||
'profile_picture BLOB NOT NULL, '
|
||||
'creation_time INTEGER NOT NULL '
|
||||
"DEFAULT (strftime('%s', CURRENT_TIMESTAMP)));"));
|
||||
"DEFAULT (strftime('%s', CURRENT_TIMESTAMP)));",
|
||||
[]));
|
||||
});
|
||||
|
||||
test('drops tables', () async {
|
||||
|
|
|
@ -58,7 +58,9 @@ void main() {
|
|||
expect(second, emits(isEmpty));
|
||||
|
||||
await pumpEventQueue(times: 1);
|
||||
verifyZeroInteractions(executor);
|
||||
// calling executor.dialect is ok, it's needed to construct the statement
|
||||
verify(executor.dialect);
|
||||
verifyNoMoreInteractions(executor);
|
||||
});
|
||||
|
||||
test('every stream instance can be listened to', () async {
|
||||
|
|
|
@ -6,8 +6,6 @@ import 'package:grinder/grinder_sdk.dart';
|
|||
import 'package:coverage/coverage.dart';
|
||||
import 'package:path/path.dart';
|
||||
|
||||
import 'format_coverage.dart' as fc;
|
||||
|
||||
Future<void> main(List<String> args) async {
|
||||
// First, generate the build script, see
|
||||
// https://github.com/dart-lang/build/blob/3208cfe94c475ed3e1ec44c227aadaddaeac263d/build_runner/bin/build_runner.dart#L65
|
||||
|
@ -19,7 +17,4 @@ Future<void> main(List<String> args) async {
|
|||
final coverage = await runAndCollect(tests, onExit: true, printOutput: true);
|
||||
|
||||
File('coverage.json').writeAsStringSync(json.encode(coverage));
|
||||
|
||||
print('formatting to .lcov format');
|
||||
await fc.main();
|
||||
}
|
||||
|
|
|
@ -9,7 +9,10 @@ Future main() async {
|
|||
packagesPath: 'moor/.packages',
|
||||
);
|
||||
|
||||
final coverage = await parseCoverage([File('moor/coverage.json')], 1);
|
||||
final coverage = await parseCoverage([
|
||||
File('moor/coverage.json'),
|
||||
File('sqlparser/coverage.json'),
|
||||
], 1);
|
||||
|
||||
// report coverage for the moor and moor_generator package
|
||||
final lcov = await LcovFormatter(
|
||||
|
@ -17,9 +20,10 @@ Future main() async {
|
|||
reportOn: [
|
||||
'moor/lib/',
|
||||
'moor_generator/lib',
|
||||
'sqlparser/lib',
|
||||
],
|
||||
basePath: '.',
|
||||
).format(coverage);
|
||||
|
||||
File('moor/lcov.info').writeAsStringSync(lcov);
|
||||
File('lcov.info').writeAsStringSync(lcov);
|
||||
}
|
||||
|
|
|
@ -5,7 +5,6 @@ import 'package:moor_example/database/database.dart';
|
|||
import 'package:moor_example/main.dart';
|
||||
import 'package:moor_example/widgets/categories_drawer.dart';
|
||||
import 'package:moor_example/widgets/todo_card.dart';
|
||||
import 'package:moor_flutter/moor_flutter.dart';
|
||||
|
||||
// ignore_for_file: prefer_const_constructors
|
||||
|
||||
|
@ -32,39 +31,23 @@ class HomeScreenState extends State<HomeScreen> {
|
|||
drawer: CategoriesDrawer(),
|
||||
// A moorAnimatedList automatically animates incoming and leaving items, we only
|
||||
// have to tell it what data to display and how to turn data into widgets.
|
||||
body: MoorAnimatedList<EntryWithCategory>(
|
||||
// we want to show an updating stream of all relevant entries
|
||||
body: StreamBuilder<List<EntryWithCategory>>(
|
||||
stream: bloc.homeScreenEntries,
|
||||
// consider items equal if their id matches. Otherwise, we'd get an
|
||||
// animation of an old item leaving and another one coming in every time
|
||||
// the content of an item changed!
|
||||
equals: (a, b) => a.entry.id == b.entry.id,
|
||||
itemBuilder: (ctx, item, animation) {
|
||||
// When a new item arrives, it will expand vertically
|
||||
return SizeTransition(
|
||||
key: ObjectKey(item.entry.id),
|
||||
sizeFactor: animation,
|
||||
axis: Axis.vertical,
|
||||
child: TodoCard(item.entry),
|
||||
);
|
||||
},
|
||||
removedItemBuilder: (ctx, item, animation) {
|
||||
// and it will leave the same way after being deleted.
|
||||
return SizeTransition(
|
||||
key: ObjectKey(item.entry.id),
|
||||
sizeFactor: animation,
|
||||
axis: Axis.vertical,
|
||||
child: AnimatedBuilder(
|
||||
animation:
|
||||
CurvedAnimation(parent: animation, curve: Curves.easeOut),
|
||||
child: TodoCard(item.entry),
|
||||
builder: (context, child) {
|
||||
return Opacity(
|
||||
opacity: animation.value,
|
||||
child: child,
|
||||
);
|
||||
},
|
||||
),
|
||||
builder: (context, snapshot) {
|
||||
if (!snapshot.hasData) {
|
||||
return const Align(
|
||||
alignment: Alignment.center,
|
||||
child: CircularProgressIndicator(),
|
||||
);
|
||||
}
|
||||
|
||||
final activeTodos = snapshot.data;
|
||||
|
||||
return ListView.builder(
|
||||
itemCount: activeTodos.length,
|
||||
itemBuilder: (context, index) {
|
||||
return TodoCard(activeTodos[index].entry);
|
||||
},
|
||||
);
|
||||
},
|
||||
),
|
||||
|
|
|
@ -22,7 +22,11 @@ class _SqfliteDelegate extends DatabaseDelegate with _SqfliteExecutor {
|
|||
final bool inDbFolder;
|
||||
final String path;
|
||||
|
||||
_SqfliteDelegate(this.inDbFolder, this.path);
|
||||
bool singleInstance;
|
||||
|
||||
_SqfliteDelegate(this.inDbFolder, this.path, {this.singleInstance}) {
|
||||
singleInstance ??= true;
|
||||
}
|
||||
|
||||
@override
|
||||
DbVersionDelegate get versionDelegate {
|
||||
|
@ -57,8 +61,14 @@ class _SqfliteDelegate extends DatabaseDelegate with _SqfliteExecutor {
|
|||
onUpgrade: (db, from, to) {
|
||||
_loadedSchemaVersion = from;
|
||||
},
|
||||
singleInstance: singleInstance,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> close() {
|
||||
return db.close();
|
||||
}
|
||||
}
|
||||
|
||||
class _SqfliteTransactionDelegate extends SupportedTransactionDelegate {
|
||||
|
@ -71,6 +81,10 @@ class _SqfliteTransactionDelegate extends SupportedTransactionDelegate {
|
|||
delegate.db.transaction((transaction) async {
|
||||
final executor = _SqfliteTransactionExecutor(transaction);
|
||||
await run(executor);
|
||||
}).catchError((_) {
|
||||
// Ignore the errr! We send a fake exception to indicate a rollback.
|
||||
// sqflite will rollback, but the exception will bubble up. Here we stop
|
||||
// the exception.
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -122,10 +136,22 @@ mixin _SqfliteExecutor on QueryDelegate {
|
|||
|
||||
/// A query executor that uses sqflite internally.
|
||||
class FlutterQueryExecutor extends DelegatedDatabase {
|
||||
FlutterQueryExecutor({@required String path, bool logStatements})
|
||||
: super(_SqfliteDelegate(false, path), logStatements: logStatements);
|
||||
/// A query executor that will store the database in the file declared by
|
||||
/// [path]. If [logStatements] is true, statements sent to the database will
|
||||
/// be [print]ed, which can be handy for debugging. The [singleInstance]
|
||||
/// parameter sets the corresponding parameter on [s.openDatabase].
|
||||
FlutterQueryExecutor(
|
||||
{@required String path, bool logStatements, bool singleInstance})
|
||||
: super(_SqfliteDelegate(false, path, singleInstance: singleInstance),
|
||||
logStatements: logStatements);
|
||||
|
||||
/// A query executor that will store the database in the file declared by
|
||||
/// [path], which will be resolved relative to [s.getDatabasesPath()].
|
||||
/// If [logStatements] is true, statements sent to the database will
|
||||
/// be [print]ed, which can be handy for debugging. The [singleInstance]
|
||||
/// parameter sets the corresponding parameter on [s.openDatabase].
|
||||
FlutterQueryExecutor.inDatabaseFolder(
|
||||
{@required String path, bool logStatements})
|
||||
: super(_SqfliteDelegate(true, path), logStatements: logStatements);
|
||||
{@required String path, bool logStatements, bool singleInstance})
|
||||
: super(_SqfliteDelegate(true, path, singleInstance: singleInstance),
|
||||
logStatements: logStatements);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import 'dart:async';
|
||||
|
||||
import 'package:flutter/widgets.dart';
|
||||
|
||||
// ignore: deprecated_member_use
|
||||
import 'package:moor/diff_util.dart';
|
||||
|
||||
typedef Widget ItemBuilder<T>(
|
||||
|
@ -9,6 +11,8 @@ typedef Widget RemovedItemBuilder<T>(
|
|||
BuildContext context, T item, Animation<double> anim);
|
||||
|
||||
/// An [AnimatedList] that shows the result of a moor query stream.
|
||||
@Deprecated('Will be removed in moor 2.0. You could use the '
|
||||
'animated_stream_list package as an alternative')
|
||||
class MoorAnimatedList<T> extends StatefulWidget {
|
||||
final Stream<List<T>> stream;
|
||||
final ItemBuilder<T> itemBuilder;
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import 'package:analyzer/dart/ast/ast.dart';
|
||||
import 'package:analyzer/dart/element/type.dart';
|
||||
import 'package:built_value/built_value.dart';
|
||||
import 'package:moor_generator/src/model/used_type_converter.dart';
|
||||
|
||||
part 'specified_column.g.dart';
|
||||
|
||||
|
@ -61,6 +60,15 @@ const Map<ColumnType, String> createVariable = {
|
|||
ColumnType.real: 'Variable.withReal',
|
||||
};
|
||||
|
||||
const Map<ColumnType, String> sqlTypes = {
|
||||
ColumnType.boolean: 'BoolType',
|
||||
ColumnType.text: 'StringType',
|
||||
ColumnType.integer: 'IntType',
|
||||
ColumnType.datetime: 'DateTimeType',
|
||||
ColumnType.blob: 'BlobType',
|
||||
ColumnType.real: 'RealType',
|
||||
};
|
||||
|
||||
/// A column, as specified by a getter in a table.
|
||||
class SpecifiedColumn {
|
||||
/// The getter name of this column in the table class. It will also be used
|
||||
|
@ -95,29 +103,18 @@ class SpecifiedColumn {
|
|||
/// default ones.
|
||||
final String customConstraints;
|
||||
|
||||
/// If a default expression has been provided as the argument of
|
||||
/// ColumnBuilder.withDefault, contains the Dart code that references that
|
||||
/// expression.
|
||||
final Expression defaultArgument;
|
||||
/// Dart code that generates the default expression for this column, or null
|
||||
/// if there is no default expression.
|
||||
final String defaultArgument;
|
||||
|
||||
/// If a type converter has been specified as the argument of
|
||||
/// ColumnBuilder.map, this contains the Dart code that references that type
|
||||
/// converter.
|
||||
final Expression typeConverter;
|
||||
|
||||
/// If the type of this column has been overridden, contains the actual Dart
|
||||
/// type. Otherwise null.
|
||||
///
|
||||
/// Column types can be overridden with type converters. For instance, if
|
||||
/// `C` was a type converter that converts `D` to `num`s, the column generated
|
||||
/// by `real().map(const C())()` would have type `D` instead of `num`.
|
||||
final DartType overriddenDartType;
|
||||
/// The [UsedTypeConverter], if one has been set on this column.
|
||||
final UsedTypeConverter typeConverter;
|
||||
|
||||
/// The dart type that matches the values of this column. For instance, if a
|
||||
/// table has declared an `IntColumn`, the matching dart type name would be [int].
|
||||
String get dartTypeName {
|
||||
if (overriddenDartType != null) {
|
||||
return overriddenDartType.name;
|
||||
if (typeConverter != null) {
|
||||
return typeConverter.mappedType?.name;
|
||||
}
|
||||
return variableTypeName;
|
||||
}
|
||||
|
@ -152,14 +149,7 @@ class SpecifiedColumn {
|
|||
|
||||
/// The class inside the moor library that represents the same sql type as
|
||||
/// this column.
|
||||
String get sqlTypeName => const {
|
||||
ColumnType.boolean: 'BoolType',
|
||||
ColumnType.text: 'StringType',
|
||||
ColumnType.integer: 'IntType',
|
||||
ColumnType.datetime: 'DateTimeType',
|
||||
ColumnType.blob: 'BlobType',
|
||||
ColumnType.real: 'RealType',
|
||||
}[type];
|
||||
String get sqlTypeName => sqlTypes[type];
|
||||
|
||||
const SpecifiedColumn({
|
||||
this.type,
|
||||
|
@ -172,7 +162,6 @@ class SpecifiedColumn {
|
|||
this.features = const [],
|
||||
this.defaultArgument,
|
||||
this.typeConverter,
|
||||
this.overriddenDartType,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,13 +1,25 @@
|
|||
import 'package:moor_generator/src/model/specified_column.dart';
|
||||
import 'package:analyzer/dart/element/element.dart';
|
||||
import 'package:moor_generator/src/model/used_type_converter.dart';
|
||||
import 'package:recase/recase.dart';
|
||||
|
||||
/// A parsed table, declared in code by extending `Table` and referencing that
|
||||
/// table in `@UseMoor` or `@UseDao`.
|
||||
class SpecifiedTable {
|
||||
/// The [ClassElement] for the class that declares this table.
|
||||
/// The [ClassElement] for the class that declares this table or null if
|
||||
/// the table was inferred from a `CREATE TABLE` statement.
|
||||
final ClassElement fromClass;
|
||||
|
||||
/// If [fromClass] is null, another source to use when determining the name
|
||||
/// of this table in generated Dart code.
|
||||
final String _overriddenName;
|
||||
|
||||
/// Whether this table was created from an `ALTER TABLE` statement instead of
|
||||
/// a Dart class.
|
||||
bool get isFromSql => _overriddenName != null;
|
||||
|
||||
String get _baseName => _overriddenName ?? fromClass.name;
|
||||
|
||||
/// The columns declared in this table.
|
||||
final List<SpecifiedColumn> columns;
|
||||
|
||||
|
@ -17,25 +29,51 @@ class SpecifiedTable {
|
|||
/// The name for the data class associated with this table
|
||||
final String dartTypeName;
|
||||
|
||||
String get tableFieldName => ReCase(fromClass.name).camelCase;
|
||||
String get tableInfoName => tableInfoNameForTableClass(fromClass);
|
||||
String get updateCompanionName => _updateCompanionName(fromClass);
|
||||
String get tableFieldName => _dbFieldName(_baseName);
|
||||
String get tableInfoName {
|
||||
// if this table was parsed from sql, a user might want to refer to it
|
||||
// directly because there is no user defined parent class.
|
||||
// So, turn CREATE TABLE users into something called "Users" instead of
|
||||
// "$UsersTable".
|
||||
if (_overriddenName != null) {
|
||||
return _overriddenName;
|
||||
}
|
||||
return tableInfoNameForTableClass(_baseName);
|
||||
}
|
||||
|
||||
String get updateCompanionName => _updateCompanionName(_baseName);
|
||||
|
||||
/// The set of primary keys, if they have been explicitly defined by
|
||||
/// overriding `primaryKey` in the table class. `null` if the primary key has
|
||||
/// not been defined that way.
|
||||
final Set<SpecifiedColumn> primaryKey;
|
||||
|
||||
/// When non-null, the generated table class will override the `withoutRowId`
|
||||
/// getter on the table class with this value.
|
||||
final bool overrideWithoutRowId;
|
||||
|
||||
/// When non-null, the generated table class will override the
|
||||
/// `customConstraints` getter in the table class with this value.
|
||||
final List<String> overrideTableConstraints;
|
||||
|
||||
const SpecifiedTable(
|
||||
{this.fromClass,
|
||||
this.columns,
|
||||
this.sqlName,
|
||||
this.dartTypeName,
|
||||
this.primaryKey});
|
||||
this.primaryKey,
|
||||
String overriddenName,
|
||||
this.overrideWithoutRowId,
|
||||
this.overrideTableConstraints})
|
||||
: _overriddenName = overriddenName;
|
||||
|
||||
/// Finds all type converters used in this tables.
|
||||
Iterable<UsedTypeConverter> get converters =>
|
||||
columns.map((c) => c.typeConverter).where((t) => t != null);
|
||||
}
|
||||
|
||||
String tableInfoNameForTableClass(ClassElement fromClass) =>
|
||||
'\$${fromClass.name}Table';
|
||||
String _dbFieldName(String className) => ReCase(className).camelCase;
|
||||
|
||||
String _updateCompanionName(ClassElement fromClass) =>
|
||||
'${fromClass.name}Companion';
|
||||
String tableInfoNameForTableClass(String className) => '\$${className}Table';
|
||||
|
||||
String _updateCompanionName(String className) => '${className}Companion';
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import 'package:moor_generator/src/model/specified_column.dart';
|
||||
import 'package:moor_generator/src/model/specified_table.dart';
|
||||
import 'package:moor_generator/src/model/used_type_converter.dart';
|
||||
import 'package:recase/recase.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
|
||||
|
@ -90,7 +91,9 @@ class ResultColumn {
|
|||
final ColumnType type;
|
||||
final bool nullable;
|
||||
|
||||
ResultColumn(this.name, this.type, this.nullable);
|
||||
final UsedTypeConverter converter;
|
||||
|
||||
ResultColumn(this.name, this.type, this.nullable, {this.converter});
|
||||
}
|
||||
|
||||
class FoundVariable {
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
import 'package:analyzer/dart/ast/ast.dart';
|
||||
import 'package:analyzer/dart/element/type.dart';
|
||||
import 'package:meta/meta.dart';
|
||||
import 'package:moor_generator/src/model/specified_column.dart';
|
||||
import 'package:moor_generator/src/model/specified_table.dart';
|
||||
|
||||
class UsedTypeConverter {
|
||||
/// Index of this converter in the table in which it has been created.
|
||||
int index;
|
||||
SpecifiedTable table;
|
||||
|
||||
/// The [Expression] that will construct the type converter at runtime. The
|
||||
/// type converter constructed will map a [mappedType] to the [sqlType] and
|
||||
/// vice-versa.
|
||||
final Expression expression;
|
||||
|
||||
/// The type that will be present at runtime.
|
||||
final DartType mappedType;
|
||||
|
||||
/// The type that will be written to the database.
|
||||
final ColumnType sqlType;
|
||||
|
||||
DartType get typeOfConverter => expression.staticType;
|
||||
|
||||
/// Type converters are stored as static fields in the table that created
|
||||
/// them. This will be the field name for this converter.
|
||||
String get fieldName => '\$converter$index';
|
||||
|
||||
UsedTypeConverter(
|
||||
{@required this.expression,
|
||||
@required this.mappedType,
|
||||
@required this.sqlType});
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
import 'package:analyzer/dart/ast/ast.dart';
|
||||
import 'package:analyzer/dart/element/element.dart';
|
||||
import 'package:analyzer/dart/element/type.dart';
|
||||
import 'package:moor_generator/src/model/used_type_converter.dart';
|
||||
import 'package:moor_generator/src/state/errors.dart';
|
||||
import 'package:moor_generator/src/model/specified_column.dart';
|
||||
import 'package:moor_generator/src/parser/parser.dart';
|
||||
|
@ -71,8 +72,8 @@ class ColumnParser extends ParserBase {
|
|||
String foundExplicitName;
|
||||
String foundCustomConstraint;
|
||||
Expression foundDefaultExpression;
|
||||
Expression foundTypeConverter;
|
||||
DartType overrideDartType;
|
||||
Expression createdTypeConverter;
|
||||
DartType typeConverterRuntime;
|
||||
var wasDeclaredAsPrimaryKey = false;
|
||||
var nullable = false;
|
||||
|
||||
|
@ -162,8 +163,8 @@ class ColumnParser extends ParserBase {
|
|||
// type of the custom object
|
||||
final type = remainingExpr.typeArgumentTypes.single;
|
||||
|
||||
foundTypeConverter = expression;
|
||||
overrideDartType = type;
|
||||
createdTypeConverter = expression;
|
||||
typeConverterRuntime = type;
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -179,19 +180,27 @@ class ColumnParser extends ParserBase {
|
|||
name = ColumnName.implicitly(ReCase(getter.name.name).snakeCase);
|
||||
}
|
||||
|
||||
final columnType = _startMethodToColumnType(foundStartMethod);
|
||||
|
||||
UsedTypeConverter converter;
|
||||
if (createdTypeConverter != null && typeConverterRuntime != null) {
|
||||
converter = UsedTypeConverter(
|
||||
expression: createdTypeConverter,
|
||||
mappedType: typeConverterRuntime,
|
||||
sqlType: columnType);
|
||||
}
|
||||
|
||||
return SpecifiedColumn(
|
||||
type: _startMethodToColumnType(foundStartMethod),
|
||||
dartGetterName: getter.name.name,
|
||||
name: name,
|
||||
overriddenJsonName: _readJsonKey(getterElement),
|
||||
declaredAsPrimaryKey: wasDeclaredAsPrimaryKey,
|
||||
customConstraints: foundCustomConstraint,
|
||||
nullable: nullable,
|
||||
features: foundFeatures,
|
||||
defaultArgument: foundDefaultExpression,
|
||||
typeConverter: foundTypeConverter,
|
||||
overriddenDartType: overrideDartType,
|
||||
);
|
||||
type: columnType,
|
||||
dartGetterName: getter.name.name,
|
||||
name: name,
|
||||
overriddenJsonName: _readJsonKey(getterElement),
|
||||
declaredAsPrimaryKey: wasDeclaredAsPrimaryKey,
|
||||
customConstraints: foundCustomConstraint,
|
||||
nullable: nullable,
|
||||
features: foundFeatures,
|
||||
defaultArgument: foundDefaultExpression?.toSource(),
|
||||
typeConverter: converter);
|
||||
}
|
||||
|
||||
ColumnType _startMethodToColumnType(String startMethod) {
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
import 'package:moor_generator/src/parser/moor/parsed_moor_file.dart';
|
||||
import 'package:source_span/source_span.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
|
||||
/// Parses and analyzes the experimental `.moor` files containing sql
|
||||
/// statements.
|
||||
class MoorAnalyzer {
|
||||
/// Content of the `.moor` file we're analyzing.
|
||||
final String content;
|
||||
|
||||
MoorAnalyzer(this.content);
|
||||
|
||||
Future<MoorParsingResult> analyze() {
|
||||
final results = SqlEngine().parseMultiple(content);
|
||||
|
||||
final createdTables = <CreateTable>[];
|
||||
final errors = <MoorParsingError>[];
|
||||
|
||||
for (var parsedStmt in results) {
|
||||
if (parsedStmt.rootNode is CreateTableStatement) {
|
||||
createdTables.add(CreateTable(parsedStmt));
|
||||
} else {
|
||||
errors.add(
|
||||
MoorParsingError(
|
||||
parsedStmt.rootNode.span,
|
||||
message:
|
||||
'At the moment, only CREATE TABLE statements are supported in .moor files',
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// all results have the same list of errors
|
||||
final sqlErrors = results.isEmpty ? <ParsingError>[] : results.first.errors;
|
||||
|
||||
for (var error in sqlErrors) {
|
||||
errors.add(MoorParsingError(error.token.span, message: error.message));
|
||||
}
|
||||
|
||||
final parsedFile = ParsedMoorFile(createdTables);
|
||||
|
||||
return Future.value(MoorParsingResult(parsedFile, errors));
|
||||
}
|
||||
}
|
||||
|
||||
class MoorParsingResult {
|
||||
final ParsedMoorFile parsedFile;
|
||||
final List<MoorParsingError> errors;
|
||||
|
||||
MoorParsingResult(this.parsedFile, this.errors);
|
||||
}
|
||||
|
||||
class MoorParsingError {
|
||||
final FileSpan span;
|
||||
final String message;
|
||||
|
||||
MoorParsingError(this.span, {this.message});
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return span.message(message, color: true);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
import 'package:moor_generator/src/model/specified_column.dart';
|
||||
import 'package:moor_generator/src/model/specified_table.dart';
|
||||
import 'package:moor_generator/src/parser/sql/type_mapping.dart';
|
||||
import 'package:moor_generator/src/utils/names.dart';
|
||||
import 'package:moor_generator/src/utils/string_escaper.dart';
|
||||
import 'package:recase/recase.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
|
||||
/*
|
||||
We're in the process of defining what a .moor file could actually look like.
|
||||
At the moment, we only support "CREATE TABLE" statements:
|
||||
``` // content of a .moor file
|
||||
CREATE TABLE users (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
)
|
||||
```
|
||||
|
||||
In the future, we'd also like to support
|
||||
- import statements between moor files
|
||||
- import statements from moor files referencing tables declared via the Dart DSL
|
||||
- declaring statements in these files, similar to how compiled statements work
|
||||
with the annotation.
|
||||
*/
|
||||
|
||||
class ParsedMoorFile {
|
||||
final List<CreateTable> declaredTables;
|
||||
|
||||
ParsedMoorFile(this.declaredTables);
|
||||
}
|
||||
|
||||
class CreateTable {
|
||||
/// The AST of this `CREATE TABLE` statement.
|
||||
final ParseResult ast;
|
||||
|
||||
SpecifiedTable extractTable(TypeMapper mapper) {
|
||||
final table =
|
||||
SchemaFromCreateTable().read(ast.rootNode as CreateTableStatement);
|
||||
|
||||
final foundColumns = <String, SpecifiedColumn>{};
|
||||
final primaryKey = <SpecifiedColumn>{};
|
||||
|
||||
for (var column in table.resolvedColumns) {
|
||||
var isPrimaryKey = false;
|
||||
final features = <ColumnFeature>[];
|
||||
final sqlName = column.name;
|
||||
final dartName = ReCase(sqlName).camelCase;
|
||||
final constraintWriter = StringBuffer();
|
||||
final moorType = mapper.resolvedToMoor(column.type);
|
||||
String defaultValue;
|
||||
|
||||
for (var constraint in column.constraints) {
|
||||
if (constraint is PrimaryKeyColumn) {
|
||||
isPrimaryKey = true;
|
||||
if (constraint.autoIncrement) {
|
||||
features.add(AutoIncrement());
|
||||
}
|
||||
}
|
||||
if (constraint is Default) {
|
||||
final dartType = dartTypeNames[moorType];
|
||||
final sqlType = sqlTypes[moorType];
|
||||
final expressionName = 'const CustomExpression<$dartType, $sqlType>';
|
||||
final sqlDefault = constraint.expression.span.text;
|
||||
defaultValue = '$expressionName(${asDartLiteral(sqlDefault)})';
|
||||
}
|
||||
|
||||
if (constraintWriter.isNotEmpty) {
|
||||
constraintWriter.write(' ');
|
||||
}
|
||||
constraintWriter.write(constraint.span.text);
|
||||
}
|
||||
|
||||
final parsed = SpecifiedColumn(
|
||||
type: moorType,
|
||||
nullable: column.type.nullable,
|
||||
dartGetterName: dartName,
|
||||
name: ColumnName.implicitly(sqlName),
|
||||
declaredAsPrimaryKey: isPrimaryKey,
|
||||
features: features,
|
||||
customConstraints: constraintWriter.toString(),
|
||||
defaultArgument: defaultValue,
|
||||
);
|
||||
|
||||
foundColumns[column.name] = parsed;
|
||||
if (isPrimaryKey) {
|
||||
primaryKey.add(parsed);
|
||||
}
|
||||
}
|
||||
|
||||
final tableName = table.name;
|
||||
final dartTableName = ReCase(tableName).pascalCase;
|
||||
|
||||
final constraints = table.tableConstraints.map((c) => c.span.text).toList();
|
||||
|
||||
return SpecifiedTable(
|
||||
fromClass: null,
|
||||
columns: foundColumns.values.toList(),
|
||||
sqlName: table.name,
|
||||
dartTypeName: dataClassNameForClassName(dartTableName),
|
||||
overriddenName: ReCase(tableName).pascalCase,
|
||||
primaryKey: primaryKey,
|
||||
overrideWithoutRowId: table.withoutRowId ? true : null,
|
||||
overrideTableConstraints: constraints.isNotEmpty ? constraints : null,
|
||||
);
|
||||
}
|
||||
|
||||
CreateTable(this.ast);
|
||||
}
|
|
@ -1,5 +1,7 @@
|
|||
import 'package:moor_generator/src/model/sql_query.dart';
|
||||
import 'package:moor_generator/src/model/used_type_converter.dart';
|
||||
import 'package:moor_generator/src/parser/sql/type_mapping.dart';
|
||||
import 'package:moor_generator/src/utils/type_converter_hint.dart';
|
||||
import 'package:sqlparser/sqlparser.dart' hide ResultColumn;
|
||||
|
||||
import 'affected_tables_visitor.dart';
|
||||
|
@ -57,8 +59,13 @@ class QueryHandler {
|
|||
for (var column in rawColumns) {
|
||||
final type = context.typeOf(column).type;
|
||||
final moorType = mapper.resolvedToMoor(type);
|
||||
UsedTypeConverter converter;
|
||||
if (type.hint is TypeConverterHint) {
|
||||
converter = (type.hint as TypeConverterHint).converter;
|
||||
}
|
||||
|
||||
columns.add(ResultColumn(column.name, moorType, type.nullable));
|
||||
columns.add(ResultColumn(column.name, moorType, type.nullable,
|
||||
converter: converter));
|
||||
|
||||
final table = _tableOfColumn(column);
|
||||
candidatesForSingleTable.removeWhere((t) => t != table);
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import 'package:moor_generator/src/model/specified_column.dart';
|
||||
import 'package:moor_generator/src/model/specified_table.dart';
|
||||
import 'package:moor_generator/src/model/sql_query.dart';
|
||||
import 'package:moor_generator/src/utils/type_converter_hint.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
|
||||
/// Converts tables and types between the moor_generator and the sqlparser
|
||||
|
@ -13,8 +14,11 @@ class TypeMapper {
|
|||
Table extractStructure(SpecifiedTable table) {
|
||||
final columns = <TableColumn>[];
|
||||
for (var specified in table.columns) {
|
||||
final type =
|
||||
resolveForColumnType(specified.type).withNullable(specified.nullable);
|
||||
final hint = specified.typeConverter != null
|
||||
? TypeConverterHint(specified.typeConverter)
|
||||
: null;
|
||||
final type = resolveForColumnType(specified.type, overrideHint: hint)
|
||||
.withNullable(specified.nullable);
|
||||
columns.add(TableColumn(specified.name.name, type));
|
||||
}
|
||||
|
||||
|
@ -23,20 +27,22 @@ class TypeMapper {
|
|||
return engineTable;
|
||||
}
|
||||
|
||||
ResolvedType resolveForColumnType(ColumnType type) {
|
||||
ResolvedType resolveForColumnType(ColumnType type, {TypeHint overrideHint}) {
|
||||
switch (type) {
|
||||
case ColumnType.integer:
|
||||
return const ResolvedType(type: BasicType.int);
|
||||
return ResolvedType(type: BasicType.int, hint: overrideHint);
|
||||
case ColumnType.text:
|
||||
return const ResolvedType(type: BasicType.text);
|
||||
return ResolvedType(type: BasicType.text, hint: overrideHint);
|
||||
case ColumnType.boolean:
|
||||
return const ResolvedType(type: BasicType.int, hint: IsBoolean());
|
||||
return ResolvedType(
|
||||
type: BasicType.int, hint: overrideHint ?? const IsBoolean());
|
||||
case ColumnType.datetime:
|
||||
return const ResolvedType(type: BasicType.int, hint: IsDateTime());
|
||||
return ResolvedType(
|
||||
type: BasicType.int, hint: overrideHint ?? const IsDateTime());
|
||||
case ColumnType.blob:
|
||||
return const ResolvedType(type: BasicType.blob);
|
||||
return ResolvedType(type: BasicType.blob, hint: overrideHint);
|
||||
case ColumnType.real:
|
||||
return const ResolvedType(type: BasicType.real);
|
||||
return ResolvedType(type: BasicType.real, hint: overrideHint);
|
||||
}
|
||||
throw StateError('cant happen');
|
||||
}
|
||||
|
|
|
@ -19,13 +19,22 @@ class TableParser extends ParserBase {
|
|||
|
||||
final columns = await _parseColumns(element);
|
||||
|
||||
return SpecifiedTable(
|
||||
final table = SpecifiedTable(
|
||||
fromClass: element,
|
||||
columns: columns,
|
||||
sqlName: escapeIfNeeded(sqlName),
|
||||
dartTypeName: _readDartTypeName(element),
|
||||
primaryKey: await _readPrimaryKey(element, columns),
|
||||
);
|
||||
|
||||
var index = 0;
|
||||
for (var converter in table.converters) {
|
||||
converter
|
||||
..index = index++
|
||||
..table = table;
|
||||
}
|
||||
|
||||
return table;
|
||||
}
|
||||
|
||||
String _readDartTypeName(ClassElement element) {
|
||||
|
|
|
@ -13,10 +13,20 @@ class UseDaoParser {
|
|||
Future<SpecifiedDao> parseDao(
|
||||
ClassElement element, ConstantReader annotation) async {
|
||||
final tableTypes =
|
||||
annotation.peek('tables').listValue.map((obj) => obj.toTypeValue());
|
||||
annotation.peek('tables')?.listValue?.map((obj) => obj.toTypeValue()) ??
|
||||
[];
|
||||
final queryStrings = annotation.peek('queries')?.mapValue ?? {};
|
||||
|
||||
final includes = annotation
|
||||
.read('include')
|
||||
.objectValue
|
||||
.toSetValue()
|
||||
?.map((e) => e.toStringValue()) ??
|
||||
{};
|
||||
|
||||
final parsedTables = await session.parseTables(tableTypes, element);
|
||||
parsedTables.addAll(await session.resolveIncludes(includes));
|
||||
|
||||
final parsedQueries =
|
||||
await session.parseQueries(queryStrings, parsedTables);
|
||||
|
||||
|
|
|
@ -15,10 +15,19 @@ class UseMoorParser {
|
|||
ClassElement element, ConstantReader annotation) async {
|
||||
// the types declared in UseMoor.tables
|
||||
final tableTypes =
|
||||
annotation.peek('tables').listValue.map((obj) => obj.toTypeValue());
|
||||
annotation.peek('tables')?.listValue?.map((obj) => obj.toTypeValue()) ??
|
||||
[];
|
||||
final queryStrings = annotation.peek('queries')?.mapValue ?? {};
|
||||
final includes = annotation
|
||||
.read('include')
|
||||
.objectValue
|
||||
.toSetValue()
|
||||
?.map((e) => e.toStringValue()) ??
|
||||
{};
|
||||
|
||||
final parsedTables = await session.parseTables(tableTypes, element);
|
||||
parsedTables.addAll(await session.resolveIncludes(includes));
|
||||
|
||||
final parsedQueries =
|
||||
await session.parseQueries(queryStrings, parsedTables);
|
||||
final daoTypes = _readDaoTypes(annotation);
|
||||
|
|
|
@ -10,7 +10,9 @@ import 'package:moor_generator/src/model/specified_database.dart';
|
|||
import 'package:moor_generator/src/model/specified_table.dart';
|
||||
import 'package:moor_generator/src/model/sql_query.dart';
|
||||
import 'package:moor_generator/src/parser/column_parser.dart';
|
||||
import 'package:moor_generator/src/parser/moor/moor_analyzer.dart';
|
||||
import 'package:moor_generator/src/parser/sql/sql_parser.dart';
|
||||
import 'package:moor_generator/src/parser/sql/type_mapping.dart';
|
||||
import 'package:moor_generator/src/parser/table_parser.dart';
|
||||
import 'package:moor_generator/src/parser/use_dao_parser.dart';
|
||||
import 'package:moor_generator/src/parser/use_moor_parser.dart';
|
||||
|
@ -78,7 +80,34 @@ class GeneratorSession {
|
|||
} else {
|
||||
return _tableParser.parse(type.element as ClassElement);
|
||||
}
|
||||
}));
|
||||
})).then((list) => List.from(list)); // make growable
|
||||
}
|
||||
|
||||
Future<List<SpecifiedTable>> resolveIncludes(Iterable<String> paths) async {
|
||||
final mapper = TypeMapper();
|
||||
final foundTables = <SpecifiedTable>[];
|
||||
|
||||
for (var path in paths) {
|
||||
final asset = AssetId.resolve(path, from: step.inputId);
|
||||
String content;
|
||||
try {
|
||||
content = await step.readAsString(asset);
|
||||
} catch (e) {
|
||||
errors.add(MoorError(
|
||||
critical: true,
|
||||
message: 'The included file $path could not be found'));
|
||||
}
|
||||
|
||||
final parsed = await MoorAnalyzer(content).analyze();
|
||||
foundTables.addAll(
|
||||
parsed.parsedFile.declaredTables.map((t) => t.extractTable(mapper)));
|
||||
|
||||
for (var parseError in parsed.errors) {
|
||||
errors.add(MoorError(message: "Can't parse sql in $path: $parseError"));
|
||||
}
|
||||
}
|
||||
|
||||
return foundTables;
|
||||
}
|
||||
|
||||
/// Parses a column from a getter [e] declared inside a table class and its
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
import 'package:moor_generator/src/model/used_type_converter.dart';
|
||||
import 'package:sqlparser/sqlparser.dart';
|
||||
|
||||
class TypeConverterHint extends TypeHint {
|
||||
final UsedTypeConverter converter;
|
||||
|
||||
TypeConverterHint(this.converter);
|
||||
}
|
|
@ -1,4 +1,3 @@
|
|||
import 'package:moor_generator/src/model/specified_column.dart';
|
||||
import 'package:moor_generator/src/model/specified_table.dart';
|
||||
import 'package:moor_generator/src/state/session.dart';
|
||||
import 'package:moor_generator/src/writer/utils/hash_code.dart';
|
||||
|
@ -76,7 +75,6 @@ class DataClassWriter {
|
|||
..write("final effectivePrefix = prefix ?? '';");
|
||||
|
||||
final dartTypeToResolver = <String, String>{};
|
||||
final columnToTypeMapper = <SpecifiedColumn, String>{};
|
||||
|
||||
final types = table.columns.map((c) => c.variableTypeName).toSet();
|
||||
for (var usedType in types) {
|
||||
|
@ -88,13 +86,6 @@ class DataClassWriter {
|
|||
.write('final $resolver = db.typeSystem.forDartType<$usedType>();\n');
|
||||
}
|
||||
|
||||
for (var column in table.columns.where((c) => c.typeConverter != null)) {
|
||||
final name = '${column.dartGetterName}Converter';
|
||||
columnToTypeMapper[column] = name;
|
||||
|
||||
buffer.write('final $name = ${column.typeConverter.toSource()};');
|
||||
}
|
||||
|
||||
// finally, the mighty constructor invocation:
|
||||
buffer.write('return $dataClassName(');
|
||||
|
||||
|
@ -106,9 +97,13 @@ class DataClassWriter {
|
|||
|
||||
var loadType = '$resolver.mapFromDatabaseResponse(data[$columnName])';
|
||||
|
||||
if (columnToTypeMapper.containsKey(column)) {
|
||||
final converter = columnToTypeMapper[column];
|
||||
loadType = '$converter.mapToDart($loadType)';
|
||||
// run the loaded expression though the custom converter for the final
|
||||
// result.
|
||||
if (column.typeConverter != null) {
|
||||
// stored as a static field
|
||||
final converter = column.typeConverter;
|
||||
final loaded = '${table.tableInfoName}.${converter.fieldName}';
|
||||
loadType = '$loaded.mapToDart($loadType)';
|
||||
}
|
||||
|
||||
buffer.write('$getter: $loadType,');
|
||||
|
|
|
@ -62,7 +62,18 @@ class QueryWriter {
|
|||
for (var column in _select.resultSet.columns) {
|
||||
final fieldName = _select.resultSet.dartNameFor(column);
|
||||
final readMethod = readFromMethods[column.type];
|
||||
buffer.write("$fieldName: row.$readMethod('${column.name}'),");
|
||||
|
||||
var code = "row.$readMethod('${column.name}')";
|
||||
|
||||
if (column.converter != null) {
|
||||
final converter = column.converter;
|
||||
final infoName = converter.table.tableInfoName;
|
||||
final field = '$infoName.${converter.fieldName}';
|
||||
|
||||
code = '$field.mapToDart($code)';
|
||||
}
|
||||
|
||||
buffer.write('$fieldName: $code,');
|
||||
}
|
||||
|
||||
buffer.write(');\n}\n');
|
||||
|
|
|
@ -13,7 +13,7 @@ class ResultSetWriter {
|
|||
// write fields
|
||||
for (var column in query.resultSet.columns) {
|
||||
final name = query.resultSet.dartNameFor(column);
|
||||
final runtimeType = dartTypeNames[column.type];
|
||||
final runtimeType = _getRuntimeType(column);
|
||||
into.write('final $runtimeType $name\n;');
|
||||
}
|
||||
|
||||
|
@ -24,4 +24,12 @@ class ResultSetWriter {
|
|||
}
|
||||
into.write('});\n}\n');
|
||||
}
|
||||
|
||||
String _getRuntimeType(ResultColumn column) {
|
||||
if (column.converter != null) {
|
||||
return column.converter.mappedType.displayName;
|
||||
} else {
|
||||
return dartTypeNames[column.type];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ class TableWriter {
|
|||
|
||||
void writeTableInfoClass(StringBuffer buffer) {
|
||||
final dataClass = table.dartTypeName;
|
||||
final tableDslName = table.fromClass.name;
|
||||
final tableDslName = table.fromClass?.name ?? 'Table';
|
||||
|
||||
// class UsersTable extends Users implements TableInfo<Users, User> {
|
||||
buffer
|
||||
|
@ -62,10 +62,21 @@ class TableWriter {
|
|||
|
||||
_writeAliasGenerator(buffer);
|
||||
|
||||
_writeConvertersAsStaticFields(buffer);
|
||||
_overrideFieldsIfNeeded(buffer);
|
||||
|
||||
// close class
|
||||
buffer.write('}');
|
||||
}
|
||||
|
||||
void _writeConvertersAsStaticFields(StringBuffer buffer) {
|
||||
for (var converter in table.converters) {
|
||||
final typeName = converter.typeOfConverter.displayName;
|
||||
final code = converter.expression.toSource();
|
||||
buffer..write('static $typeName ${converter.fieldName} = $code;');
|
||||
}
|
||||
}
|
||||
|
||||
void _writeMappingMethod(StringBuffer buffer) {
|
||||
final dataClassName = table.dartTypeName;
|
||||
|
||||
|
@ -93,10 +104,10 @@ class TableWriter {
|
|||
|
||||
if (column.typeConverter != null) {
|
||||
// apply type converter before writing the variable
|
||||
// todo instead of creating the converter every time, can we cache its
|
||||
// instance in the generated table class?
|
||||
final converter = column.typeConverter;
|
||||
final fieldName = '${table.tableInfoName}.${converter.fieldName}';
|
||||
buffer
|
||||
..write('final converter = ${column.typeConverter.toSource()};\n')
|
||||
..write('final converter = $fieldName;\n')
|
||||
..write(mapSetter)
|
||||
..write('(converter.mapToSql(d.${column.dartGetterName}.value));');
|
||||
} else {
|
||||
|
@ -138,7 +149,7 @@ class TableWriter {
|
|||
}
|
||||
|
||||
if (column.defaultArgument != null) {
|
||||
additionalParams['defaultValue'] = column.defaultArgument.toSource();
|
||||
additionalParams['defaultValue'] = column.defaultArgument;
|
||||
}
|
||||
|
||||
expressionBuffer
|
||||
|
@ -164,7 +175,9 @@ class TableWriter {
|
|||
getterName: column.dartGetterName,
|
||||
returnType: column.implColumnTypeName,
|
||||
code: expressionBuffer.toString(),
|
||||
hasOverride: true,
|
||||
// don't override on custom tables because we only override the column
|
||||
// when the base class is user defined
|
||||
hasOverride: !table.isFromSql,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -247,4 +260,20 @@ class TableWriter {
|
|||
..write('return $typeName(_db, alias);')
|
||||
..write('}');
|
||||
}
|
||||
|
||||
void _overrideFieldsIfNeeded(StringBuffer buffer) {
|
||||
if (table.overrideWithoutRowId != null) {
|
||||
final value = table.overrideWithoutRowId ? 'true' : 'false';
|
||||
buffer..write('@override\n')..write('final bool withoutRowId = $value;');
|
||||
}
|
||||
|
||||
if (table.overrideTableConstraints != null) {
|
||||
final value =
|
||||
table.overrideTableConstraints.map(asDartLiteral).join(', ');
|
||||
|
||||
buffer
|
||||
..write('@override\n')
|
||||
..write('final List<String> customConstraints = const [$value];');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
import 'package:moor_generator/src/parser/moor/moor_analyzer.dart';
|
||||
import 'package:moor_generator/src/parser/sql/type_mapping.dart';
|
||||
import 'package:test_api/test_api.dart';
|
||||
|
||||
void main() {
|
||||
final content = '''
|
||||
CREATE TABLE users(
|
||||
id INT NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
name VARCHAR NOT NULL CHECK(LENGTH(name) BETWEEN 5 AND 30)
|
||||
);
|
||||
''';
|
||||
|
||||
test('extracts table structure from .moor files', () async {
|
||||
final analyzer = MoorAnalyzer(content);
|
||||
final result = await analyzer.analyze();
|
||||
|
||||
expect(result.errors, isEmpty);
|
||||
|
||||
final table =
|
||||
result.parsedFile.declaredTables.single.extractTable(TypeMapper());
|
||||
|
||||
expect(table.sqlName, 'users');
|
||||
});
|
||||
}
|
|
@ -9,3 +9,5 @@ build/
|
|||
|
||||
# Directory created by dartdoc
|
||||
doc/api/
|
||||
|
||||
coverage.json
|
|
@ -65,11 +65,11 @@ package to generate type-safe methods from sql.
|
|||
Most on this list is just not supported yet because I didn't found a use case for
|
||||
them yet. If you need them, just leave an issue and I'll try to implement them soon.
|
||||
|
||||
- For now, only `INSERT` statements are not supported, but they will be soon
|
||||
- For now, `INSERT` statements are not supported, but they will be soon.
|
||||
- Windowing is not supported yet
|
||||
- Compound select statements (`UNION` / `INTERSECT`) are not supported yet
|
||||
- Common table expressions are not supported
|
||||
- Some advanced expressions, like `COLLATE` or `CAST`s aren't supported yet.
|
||||
- Some advanced expressions, like `CAST`s aren't supported yet.
|
||||
|
||||
If you run into parsing errors with what you think is valid sql, please create an issue.
|
||||
|
||||
|
|
|
@ -4,4 +4,5 @@ library sqlparser;
|
|||
export 'src/analysis/analysis.dart';
|
||||
export 'src/ast/ast.dart';
|
||||
export 'src/engine/sql_engine.dart';
|
||||
export 'src/reader/parser/parser.dart' show ParsingError;
|
||||
export 'src/reader/tokenizer/token.dart' show CumulatedTokenizerException;
|
||||
|
|
|
@ -5,6 +5,7 @@ import 'package:sqlparser/sqlparser.dart';
|
|||
import 'package:sqlparser/src/reader/tokenizer/token.dart';
|
||||
|
||||
part 'schema/column.dart';
|
||||
part 'schema/from_create_table.dart';
|
||||
part 'schema/references.dart';
|
||||
part 'schema/table.dart';
|
||||
|
||||
|
|
|
@ -16,10 +16,16 @@ class TableColumn extends Column {
|
|||
/// The type of this column, which is immediately available.
|
||||
final ResolvedType type;
|
||||
|
||||
/// The column constraints set on this column.
|
||||
///
|
||||
/// See also:
|
||||
/// - https://www.sqlite.org/syntax/column-constraint.html
|
||||
final List<ColumnConstraint> constraints;
|
||||
|
||||
/// The table this column belongs to.
|
||||
Table table;
|
||||
|
||||
TableColumn(this.name, this.type);
|
||||
TableColumn(this.name, this.type, {this.constraints = const []});
|
||||
}
|
||||
|
||||
/// A column that is created by an expression. For instance, in the select
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
part of '../analysis.dart';
|
||||
|
||||
/// Reads the [Table] definition from a [CreateTableStatement].
|
||||
class SchemaFromCreateTable {
|
||||
Table read(CreateTableStatement stmt) {
|
||||
return Table(
|
||||
name: stmt.tableName,
|
||||
resolvedColumns: [for (var def in stmt.columns) _readColumn(def)],
|
||||
withoutRowId: stmt.withoutRowId,
|
||||
tableConstraints: stmt.tableConstraints,
|
||||
);
|
||||
}
|
||||
|
||||
TableColumn _readColumn(ColumnDefinition definition) {
|
||||
final affinity = columnAffinity(definition.typeName);
|
||||
final nullable = !definition.constraints.any((c) => c is NotNull);
|
||||
|
||||
final resolvedType = ResolvedType(type: affinity, nullable: nullable);
|
||||
|
||||
return TableColumn(
|
||||
definition.columnName,
|
||||
resolvedType,
|
||||
constraints: definition.constraints,
|
||||
);
|
||||
}
|
||||
|
||||
/// Looks up the correct column affinity for a declared type name with the
|
||||
/// rules described here:
|
||||
/// https://www.sqlite.org/datatype3.html#determination_of_column_affinity
|
||||
@visibleForTesting
|
||||
BasicType columnAffinity(String typeName) {
|
||||
if (typeName == null) {
|
||||
return BasicType.blob;
|
||||
}
|
||||
|
||||
final upper = typeName.toUpperCase();
|
||||
if (upper.contains('INT')) {
|
||||
return BasicType.int;
|
||||
}
|
||||
if (upper.contains('CHAR') ||
|
||||
upper.contains('CLOB') ||
|
||||
upper.contains('TEXT')) {
|
||||
return BasicType.text;
|
||||
}
|
||||
|
||||
if (upper.contains('BLOB')) {
|
||||
return BasicType.blob;
|
||||
}
|
||||
|
||||
return BasicType.real;
|
||||
}
|
||||
}
|
|
@ -30,8 +30,18 @@ class Table with ResultSet, VisibleToChildren {
|
|||
@override
|
||||
final List<TableColumn> resolvedColumns;
|
||||
|
||||
/// Whether this table was created with an "WITHOUT ROWID" modifier
|
||||
final bool withoutRowId;
|
||||
|
||||
/// Additional constraints set on this table.
|
||||
final List<TableConstraint> tableConstraints;
|
||||
|
||||
/// Constructs a table from the known [name] and [resolvedColumns].
|
||||
Table({@required this.name, this.resolvedColumns}) {
|
||||
Table(
|
||||
{@required this.name,
|
||||
this.resolvedColumns,
|
||||
this.withoutRowId = false,
|
||||
this.tableConstraints = const []}) {
|
||||
for (var column in resolvedColumns) {
|
||||
column.table = this;
|
||||
}
|
||||
|
|
|
@ -80,14 +80,20 @@ class ColumnResolver extends RecursiveVisitor<void> {
|
|||
}
|
||||
} else if (resultColumn is ExpressionResultColumn) {
|
||||
final name = _nameOfResultColumn(resultColumn);
|
||||
usedColumns.add(
|
||||
ExpressionColumn(name: name, expression: resultColumn.expression),
|
||||
);
|
||||
final column =
|
||||
ExpressionColumn(name: name, expression: resultColumn.expression);
|
||||
|
||||
usedColumns.add(column);
|
||||
|
||||
// make this column available if there is no other with the same name
|
||||
if (!availableColumns.any((c) => c.name == name)) {
|
||||
availableColumns.add(column);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
s.resolvedColumns = usedColumns;
|
||||
s.scope.availableColumns = availableColumns;
|
||||
scope.availableColumns = availableColumns;
|
||||
}
|
||||
|
||||
String _nameOfResultColumn(ExpressionResultColumn c) {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import 'package:collection/collection.dart';
|
||||
import 'package:meta/meta.dart';
|
||||
import 'package:source_span/source_span.dart';
|
||||
import 'package:sqlparser/src/reader/tokenizer/token.dart';
|
||||
import 'package:sqlparser/src/analysis/analysis.dart';
|
||||
|
||||
|
@ -19,6 +20,10 @@ part 'expressions/subquery.dart';
|
|||
part 'expressions/tuple.dart';
|
||||
part 'expressions/variables.dart';
|
||||
|
||||
part 'schema/column_definition.dart';
|
||||
part 'schema/table_definition.dart';
|
||||
|
||||
part 'statements/create_table.dart';
|
||||
part 'statements/delete.dart';
|
||||
part 'statements/select.dart';
|
||||
part 'statements/statement.dart';
|
||||
|
@ -45,6 +50,8 @@ abstract class AstNode {
|
|||
/// The last position that belongs to node, exclusive. Not set for all nodes.
|
||||
int get lastPosition => last.span.end.offset;
|
||||
|
||||
FileSpan get span => first.span.expand(last.span);
|
||||
|
||||
/// Sets the [AstNode.first] and [AstNode.last] property in one go.
|
||||
void setSpan(Token first, Token last) {
|
||||
this.first = first;
|
||||
|
@ -127,6 +134,7 @@ abstract class AstVisitor<T> {
|
|||
T visitResultColumn(ResultColumn e);
|
||||
T visitDeleteStatement(DeleteStatement e);
|
||||
T visitUpdateStatement(UpdateStatement e);
|
||||
T visitCreateTableStatement(CreateTableStatement e);
|
||||
|
||||
T visitOrderBy(OrderBy e);
|
||||
T visitOrderingTerm(OrderingTerm e);
|
||||
|
@ -137,6 +145,11 @@ abstract class AstVisitor<T> {
|
|||
|
||||
T visitSetComponent(SetComponent e);
|
||||
|
||||
T visitColumnDefinition(ColumnDefinition e);
|
||||
T visitColumnConstraint(ColumnConstraint e);
|
||||
T visitTableConstraint(TableConstraint e);
|
||||
T visitForeignKeyClause(ForeignKeyClause e);
|
||||
|
||||
T visitBinaryExpression(BinaryExpression e);
|
||||
T visitStringComparison(StringComparisonExpression e);
|
||||
T visitUnaryExpression(UnaryExpression e);
|
||||
|
@ -236,9 +249,24 @@ class RecursiveVisitor<T> extends AstVisitor<T> {
|
|||
@override
|
||||
T visitUpdateStatement(UpdateStatement e) => visitChildren(e);
|
||||
|
||||
@override
|
||||
T visitCreateTableStatement(CreateTableStatement e) => visitChildren(e);
|
||||
|
||||
@override
|
||||
T visitUnaryExpression(UnaryExpression e) => visitChildren(e);
|
||||
|
||||
@override
|
||||
T visitColumnDefinition(ColumnDefinition e) => visitChildren(e);
|
||||
|
||||
@override
|
||||
T visitTableConstraint(TableConstraint e) => visitChildren(e);
|
||||
|
||||
@override
|
||||
T visitColumnConstraint(ColumnConstraint e) => visitChildren(e);
|
||||
|
||||
@override
|
||||
T visitForeignKeyClause(ForeignKeyClause e) => visitChildren(e);
|
||||
|
||||
@protected
|
||||
T visitChildren(AstNode e) {
|
||||
for (var child in e.childNodes) {
|
||||
|
|
|
@ -23,7 +23,10 @@ class OrderingTerm extends AstNode {
|
|||
final Expression expression;
|
||||
final OrderingMode orderingMode;
|
||||
|
||||
OrderingTerm({this.expression, this.orderingMode = OrderingMode.ascending});
|
||||
OrderingMode get resolvedOrderingMode =>
|
||||
orderingMode ?? OrderingMode.ascending;
|
||||
|
||||
OrderingTerm({this.expression, this.orderingMode});
|
||||
|
||||
@override
|
||||
T accept<T>(AstVisitor<T> visitor) => visitor.visitOrderingTerm(this);
|
||||
|
|
|
@ -0,0 +1,166 @@
|
|||
part of '../ast.dart';
|
||||
|
||||
/// https://www.sqlite.org/syntax/column-def.html
|
||||
class ColumnDefinition extends AstNode {
|
||||
final String columnName;
|
||||
final String typeName;
|
||||
final List<ColumnConstraint> constraints;
|
||||
|
||||
ColumnDefinition(
|
||||
{@required this.columnName,
|
||||
@required this.typeName,
|
||||
this.constraints = const []});
|
||||
|
||||
@override
|
||||
T accept<T>(AstVisitor<T> visitor) => visitor.visitColumnDefinition(this);
|
||||
|
||||
@override
|
||||
Iterable<AstNode> get childNodes => constraints;
|
||||
|
||||
@override
|
||||
bool contentEquals(ColumnDefinition other) {
|
||||
return other.columnName == columnName && other.typeName == typeName;
|
||||
}
|
||||
}
|
||||
|
||||
/// https://www.sqlite.org/syntax/column-constraint.html
|
||||
abstract class ColumnConstraint extends AstNode {
|
||||
final String name;
|
||||
|
||||
ColumnConstraint(this.name);
|
||||
|
||||
@override
|
||||
T accept<T>(AstVisitor<T> visitor) => visitor.visitColumnConstraint(this);
|
||||
|
||||
T when<T>({
|
||||
T Function(NotNull n) notNull,
|
||||
T Function(PrimaryKeyColumn) primaryKey,
|
||||
T Function(UniqueColumn) unique,
|
||||
T Function(CheckColumn) check,
|
||||
T Function(Default) isDefault,
|
||||
T Function(CollateConstraint) collate,
|
||||
T Function(ForeignKeyColumnConstraint) foreignKey,
|
||||
}) {
|
||||
if (this is NotNull) {
|
||||
return notNull?.call(this as NotNull);
|
||||
} else if (this is PrimaryKeyColumn) {
|
||||
return primaryKey?.call(this as PrimaryKeyColumn);
|
||||
} else if (this is UniqueColumn) {
|
||||
return unique?.call(this as UniqueColumn);
|
||||
} else if (this is CheckColumn) {
|
||||
return check?.call(this as CheckColumn);
|
||||
} else if (this is Default) {
|
||||
return isDefault?.call(this as Default);
|
||||
} else if (this is CollateConstraint) {
|
||||
return collate?.call(this as CollateConstraint);
|
||||
} else if (this is ForeignKeyColumnConstraint) {
|
||||
return foreignKey?.call(this as ForeignKeyColumnConstraint);
|
||||
} else {
|
||||
throw Exception('Did not expect $runtimeType as a ColumnConstraint');
|
||||
}
|
||||
}
|
||||
|
||||
@visibleForOverriding
|
||||
bool _equalToConstraint(covariant ColumnConstraint other);
|
||||
|
||||
@override
|
||||
bool contentEquals(ColumnConstraint other) {
|
||||
return other.name == name && _equalToConstraint(other);
|
||||
}
|
||||
}
|
||||
|
||||
enum ConflictClause { rollback, abort, fail, ignore, replace }
|
||||
|
||||
class NotNull extends ColumnConstraint {
|
||||
final ConflictClause onConflict;
|
||||
|
||||
NotNull(String name, {this.onConflict}) : super(name);
|
||||
|
||||
@override
|
||||
final Iterable<AstNode> childNodes = const [];
|
||||
|
||||
@override
|
||||
bool _equalToConstraint(NotNull other) => onConflict == other.onConflict;
|
||||
}
|
||||
|
||||
class PrimaryKeyColumn extends ColumnConstraint {
|
||||
final bool autoIncrement;
|
||||
final ConflictClause onConflict;
|
||||
final OrderingMode mode;
|
||||
|
||||
PrimaryKeyColumn(String name,
|
||||
{this.autoIncrement = false, this.mode, this.onConflict})
|
||||
: super(name);
|
||||
|
||||
@override
|
||||
Iterable<AstNode> get childNodes => const [];
|
||||
|
||||
@override
|
||||
bool _equalToConstraint(PrimaryKeyColumn other) {
|
||||
return other.autoIncrement == autoIncrement &&
|
||||
other.mode == mode &&
|
||||
other.onConflict == onConflict;
|
||||
}
|
||||
}
|
||||
|
||||
class UniqueColumn extends ColumnConstraint {
|
||||
final ConflictClause onConflict;
|
||||
|
||||
UniqueColumn(String name, this.onConflict) : super(name);
|
||||
|
||||
@override
|
||||
Iterable<AstNode> get childNodes => const [];
|
||||
|
||||
@override
|
||||
bool _equalToConstraint(UniqueColumn other) {
|
||||
return other.onConflict == onConflict;
|
||||
}
|
||||
}
|
||||
|
||||
class CheckColumn extends ColumnConstraint {
|
||||
final Expression expression;
|
||||
|
||||
CheckColumn(String name, this.expression) : super(name);
|
||||
|
||||
@override
|
||||
Iterable<AstNode> get childNodes => [expression];
|
||||
|
||||
@override
|
||||
bool _equalToConstraint(CheckColumn other) => true;
|
||||
}
|
||||
|
||||
class Default extends ColumnConstraint {
|
||||
final Expression expression;
|
||||
|
||||
Default(String name, this.expression) : super(name);
|
||||
|
||||
@override
|
||||
Iterable<AstNode> get childNodes => [expression];
|
||||
|
||||
@override
|
||||
bool _equalToConstraint(Default other) => true;
|
||||
}
|
||||
|
||||
class CollateConstraint extends ColumnConstraint {
|
||||
final String collation;
|
||||
|
||||
CollateConstraint(String name, this.collation) : super(name);
|
||||
|
||||
@override
|
||||
final Iterable<AstNode> childNodes = const [];
|
||||
|
||||
@override
|
||||
bool _equalToConstraint(CollateConstraint other) => true;
|
||||
}
|
||||
|
||||
class ForeignKeyColumnConstraint extends ColumnConstraint {
|
||||
final ForeignKeyClause clause;
|
||||
|
||||
ForeignKeyColumnConstraint(String name, this.clause) : super(name);
|
||||
|
||||
@override
|
||||
bool _equalToConstraint(ForeignKeyColumnConstraint other) => true;
|
||||
|
||||
@override
|
||||
Iterable<AstNode> get childNodes => [clause];
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
part of '../ast.dart';
|
||||
|
||||
enum ReferenceAction { setNull, setDefault, cascade, restrict, noAction }
|
||||
|
||||
class ForeignKeyClause extends AstNode {
|
||||
final TableReference foreignTable;
|
||||
final List<Reference> columnNames;
|
||||
final ReferenceAction onDelete;
|
||||
final ReferenceAction onUpdate;
|
||||
|
||||
ForeignKeyClause(
|
||||
{@required this.foreignTable,
|
||||
@required this.columnNames,
|
||||
this.onDelete,
|
||||
this.onUpdate});
|
||||
|
||||
@override
|
||||
T accept<T>(AstVisitor<T> visitor) => visitor.visitForeignKeyClause(this);
|
||||
|
||||
@override
|
||||
Iterable<AstNode> get childNodes => [foreignTable, ...columnNames];
|
||||
|
||||
@override
|
||||
bool contentEquals(ForeignKeyClause other) {
|
||||
return other.onDelete == onDelete && other.onUpdate == onUpdate;
|
||||
}
|
||||
}
|
||||
|
||||
abstract class TableConstraint extends AstNode {
|
||||
final String name;
|
||||
|
||||
TableConstraint(this.name);
|
||||
|
||||
@override
|
||||
T accept<T>(AstVisitor<T> visitor) => visitor.visitTableConstraint(this);
|
||||
|
||||
@override
|
||||
bool contentEquals(TableConstraint other) {
|
||||
return other.name == name && _constraintEquals(other);
|
||||
}
|
||||
|
||||
@visibleForOverriding
|
||||
bool _constraintEquals(covariant TableConstraint other);
|
||||
}
|
||||
|
||||
class KeyClause extends TableConstraint {
|
||||
final bool isPrimaryKey;
|
||||
final List<Reference> indexedColumns;
|
||||
final ConflictClause onConflict;
|
||||
|
||||
bool get isUnique => !isPrimaryKey;
|
||||
|
||||
KeyClause(String name,
|
||||
{@required this.isPrimaryKey,
|
||||
@required this.indexedColumns,
|
||||
this.onConflict})
|
||||
: super(name);
|
||||
|
||||
@override
|
||||
bool _constraintEquals(KeyClause other) {
|
||||
return other.isPrimaryKey == isPrimaryKey && other.onConflict == onConflict;
|
||||
}
|
||||
|
||||
@override
|
||||
Iterable<AstNode> get childNodes => indexedColumns;
|
||||
}
|
||||
|
||||
class CheckTable extends TableConstraint {
|
||||
final Expression expression;
|
||||
|
||||
CheckTable(String name, this.expression) : super(name);
|
||||
|
||||
@override
|
||||
bool _constraintEquals(CheckTable other) => true;
|
||||
|
||||
@override
|
||||
Iterable<AstNode> get childNodes => [expression];
|
||||
}
|
||||
|
||||
class ForeignKeyTableConstraint extends TableConstraint {
|
||||
final List<Reference> columns;
|
||||
final ForeignKeyClause clause;
|
||||
|
||||
ForeignKeyTableConstraint(String name,
|
||||
{@required this.columns, @required this.clause})
|
||||
: super(name);
|
||||
|
||||
@override
|
||||
bool _constraintEquals(ForeignKeyTableConstraint other) => true;
|
||||
|
||||
@override
|
||||
Iterable<AstNode> get childNodes => [...columns, clause];
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
part of '../ast.dart';
|
||||
|
||||
/// A "CREATE TABLE" statement, see https://www.sqlite.org/lang_createtable.html
|
||||
/// for the individual components.
|
||||
class CreateTableStatement extends Statement with SchemaStatement {
|
||||
final bool ifNotExists;
|
||||
final String tableName;
|
||||
final List<ColumnDefinition> columns;
|
||||
final List<TableConstraint> tableConstraints;
|
||||
final bool withoutRowId;
|
||||
|
||||
CreateTableStatement(
|
||||
{this.ifNotExists = false,
|
||||
@required this.tableName,
|
||||
this.columns = const [],
|
||||
this.tableConstraints = const [],
|
||||
this.withoutRowId = false});
|
||||
|
||||
@override
|
||||
T accept<T>(AstVisitor<T> visitor) => visitor.visitCreateTableStatement(this);
|
||||
|
||||
@override
|
||||
Iterable<AstNode> get childNodes => [...columns, ...tableConstraints];
|
||||
|
||||
@override
|
||||
bool contentEquals(CreateTableStatement other) {
|
||||
return other.ifNotExists == ifNotExists &&
|
||||
other.tableName == tableName &&
|
||||
other.withoutRowId == withoutRowId;
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
part of '../ast.dart';
|
||||
|
||||
class DeleteStatement extends Statement {
|
||||
class DeleteStatement extends Statement with CrudStatement {
|
||||
final TableReference from;
|
||||
final Expression where;
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
part of '../ast.dart';
|
||||
|
||||
class SelectStatement extends Statement with ResultSet {
|
||||
class SelectStatement extends Statement with CrudStatement, ResultSet {
|
||||
final bool distinct;
|
||||
final List<ResultColumn> columns;
|
||||
final List<Queryable> from;
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
part of '../ast.dart';
|
||||
|
||||
abstract class Statement extends AstNode {}
|
||||
|
||||
/// Marker mixin for statements that read from an existing table structure.
|
||||
mixin CrudStatement on Statement {}
|
||||
|
||||
/// Marker mixin for statements that change the table structure.
|
||||
mixin SchemaStatement on Statement {}
|
||||
|
|
|
@ -16,7 +16,7 @@ const Map<TokenType, FailureMode> _tokensToMode = {
|
|||
TokenType.ignore: FailureMode.ignore,
|
||||
};
|
||||
|
||||
class UpdateStatement extends Statement {
|
||||
class UpdateStatement extends Statement with CrudStatement {
|
||||
final FailureMode or;
|
||||
final TableReference table;
|
||||
final List<SetComponent> set;
|
||||
|
|
|
@ -25,42 +25,79 @@ class SqlEngine {
|
|||
return scope;
|
||||
}
|
||||
|
||||
/// Parses the [sql] statement. At the moment, only SELECT statements are
|
||||
/// supported.
|
||||
ParseResult parse(String sql) {
|
||||
final scanner = Scanner(sql);
|
||||
/// Tokenizes the [source] into a list list [Token]s. Each [Token] contains
|
||||
/// information about where it appears in the [source] and a [TokenType].
|
||||
List<Token> tokenize(String source) {
|
||||
final scanner = Scanner(source);
|
||||
final tokens = scanner.scanTokens();
|
||||
|
||||
if (scanner.errors.isNotEmpty) {
|
||||
throw CumulatedTokenizerException(scanner.errors);
|
||||
}
|
||||
|
||||
final parser = Parser(tokens);
|
||||
final stmt = parser.statement();
|
||||
return ParseResult._(stmt, parser.errors);
|
||||
return tokens;
|
||||
}
|
||||
|
||||
/// Parses and analyzes the [sql] statement, which at the moment has to be a
|
||||
/// select statement. The [AnalysisContext] returned contains all information
|
||||
/// about type hints, errors, and the parsed AST.
|
||||
/// Parses the [sql] statement into an AST-representation.
|
||||
ParseResult parse(String sql) {
|
||||
final tokens = tokenize(sql);
|
||||
final parser = Parser(tokens);
|
||||
|
||||
final stmt = parser.statement();
|
||||
return ParseResult._(stmt, parser.errors, sql);
|
||||
}
|
||||
|
||||
/// Parses multiple sql statements, separated by a semicolon. All
|
||||
/// [ParseResult] entries will have the same [ParseResult.errors], but the
|
||||
/// [ParseResult.sql] will only refer to the substring creating a statement.
|
||||
List<ParseResult> parseMultiple(String sql) {
|
||||
final tokens = tokenize(sql);
|
||||
final parser = Parser(tokens);
|
||||
|
||||
final stmts = parser.statements();
|
||||
|
||||
return stmts.map((statement) {
|
||||
final first = statement.firstPosition;
|
||||
final last = statement.lastPosition;
|
||||
|
||||
final source = sql.substring(first, last);
|
||||
return ParseResult._(statement, parser.errors, source);
|
||||
}).toList();
|
||||
}
|
||||
|
||||
/// Parses and analyzes the [sql] statement. The [AnalysisContext] returned
|
||||
/// contains all information about type hints, errors, and the parsed AST.
|
||||
///
|
||||
/// The analyzer needs to know all the available tables to resolve references
|
||||
/// and result columns, so all known tables should be registered using
|
||||
/// [registerTable] before calling this method.
|
||||
AnalysisContext analyze(String sql) {
|
||||
final result = parse(sql);
|
||||
return analyzeParsed(result);
|
||||
}
|
||||
|
||||
/// Analyzes a parsed [result] statement. The [AnalysisContext] returned
|
||||
/// contains all information about type hints, errors, and the parsed AST.
|
||||
///
|
||||
/// The analyzer needs to know all the available tables to resolve references
|
||||
/// and result columns, so all known tables should be registered using
|
||||
/// [registerTable] before calling this method.
|
||||
AnalysisContext analyzeParsed(ParseResult result) {
|
||||
final node = result.rootNode;
|
||||
const SetParentVisitor().startAtRoot(node);
|
||||
|
||||
final context = AnalysisContext(node, sql);
|
||||
final context = AnalysisContext(node, result.sql);
|
||||
final scope = _constructRootScope();
|
||||
|
||||
try {
|
||||
ReferenceFinder(globalScope: scope).start(node);
|
||||
node
|
||||
..accept(ColumnResolver(context))
|
||||
..accept(ReferenceResolver(context))
|
||||
..accept(TypeResolvingVisitor(context));
|
||||
|
||||
if (node is CrudStatement) {
|
||||
node
|
||||
..accept(ColumnResolver(context))
|
||||
..accept(ReferenceResolver(context))
|
||||
..accept(TypeResolvingVisitor(context));
|
||||
}
|
||||
} catch (e) {
|
||||
// todo should we do now? AFAIK, everything that causes an exception
|
||||
// is added as an error contained in the context.
|
||||
|
@ -81,5 +118,8 @@ class ParseResult {
|
|||
/// where the error occurred.
|
||||
final List<ParsingError> errors;
|
||||
|
||||
ParseResult._(this.rootNode, this.errors);
|
||||
/// The sql source that created the AST at [rootNode].
|
||||
final String sql;
|
||||
|
||||
ParseResult._(this.rootNode, this.errors, this.sql);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,351 @@
|
|||
part of 'parser.dart';
|
||||
|
||||
mixin CrudParser on ParserBase {
|
||||
@override
|
||||
SelectStatement select() {
|
||||
if (!_match(const [TokenType.select])) return null;
|
||||
final selectToken = _previous;
|
||||
|
||||
var distinct = false;
|
||||
if (_matchOne(TokenType.distinct)) {
|
||||
distinct = true;
|
||||
} else if (_matchOne(TokenType.all)) {
|
||||
distinct = false;
|
||||
}
|
||||
|
||||
final resultColumns = <ResultColumn>[];
|
||||
do {
|
||||
resultColumns.add(_resultColumn());
|
||||
} while (_match(const [TokenType.comma]));
|
||||
|
||||
final from = _from();
|
||||
|
||||
final where = _where();
|
||||
final groupBy = _groupBy();
|
||||
final orderBy = _orderBy();
|
||||
final limit = _limit();
|
||||
|
||||
return SelectStatement(
|
||||
distinct: distinct,
|
||||
columns: resultColumns,
|
||||
from: from,
|
||||
where: where,
|
||||
groupBy: groupBy,
|
||||
orderBy: orderBy,
|
||||
limit: limit,
|
||||
)..setSpan(selectToken, _previous);
|
||||
}
|
||||
|
||||
/// Parses a [ResultColumn] or throws if none is found.
|
||||
/// https://www.sqlite.org/syntax/result-column.html
|
||||
ResultColumn _resultColumn() {
|
||||
if (_match(const [TokenType.star])) {
|
||||
return StarResultColumn(null)..setSpan(_previous, _previous);
|
||||
}
|
||||
|
||||
final positionBefore = _current;
|
||||
|
||||
if (_match(const [TokenType.identifier])) {
|
||||
// two options. the identifier could be followed by ".*", in which case
|
||||
// we have a star result column. If it's followed by anything else, it can
|
||||
// still refer to a column in a table as part of a expression result column
|
||||
final identifier = _previous;
|
||||
|
||||
if (_match(const [TokenType.dot]) && _match(const [TokenType.star])) {
|
||||
return StarResultColumn((identifier as IdentifierToken).identifier)
|
||||
..setSpan(identifier, _previous);
|
||||
}
|
||||
|
||||
// not a star result column. go back and parse the expression.
|
||||
// todo this is a bit unorthodox. is there a better way to parse the
|
||||
// expression from before?
|
||||
_current = positionBefore;
|
||||
}
|
||||
|
||||
final tokenBefore = _peek;
|
||||
|
||||
final expr = expression();
|
||||
final as = _as();
|
||||
|
||||
return ExpressionResultColumn(expression: expr, as: as?.identifier)
|
||||
..setSpan(tokenBefore, _previous);
|
||||
}
|
||||
|
||||
/// Returns an identifier followed after an optional "AS" token in sql.
|
||||
/// Returns null if there is
|
||||
IdentifierToken _as() {
|
||||
if (_match(const [TokenType.as])) {
|
||||
return _consume(TokenType.identifier, 'Expected an identifier')
|
||||
as IdentifierToken;
|
||||
} else if (_match(const [TokenType.identifier])) {
|
||||
return _previous as IdentifierToken;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
List<Queryable> _from() {
|
||||
if (!_matchOne(TokenType.from)) return [];
|
||||
|
||||
// Can either be a list of <TableOrSubquery> or a join. Joins also start
|
||||
// with a TableOrSubquery, so let's first parse that.
|
||||
final start = _tableOrSubquery();
|
||||
// parse join, if it is one
|
||||
final join = _joinClause(start);
|
||||
if (join != null) {
|
||||
return [join];
|
||||
}
|
||||
|
||||
// not a join. Keep the TableOrSubqueries coming!
|
||||
final queries = [start];
|
||||
while (_matchOne(TokenType.comma)) {
|
||||
queries.add(_tableOrSubquery());
|
||||
}
|
||||
|
||||
return queries;
|
||||
}
|
||||
|
||||
TableOrSubquery _tableOrSubquery() {
|
||||
// this is what we're parsing: https://www.sqlite.org/syntax/table-or-subquery.html
|
||||
// we currently only support regular tables and nested selects
|
||||
final tableRef = _tableReference();
|
||||
if (tableRef != null) {
|
||||
return tableRef;
|
||||
} else if (_matchOne(TokenType.leftParen)) {
|
||||
final innerStmt = select();
|
||||
_consume(TokenType.rightParen,
|
||||
'Expected a right bracket to terminate the inner select');
|
||||
|
||||
final alias = _as();
|
||||
return SelectStatementAsSource(
|
||||
statement: innerStmt, as: alias?.identifier);
|
||||
}
|
||||
|
||||
_error('Expected a table name or a nested select statement');
|
||||
}
|
||||
|
||||
TableReference _tableReference() {
|
||||
if (_matchOne(TokenType.identifier)) {
|
||||
// ignore the schema name, it's not supported. Besides that, we're on the
|
||||
// first branch in the diagram here
|
||||
final tableName = (_previous as IdentifierToken).identifier;
|
||||
final alias = _as();
|
||||
return TableReference(tableName, alias?.identifier);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
JoinClause _joinClause(TableOrSubquery start) {
|
||||
var operator = _parseJoinOperatorNoComma();
|
||||
if (operator == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final joins = <Join>[];
|
||||
|
||||
while (operator != null) {
|
||||
final subquery = _tableOrSubquery();
|
||||
final constraint = _joinConstraint();
|
||||
JoinOperator resolvedOperator;
|
||||
if (operator.contains(TokenType.left)) {
|
||||
resolvedOperator = operator.contains(TokenType.outer)
|
||||
? JoinOperator.leftOuter
|
||||
: JoinOperator.left;
|
||||
} else if (operator.contains(TokenType.inner)) {
|
||||
resolvedOperator = JoinOperator.inner;
|
||||
} else if (operator.contains(TokenType.cross)) {
|
||||
resolvedOperator = JoinOperator.cross;
|
||||
} else if (operator.contains(TokenType.comma)) {
|
||||
resolvedOperator = JoinOperator.comma;
|
||||
} else {
|
||||
resolvedOperator = JoinOperator.none;
|
||||
}
|
||||
|
||||
joins.add(Join(
|
||||
natural: operator.contains(TokenType.natural),
|
||||
operator: resolvedOperator,
|
||||
query: subquery,
|
||||
constraint: constraint,
|
||||
));
|
||||
|
||||
// parse the next operator, if there is more than one join
|
||||
if (_matchOne(TokenType.comma)) {
|
||||
operator = [TokenType.comma];
|
||||
} else {
|
||||
operator = _parseJoinOperatorNoComma();
|
||||
}
|
||||
}
|
||||
|
||||
return JoinClause(primary: start, joins: joins);
|
||||
}
|
||||
|
||||
/// Parses https://www.sqlite.org/syntax/join-operator.html, minus the comma.
|
||||
List<TokenType> _parseJoinOperatorNoComma() {
|
||||
if (_match(_startOperators)) {
|
||||
final operators = [_previous.type];
|
||||
|
||||
if (_previous.type == TokenType.join) {
|
||||
// just join, without any specific operators
|
||||
return operators;
|
||||
} else {
|
||||
// natural is a prefix, another operator can follow.
|
||||
if (_previous.type == TokenType.natural) {
|
||||
if (_match([TokenType.left, TokenType.inner, TokenType.cross])) {
|
||||
operators.add(_previous.type);
|
||||
}
|
||||
}
|
||||
if (_previous.type == TokenType.left && _matchOne(TokenType.outer)) {
|
||||
operators.add(_previous.type);
|
||||
}
|
||||
|
||||
_consume(TokenType.join, 'Expected to see a join keyword here');
|
||||
return operators;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Parses https://www.sqlite.org/syntax/join-constraint.html
|
||||
JoinConstraint _joinConstraint() {
|
||||
if (_matchOne(TokenType.on)) {
|
||||
return OnConstraint(expression: expression());
|
||||
} else if (_matchOne(TokenType.using)) {
|
||||
_consume(TokenType.leftParen, 'Expected an opening paranthesis');
|
||||
|
||||
final columnNames = <String>[];
|
||||
do {
|
||||
final identifier =
|
||||
_consume(TokenType.identifier, 'Expected a column name');
|
||||
columnNames.add((identifier as IdentifierToken).identifier);
|
||||
} while (_matchOne(TokenType.comma));
|
||||
|
||||
_consume(TokenType.rightParen, 'Expected an closing paranthesis');
|
||||
|
||||
return UsingConstraint(columnNames: columnNames);
|
||||
}
|
||||
_error('Expected a constraint with ON or USING');
|
||||
}
|
||||
|
||||
/// Parses a where clause if there is one at the current position
|
||||
Expression _where() {
|
||||
if (_match(const [TokenType.where])) {
|
||||
return expression();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
GroupBy _groupBy() {
|
||||
if (_matchOne(TokenType.group)) {
|
||||
_consume(TokenType.by, 'Expected a "BY"');
|
||||
final by = <Expression>[];
|
||||
Expression having;
|
||||
|
||||
do {
|
||||
by.add(expression());
|
||||
} while (_matchOne(TokenType.comma));
|
||||
|
||||
if (_matchOne(TokenType.having)) {
|
||||
having = expression();
|
||||
}
|
||||
|
||||
return GroupBy(by: by, having: having);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
OrderBy _orderBy() {
|
||||
if (_match(const [TokenType.order])) {
|
||||
_consume(TokenType.by, 'Expected "BY" after "ORDER" token');
|
||||
final terms = <OrderingTerm>[];
|
||||
do {
|
||||
terms.add(_orderingTerm());
|
||||
} while (_matchOne(TokenType.comma));
|
||||
return OrderBy(terms: terms);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
OrderingTerm _orderingTerm() {
|
||||
final expr = expression();
|
||||
|
||||
return OrderingTerm(expression: expr, orderingMode: _orderingModeOrNull());
|
||||
}
|
||||
|
||||
@override
|
||||
OrderingMode _orderingModeOrNull() {
|
||||
if (_match(const [TokenType.asc, TokenType.desc])) {
|
||||
final mode = _previous.type == TokenType.asc
|
||||
? OrderingMode.ascending
|
||||
: OrderingMode.descending;
|
||||
return mode;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Parses a [Limit] clause, or returns null if there is no limit token after
|
||||
/// the current position.
|
||||
Limit _limit() {
|
||||
if (!_matchOne(TokenType.limit)) return null;
|
||||
|
||||
// Unintuitive, it's "$amount OFFSET $offset", but "$offset, $amount"
|
||||
// the order changes between the separator tokens.
|
||||
final first = expression();
|
||||
|
||||
if (_matchOne(TokenType.comma)) {
|
||||
final separator = _previous;
|
||||
final count = expression();
|
||||
return Limit(count: count, offsetSeparator: separator, offset: first);
|
||||
} else if (_matchOne(TokenType.offset)) {
|
||||
final separator = _previous;
|
||||
final offset = expression();
|
||||
return Limit(count: first, offsetSeparator: separator, offset: offset);
|
||||
} else {
|
||||
return Limit(count: first);
|
||||
}
|
||||
}
|
||||
|
||||
DeleteStatement _deleteStmt() {
|
||||
if (!_matchOne(TokenType.delete)) return null;
|
||||
_consume(TokenType.from, 'Expected a FROM here');
|
||||
|
||||
final table = _tableReference();
|
||||
Expression where;
|
||||
if (table == null) {
|
||||
_error('Expected a table reference');
|
||||
}
|
||||
|
||||
if (_matchOne(TokenType.where)) {
|
||||
where = expression();
|
||||
}
|
||||
|
||||
return DeleteStatement(from: table, where: where);
|
||||
}
|
||||
|
||||
UpdateStatement _update() {
|
||||
if (!_matchOne(TokenType.update)) return null;
|
||||
FailureMode failureMode;
|
||||
if (_matchOne(TokenType.or)) {
|
||||
failureMode = UpdateStatement.failureModeFromToken(_advance().type);
|
||||
}
|
||||
|
||||
final table = _tableReference();
|
||||
_consume(TokenType.set, 'Expected SET after the table name');
|
||||
|
||||
final set = <SetComponent>[];
|
||||
do {
|
||||
final columnName =
|
||||
_consume(TokenType.identifier, 'Expected a column name to set')
|
||||
as IdentifierToken;
|
||||
final reference = Reference(columnName: columnName.identifier)
|
||||
..setSpan(columnName, columnName);
|
||||
_consume(TokenType.equal, 'Expected = after the column name');
|
||||
final expr = expression();
|
||||
|
||||
set.add(SetComponent(column: reference, expression: expr));
|
||||
} while (_matchOne(TokenType.comma));
|
||||
|
||||
final where = _where();
|
||||
return UpdateStatement(
|
||||
or: failureMode, table: table, set: set, where: where);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,349 @@
|
|||
part of 'parser.dart';
|
||||
|
||||
/// Parses expressions. Expressions have the following precedence:
|
||||
/// - `-`, `+`, `~`, unary not
|
||||
/// - `||` (concatenation)
|
||||
/// - `*`, '/', '%'
|
||||
/// - `+`, `-`
|
||||
/// - `<<`, `>>`, `&`, `|`
|
||||
/// - `<`, `<=`, `>`, `>=`
|
||||
/// - `=`, `==`, `!=`, `<>`, `IS`, `IS NOT`, `IN`, `LIKE`, `GLOB`, `MATCH`,
|
||||
/// `REGEXP`
|
||||
/// - `AND`
|
||||
/// - `OR`
|
||||
/// - Case expressions
|
||||
mixin ExpressionParser on ParserBase {
|
||||
@override
|
||||
Expression expression() {
|
||||
return _case();
|
||||
}
|
||||
|
||||
Expression _case() {
|
||||
if (_matchOne(TokenType.$case)) {
|
||||
final base = _check(TokenType.when) ? null : _or();
|
||||
final whens = <WhenComponent>[];
|
||||
Expression $else;
|
||||
|
||||
while (_matchOne(TokenType.when)) {
|
||||
final whenExpr = _or();
|
||||
_consume(TokenType.then, 'Expected THEN');
|
||||
final then = _or();
|
||||
whens.add(WhenComponent(when: whenExpr, then: then));
|
||||
}
|
||||
|
||||
if (_matchOne(TokenType.$else)) {
|
||||
$else = _or();
|
||||
}
|
||||
|
||||
_consume(TokenType.end, 'Expected END to finish the case operator');
|
||||
return CaseExpression(whens: whens, base: base, elseExpr: $else);
|
||||
}
|
||||
|
||||
return _or();
|
||||
}
|
||||
|
||||
/// Parses an expression of the form a <T> b, where <T> is in [types] and
|
||||
/// both a and b are expressions with a higher precedence parsed from
|
||||
/// [higherPrecedence].
|
||||
Expression _parseSimpleBinary(
|
||||
List<TokenType> types, Expression Function() higherPrecedence) {
|
||||
var expression = higherPrecedence();
|
||||
|
||||
while (_match(types)) {
|
||||
final operator = _previous;
|
||||
final right = higherPrecedence();
|
||||
expression = BinaryExpression(expression, operator, right);
|
||||
}
|
||||
return expression;
|
||||
}
|
||||
|
||||
Expression _or() => _parseSimpleBinary(const [TokenType.or], _and);
|
||||
Expression _and() => _parseSimpleBinary(const [TokenType.and], _in);
|
||||
|
||||
Expression _in() {
|
||||
final left = _equals();
|
||||
|
||||
if (_checkWithNot(TokenType.$in)) {
|
||||
final not = _matchOne(TokenType.not);
|
||||
_matchOne(TokenType.$in);
|
||||
|
||||
var inside = _equals();
|
||||
if (inside is Parentheses) {
|
||||
// if we have something like x IN (3), then (3) is a tuple and not a
|
||||
// parenthesis. We can only know this from the context unfortunately
|
||||
inside = (inside as Parentheses).asTuple;
|
||||
}
|
||||
|
||||
return InExpression(left: left, inside: inside, not: not);
|
||||
}
|
||||
|
||||
return left;
|
||||
}
|
||||
|
||||
/// Parses expressions with the "equals" precedence. This contains
|
||||
/// comparisons, "IS (NOT) IN" expressions, between expressions and "like"
|
||||
/// expressions.
|
||||
Expression _equals() {
|
||||
var expression = _comparison();
|
||||
|
||||
final ops = const [
|
||||
TokenType.equal,
|
||||
TokenType.doubleEqual,
|
||||
TokenType.exclamationEqual,
|
||||
TokenType.lessMore,
|
||||
TokenType.$is,
|
||||
];
|
||||
final stringOps = const [
|
||||
TokenType.like,
|
||||
TokenType.glob,
|
||||
TokenType.match,
|
||||
TokenType.regexp,
|
||||
];
|
||||
|
||||
while (true) {
|
||||
if (_checkWithNot(TokenType.between)) {
|
||||
final not = _matchOne(TokenType.not);
|
||||
_consume(TokenType.between, 'expected a BETWEEN');
|
||||
|
||||
final lower = _comparison();
|
||||
_consume(TokenType.and, 'expected AND');
|
||||
final upper = _comparison();
|
||||
|
||||
expression = BetweenExpression(
|
||||
not: not, check: expression, lower: lower, upper: upper);
|
||||
} else if (_match(ops)) {
|
||||
final operator = _previous;
|
||||
if (operator.type == TokenType.$is) {
|
||||
final not = _match(const [TokenType.not]);
|
||||
// special case: is not expression
|
||||
expression = IsExpression(not, expression, _comparison());
|
||||
} else {
|
||||
expression = BinaryExpression(expression, operator, _comparison());
|
||||
}
|
||||
} else if (_checkAnyWithNot(stringOps)) {
|
||||
final not = _matchOne(TokenType.not);
|
||||
_match(stringOps); // will consume, existence was verified with check
|
||||
final operator = _previous;
|
||||
|
||||
final right = _comparison();
|
||||
Expression escape;
|
||||
if (_matchOne(TokenType.escape)) {
|
||||
escape = _comparison();
|
||||
}
|
||||
|
||||
expression = StringComparisonExpression(
|
||||
not: not,
|
||||
left: expression,
|
||||
operator: operator,
|
||||
right: right,
|
||||
escape: escape);
|
||||
} else {
|
||||
break; // no matching operator with this precedence was found
|
||||
}
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
|
||||
Expression _comparison() {
|
||||
return _parseSimpleBinary(_comparisonOperators, _binaryOperation);
|
||||
}
|
||||
|
||||
Expression _binaryOperation() {
|
||||
return _parseSimpleBinary(_binaryOperators, _addition);
|
||||
}
|
||||
|
||||
Expression _addition() {
|
||||
return _parseSimpleBinary(const [
|
||||
TokenType.plus,
|
||||
TokenType.minus,
|
||||
], _multiplication);
|
||||
}
|
||||
|
||||
Expression _multiplication() {
|
||||
return _parseSimpleBinary(const [
|
||||
TokenType.star,
|
||||
TokenType.slash,
|
||||
TokenType.percent,
|
||||
], _concatenation);
|
||||
}
|
||||
|
||||
Expression _concatenation() {
|
||||
return _parseSimpleBinary(const [TokenType.doublePipe], _unary);
|
||||
}
|
||||
|
||||
Expression _unary() {
|
||||
if (_match(const [
|
||||
TokenType.minus,
|
||||
TokenType.plus,
|
||||
TokenType.tilde,
|
||||
TokenType.not,
|
||||
])) {
|
||||
final operator = _previous;
|
||||
final expression = _unary();
|
||||
return UnaryExpression(operator, expression);
|
||||
} else if (_matchOne(TokenType.exists)) {
|
||||
_consume(
|
||||
TokenType.leftParen, 'Expected opening parenthesis after EXISTS');
|
||||
final selectStmt = select();
|
||||
_consume(TokenType.rightParen,
|
||||
'Expected closing paranthesis to finish EXISTS expression');
|
||||
return ExistsExpression(select: selectStmt);
|
||||
}
|
||||
|
||||
return _postfix();
|
||||
}
|
||||
|
||||
Expression _postfix() {
|
||||
// todo parse ISNULL, NOTNULL, NOT NULL, etc.
|
||||
// I don't even know the precedence ¯\_(ツ)_/¯ (probably not higher than
|
||||
// unary)
|
||||
var expression = _primary();
|
||||
|
||||
while (_matchOne(TokenType.collate)) {
|
||||
final collateOp = _previous;
|
||||
final collateFun =
|
||||
_consume(TokenType.identifier, 'Expected a collating sequence')
|
||||
as IdentifierToken;
|
||||
expression = CollateExpression(
|
||||
inner: expression,
|
||||
operator: collateOp,
|
||||
collateFunction: collateFun,
|
||||
)..setSpan(expression.first, collateFun);
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
|
||||
@override
|
||||
Literal _literalOrNull() {
|
||||
final token = _peek;
|
||||
|
||||
Literal _parseInner() {
|
||||
if (_matchOne(TokenType.numberLiteral)) {
|
||||
return NumericLiteral(_parseNumber(token.lexeme), token);
|
||||
}
|
||||
if (_matchOne(TokenType.stringLiteral)) {
|
||||
return StringLiteral(token as StringLiteralToken);
|
||||
}
|
||||
if (_matchOne(TokenType.$null)) {
|
||||
return NullLiteral(token);
|
||||
}
|
||||
if (_matchOne(TokenType.$true)) {
|
||||
return BooleanLiteral.withTrue(token);
|
||||
}
|
||||
if (_matchOne(TokenType.$false)) {
|
||||
return BooleanLiteral.withFalse(token);
|
||||
}
|
||||
// todo CURRENT_TIME, CURRENT_DATE, CURRENT_TIMESTAMP
|
||||
return null;
|
||||
}
|
||||
|
||||
final literal = _parseInner();
|
||||
literal?.setSpan(token, token);
|
||||
return literal;
|
||||
}
|
||||
|
||||
Expression _primary() {
|
||||
final literal = _literalOrNull();
|
||||
if (literal != null) return literal;
|
||||
|
||||
final token = _advance();
|
||||
final type = token.type;
|
||||
switch (type) {
|
||||
case TokenType.leftParen:
|
||||
// Opening brackets could be three things: An inner select statement
|
||||
// (SELECT ...), a parenthesised expression, or a tuple of expressions
|
||||
// (a, b, c).
|
||||
final left = token;
|
||||
if (_peek.type == TokenType.select) {
|
||||
final stmt = select();
|
||||
_consume(TokenType.rightParen, 'Expected a closing bracket');
|
||||
return SubQuery(select: stmt);
|
||||
} else {
|
||||
// alright, it's either a tuple or just parenthesis. A tuple can be
|
||||
// empty, so if the next statement is the closing bracket we're done
|
||||
if (_matchOne(TokenType.rightParen)) {
|
||||
return TupleExpression(expressions: [])..setSpan(left, _previous);
|
||||
}
|
||||
|
||||
final expr = expression();
|
||||
|
||||
// Are we witnessing a tuple?
|
||||
if (_check(TokenType.comma)) {
|
||||
// we are, add expressions as long as we see commas
|
||||
final exprs = [expr];
|
||||
while (_matchOne(TokenType.comma)) {
|
||||
exprs.add(expression());
|
||||
}
|
||||
|
||||
_consume(TokenType.rightParen, 'Expected a closing bracket');
|
||||
return TupleExpression(expressions: exprs);
|
||||
} else {
|
||||
// we aren't, so that'll just be parentheses.
|
||||
_consume(TokenType.rightParen, 'Expected a closing bracket');
|
||||
return Parentheses(left, expr, token);
|
||||
}
|
||||
}
|
||||
break;
|
||||
case TokenType.identifier:
|
||||
// could be table.column, function(...) or just column
|
||||
final first = token as IdentifierToken;
|
||||
|
||||
if (_matchOne(TokenType.dot)) {
|
||||
final second =
|
||||
_consume(TokenType.identifier, 'Expected a column name here')
|
||||
as IdentifierToken;
|
||||
return Reference(
|
||||
tableName: first.identifier, columnName: second.identifier)
|
||||
..setSpan(first, second);
|
||||
} else if (_matchOne(TokenType.leftParen)) {
|
||||
final parameters = _functionParameters();
|
||||
final rightParen = _consume(TokenType.rightParen,
|
||||
'Expected closing bracket after argument list');
|
||||
|
||||
return FunctionExpression(
|
||||
name: first.identifier, parameters: parameters)
|
||||
..setSpan(first, rightParen);
|
||||
} else {
|
||||
return Reference(columnName: first.identifier)..setSpan(first, first);
|
||||
}
|
||||
break;
|
||||
case TokenType.questionMark:
|
||||
final mark = token;
|
||||
|
||||
if (_matchOne(TokenType.numberLiteral)) {
|
||||
final number = _previous;
|
||||
return NumberedVariable(mark, _parseNumber(number.lexeme).toInt())
|
||||
..setSpan(mark, number);
|
||||
} else {
|
||||
return NumberedVariable(mark, null)..setSpan(mark, mark);
|
||||
}
|
||||
break;
|
||||
case TokenType.colon:
|
||||
final colon = token;
|
||||
final identifier = _consume(TokenType.identifier,
|
||||
'Expected an identifier for the named variable') as IdentifierToken;
|
||||
final content = identifier.identifier;
|
||||
return ColonNamedVariable(':$content')..setSpan(colon, identifier);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
// nothing found -> issue error
|
||||
_error('Could not parse this expression');
|
||||
}
|
||||
|
||||
FunctionParameters _functionParameters() {
|
||||
if (_matchOne(TokenType.star)) {
|
||||
return const StarFunctionParameter();
|
||||
}
|
||||
|
||||
final distinct = _matchOne(TokenType.distinct);
|
||||
final parameters = <Expression>[];
|
||||
while (_peek.type != TokenType.rightParen) {
|
||||
parameters.add(expression());
|
||||
}
|
||||
return ExprFunctionParameters(distinct: distinct, parameters: parameters);
|
||||
}
|
||||
}
|
|
@ -2,7 +2,10 @@ import 'package:meta/meta.dart';
|
|||
import 'package:sqlparser/src/ast/ast.dart';
|
||||
import 'package:sqlparser/src/reader/tokenizer/token.dart';
|
||||
|
||||
part 'crud.dart';
|
||||
part 'num_parser.dart';
|
||||
part 'expressions.dart';
|
||||
part 'schema.dart';
|
||||
|
||||
const _comparisonOperators = [
|
||||
TokenType.less,
|
||||
|
@ -37,22 +40,19 @@ class ParsingError implements Exception {
|
|||
}
|
||||
}
|
||||
|
||||
// todo better error handling and synchronisation, like it's done here:
|
||||
// https://craftinginterpreters.com/parsing-expressions.html#synchronizing-a-recursive-descent-parser
|
||||
|
||||
class Parser {
|
||||
abstract class ParserBase {
|
||||
final List<Token> tokens;
|
||||
final List<ParsingError> errors = [];
|
||||
int _current = 0;
|
||||
|
||||
Parser(this.tokens);
|
||||
ParserBase(this.tokens);
|
||||
|
||||
bool get _isAtEnd => _peek.type == TokenType.eof;
|
||||
Token get _peek => tokens[_current];
|
||||
Token get _peekNext => tokens[_current + 1];
|
||||
Token get _previous => tokens[_current - 1];
|
||||
|
||||
bool _match(List<TokenType> types) {
|
||||
bool _match(Iterable<TokenType> types) {
|
||||
for (var type in types) {
|
||||
if (_check(type)) {
|
||||
_advance();
|
||||
|
@ -109,686 +109,51 @@ class Parser {
|
|||
_error(message);
|
||||
}
|
||||
|
||||
Statement statement() {
|
||||
final stmt = select() ?? _deleteStmt() ?? _update();
|
||||
|
||||
_matchOne(TokenType.semicolon);
|
||||
if (!_isAtEnd) {
|
||||
_error('Expected the statement to finish here');
|
||||
}
|
||||
return stmt;
|
||||
IdentifierToken _consumeIdentifier(String message) {
|
||||
return _consume(TokenType.identifier, message) as IdentifierToken;
|
||||
}
|
||||
|
||||
// Common operations that we are referenced very often
|
||||
Expression expression();
|
||||
|
||||
/// Parses a [SelectStatement], or returns null if there is no select token
|
||||
/// after the current position.
|
||||
///
|
||||
/// See also:
|
||||
/// https://www.sqlite.org/lang_select.html
|
||||
SelectStatement select() {
|
||||
if (!_match(const [TokenType.select])) return null;
|
||||
final selectToken = _previous;
|
||||
SelectStatement select();
|
||||
|
||||
var distinct = false;
|
||||
if (_matchOne(TokenType.distinct)) {
|
||||
distinct = true;
|
||||
} else if (_matchOne(TokenType.all)) {
|
||||
distinct = false;
|
||||
Literal _literalOrNull();
|
||||
OrderingMode _orderingModeOrNull();
|
||||
}
|
||||
|
||||
// todo better error handling and synchronisation, like it's done here:
|
||||
// https://craftinginterpreters.com/parsing-expressions.html#synchronizing-a-recursive-descent-parser
|
||||
|
||||
class Parser extends ParserBase
|
||||
with ExpressionParser, SchemaParser, CrudParser {
|
||||
Parser(List<Token> tokens) : super(tokens);
|
||||
|
||||
Statement statement({bool expectEnd = true}) {
|
||||
final first = _peek;
|
||||
final stmt = select() ?? _deleteStmt() ?? _update() ?? _createTable();
|
||||
|
||||
if (stmt == null) {
|
||||
_error('Expected a sql statement to start here');
|
||||
}
|
||||
|
||||
final resultColumns = <ResultColumn>[];
|
||||
do {
|
||||
resultColumns.add(_resultColumn());
|
||||
} while (_match(const [TokenType.comma]));
|
||||
|
||||
final from = _from();
|
||||
|
||||
final where = _where();
|
||||
final groupBy = _groupBy();
|
||||
final orderBy = _orderBy();
|
||||
final limit = _limit();
|
||||
|
||||
return SelectStatement(
|
||||
distinct: distinct,
|
||||
columns: resultColumns,
|
||||
from: from,
|
||||
where: where,
|
||||
groupBy: groupBy,
|
||||
orderBy: orderBy,
|
||||
limit: limit,
|
||||
)..setSpan(selectToken, _previous);
|
||||
}
|
||||
|
||||
/// Parses a [ResultColumn] or throws if none is found.
|
||||
/// https://www.sqlite.org/syntax/result-column.html
|
||||
ResultColumn _resultColumn() {
|
||||
if (_match(const [TokenType.star])) {
|
||||
return StarResultColumn(null)..setSpan(_previous, _previous);
|
||||
_matchOne(TokenType.semicolon);
|
||||
if (!_isAtEnd && expectEnd) {
|
||||
_error('Expected the statement to finish here');
|
||||
}
|
||||
return stmt..setSpan(first, _previous);
|
||||
}
|
||||
|
||||
final positionBefore = _current;
|
||||
|
||||
if (_match(const [TokenType.identifier])) {
|
||||
// two options. the identifier could be followed by ".*", in which case
|
||||
// we have a star result column. If it's followed by anything else, it can
|
||||
// still refer to a column in a table as part of a expression result column
|
||||
final identifier = _previous;
|
||||
|
||||
if (_match(const [TokenType.dot]) && _match(const [TokenType.star])) {
|
||||
return StarResultColumn((identifier as IdentifierToken).identifier)
|
||||
..setSpan(identifier, _previous);
|
||||
}
|
||||
|
||||
// not a star result column. go back and parse the expression.
|
||||
// todo this is a bit unorthodox. is there a better way to parse the
|
||||
// expression from before?
|
||||
_current = positionBefore;
|
||||
List<Statement> statements() {
|
||||
final stmts = <Statement>[];
|
||||
while (!_isAtEnd) {
|
||||
stmts.add(statement(expectEnd: false));
|
||||
}
|
||||
|
||||
final tokenBefore = _peek;
|
||||
|
||||
final expr = expression();
|
||||
final as = _as();
|
||||
|
||||
return ExpressionResultColumn(expression: expr, as: as?.identifier)
|
||||
..setSpan(tokenBefore, _previous);
|
||||
}
|
||||
|
||||
/// Returns an identifier followed after an optional "AS" token in sql.
|
||||
/// Returns null if there is
|
||||
IdentifierToken _as() {
|
||||
if (_match(const [TokenType.as])) {
|
||||
return _consume(TokenType.identifier, 'Expected an identifier')
|
||||
as IdentifierToken;
|
||||
} else if (_match(const [TokenType.identifier])) {
|
||||
return _previous as IdentifierToken;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
List<Queryable> _from() {
|
||||
if (!_matchOne(TokenType.from)) return [];
|
||||
|
||||
// Can either be a list of <TableOrSubquery> or a join. Joins also start
|
||||
// with a TableOrSubquery, so let's first parse that.
|
||||
final start = _tableOrSubquery();
|
||||
// parse join, if it is one
|
||||
final join = _joinClause(start);
|
||||
if (join != null) {
|
||||
return [join];
|
||||
}
|
||||
|
||||
// not a join. Keep the TableOrSubqueries coming!
|
||||
final queries = [start];
|
||||
while (_matchOne(TokenType.comma)) {
|
||||
queries.add(_tableOrSubquery());
|
||||
}
|
||||
|
||||
return queries;
|
||||
}
|
||||
|
||||
TableOrSubquery _tableOrSubquery() {
|
||||
// this is what we're parsing: https://www.sqlite.org/syntax/table-or-subquery.html
|
||||
// we currently only support regular tables and nested selects
|
||||
final tableRef = _tableReference();
|
||||
if (tableRef != null) {
|
||||
return tableRef;
|
||||
} else if (_matchOne(TokenType.leftParen)) {
|
||||
final innerStmt = select();
|
||||
_consume(TokenType.rightParen,
|
||||
'Expected a right bracket to terminate the inner select');
|
||||
|
||||
final alias = _as();
|
||||
return SelectStatementAsSource(
|
||||
statement: innerStmt, as: alias?.identifier);
|
||||
}
|
||||
|
||||
_error('Expected a table name or a nested select statement');
|
||||
}
|
||||
|
||||
TableReference _tableReference() {
|
||||
if (_matchOne(TokenType.identifier)) {
|
||||
// ignore the schema name, it's not supported. Besides that, we're on the
|
||||
// first branch in the diagram here
|
||||
final tableName = (_previous as IdentifierToken).identifier;
|
||||
final alias = _as();
|
||||
return TableReference(tableName, alias?.identifier);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
JoinClause _joinClause(TableOrSubquery start) {
|
||||
var operator = _parseJoinOperatorNoComma();
|
||||
if (operator == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final joins = <Join>[];
|
||||
|
||||
while (operator != null) {
|
||||
final subquery = _tableOrSubquery();
|
||||
final constraint = _joinConstraint();
|
||||
JoinOperator resolvedOperator;
|
||||
if (operator.contains(TokenType.left)) {
|
||||
resolvedOperator = operator.contains(TokenType.outer)
|
||||
? JoinOperator.leftOuter
|
||||
: JoinOperator.left;
|
||||
} else if (operator.contains(TokenType.inner)) {
|
||||
resolvedOperator = JoinOperator.inner;
|
||||
} else if (operator.contains(TokenType.cross)) {
|
||||
resolvedOperator = JoinOperator.cross;
|
||||
} else if (operator.contains(TokenType.comma)) {
|
||||
resolvedOperator = JoinOperator.comma;
|
||||
} else {
|
||||
resolvedOperator = JoinOperator.none;
|
||||
}
|
||||
|
||||
joins.add(Join(
|
||||
natural: operator.contains(TokenType.natural),
|
||||
operator: resolvedOperator,
|
||||
query: subquery,
|
||||
constraint: constraint,
|
||||
));
|
||||
|
||||
// parse the next operator, if there is more than one join
|
||||
if (_matchOne(TokenType.comma)) {
|
||||
operator = [TokenType.comma];
|
||||
} else {
|
||||
operator = _parseJoinOperatorNoComma();
|
||||
}
|
||||
}
|
||||
|
||||
return JoinClause(primary: start, joins: joins);
|
||||
}
|
||||
|
||||
/// Parses https://www.sqlite.org/syntax/join-operator.html, minus the comma.
|
||||
List<TokenType> _parseJoinOperatorNoComma() {
|
||||
if (_match(_startOperators)) {
|
||||
final operators = [_previous.type];
|
||||
|
||||
if (_previous.type == TokenType.join) {
|
||||
// just join, without any specific operators
|
||||
return operators;
|
||||
} else {
|
||||
// natural is a prefix, another operator can follow.
|
||||
if (_previous.type == TokenType.natural) {
|
||||
if (_match([TokenType.left, TokenType.inner, TokenType.cross])) {
|
||||
operators.add(_previous.type);
|
||||
}
|
||||
}
|
||||
if (_previous.type == TokenType.left && _matchOne(TokenType.outer)) {
|
||||
operators.add(_previous.type);
|
||||
}
|
||||
|
||||
_consume(TokenType.join, 'Expected to see a join keyword here');
|
||||
return operators;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Parses https://www.sqlite.org/syntax/join-constraint.html
|
||||
JoinConstraint _joinConstraint() {
|
||||
if (_matchOne(TokenType.on)) {
|
||||
return OnConstraint(expression: expression());
|
||||
} else if (_matchOne(TokenType.using)) {
|
||||
_consume(TokenType.leftParen, 'Expected an opening paranthesis');
|
||||
|
||||
final columnNames = <String>[];
|
||||
do {
|
||||
final identifier =
|
||||
_consume(TokenType.identifier, 'Expected a column name');
|
||||
columnNames.add((identifier as IdentifierToken).identifier);
|
||||
} while (_matchOne(TokenType.comma));
|
||||
|
||||
_consume(TokenType.rightParen, 'Expected an closing paranthesis');
|
||||
|
||||
return UsingConstraint(columnNames: columnNames);
|
||||
}
|
||||
_error('Expected a constraint with ON or USING');
|
||||
}
|
||||
|
||||
/// Parses a where clause if there is one at the current position
|
||||
Expression _where() {
|
||||
if (_match(const [TokenType.where])) {
|
||||
return expression();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
GroupBy _groupBy() {
|
||||
if (_matchOne(TokenType.group)) {
|
||||
_consume(TokenType.by, 'Expected a "BY"');
|
||||
final by = <Expression>[];
|
||||
Expression having;
|
||||
|
||||
do {
|
||||
by.add(expression());
|
||||
} while (_matchOne(TokenType.comma));
|
||||
|
||||
if (_matchOne(TokenType.having)) {
|
||||
having = expression();
|
||||
}
|
||||
|
||||
return GroupBy(by: by, having: having);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
OrderBy _orderBy() {
|
||||
if (_match(const [TokenType.order])) {
|
||||
_consume(TokenType.by, 'Expected "BY" after "ORDER" token');
|
||||
final terms = <OrderingTerm>[];
|
||||
do {
|
||||
terms.add(_orderingTerm());
|
||||
} while (_matchOne(TokenType.comma));
|
||||
return OrderBy(terms: terms);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
OrderingTerm _orderingTerm() {
|
||||
final expr = expression();
|
||||
|
||||
if (_match(const [TokenType.asc, TokenType.desc])) {
|
||||
final mode = _previous.type == TokenType.asc
|
||||
? OrderingMode.ascending
|
||||
: OrderingMode.descending;
|
||||
return OrderingTerm(expression: expr, orderingMode: mode);
|
||||
}
|
||||
|
||||
return OrderingTerm(expression: expr);
|
||||
}
|
||||
|
||||
/// Parses a [Limit] clause, or returns null if there is no limit token after
|
||||
/// the current position.
|
||||
Limit _limit() {
|
||||
if (!_matchOne(TokenType.limit)) return null;
|
||||
|
||||
// Unintuitive, it's "$amount OFFSET $offset", but "$offset, $amount"
|
||||
// the order changes between the separator tokens.
|
||||
final first = expression();
|
||||
|
||||
if (_matchOne(TokenType.comma)) {
|
||||
final separator = _previous;
|
||||
final count = expression();
|
||||
return Limit(count: count, offsetSeparator: separator, offset: first);
|
||||
} else if (_matchOne(TokenType.offset)) {
|
||||
final separator = _previous;
|
||||
final offset = expression();
|
||||
return Limit(count: first, offsetSeparator: separator, offset: offset);
|
||||
} else {
|
||||
return Limit(count: first);
|
||||
}
|
||||
}
|
||||
|
||||
DeleteStatement _deleteStmt() {
|
||||
if (!_matchOne(TokenType.delete)) return null;
|
||||
_consume(TokenType.from, 'Expected a FROM here');
|
||||
|
||||
final table = _tableReference();
|
||||
Expression where;
|
||||
if (table == null) {
|
||||
_error('Expected a table reference');
|
||||
}
|
||||
|
||||
if (_matchOne(TokenType.where)) {
|
||||
where = expression();
|
||||
}
|
||||
|
||||
return DeleteStatement(from: table, where: where);
|
||||
}
|
||||
|
||||
UpdateStatement _update() {
|
||||
if (!_matchOne(TokenType.update)) return null;
|
||||
FailureMode failureMode;
|
||||
if (_matchOne(TokenType.or)) {
|
||||
failureMode = UpdateStatement.failureModeFromToken(_advance().type);
|
||||
}
|
||||
|
||||
final table = _tableReference();
|
||||
_consume(TokenType.set, 'Expected SET after the table name');
|
||||
|
||||
final set = <SetComponent>[];
|
||||
do {
|
||||
final reference = _primary() as Reference;
|
||||
_consume(TokenType.equal, 'Expected = after the column name');
|
||||
final expr = expression();
|
||||
|
||||
set.add(SetComponent(column: reference, expression: expr));
|
||||
} while (_matchOne(TokenType.comma));
|
||||
|
||||
final where = _where();
|
||||
return UpdateStatement(
|
||||
or: failureMode, table: table, set: set, where: where);
|
||||
}
|
||||
|
||||
/* We parse expressions here.
|
||||
* Operators have the following precedence:
|
||||
* - + ~ NOT (unary)
|
||||
* || (concatenation)
|
||||
* * / %
|
||||
* + -
|
||||
* << >> & |
|
||||
* < <= > >=
|
||||
* = == != <> IS IS NOT IN LIKE GLOB MATCH REGEXP
|
||||
* AND
|
||||
* OR
|
||||
* We also treat expressions in parentheses and literals with the highest
|
||||
* priority. Parsing methods are written in ascending precedence, and each
|
||||
* parsing method calls the next higher precedence if unsuccessful.
|
||||
* https://www.sqlite.org/lang_expr.html
|
||||
* */
|
||||
|
||||
Expression expression() {
|
||||
return _case();
|
||||
}
|
||||
|
||||
Expression _case() {
|
||||
if (_matchOne(TokenType.$case)) {
|
||||
final base = _check(TokenType.when) ? null : _or();
|
||||
final whens = <WhenComponent>[];
|
||||
Expression $else;
|
||||
|
||||
while (_matchOne(TokenType.when)) {
|
||||
final whenExpr = _or();
|
||||
_consume(TokenType.then, 'Expected THEN');
|
||||
final then = _or();
|
||||
whens.add(WhenComponent(when: whenExpr, then: then));
|
||||
}
|
||||
|
||||
if (_matchOne(TokenType.$else)) {
|
||||
$else = _or();
|
||||
}
|
||||
|
||||
_consume(TokenType.end, 'Expected END to finish the case operator');
|
||||
return CaseExpression(whens: whens, base: base, elseExpr: $else);
|
||||
}
|
||||
|
||||
return _or();
|
||||
}
|
||||
|
||||
/// Parses an expression of the form a <T> b, where <T> is in [types] and
|
||||
/// both a and b are expressions with a higher precedence parsed from
|
||||
/// [higherPrecedence].
|
||||
Expression _parseSimpleBinary(
|
||||
List<TokenType> types, Expression Function() higherPrecedence) {
|
||||
var expression = higherPrecedence();
|
||||
|
||||
while (_match(types)) {
|
||||
final operator = _previous;
|
||||
final right = higherPrecedence();
|
||||
expression = BinaryExpression(expression, operator, right);
|
||||
}
|
||||
return expression;
|
||||
}
|
||||
|
||||
Expression _or() => _parseSimpleBinary(const [TokenType.or], _and);
|
||||
Expression _and() => _parseSimpleBinary(const [TokenType.and], _in);
|
||||
|
||||
Expression _in() {
|
||||
final left = _equals();
|
||||
|
||||
if (_checkWithNot(TokenType.$in)) {
|
||||
final not = _matchOne(TokenType.not);
|
||||
_matchOne(TokenType.$in);
|
||||
|
||||
var inside = _equals();
|
||||
if (inside is Parentheses) {
|
||||
// if we have something like x IN (3), then (3) is a tuple and not a
|
||||
// parenthesis. We can only know this from the context unfortunately
|
||||
inside = (inside as Parentheses).asTuple;
|
||||
}
|
||||
|
||||
return InExpression(left: left, inside: inside, not: not);
|
||||
}
|
||||
|
||||
return left;
|
||||
}
|
||||
|
||||
/// Parses expressions with the "equals" precedence. This contains
|
||||
/// comparisons, "IS (NOT) IN" expressions, between expressions and "like"
|
||||
/// expressions.
|
||||
Expression _equals() {
|
||||
var expression = _comparison();
|
||||
|
||||
final ops = const [
|
||||
TokenType.equal,
|
||||
TokenType.doubleEqual,
|
||||
TokenType.exclamationEqual,
|
||||
TokenType.lessMore,
|
||||
TokenType.$is,
|
||||
];
|
||||
final stringOps = const [
|
||||
TokenType.like,
|
||||
TokenType.glob,
|
||||
TokenType.match,
|
||||
TokenType.regexp,
|
||||
];
|
||||
|
||||
while (true) {
|
||||
if (_checkWithNot(TokenType.between)) {
|
||||
final not = _matchOne(TokenType.not);
|
||||
_consume(TokenType.between, 'expected a BETWEEN');
|
||||
|
||||
final lower = _comparison();
|
||||
_consume(TokenType.and, 'expected AND');
|
||||
final upper = _comparison();
|
||||
|
||||
expression = BetweenExpression(
|
||||
not: not, check: expression, lower: lower, upper: upper);
|
||||
} else if (_match(ops)) {
|
||||
final operator = _previous;
|
||||
if (operator.type == TokenType.$is) {
|
||||
final not = _match(const [TokenType.not]);
|
||||
// special case: is not expression
|
||||
expression = IsExpression(not, expression, _comparison());
|
||||
} else {
|
||||
expression = BinaryExpression(expression, operator, _comparison());
|
||||
}
|
||||
} else if (_checkAnyWithNot(stringOps)) {
|
||||
final not = _matchOne(TokenType.not);
|
||||
_match(stringOps); // will consume, existence was verified with check
|
||||
final operator = _previous;
|
||||
|
||||
final right = _comparison();
|
||||
Expression escape;
|
||||
if (_matchOne(TokenType.escape)) {
|
||||
escape = _comparison();
|
||||
}
|
||||
|
||||
expression = StringComparisonExpression(
|
||||
not: not,
|
||||
left: expression,
|
||||
operator: operator,
|
||||
right: right,
|
||||
escape: escape);
|
||||
} else {
|
||||
break; // no matching operator with this precedence was found
|
||||
}
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
|
||||
Expression _comparison() {
|
||||
return _parseSimpleBinary(_comparisonOperators, _binaryOperation);
|
||||
}
|
||||
|
||||
Expression _binaryOperation() {
|
||||
return _parseSimpleBinary(_binaryOperators, _addition);
|
||||
}
|
||||
|
||||
Expression _addition() {
|
||||
return _parseSimpleBinary(const [
|
||||
TokenType.plus,
|
||||
TokenType.minus,
|
||||
], _multiplication);
|
||||
}
|
||||
|
||||
Expression _multiplication() {
|
||||
return _parseSimpleBinary(const [
|
||||
TokenType.star,
|
||||
TokenType.slash,
|
||||
TokenType.percent,
|
||||
], _concatenation);
|
||||
}
|
||||
|
||||
Expression _concatenation() {
|
||||
return _parseSimpleBinary(const [TokenType.doublePipe], _unary);
|
||||
}
|
||||
|
||||
Expression _unary() {
|
||||
if (_match(const [
|
||||
TokenType.minus,
|
||||
TokenType.plus,
|
||||
TokenType.tilde,
|
||||
TokenType.not,
|
||||
])) {
|
||||
final operator = _previous;
|
||||
final expression = _unary();
|
||||
return UnaryExpression(operator, expression);
|
||||
} else if (_matchOne(TokenType.exists)) {
|
||||
_consume(
|
||||
TokenType.leftParen, 'Expected opening parenthesis after EXISTS');
|
||||
final selectStmt = select();
|
||||
_consume(TokenType.rightParen,
|
||||
'Expected closing paranthesis to finish EXISTS expression');
|
||||
return ExistsExpression(select: selectStmt);
|
||||
}
|
||||
|
||||
return _postfix();
|
||||
}
|
||||
|
||||
Expression _postfix() {
|
||||
// todo parse ISNULL, NOTNULL, NOT NULL, etc.
|
||||
// I don't even know the precedence ¯\_(ツ)_/¯ (probably not higher than
|
||||
// unary)
|
||||
var expression = _primary();
|
||||
|
||||
while (_matchOne(TokenType.collate)) {
|
||||
final collateOp = _previous;
|
||||
final collateFun =
|
||||
_consume(TokenType.identifier, 'Expected a collating sequence')
|
||||
as IdentifierToken;
|
||||
expression = CollateExpression(
|
||||
inner: expression,
|
||||
operator: collateOp,
|
||||
collateFunction: collateFun,
|
||||
)..setSpan(expression.first, collateFun);
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
|
||||
Expression _primary() {
|
||||
final token = _advance();
|
||||
final type = token.type;
|
||||
switch (type) {
|
||||
case TokenType.numberLiteral:
|
||||
return NumericLiteral(_parseNumber(token.lexeme), token);
|
||||
case TokenType.stringLiteral:
|
||||
return StringLiteral(token as StringLiteralToken);
|
||||
case TokenType.$null:
|
||||
return NullLiteral(token);
|
||||
case TokenType.$true:
|
||||
return BooleanLiteral.withTrue(token);
|
||||
case TokenType.$false:
|
||||
return BooleanLiteral.withFalse(token);
|
||||
// todo CURRENT_TIME, CURRENT_DATE, CURRENT_TIMESTAMP
|
||||
case TokenType.leftParen:
|
||||
// Opening brackets could be three things: An inner select statement
|
||||
// (SELECT ...), a parenthesised expression, or a tuple of expressions
|
||||
// (a, b, c).
|
||||
final left = token;
|
||||
if (_peek.type == TokenType.select) {
|
||||
final stmt = select();
|
||||
_consume(TokenType.rightParen, 'Expected a closing bracket');
|
||||
return SubQuery(select: stmt);
|
||||
} else {
|
||||
// alright, it's either a tuple or just parenthesis. A tuple can be
|
||||
// empty, so if the next statement is the closing bracket we're done
|
||||
if (_matchOne(TokenType.rightParen)) {
|
||||
return TupleExpression(expressions: [])..setSpan(left, _previous);
|
||||
}
|
||||
|
||||
final expr = expression();
|
||||
|
||||
// Are we witnessing a tuple?
|
||||
if (_check(TokenType.comma)) {
|
||||
// we are, add expressions as long as we see commas
|
||||
final exprs = [expr];
|
||||
while (_matchOne(TokenType.comma)) {
|
||||
exprs.add(expression());
|
||||
}
|
||||
|
||||
_consume(TokenType.rightParen, 'Expected a closing bracket');
|
||||
return TupleExpression(expressions: exprs);
|
||||
} else {
|
||||
// we aren't, so that'll just be parentheses.
|
||||
_consume(TokenType.rightParen, 'Expected a closing bracket');
|
||||
return Parentheses(left, expr, token);
|
||||
}
|
||||
}
|
||||
break;
|
||||
case TokenType.identifier:
|
||||
// could be table.column, function(...) or just column
|
||||
final first = token as IdentifierToken;
|
||||
|
||||
if (_matchOne(TokenType.dot)) {
|
||||
final second =
|
||||
_consume(TokenType.identifier, 'Expected a column name here')
|
||||
as IdentifierToken;
|
||||
return Reference(
|
||||
tableName: first.identifier, columnName: second.identifier)
|
||||
..setSpan(first, second);
|
||||
} else if (_matchOne(TokenType.leftParen)) {
|
||||
final parameters = _functionParameters();
|
||||
final rightParen = _consume(TokenType.rightParen,
|
||||
'Expected closing bracket after argument list');
|
||||
|
||||
return FunctionExpression(
|
||||
name: first.identifier, parameters: parameters)
|
||||
..setSpan(first, rightParen);
|
||||
} else {
|
||||
return Reference(columnName: first.identifier)..setSpan(first, first);
|
||||
}
|
||||
break;
|
||||
case TokenType.questionMark:
|
||||
final mark = token;
|
||||
|
||||
if (_matchOne(TokenType.numberLiteral)) {
|
||||
final number = _previous;
|
||||
return NumberedVariable(mark, _parseNumber(number.lexeme).toInt())
|
||||
..setSpan(mark, number);
|
||||
} else {
|
||||
return NumberedVariable(mark, null)..setSpan(mark, mark);
|
||||
}
|
||||
break;
|
||||
case TokenType.colon:
|
||||
final colon = token;
|
||||
final identifier = _consume(TokenType.identifier,
|
||||
'Expected an identifier for the named variable') as IdentifierToken;
|
||||
final content = identifier.identifier;
|
||||
return ColonNamedVariable(':$content')..setSpan(colon, identifier);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
// nothing found -> issue error
|
||||
_error('Could not parse this expression');
|
||||
}
|
||||
|
||||
FunctionParameters _functionParameters() {
|
||||
if (_matchOne(TokenType.star)) {
|
||||
return const StarFunctionParameter();
|
||||
}
|
||||
|
||||
final distinct = _matchOne(TokenType.distinct);
|
||||
final parameters = <Expression>[];
|
||||
while (_peek.type != TokenType.rightParen) {
|
||||
parameters.add(expression());
|
||||
}
|
||||
return ExprFunctionParameters(distinct: distinct, parameters: parameters);
|
||||
return stmts;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,315 @@
|
|||
part of 'parser.dart';
|
||||
|
||||
mixin SchemaParser on ParserBase {
|
||||
CreateTableStatement _createTable() {
|
||||
if (!_matchOne(TokenType.create)) return null;
|
||||
final first = _previous;
|
||||
|
||||
_consume(TokenType.table, 'Expected TABLE keyword here');
|
||||
|
||||
var ifNotExists = false;
|
||||
|
||||
if (_matchOne(TokenType.$if)) {
|
||||
_consume(TokenType.not, 'Expected IF to be followed by NOT EXISTS');
|
||||
_consume(TokenType.exists, 'Expected IF NOT to be followed by EXISTS');
|
||||
ifNotExists = true;
|
||||
}
|
||||
|
||||
final tableIdentifier = _consumeIdentifier('Expected a table name');
|
||||
|
||||
// we don't currently support CREATE TABLE x AS SELECT ... statements
|
||||
_consume(
|
||||
TokenType.leftParen, 'Expected opening parenthesis to list columns');
|
||||
|
||||
final columns = <ColumnDefinition>[];
|
||||
final tableConstraints = <TableConstraint>[];
|
||||
// the columns must come before the table constraints!
|
||||
var encounteredTableConstraint = false;
|
||||
|
||||
do {
|
||||
final tableConstraint = _tableConstraintOrNull();
|
||||
|
||||
if (tableConstraint != null) {
|
||||
encounteredTableConstraint = true;
|
||||
tableConstraints.add(tableConstraint);
|
||||
} else {
|
||||
if (encounteredTableConstraint) {
|
||||
_error('Expected another table constraint');
|
||||
} else {
|
||||
columns.add(_columnDefinition());
|
||||
}
|
||||
}
|
||||
} while (_matchOne(TokenType.comma));
|
||||
|
||||
_consume(TokenType.rightParen, 'Expected closing parenthesis');
|
||||
|
||||
var withoutRowId = false;
|
||||
if (_matchOne(TokenType.without)) {
|
||||
_consume(
|
||||
TokenType.rowid, 'Expected ROWID to complete the WITHOUT ROWID part');
|
||||
withoutRowId = true;
|
||||
}
|
||||
|
||||
return CreateTableStatement(
|
||||
ifNotExists: ifNotExists,
|
||||
tableName: tableIdentifier.identifier,
|
||||
withoutRowId: withoutRowId,
|
||||
columns: columns,
|
||||
tableConstraints: tableConstraints,
|
||||
)..setSpan(first, _previous);
|
||||
}
|
||||
|
||||
ColumnDefinition _columnDefinition() {
|
||||
final name = _consume(TokenType.identifier, 'Expected a column name')
|
||||
as IdentifierToken;
|
||||
|
||||
final typeName = _typeName();
|
||||
final constraints = <ColumnConstraint>[];
|
||||
ColumnConstraint constraint;
|
||||
while ((constraint = _columnConstraint(orNull: true)) != null) {
|
||||
constraints.add(constraint);
|
||||
}
|
||||
|
||||
return ColumnDefinition(
|
||||
columnName: name.identifier,
|
||||
typeName: typeName,
|
||||
constraints: constraints,
|
||||
)..setSpan(name, _previous);
|
||||
}
|
||||
|
||||
String _typeName() {
|
||||
// sqlite doesn't really define what a type name is and has very loose rules
|
||||
// at turning them into a type affinity. We support this pattern:
|
||||
// typename = identifier [ "(" { identifier | comma | number_literal } ")" ]
|
||||
if (!_matchOne(TokenType.identifier)) return null;
|
||||
|
||||
final typeNameBuilder = StringBuffer(_previous.lexeme);
|
||||
|
||||
if (_matchOne(TokenType.leftParen)) {
|
||||
typeNameBuilder.write('(');
|
||||
|
||||
const inBrackets = [
|
||||
TokenType.identifier,
|
||||
TokenType.comma,
|
||||
TokenType.numberLiteral
|
||||
];
|
||||
while (_match(inBrackets)) {
|
||||
typeNameBuilder..write(' ')..write(_previous.lexeme);
|
||||
}
|
||||
|
||||
_consume(TokenType.rightParen,
|
||||
'Expected closing paranthesis to finish type name');
|
||||
}
|
||||
|
||||
return typeNameBuilder.toString();
|
||||
}
|
||||
|
||||
ColumnConstraint _columnConstraint({bool orNull = false}) {
|
||||
final first = _peek;
|
||||
|
||||
final resolvedName = _constraintNameOrNull();
|
||||
|
||||
if (_matchOne(TokenType.primary)) {
|
||||
_consume(TokenType.key, 'Expected KEY to complete PRIMARY KEY clause');
|
||||
|
||||
final mode = _orderingModeOrNull();
|
||||
final conflict = _conflictClauseOrNull();
|
||||
final hasAutoInc = _matchOne(TokenType.autoincrement);
|
||||
|
||||
return PrimaryKeyColumn(resolvedName,
|
||||
autoIncrement: hasAutoInc, mode: mode, onConflict: conflict)
|
||||
..setSpan(first, _previous);
|
||||
}
|
||||
if (_matchOne(TokenType.not)) {
|
||||
_consume(TokenType.$null, 'Expected NULL to complete NOT NULL');
|
||||
|
||||
return NotNull(resolvedName, onConflict: _conflictClauseOrNull())
|
||||
..setSpan(first, _previous);
|
||||
}
|
||||
if (_matchOne(TokenType.unique)) {
|
||||
return UniqueColumn(resolvedName, _conflictClauseOrNull())
|
||||
..setSpan(first, _previous);
|
||||
}
|
||||
if (_matchOne(TokenType.check)) {
|
||||
final expr = _expressionInParentheses();
|
||||
return CheckColumn(resolvedName, expr)..setSpan(first, _previous);
|
||||
}
|
||||
if (_matchOne(TokenType.$default)) {
|
||||
Expression expr = _literalOrNull();
|
||||
|
||||
// when not a literal, expect an expression in parentheses
|
||||
expr ??= _expressionInParentheses();
|
||||
|
||||
return Default(resolvedName, expr)..setSpan(first, _previous);
|
||||
}
|
||||
if (_matchOne(TokenType.collate)) {
|
||||
final collation = _consumeIdentifier('Expected the collation name');
|
||||
|
||||
return CollateConstraint(resolvedName, collation.identifier)
|
||||
..setSpan(first, _previous);
|
||||
}
|
||||
if (_peek.type == TokenType.references) {
|
||||
final clause = _foreignKeyClause();
|
||||
return ForeignKeyColumnConstraint(resolvedName, clause)
|
||||
..setSpan(first, _previous);
|
||||
}
|
||||
|
||||
// no known column constraint matched. If orNull is set and we're not
|
||||
// guaranteed to be in a constraint clause (started with CONSTRAINT), we
|
||||
// can return null
|
||||
if (orNull && resolvedName == null) {
|
||||
return null;
|
||||
}
|
||||
_error('Expected a constraint (primary key, nullability, etc.)');
|
||||
}
|
||||
|
||||
TableConstraint _tableConstraintOrNull() {
|
||||
final first = _peek;
|
||||
final name = _constraintNameOrNull();
|
||||
|
||||
if (_match([TokenType.unique, TokenType.primary])) {
|
||||
final isPrimaryKey = _previous.type == TokenType.primary;
|
||||
|
||||
if (isPrimaryKey) {
|
||||
_consume(TokenType.key, 'Expected KEY to start PRIMARY KEY clause');
|
||||
}
|
||||
|
||||
final columns = _listColumnsInParentheses(allowEmpty: false);
|
||||
final conflictClause = _conflictClauseOrNull();
|
||||
|
||||
return KeyClause(name,
|
||||
isPrimaryKey: isPrimaryKey,
|
||||
indexedColumns: columns,
|
||||
onConflict: conflictClause)
|
||||
..setSpan(first, _previous);
|
||||
} else if (_matchOne(TokenType.check)) {
|
||||
final expr = _expressionInParentheses();
|
||||
return CheckTable(name, expr)..setSpan(first, _previous);
|
||||
} else if (_matchOne(TokenType.foreign)) {
|
||||
_consume(TokenType.key, 'Expected KEY to start FOREIGN KEY clause');
|
||||
final columns = _listColumnsInParentheses(allowEmpty: false);
|
||||
final clause = _foreignKeyClause();
|
||||
|
||||
return ForeignKeyTableConstraint(name, columns: columns, clause: clause)
|
||||
..setSpan(first, _previous);
|
||||
}
|
||||
|
||||
if (name != null) {
|
||||
// if a constraint was started with CONSTRAINT <name> but then we didn't
|
||||
// find a constraint, that's an syntax error
|
||||
_error('Expected a table constraint (e.g. a primary key)');
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
String _constraintNameOrNull() {
|
||||
if (_matchOne(TokenType.constraint)) {
|
||||
final name = _consumeIdentifier('Expect a name for the constraint here');
|
||||
return name.identifier;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
Expression _expressionInParentheses() {
|
||||
_consume(TokenType.leftParen, 'Expected opening parenthesis');
|
||||
final expr = expression();
|
||||
_consume(TokenType.rightParen, 'Expected closing parenthesis');
|
||||
return expr;
|
||||
}
|
||||
|
||||
ConflictClause _conflictClauseOrNull() {
|
||||
if (_matchOne(TokenType.on)) {
|
||||
_consume(TokenType.conflict,
|
||||
'Expected CONFLICT to complete ON CONFLICT clause');
|
||||
|
||||
const modes = {
|
||||
TokenType.rollback: ConflictClause.rollback,
|
||||
TokenType.abort: ConflictClause.abort,
|
||||
TokenType.fail: ConflictClause.fail,
|
||||
TokenType.ignore: ConflictClause.ignore,
|
||||
TokenType.replace: ConflictClause.replace,
|
||||
};
|
||||
|
||||
if (_match(modes.keys)) {
|
||||
return modes[_previous.type];
|
||||
} else {
|
||||
_error('Expected a conflict handler (rollback, abort, etc.) here');
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
ForeignKeyClause _foreignKeyClause() {
|
||||
// https://www.sqlite.org/syntax/foreign-key-clause.html
|
||||
_consume(TokenType.references, 'Expected REFERENCES');
|
||||
final firstToken = _previous;
|
||||
|
||||
final foreignTable = _consumeIdentifier('Expected a table name');
|
||||
final foreignTableName = TableReference(foreignTable.identifier, null)
|
||||
..setSpan(foreignTable, foreignTable);
|
||||
|
||||
final columnNames = _listColumnsInParentheses(allowEmpty: true);
|
||||
|
||||
ReferenceAction onDelete, onUpdate;
|
||||
|
||||
while (_matchOne(TokenType.on)) {
|
||||
if (_matchOne(TokenType.delete)) {
|
||||
onDelete = _referenceAction();
|
||||
} else if (_matchOne(TokenType.update)) {
|
||||
onUpdate = _referenceAction();
|
||||
} else {
|
||||
_error('Expected either DELETE or UPDATE');
|
||||
}
|
||||
}
|
||||
|
||||
return ForeignKeyClause(
|
||||
foreignTable: foreignTableName,
|
||||
columnNames: columnNames,
|
||||
onUpdate: onUpdate,
|
||||
onDelete: onDelete,
|
||||
)..setSpan(firstToken, _previous);
|
||||
}
|
||||
|
||||
ReferenceAction _referenceAction() {
|
||||
if (_matchOne(TokenType.cascade)) {
|
||||
return ReferenceAction.cascade;
|
||||
} else if (_matchOne(TokenType.restrict)) {
|
||||
return ReferenceAction.restrict;
|
||||
} else if (_matchOne(TokenType.no)) {
|
||||
_consume(TokenType.action, 'Expect ACTION to complete NO ACTION clause');
|
||||
return ReferenceAction.noAction;
|
||||
} else if (_matchOne(TokenType.set)) {
|
||||
if (_matchOne(TokenType.$null)) {
|
||||
return ReferenceAction.setNull;
|
||||
} else if (_matchOne(TokenType.$default)) {
|
||||
return ReferenceAction.setDefault;
|
||||
} else {
|
||||
_error('Expected either NULL or DEFAULT as set action here');
|
||||
}
|
||||
} else {
|
||||
_error('Not a valid action, expected CASCADE, SET NULL, etc..');
|
||||
}
|
||||
}
|
||||
|
||||
List<Reference> _listColumnsInParentheses({bool allowEmpty = false}) {
|
||||
final columnNames = <Reference>[];
|
||||
if (_matchOne(TokenType.leftParen)) {
|
||||
do {
|
||||
final referenceId = _consumeIdentifier('Expected a column name');
|
||||
final reference = Reference(columnName: referenceId.identifier)
|
||||
..setSpan(referenceId, referenceId);
|
||||
columnNames.add(reference);
|
||||
} while (_matchOne(TokenType.comma));
|
||||
|
||||
_consume(TokenType.rightParen,
|
||||
'Expected closing paranthesis after column names');
|
||||
} else {
|
||||
if (!allowEmpty) {
|
||||
_error('Expected a list of columns in parantheses');
|
||||
}
|
||||
}
|
||||
|
||||
return columnNames;
|
||||
}
|
||||
}
|
|
@ -95,6 +95,26 @@ enum TokenType {
|
|||
ignore,
|
||||
set,
|
||||
|
||||
create,
|
||||
table,
|
||||
$if,
|
||||
without,
|
||||
rowid,
|
||||
constraint,
|
||||
autoincrement,
|
||||
primary,
|
||||
foreign,
|
||||
key,
|
||||
unique,
|
||||
check,
|
||||
$default,
|
||||
conflict,
|
||||
references,
|
||||
cascade,
|
||||
restrict,
|
||||
no,
|
||||
action,
|
||||
|
||||
semicolon,
|
||||
eof,
|
||||
}
|
||||
|
@ -154,6 +174,25 @@ const Map<String, TokenType> keywords = {
|
|||
'REPLACE': TokenType.replace,
|
||||
'FAIL': TokenType.fail,
|
||||
'IGNORE': TokenType.ignore,
|
||||
'CREATE': TokenType.create,
|
||||
'TABLE': TokenType.table,
|
||||
'IF': TokenType.$if,
|
||||
'WITHOUT': TokenType.without,
|
||||
'ROWID': TokenType.rowid,
|
||||
'CONSTRAINT': TokenType.constraint,
|
||||
'AUTOINCREMENT': TokenType.autoincrement,
|
||||
'PRIMARY': TokenType.primary,
|
||||
'FOREIGN': TokenType.foreign,
|
||||
'KEY': TokenType.key,
|
||||
'UNIQUE': TokenType.unique,
|
||||
'CHECK': TokenType.check,
|
||||
'DEFAULT': TokenType.$default,
|
||||
'CONFLICT': TokenType.conflict,
|
||||
'REFERENCES': TokenType.references,
|
||||
'CASCADE': TokenType.cascade,
|
||||
'RESTRICT': TokenType.restrict,
|
||||
'NO': TokenType.no,
|
||||
'ACTION': TokenType.action,
|
||||
};
|
||||
|
||||
class Token {
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
dart:
|
||||
- stable
|
||||
|
||||
stages:
|
||||
- analyze:
|
||||
- dartanalyzer: --fatal-infos --fatal-warnings lib/ test/
|
||||
- dartfmt
|
||||
- unit_test:
|
||||
- test
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue