Fix autocomplete and suggestions in the analyzer plugin

This commit is contained in:
Simon Binder 2019-09-24 20:50:23 +02:00
parent 3ef0d0f61a
commit ba21a594af
No known key found for this signature in database
GPG Key ID: 7891917E4147B8C0
12 changed files with 118 additions and 36 deletions

View File

@ -4,7 +4,9 @@ Currently, we support
- showing errors in moor files
- outline
- (kinda) folding
- folding
- (very, very limited) autocomplete
- some quickfixes to make columns nullable or non-null
## Setup
To use this plugin, you'll need to perform these steps once. It is assumed that you

View File

@ -1,5 +1,5 @@
include: package:pedantic/analysis_options.yaml
#analyzer:
# plugins:
# - moor
analyzer:
plugins:
- moor

View File

@ -80,9 +80,15 @@ class MoorDriver implements AnalysisDriverGeneric {
}
final backendTask = _createTask(mostImportantFile.file.uri);
final task = session.startTask(backendTask);
await task.runTask();
_tracker.handleTaskCompleted(task);
try {
final task = session.startTask(backendTask);
await task.runTask();
_tracker.handleTaskCompleted(task);
} catch (e, s) {
Logger.root.warning(
'Error while working on ${mostImportantFile.file.uri}', e, s);
_tracker.removePending(mostImportantFile);
}
} finally {
_isWorking = false;
}
@ -141,11 +147,16 @@ class MoorDriver implements AnalysisDriverGeneric {
/// Waits for the file at [path] to be parsed.
Future<FoundFile> waitFileParsed(String path) {
_scheduler.notify(this);
final found = pathToFoundFile(path);
return completedFiles()
.firstWhere((file) => file == found && file.isParsed);
if (found.isParsed) {
return Future.value(found);
} else {
_scheduler.notify(this);
final found = pathToFoundFile(path);
return completedFiles()
.firstWhere((file) => file == found && file.isParsed);
}
}
}

View File

@ -76,6 +76,13 @@ class FileTracker {
}
}
/// Manually remove the [file] from the backlog. As the plugin is still very
/// unstable, we use this on unexpected errors so that we just move on to the
/// next file if there is a problem.
void removePending(TrackedFile file) {
_pendingWork.remove(file);
}
void dispose() {
_computations.close();
}

View File

@ -37,6 +37,12 @@ abstract class _AssistOnNodeContributor<T extends AstNode> {
const _AssistOnNodeContributor();
void contribute(AssistCollector collector, T node, String path);
SourceEdit replaceNode(AstNode node, String text) {
final start = node.firstPosition;
final length = node.lastPosition - start;
return SourceEdit(start, length, text);
}
}
class AssistId {

View File

@ -9,9 +9,9 @@ class ColumnNullability extends _AssistOnNodeContributor<ColumnDefinition> {
final notNull = node.findConstraint<NotNull>();
if (notNull == null) {
// there is no not-null constraint on this column, suggest to add one at
// the end of the definition
final end = node.lastPosition;
// there is no not-null constraint on this column, suggest to add one
// after the type name
final end = node.typeNames.last.span.end.offset;
final id = AssistId.makeNotNull;
collector.addAssist(PrioritizedSourceChange(
@ -34,9 +34,7 @@ class ColumnNullability extends _AssistOnNodeContributor<ColumnDefinition> {
collector.addAssist(PrioritizedSourceChange(
id.priority,
SourceChange('Make this column nullable', id: id.id, edits: [
SourceFileEdit(path, -1, edits: [
SourceEdit(notNull.firstPosition, notNull.lastPosition, '')
])
SourceFileEdit(path, -1, edits: [replaceNode(notNull, '')])
]),
));
}

View File

@ -6,6 +6,9 @@ class ColumnDefinition extends AstNode {
final String typeName;
final List<ColumnConstraint> constraints;
/// The tokens there were involved in defining the type of this column.
List<Token> typeNames;
ColumnDefinition(
{@required this.columnName,
@required this.typeName,

View File

@ -21,11 +21,19 @@ class AutoCompleteEngine {
final List<Hint> _hints = [];
UnmodifiableListView<Hint> _hintsView;
final List<Token> _tokens;
void addHint(Hint hint) {
_hints.insert(_lastHintBefore(hint.offset), hint);
if (_hints.isEmpty) {
_hints.add(hint);
} else {
// ensure that the hints list stays sorted by offset
final position = _lastHintBefore(hint.offset);
_hints.insert(position + 1, hint);
}
}
AutoCompleteEngine() {
AutoCompleteEngine(this._tokens) {
_hintsView = UnmodifiableListView(_hints);
}
@ -40,13 +48,23 @@ class AutoCompleteEngine {
final hint = foundHints[_lastHintBefore(offset)];
final suggestions = hint.description.suggest(CalculationRequest()).toList();
return ComputedSuggestions(hint.offset, offset - hint.offset, suggestions);
// when calculating the offset, respect whitespace that comes after the
// last hint.
final lastToken = hint.before;
final nextToken =
lastToken != null ? _tokens[lastToken.index + 1] : _tokens.first;
final hintOffset = nextToken.span.start.offset;
final length = offset - hintOffset;
return ComputedSuggestions(hintOffset, length, suggestions);
}
/// find the last hint that appears before [offset]
int _lastHintBefore(int offset) {
// find the last hint that appears before offset
var min = 0;
var max = foundHints.length;
var max = foundHints.length - 1;
while (min < max) {
final mid = min + ((max - min) >> 1);
@ -56,10 +74,18 @@ class AutoCompleteEngine {
if (offsetOfMid == offset) {
return mid;
} else if (offsetOfMid < offset) {
min = mid + 1;
} else {
max = mid - 1;
final offsetOfNext = _hints[mid + 1].offset;
if (offsetOfMid < offset) {
if (offsetOfNext > offset) {
// next one is too late, so this must be the correct one
return mid;
}
min = mid + 1;
} else {
max = mid - 1;
}
}
}

View File

@ -66,8 +66,9 @@ class SqlEngine {
ParseResult parseMoorFile(String content) {
assert(useMoorExtensions);
final autoComplete = AutoCompleteEngine();
final tokens = tokenize(content);
final autoComplete = AutoCompleteEngine(tokens);
final tokensForParser = tokens.where((t) => !t.invisibleToParser).toList();
final parser =
Parser(tokensForParser, useMoor: true, autoComplete: autoComplete);

View File

@ -80,7 +80,14 @@ mixin SchemaParser on ParserBase {
final name = _consume(TokenType.identifier, 'Expected a column name')
as IdentifierToken;
final typeName = _typeName();
final typeNames = _typeName();
if (typeNames == null) {
_error('Expected a type name here');
}
final typeSpan = typeNames.first.span.expand(typeNames.last.span);
final typeName = typeSpan.text;
final constraints = <ColumnConstraint>[];
ColumnConstraint constraint;
while ((constraint = _columnConstraint(orNull: true)) != null) {
@ -91,19 +98,21 @@ mixin SchemaParser on ParserBase {
columnName: name.identifier,
typeName: typeName,
constraints: constraints,
)..setSpan(name, _previous);
)
..setSpan(name, _previous)
..typeNames = typeNames;
}
String _typeName() {
List<Token> _typeName() {
// sqlite doesn't really define what a type name is and has very loose rules
// at turning them into a type affinity. We support this pattern:
// typename = identifier [ "(" { identifier | comma | number_literal } ")" ]
if (!_matchOne(TokenType.identifier)) return null;
final typeNameBuilder = StringBuffer(_previous.lexeme);
final typeNames = [_previous];
if (_matchOne(TokenType.leftParen)) {
typeNameBuilder.write('(');
typeNames.add(_previous);
const inBrackets = [
TokenType.identifier,
@ -111,14 +120,15 @@ mixin SchemaParser on ParserBase {
TokenType.numberLiteral
];
while (_match(inBrackets)) {
typeNameBuilder..write(' ')..write(_previous.lexeme);
typeNames.add(_previous);
}
_consume(TokenType.rightParen,
'Expected closing paranthesis to finish type name');
typeNames.add(_previous);
}
return typeNameBuilder.toString();
return typeNames;
}
ColumnConstraint _columnConstraint({bool orNull = false}) {
@ -127,10 +137,13 @@ mixin SchemaParser on ParserBase {
final resolvedName = _constraintNameOrNull();
if (_matchOne(TokenType.primary)) {
_suggestHint(HintDescription.token(TokenType.key));
_consume(TokenType.key, 'Expected KEY to complete PRIMARY KEY clause');
final mode = _orderingModeOrNull();
final conflict = _conflictClauseOrNull();
_suggestHint(HintDescription.token(TokenType.autoincrement));
final hasAutoInc = _matchOne(TokenType.autoincrement);
return PrimaryKeyColumn(resolvedName,
@ -138,6 +151,8 @@ mixin SchemaParser on ParserBase {
..setSpan(first, _previous);
}
if (_matchOne(TokenType.not)) {
_suggestHint(HintDescription.token(TokenType.$null));
final notToken = _previous;
final nullToken =
_consume(TokenType.$null, 'Expected NULL to complete NOT NULL');
@ -249,6 +264,7 @@ mixin SchemaParser on ParserBase {
}
ConflictClause _conflictClauseOrNull() {
_suggestHint(HintDescription.token(TokenType.on));
if (_matchOne(TokenType.on)) {
_consume(TokenType.conflict,
'Expected CONFLICT to complete ON CONFLICT clause');
@ -260,6 +276,7 @@ mixin SchemaParser on ParserBase {
TokenType.ignore: ConflictClause.ignore,
TokenType.replace: ConflictClause.replace,
};
_suggestHint(HintDescription.tokens(modes.keys.toList()));
if (_match(modes.keys)) {
return modes[_previous.type];
@ -284,7 +301,10 @@ mixin SchemaParser on ParserBase {
ReferenceAction onDelete, onUpdate;
_suggestHint(HintDescription.token(TokenType.on));
while (_matchOne(TokenType.on)) {
_suggestHint(
const HintDescription.tokens([TokenType.delete, TokenType.update]));
if (_matchOne(TokenType.delete)) {
onDelete = _referenceAction();
} else if (_matchOne(TokenType.update)) {

View File

@ -35,6 +35,11 @@ class Scanner {
final endSpan = _file.span(source.length);
tokens.add(Token(TokenType.eof, endSpan));
for (var i = 0; i < tokens.length; i++) {
tokens[i].index = i;
}
return tokens;
}

View File

@ -260,7 +260,10 @@ class Token {
final FileSpan span;
String get lexeme => span.text;
const Token(this.type, this.span);
/// The index of this [Token] in the list of tokens scanned.
int index;
Token(this.type, this.span);
@override
String toString() {
@ -274,7 +277,7 @@ class StringLiteralToken extends Token {
/// sqlite allows binary strings (x'literal') which are interpreted as blobs.
final bool binary;
const StringLiteralToken(this.value, FileSpan span, {this.binary = false})
StringLiteralToken(this.value, FileSpan span, {this.binary = false})
: super(TokenType.stringLiteral, span);
}
@ -295,7 +298,7 @@ class IdentifierToken extends Token {
}
}
const IdentifierToken(this.escaped, FileSpan span, {this.synthetic = false})
IdentifierToken(this.escaped, FileSpan span, {this.synthetic = false})
: super(TokenType.identifier, span);
}