Merge pull request #8217 from bojidar-bg/gdscript-fix-keyword-call

Make GDScript allow some keywords as identifiers
This commit is contained in:
Rémi Verschelde 2017-07-25 08:18:10 +02:00 committed by GitHub
commit e00630bfca
3 changed files with 263 additions and 141 deletions

View File

@ -185,8 +185,8 @@ void GDParser::_make_completable_call(int p_arg) {
bool GDParser::_get_completable_identifier(CompletionType p_type, StringName &identifier) { bool GDParser::_get_completable_identifier(CompletionType p_type, StringName &identifier) {
identifier = StringName(); identifier = StringName();
if (tokenizer->get_token() == GDTokenizer::TK_IDENTIFIER) { if (tokenizer->is_token_literal()) {
identifier = tokenizer->get_token_identifier(); identifier = tokenizer->get_token_literal();
tokenizer->advance(); tokenizer->advance();
} }
if (tokenizer->get_token() == GDTokenizer::TK_CURSOR) { if (tokenizer->get_token() == GDTokenizer::TK_CURSOR) {
@ -201,8 +201,8 @@ bool GDParser::_get_completable_identifier(CompletionType p_type, StringName &id
completion_ident_is_call = false; completion_ident_is_call = false;
tokenizer->advance(); tokenizer->advance();
if (tokenizer->get_token() == GDTokenizer::TK_IDENTIFIER) { if (tokenizer->is_token_literal()) {
identifier = identifier.operator String() + tokenizer->get_token_identifier().operator String(); identifier = identifier.operator String() + tokenizer->get_token_literal().operator String();
tokenizer->advance(); tokenizer->advance();
} }
@ -295,17 +295,6 @@ GDParser::Node *GDParser::_parse_expression(Node *p_parent, bool p_static, bool
tokenizer->advance(); tokenizer->advance();
need_identifier = false; need_identifier = false;
} break;
case GDTokenizer::TK_IDENTIFIER: {
if (!need_identifier) {
done = true;
break;
}
path += String(tokenizer->get_token_identifier());
tokenizer->advance();
need_identifier = false;
} break; } break;
case GDTokenizer::TK_OP_DIV: { case GDTokenizer::TK_OP_DIV: {
@ -320,6 +309,13 @@ GDParser::Node *GDParser::_parse_expression(Node *p_parent, bool p_static, bool
} break; } break;
default: { default: {
// Instead of checking for TK_IDENTIFIER, we check with is_token_literal, as this allows us to use match/sync/etc. as a name
if (need_identifier && tokenizer->is_token_literal()) {
path += String(tokenizer->get_token_literal());
tokenizer->advance();
need_identifier = false;
}
done = true; done = true;
break; break;
} }
@ -585,7 +581,8 @@ GDParser::Node *GDParser::_parse_expression(Node *p_parent, bool p_static, bool
cn->value = Variant::get_numeric_constant_value(bi_type, identifier); cn->value = Variant::get_numeric_constant_value(bi_type, identifier);
expr = cn; expr = cn;
} else if (tokenizer->get_token(1) == GDTokenizer::TK_PARENTHESIS_OPEN && (tokenizer->get_token() == GDTokenizer::TK_BUILT_IN_TYPE || tokenizer->get_token() == GDTokenizer::TK_IDENTIFIER || tokenizer->get_token() == GDTokenizer::TK_BUILT_IN_FUNC)) { } else if (tokenizer->get_token(1) == GDTokenizer::TK_PARENTHESIS_OPEN && tokenizer->is_token_literal()) {
// We check with is_token_literal, as this allows us to use match/sync/etc. as a name
//function or constructor //function or constructor
OperatorNode *op = alloc_node<OperatorNode>(); OperatorNode *op = alloc_node<OperatorNode>();
@ -627,7 +624,8 @@ GDParser::Node *GDParser::_parse_expression(Node *p_parent, bool p_static, bool
expr = op; expr = op;
} else if (tokenizer->get_token() == GDTokenizer::TK_IDENTIFIER) { } else if (tokenizer->is_token_literal(0, true)) {
// We check with is_token_literal, as this allows us to use match/sync/etc. as a name
//identifier (reference) //identifier (reference)
const ClassNode *cln = current_class; const ClassNode *cln = current_class;
@ -827,10 +825,11 @@ GDParser::Node *GDParser::_parse_expression(Node *p_parent, bool p_static, bool
if (expecting == DICT_EXPECT_KEY) { if (expecting == DICT_EXPECT_KEY) {
if (tokenizer->get_token() == GDTokenizer::TK_IDENTIFIER && tokenizer->get_token(1) == GDTokenizer::TK_OP_ASSIGN) { if (tokenizer->is_token_literal() && tokenizer->get_token(1) == GDTokenizer::TK_OP_ASSIGN) {
// We check with is_token_literal, as this allows us to use match/sync/etc. as a name
//lua style identifier, easier to write //lua style identifier, easier to write
ConstantNode *cn = alloc_node<ConstantNode>(); ConstantNode *cn = alloc_node<ConstantNode>();
cn->value = tokenizer->get_token_identifier(); cn->value = tokenizer->get_token_literal();
key = cn; key = cn;
tokenizer->advance(2); tokenizer->advance(2);
expecting = DICT_EXPECT_VALUE; expecting = DICT_EXPECT_VALUE;
@ -870,7 +869,8 @@ GDParser::Node *GDParser::_parse_expression(Node *p_parent, bool p_static, bool
expr = dict; expr = dict;
} else if (tokenizer->get_token() == GDTokenizer::TK_PERIOD && (tokenizer->get_token(1) == GDTokenizer::TK_IDENTIFIER || tokenizer->get_token(1) == GDTokenizer::TK_CURSOR) && tokenizer->get_token(2) == GDTokenizer::TK_PARENTHESIS_OPEN) { } else if (tokenizer->get_token() == GDTokenizer::TK_PERIOD && (tokenizer->is_token_literal(1) || tokenizer->get_token(1) == GDTokenizer::TK_CURSOR) && tokenizer->get_token(2) == GDTokenizer::TK_PARENTHESIS_OPEN) {
// We check with is_token_literal, as this allows us to use match/sync/etc. as a name
// parent call // parent call
tokenizer->advance(); //goto identifier tokenizer->advance(); //goto identifier
@ -922,7 +922,8 @@ GDParser::Node *GDParser::_parse_expression(Node *p_parent, bool p_static, bool
//indexing using "." //indexing using "."
if (tokenizer->get_token(1) != GDTokenizer::TK_CURSOR && tokenizer->get_token(1) != GDTokenizer::TK_IDENTIFIER && tokenizer->get_token(1) != GDTokenizer::TK_BUILT_IN_FUNC) { if (tokenizer->get_token(1) != GDTokenizer::TK_CURSOR && !tokenizer->is_token_literal(1)) {
// We check with is_token_literal, as this allows us to use match/sync/etc. as a name
_set_error("Expected identifier as member"); _set_error("Expected identifier as member");
return NULL; return NULL;
} else if (tokenizer->get_token(2) == GDTokenizer::TK_PARENTHESIS_OPEN) { } else if (tokenizer->get_token(2) == GDTokenizer::TK_PARENTHESIS_OPEN) {
@ -2341,12 +2342,12 @@ void GDParser::_parse_block(BlockNode *p_block, bool p_static) {
//variale declaration and (eventual) initialization //variale declaration and (eventual) initialization
tokenizer->advance(); tokenizer->advance();
if (tokenizer->get_token() != GDTokenizer::TK_IDENTIFIER) { if (!tokenizer->is_token_literal(0, true)) {
_set_error("Expected identifier for local variable name."); _set_error("Expected identifier for local variable name.");
return; return;
} }
StringName n = tokenizer->get_token_identifier(); StringName n = tokenizer->get_token_literal();
tokenizer->advance(); tokenizer->advance();
if (current_function) { if (current_function) {
for (int i = 0; i < current_function->arguments.size(); i++) { for (int i = 0; i < current_function->arguments.size(); i++) {
@ -2571,7 +2572,7 @@ void GDParser::_parse_block(BlockNode *p_block, bool p_static) {
tokenizer->advance(); tokenizer->advance();
if (tokenizer->get_token() != GDTokenizer::TK_IDENTIFIER) { if (!tokenizer->is_token_literal(0, true)) {
_set_error("identifier expected after 'for'"); _set_error("identifier expected after 'for'");
} }
@ -3108,7 +3109,7 @@ void GDParser::_parse_class(ClassNode *p_class) {
tokenizer->advance(); //var before the identifier is allowed tokenizer->advance(); //var before the identifier is allowed
} }
if (tokenizer->get_token() != GDTokenizer::TK_IDENTIFIER) { if (!tokenizer->is_token_literal(0, true)) {
_set_error("Expected identifier for argument."); _set_error("Expected identifier for argument.");
return; return;
@ -3260,7 +3261,7 @@ void GDParser::_parse_class(ClassNode *p_class) {
case GDTokenizer::TK_PR_SIGNAL: { case GDTokenizer::TK_PR_SIGNAL: {
tokenizer->advance(); tokenizer->advance();
if (tokenizer->get_token() != GDTokenizer::TK_IDENTIFIER) { if (!tokenizer->is_token_literal()) {
_set_error("Expected identifier after 'signal'."); _set_error("Expected identifier after 'signal'.");
return; return;
} }
@ -3282,7 +3283,7 @@ void GDParser::_parse_class(ClassNode *p_class) {
break; break;
} }
if (tokenizer->get_token() != GDTokenizer::TK_IDENTIFIER) { if (tokenizer->is_token_literal(0, true)) {
_set_error("Expected identifier in signal argument."); _set_error("Expected identifier in signal argument.");
return; return;
} }
@ -3847,13 +3848,13 @@ void GDParser::_parse_class(ClassNode *p_class) {
bool onready = tokenizer->get_token(-1) == GDTokenizer::TK_PR_ONREADY; bool onready = tokenizer->get_token(-1) == GDTokenizer::TK_PR_ONREADY;
tokenizer->advance(); tokenizer->advance();
if (tokenizer->get_token() != GDTokenizer::TK_IDENTIFIER) { if (!tokenizer->is_token_literal(0, true)) {
_set_error("Expected identifier for member variable name."); _set_error("Expected identifier for member variable name.");
return; return;
} }
member.identifier = tokenizer->get_token_identifier(); member.identifier = tokenizer->get_token_literal();
member.expression = NULL; member.expression = NULL;
member._export.name = member.identifier; member._export.name = member.identifier;
member.line = tokenizer->get_token_line(); member.line = tokenizer->get_token_line();
@ -3979,11 +3980,11 @@ void GDParser::_parse_class(ClassNode *p_class) {
if (tokenizer->get_token() != GDTokenizer::TK_COMMA) { if (tokenizer->get_token() != GDTokenizer::TK_COMMA) {
//just comma means using only getter //just comma means using only getter
if (tokenizer->get_token() != GDTokenizer::TK_IDENTIFIER) { if (!tokenizer->is_token_literal()) {
_set_error("Expected identifier for setter function after 'notify'."); _set_error("Expected identifier for setter function after 'setget'.");
} }
member.setter = tokenizer->get_token_identifier(); member.setter = tokenizer->get_token_literal();
tokenizer->advance(); tokenizer->advance();
} }
@ -3992,11 +3993,11 @@ void GDParser::_parse_class(ClassNode *p_class) {
//there is a getter //there is a getter
tokenizer->advance(); tokenizer->advance();
if (tokenizer->get_token() != GDTokenizer::TK_IDENTIFIER) { if (!tokenizer->is_token_literal()) {
_set_error("Expected identifier for getter function after ','."); _set_error("Expected identifier for getter function after ','.");
} }
member.getter = tokenizer->get_token_identifier(); member.getter = tokenizer->get_token_literal();
tokenizer->advance(); tokenizer->advance();
} }
} }
@ -4014,13 +4015,13 @@ void GDParser::_parse_class(ClassNode *p_class) {
ClassNode::Constant constant; ClassNode::Constant constant;
tokenizer->advance(); tokenizer->advance();
if (tokenizer->get_token() != GDTokenizer::TK_IDENTIFIER) { if (!tokenizer->is_token_literal(0, true)) {
_set_error("Expected name (identifier) for constant."); _set_error("Expected name (identifier) for constant.");
return; return;
} }
constant.identifier = tokenizer->get_token_identifier(); constant.identifier = tokenizer->get_token_literal();
tokenizer->advance(); tokenizer->advance();
if (tokenizer->get_token() != GDTokenizer::TK_OP_ASSIGN) { if (tokenizer->get_token() != GDTokenizer::TK_OP_ASSIGN) {
@ -4061,8 +4062,8 @@ void GDParser::_parse_class(ClassNode *p_class) {
Dictionary enum_dict; Dictionary enum_dict;
tokenizer->advance(); tokenizer->advance();
if (tokenizer->get_token() == GDTokenizer::TK_IDENTIFIER) { if (tokenizer->is_token_literal(0, true)) {
enum_name = tokenizer->get_token_identifier(); enum_name = tokenizer->get_token_literal();
tokenizer->advance(); tokenizer->advance();
} }
if (tokenizer->get_token() != GDTokenizer::TK_CURLY_BRACKET_OPEN) { if (tokenizer->get_token() != GDTokenizer::TK_CURLY_BRACKET_OPEN) {
@ -4079,7 +4080,7 @@ void GDParser::_parse_class(ClassNode *p_class) {
tokenizer->advance(); tokenizer->advance();
break; // End of enum break; // End of enum
} else if (tokenizer->get_token() != GDTokenizer::TK_IDENTIFIER) { } else if (!tokenizer->is_token_literal(0, true)) {
if (tokenizer->get_token() == GDTokenizer::TK_EOF) { if (tokenizer->get_token() == GDTokenizer::TK_EOF) {
_set_error("Unexpected end of file."); _set_error("Unexpected end of file.");
@ -4088,10 +4089,10 @@ void GDParser::_parse_class(ClassNode *p_class) {
} }
return; return;
} else { // tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER } else { // tokenizer->is_token_literal(0, true)
ClassNode::Constant constant; ClassNode::Constant constant;
constant.identifier = tokenizer->get_token_identifier(); constant.identifier = tokenizer->get_token_literal();
tokenizer->advance(); tokenizer->advance();

View File

@ -130,12 +130,222 @@ const char *GDTokenizer::token_names[TK_MAX] = {
"Cursor" "Cursor"
}; };
struct _bit {
Variant::Type type;
const char *text;
};
//built in types
static const _bit _type_list[] = {
//types
{ Variant::BOOL, "bool" },
{ Variant::INT, "int" },
{ Variant::REAL, "float" },
{ Variant::STRING, "String" },
{ Variant::VECTOR2, "Vector2" },
{ Variant::RECT2, "Rect2" },
{ Variant::TRANSFORM2D, "Transform2D" },
{ Variant::VECTOR3, "Vector3" },
{ Variant::RECT3, "Rect3" },
{ Variant::PLANE, "Plane" },
{ Variant::QUAT, "Quat" },
{ Variant::BASIS, "Basis" },
{ Variant::TRANSFORM, "Transform" },
{ Variant::COLOR, "Color" },
{ Variant::_RID, "RID" },
{ Variant::OBJECT, "Object" },
{ Variant::NODE_PATH, "NodePath" },
{ Variant::DICTIONARY, "Dictionary" },
{ Variant::ARRAY, "Array" },
{ Variant::POOL_BYTE_ARRAY, "PoolByteArray" },
{ Variant::POOL_INT_ARRAY, "PoolIntArray" },
{ Variant::POOL_REAL_ARRAY, "PoolFloatArray" },
{ Variant::POOL_STRING_ARRAY, "PoolStringArray" },
{ Variant::POOL_VECTOR2_ARRAY, "PoolVector2Array" },
{ Variant::POOL_VECTOR3_ARRAY, "PoolVector3Array" },
{ Variant::POOL_COLOR_ARRAY, "PoolColorArray" },
{ Variant::VARIANT_MAX, NULL },
};
struct _kws {
GDTokenizer::Token token;
const char *text;
};
static const _kws _keyword_list[] = {
//ops
{ GDTokenizer::TK_OP_IN, "in" },
{ GDTokenizer::TK_OP_NOT, "not" },
{ GDTokenizer::TK_OP_OR, "or" },
{ GDTokenizer::TK_OP_AND, "and" },
//func
{ GDTokenizer::TK_PR_FUNCTION, "func" },
{ GDTokenizer::TK_PR_CLASS, "class" },
{ GDTokenizer::TK_PR_EXTENDS, "extends" },
{ GDTokenizer::TK_PR_IS, "is" },
{ GDTokenizer::TK_PR_ONREADY, "onready" },
{ GDTokenizer::TK_PR_TOOL, "tool" },
{ GDTokenizer::TK_PR_STATIC, "static" },
{ GDTokenizer::TK_PR_EXPORT, "export" },
{ GDTokenizer::TK_PR_SETGET, "setget" },
{ GDTokenizer::TK_PR_VAR, "var" },
{ GDTokenizer::TK_PR_PRELOAD, "preload" },
{ GDTokenizer::TK_PR_ASSERT, "assert" },
{ GDTokenizer::TK_PR_YIELD, "yield" },
{ GDTokenizer::TK_PR_SIGNAL, "signal" },
{ GDTokenizer::TK_PR_BREAKPOINT, "breakpoint" },
{ GDTokenizer::TK_PR_REMOTE, "remote" },
{ GDTokenizer::TK_PR_MASTER, "master" },
{ GDTokenizer::TK_PR_SLAVE, "slave" },
{ GDTokenizer::TK_PR_SYNC, "sync" },
{ GDTokenizer::TK_PR_CONST, "const" },
{ GDTokenizer::TK_PR_ENUM, "enum" },
//controlflow
{ GDTokenizer::TK_CF_IF, "if" },
{ GDTokenizer::TK_CF_ELIF, "elif" },
{ GDTokenizer::TK_CF_ELSE, "else" },
{ GDTokenizer::TK_CF_FOR, "for" },
{ GDTokenizer::TK_CF_WHILE, "while" },
{ GDTokenizer::TK_CF_DO, "do" },
{ GDTokenizer::TK_CF_SWITCH, "switch" },
{ GDTokenizer::TK_CF_CASE, "case" },
{ GDTokenizer::TK_CF_BREAK, "break" },
{ GDTokenizer::TK_CF_CONTINUE, "continue" },
{ GDTokenizer::TK_CF_RETURN, "return" },
{ GDTokenizer::TK_CF_MATCH, "match" },
{ GDTokenizer::TK_CF_PASS, "pass" },
{ GDTokenizer::TK_SELF, "self" },
{ GDTokenizer::TK_CONST_PI, "PI" },
{ GDTokenizer::TK_WILDCARD, "_" },
{ GDTokenizer::TK_CONST_INF, "INF" },
{ GDTokenizer::TK_CONST_NAN, "NAN" },
{ GDTokenizer::TK_ERROR, NULL }
};
const char *GDTokenizer::get_token_name(Token p_token) { const char *GDTokenizer::get_token_name(Token p_token) {
ERR_FAIL_INDEX_V(p_token, TK_MAX, "<error>"); ERR_FAIL_INDEX_V(p_token, TK_MAX, "<error>");
return token_names[p_token]; return token_names[p_token];
} }
bool GDTokenizer::is_token_literal(int p_offset, bool variable_safe) const {
switch (get_token(p_offset)) {
// Can always be literal:
case TK_IDENTIFIER:
case TK_PR_ONREADY:
case TK_PR_TOOL:
case TK_PR_STATIC:
case TK_PR_EXPORT:
case TK_PR_SETGET:
case TK_PR_SIGNAL:
case TK_PR_REMOTE:
case TK_PR_MASTER:
case TK_PR_SLAVE:
case TK_PR_SYNC:
return true;
// Literal for non-variables only:
case TK_BUILT_IN_TYPE:
case TK_BUILT_IN_FUNC:
case TK_OP_IN:
case TK_OP_NOT:
//case TK_OP_OR:
//case TK_OP_AND:
case TK_PR_CLASS:
case TK_PR_CONST:
case TK_PR_ENUM:
case TK_PR_PRELOAD:
case TK_PR_FUNCTION:
case TK_PR_EXTENDS:
case TK_PR_ASSERT:
case TK_PR_YIELD:
case TK_PR_VAR:
case TK_CF_IF:
case TK_CF_ELIF:
case TK_CF_ELSE:
case TK_CF_FOR:
case TK_CF_WHILE:
case TK_CF_DO:
case TK_CF_SWITCH:
case TK_CF_CASE:
case TK_CF_BREAK:
case TK_CF_CONTINUE:
case TK_CF_RETURN:
case TK_CF_MATCH:
case TK_CF_PASS:
case TK_SELF:
case TK_CONST_PI:
case TK_WILDCARD:
case TK_CONST_INF:
case TK_CONST_NAN:
case TK_ERROR:
return !variable_safe;
case TK_CONSTANT: {
switch (get_token_constant(p_offset).get_type()) {
case Variant::NIL:
case Variant::BOOL:
return true;
default:
return false;
}
}
default:
return false;
}
}
StringName GDTokenizer::get_token_literal(int p_offset) const {
Token token = get_token(p_offset);
switch (token) {
case TK_IDENTIFIER:
return get_token_identifier(p_offset);
case TK_BUILT_IN_TYPE: {
Variant::Type type = get_token_type(p_offset);
int idx = 0;
while (_type_list[idx].text) {
if (type == _type_list[idx].type) {
return _type_list[idx].text;
}
idx++;
}
} break; // Shouldn't get here, stuff happens
case TK_BUILT_IN_FUNC:
return GDFunctions::get_func_name(get_token_built_in_func(p_offset));
case TK_CONSTANT: {
const Variant value = get_token_constant(p_offset);
switch (value.get_type()) {
case Variant::NIL:
return "null";
case Variant::BOOL:
return value ? "true" : "false";
default: {}
}
}
case TK_OP_AND:
case TK_OP_OR:
break; // Don't get into default, since they can be non-literal
default: {
int idx = 0;
while (_keyword_list[idx].text) {
if (token == _keyword_list[idx].token) {
return _keyword_list[idx].text;
}
idx++;
}
}
}
ERR_EXPLAIN("Failed to get token literal");
ERR_FAIL_V("");
}
static bool _is_text_char(CharType c) { static bool _is_text_char(CharType c) {
return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_'; return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_';
@ -779,51 +989,14 @@ void GDTokenizerText::_advance() {
bool found = false; bool found = false;
struct _bit {
Variant::Type type;
const char *text;
};
//built in types
static const _bit type_list[] = {
//types
{ Variant::BOOL, "bool" },
{ Variant::INT, "int" },
{ Variant::REAL, "float" },
{ Variant::STRING, "String" },
{ Variant::VECTOR2, "Vector2" },
{ Variant::RECT2, "Rect2" },
{ Variant::TRANSFORM2D, "Transform2D" },
{ Variant::VECTOR3, "Vector3" },
{ Variant::RECT3, "Rect3" },
{ Variant::PLANE, "Plane" },
{ Variant::QUAT, "Quat" },
{ Variant::BASIS, "Basis" },
{ Variant::TRANSFORM, "Transform" },
{ Variant::COLOR, "Color" },
{ Variant::_RID, "RID" },
{ Variant::OBJECT, "Object" },
{ Variant::NODE_PATH, "NodePath" },
{ Variant::DICTIONARY, "Dictionary" },
{ Variant::ARRAY, "Array" },
{ Variant::POOL_BYTE_ARRAY, "PoolByteArray" },
{ Variant::POOL_INT_ARRAY, "PoolIntArray" },
{ Variant::POOL_REAL_ARRAY, "PoolFloatArray" },
{ Variant::POOL_STRING_ARRAY, "PoolStringArray" },
{ Variant::POOL_VECTOR2_ARRAY, "PoolVector2Array" },
{ Variant::POOL_VECTOR3_ARRAY, "PoolVector3Array" },
{ Variant::POOL_COLOR_ARRAY, "PoolColorArray" },
{ Variant::VARIANT_MAX, NULL },
};
{ {
int idx = 0; int idx = 0;
while (type_list[idx].text) { while (_type_list[idx].text) {
if (str == type_list[idx].text) { if (str == _type_list[idx].text) {
_make_type(type_list[idx].type); _make_type(_type_list[idx].type);
found = true; found = true;
break; break;
} }
@ -844,74 +1017,18 @@ void GDTokenizerText::_advance() {
break; break;
} }
} }
//keywor
} }
if (!found) { if (!found) {
//keyword
struct _kws {
Token token;
const char *text;
};
static const _kws keyword_list[] = {
//ops
{ TK_OP_IN, "in" },
{ TK_OP_NOT, "not" },
{ TK_OP_OR, "or" },
{ TK_OP_AND, "and" },
//func
{ TK_PR_FUNCTION, "func" },
{ TK_PR_CLASS, "class" },
{ TK_PR_EXTENDS, "extends" },
{ TK_PR_IS, "is" },
{ TK_PR_ONREADY, "onready" },
{ TK_PR_TOOL, "tool" },
{ TK_PR_STATIC, "static" },
{ TK_PR_EXPORT, "export" },
{ TK_PR_SETGET, "setget" },
{ TK_PR_VAR, "var" },
{ TK_PR_PRELOAD, "preload" },
{ TK_PR_ASSERT, "assert" },
{ TK_PR_YIELD, "yield" },
{ TK_PR_SIGNAL, "signal" },
{ TK_PR_BREAKPOINT, "breakpoint" },
{ TK_PR_REMOTE, "remote" },
{ TK_PR_MASTER, "master" },
{ TK_PR_SLAVE, "slave" },
{ TK_PR_SYNC, "sync" },
{ TK_PR_CONST, "const" },
{ TK_PR_ENUM, "enum" },
//controlflow
{ TK_CF_IF, "if" },
{ TK_CF_ELIF, "elif" },
{ TK_CF_ELSE, "else" },
{ TK_CF_FOR, "for" },
{ TK_CF_WHILE, "while" },
{ TK_CF_DO, "do" },
{ TK_CF_SWITCH, "switch" },
{ TK_CF_CASE, "case" },
{ TK_CF_BREAK, "break" },
{ TK_CF_CONTINUE, "continue" },
{ TK_CF_RETURN, "return" },
{ TK_CF_MATCH, "match" },
{ TK_CF_PASS, "pass" },
{ TK_SELF, "self" },
{ TK_CONST_PI, "PI" },
{ TK_WILDCARD, "_" },
{ TK_CONST_INF, "INF" },
{ TK_CONST_NAN, "NAN" },
{ TK_ERROR, NULL }
};
int idx = 0; int idx = 0;
found = false; found = false;
while (keyword_list[idx].text) { while (_keyword_list[idx].text) {
if (str == keyword_list[idx].text) { if (str == _keyword_list[idx].text) {
_make_token(keyword_list[idx].token); _make_token(_keyword_list[idx].token);
found = true; found = true;
break; break;
} }
@ -992,6 +1109,7 @@ const Variant &GDTokenizerText::get_token_constant(int p_offset) const {
ERR_FAIL_COND_V(tk_rb[ofs].type != TK_CONSTANT, tk_rb[0].constant); ERR_FAIL_COND_V(tk_rb[ofs].type != TK_CONSTANT, tk_rb[0].constant);
return tk_rb[ofs].constant; return tk_rb[ofs].constant;
} }
StringName GDTokenizerText::get_token_identifier(int p_offset) const { StringName GDTokenizerText::get_token_identifier(int p_offset) const {
ERR_FAIL_COND_V(p_offset <= -MAX_LOOKAHEAD, StringName()); ERR_FAIL_COND_V(p_offset <= -MAX_LOOKAHEAD, StringName());

View File

@ -149,6 +149,9 @@ protected:
public: public:
static const char *get_token_name(Token p_token); static const char *get_token_name(Token p_token);
bool is_token_literal(int p_offset = 0, bool variable_safe = false) const;
StringName get_token_literal(int p_offset = 0) const;
virtual const Variant &get_token_constant(int p_offset = 0) const = 0; virtual const Variant &get_token_constant(int p_offset = 0) const = 0;
virtual Token get_token(int p_offset = 0) const = 0; virtual Token get_token(int p_offset = 0) const = 0;
virtual StringName get_token_identifier(int p_offset = 0) const = 0; virtual StringName get_token_identifier(int p_offset = 0) const = 0;