From 0b4b8074cf647d3df13983c07eea9a051feb900a Mon Sep 17 00:00:00 2001 From: Draguve Date: Tue, 30 Jan 2024 14:17:21 -0800 Subject: [PATCH 1/3] fixed files not working --- .gitignore | 1 + __init__.py | 0 pico8/lua/parser.py | 4 ++-- 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 __init__.py diff --git a/.gitignore b/.gitignore index 9009615..aa1a390 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,4 @@ docs/_build/ .pytest_cache/ .DS_Store +build/ \ No newline at end of file diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pico8/lua/parser.py b/pico8/lua/parser.py index b1d08b2..4868051 100644 --- a/pico8/lua/parser.py +++ b/pico8/lua/parser.py @@ -47,8 +47,8 @@ 'Function', 'FunctionBody', 'TableConstructor', - 'FieldOtherThing', - 'FieldNamed', + 'FieldExpKey', + 'FieldNamedKey', 'FieldExp', ] From 6622c97950ffb47bebe508c9e9aa8cc513465635 Mon Sep 17 00:00:00 2001 From: Draguve Date: Tue, 30 Jan 2024 15:47:18 -0800 Subject: [PATCH 2/3] Added remaining assignment operators --- pico8/lua/lexer.py | 25 +++++++++++++++---------- pico8/lua/parser.py | 35 ++++++++++++++++++++++------------- 2 files changed, 37 insertions(+), 23 deletions(-) diff --git a/pico8/lua/lexer.py b/pico8/lua/lexer.py index e188753..f6aaaa0 100644 --- a/pico8/lua/lexer.py +++ b/pico8/lua/lexer.py @@ -4,7 +4,6 @@ from .. import util - __all__ = [ 'LexerError', 'Token', @@ -200,15 +199,15 @@ def value(self): if b'.' in self._data: integer, frac = self._data.split(b'.') return ( - float(int(integer, 16)) + - float(int(frac, 16))/(16**len(frac))) + float(int(integer, 16)) + + float(int(frac, 16)) / (16 ** len(frac))) return float(int(self._data, 16)) if b'b' in self._data: if b'.' in self._data: integer, frac = self._data.split(b'.') return ( - float(int(integer, 2)) + - float(int(frac, 2))/(2**len(frac))) + float(int(integer, 2)) + + float(int(frac, 2)) / (2 ** len(frac))) return float(int(self._data, 2)) return float(self._data) @@ -271,7 +270,7 @@ class TokSymbol(Token): (re.compile(br'::[a-zA-Z_\x80-\xff][a-zA-Z0-9_\x80-\xff]*::'), TokLabel), ]) _TOKEN_MATCHERS.extend([ - (re.compile(br'\b'+keyword+br'\b'), TokKeyword) + (re.compile(br'\b' + keyword + br'\b'), TokKeyword) for keyword in LUA_KEYWORDS]) # REMINDER: token patterns are ordered! The lexer stops at the first matching # pattern. This is especially tricky for the symbols because you have to make @@ -279,7 +278,14 @@ class TokSymbol(Token): # >>> would never match.) _TOKEN_MATCHERS.extend([ (re.compile(symbol), TokSymbol) for symbol in [ + + ]]) +_TOKEN_MATCHERS.extend([ + (re.compile(symbol), TokSymbol) for symbol in [ + br'\\=', br'\+=', b'-=', br'\*=', b'/=', b'%=', br'\.\.=', + br'\^=', br'\|=', b'&=', br'\^\^=', b'<<=', + b'>>=', b'>>>=', b'<<>=', b'>><=', b'==', b'~=', b'!=', b'<=', b'>=', b'&', br'\|', br'\^\^', b'~', b'<<>', b'>>>', b'>><', b'<<', b'>>', br'\\', @@ -311,7 +317,6 @@ def __init__(self, version): self._tokens = [] self._cur_lineno = 0 self._cur_charno = 0 - # If inside a string literal (else None): # * the pos of the start of the string self._in_string_lineno = None @@ -360,7 +365,7 @@ def _process_token(self, s): if self._in_string is not None: # Continue string literal. while i < len(s): - c = s[i:i+1] + c = s[i:i + 1] if c == self._in_string_delim: # End string literal. @@ -378,12 +383,12 @@ def _process_token(self, s): if c == b'\\': # Escape character. - num_m = re.match(br'\d{1,3}', s[i+1:]) + num_m = re.match(br'\d{1,3}', s[i + 1:]) if num_m: c = bytes([int(num_m.group(0))]) i += len(num_m.group(0)) else: - next_c = s[i+1:i+2] + next_c = s[i + 1:i + 2] if next_c in _STRING_ESCAPES: c = _STRING_ESCAPES[next_c] i += 1 diff --git a/pico8/lua/parser.py b/pico8/lua/parser.py index 4868051..d8278dc 100644 --- a/pico8/lua/parser.py +++ b/pico8/lua/parser.py @@ -9,7 +9,6 @@ from .. import util from . import lexer - __all__ = [ # noqa: F822 'Parser', 'ParserError', @@ -96,7 +95,7 @@ def _add_token_group(self, fieldname, fieldvalue, tokenlist, pos): pos = self._add_token_group( (fieldname, inner_i), inner, tokenlist, pos) else: - self._token_groups.append(tokenlist[pos:pos+1]) + self._token_groups.append(tokenlist[pos:pos + 1]) pos += 1 return pos @@ -226,12 +225,12 @@ def node_init(self, *args, **kwargs): for k in kwargs: setattr(self, k, kwargs[k]) + cls = type(name, (Node,), {'__init__': node_init, '_name': name, '_fields': fields, '_children': None}) globals()[name] = cls - # (!= is PICO-8 specific.) BINOP_PATS = (tuple([lexer.TokSymbol(sym) for sym in [ b'&', b'|', b'^^', b'<<', b'>>', b'>>>', b'<<>', b'>><', b'\\', @@ -305,15 +304,15 @@ def _accept(self, tok_pattern): while True: cur_tok = self._peek() if (cur_tok is None or - cur_tok.matches(tok_pattern) or - (not isinstance(cur_tok, lexer.TokSpace) and - not isinstance(cur_tok, lexer.TokNewline) and - not isinstance(cur_tok, lexer.TokComment))): + cur_tok.matches(tok_pattern) or + (not isinstance(cur_tok, lexer.TokSpace) and + not isinstance(cur_tok, lexer.TokNewline) and + not isinstance(cur_tok, lexer.TokComment))): break self._pos += 1 if (cur_tok is not None and - cur_tok.matches(tok_pattern) and + cur_tok.matches(tok_pattern) and (self._max_pos is None or self._pos < self._max_pos)): self._pos += 1 return cur_tok @@ -435,8 +434,18 @@ def _stat(self): self._accept(lexer.TokSymbol(b'-=')) or self._accept(lexer.TokSymbol(b'*=')) or self._accept(lexer.TokSymbol(b'/=')) or + self._accept(lexer.TokSymbol(b'\=')) or self._accept(lexer.TokSymbol(b'%=')) or - self._accept(lexer.TokSymbol(b'..='))) + self._accept(lexer.TokSymbol(b'^=')) or + self._accept(lexer.TokSymbol(b'..=')) or + self._accept(lexer.TokSymbol(b'|=')) or + self._accept(lexer.TokSymbol(b'&=')) or + self._accept(lexer.TokSymbol(b'^^=')) or + self._accept(lexer.TokSymbol(b'<<=')) or + self._accept(lexer.TokSymbol(b'>>=')) or + self._accept(lexer.TokSymbol(b'>>>=')) or + self._accept(lexer.TokSymbol(b'<<>=')) or + self._accept(lexer.TokSymbol(b'>><='))) if assign_op is not None: explist = self._assert(self._explist(), 'Expected expression in assignment') @@ -475,7 +484,7 @@ def _stat(self): then_pos = self._pos if (self._accept(lexer.TokKeyword(b'then')) is None and - self._accept(lexer.TokKeyword(b'do')) is None and + self._accept(lexer.TokKeyword(b'do')) is None and (self._tokens[exp._end_token_pos - 1] == lexer.TokSymbol(b')'))): # Check for PICO-8 short form. @@ -668,7 +677,7 @@ def _var(self): """ exp_prefix = self._prefixexp() if (isinstance(exp_prefix, VarName) or - isinstance(exp_prefix, VarAttribute) or + isinstance(exp_prefix, VarAttribute) or isinstance(exp_prefix, VarIndex)): return exp_prefix return None @@ -920,8 +929,8 @@ def _functioncall(self): full_exp = self._prefixexp() if (full_exp is None or - (not isinstance(full_exp, FunctionCall) and - not isinstance(full_exp, FunctionCallMethod))): + (not isinstance(full_exp, FunctionCall) and + not isinstance(full_exp, FunctionCallMethod))): self._pos = pos return None return full_exp From 6c86991b0f6d1c1730429df80f9d6ea0d8ded817 Mon Sep 17 00:00:00 2001 From: Draguve Date: Wed, 31 Jan 2024 23:38:18 -0800 Subject: [PATCH 3/3] Fixed while shorthand --- pico8/lua/parser.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/pico8/lua/parser.py b/pico8/lua/parser.py index d8278dc..d322e8b 100644 --- a/pico8/lua/parser.py +++ b/pico8/lua/parser.py @@ -465,6 +465,22 @@ def _stat(self): if self._accept(lexer.TokKeyword(b'while')) is not None: exp = self._assert(self._exp(), 'exp in while') + do_pos = self._pos + if (self._accept(lexer.TokKeyword(b'do')) is None and + (self._tokens[exp._end_token_pos - 1] == lexer.TokSymbol(b')'))): + # Check for PICO-8 short form. + do_end_pos = exp._end_token_pos + while (do_end_pos < len(self._tokens) and + not self._tokens[do_end_pos].matches(lexer.TokNewline)): + do_end_pos += 1 + try: + self._max_pos = do_end_pos + block = self._assert(self._chunk(), + 'valid chunk in short-while') + finally: + self._max_pos = None + return StatWhile(exp, block, start=pos, end=self._pos, short_while=True) + self._pos = do_pos self._expect(lexer.TokKeyword(b'do')) block = self._assert(self._chunk(), 'block in while') self._expect(lexer.TokKeyword(b'end'))