restructure so preprocesser expansion is a chainable state-modifier

master
cynic 3 years ago
parent 1bddcf8fd3
commit 07ec7e31bf
  1. 11
      main.py

@ -6,9 +6,8 @@ def isnum(n):
except: except:
return False return False
return True return True
reps = ["(", ")", "\"", ","]
f = open(sys.argv[1]).read()
def preprocess(filename):
def expand(file, path): def expand(file, path):
working_directory = pathlib.Path(path).parent.resolve() working_directory = pathlib.Path(path).parent.resolve()
tmp = file.replace("\\\n", "").split("\n") tmp = file.replace("\\\n", "").split("\n")
@ -41,12 +40,12 @@ def expand(file, path):
#print(i, "\nsep") #print(i, "\nsep")
return (defs, "\n".join(tmp)) return (defs, "\n".join(tmp))
f = open(filename).read()
f = expand(f, sys.argv[1])[1] f = expand(f, sys.argv[1])[1]
f = f.replace(" ", " RDANEELOLIVAW ") # spacer f = f.replace(" ", " RDANEELOLIVAW ") # spacer
for r in reps: f = f.replace(r, " "+r+" ") for r in ["(", ")", "\"", ","]: f = f.replace(r, " "+r+" ")
f = f.split() return f.split()
def tokenize(f): def tokenize(f):
toks = [] toks = []
@ -125,7 +124,7 @@ def structurize(program):
#print(tokenize(f)) #print(tokenize(f))
#print(lex(tokenize(f)), "\n") #print(lex(tokenize(f)), "\n")
program = structurize(lex(tokenize(f))) program = structurize(lex(tokenize(preprocess(sys.argv[1]))))
#lib.execute(structurize(lex(tokenize(f)))) #lib.execute(structurize(lex(tokenize(f))))
#print(program) #print(program)

Loading…
Cancel
Save