nealy completely usable

master
cynic 3 years ago
commit 32875732b5
  1. 85
      lib.py
  2. 133
      main.py
  3. 15
      test.ags

@ -0,0 +1,85 @@
# MISSNG UR LEYEBRARY?!!?!!
# BET UR MAD BAKADESU HAHAHAHHAHAHAHAH
# t. cirno
idspace = {}
funcspace = {}
def execute(ctx):
def _execute(ctx):
import sys, functools
def _ident(name):
return ("identifier", name)
def _destr(t):
return t[1] if t[0] == "string" else ""
def _fixarr(a):
for i in a:
if i[0] == "identifier":
a[a.index(i)] = idspace[i]
return a
def _recursereplace(i, fr, to):
if type(i) == type([]):
subs = list(
map(lambda a: _recursereplace(a, fr, to), i)
)
return subs
else:
return to if i == fr else i
if type(ctx) == type([]) and ctx[0][0] == "identifier":
subs = list(
map(lambda a: _execute(a), ctx)
)
#print("abba", ctx, subs)
if ctx[0][1] == "id":
return subs[1] if len(subs[1:]) == 1 else subs[1:]
elif ctx[0] == _ident("miracle"):
return getattr(sys.modules[_destr(subs[1])], _destr(subs[2]))(*[i[1] for i in _fixarr(subs[3])])
elif ctx[0] == _ident("def"):
idspace[subs[1]] = subs[2]
return idspace[subs[1]]
elif ctx[0] == _ident("+"):
return (subs[1][0], subs[1][1]+subs[2][1])
elif ctx[0] == _ident("defun"):
funcspace[subs[1]] = subs[2]
return funcspace[subs[1]]
elif ctx[0] in funcspace:
prototype = funcspace[ctx[0]]
for idx, arg in enumerate(subs[1:]):
idx += 1
prototype = _recursereplace(prototype, ("identifier", f"${idx}"), arg)
#print(f"${idx}", prototype)
return _execute(prototype)
else:
print("no such function", ctx[0])
return None
else:
#print("base", ctx)
if type(ctx) == type([]):
return list(
map(lambda a: _execute(a), ctx)
)
return ctx
_execute(ctx)
#input()
def oldexecute(ctx):
import sys
def _ident(name):
return ("identifier", name)
def _destr(t):
return t[1] if t[0] == "string" else ""
def _fixarr(a):
for i in a:
if i[0] == "identifier":
a[a.index(i)] = idspace[i]
return a
if ctx[0] == _ident("miracle"):
getattr(sys.modules[_destr(ctx[1])], _destr(ctx[2]))(*[i[1] for i in _fixarr(ctx[3])])
elif ctx[0] == _ident("def"):
idspace[ctx[1]] = ctx[2]
elif ctx[0] == _ident("eval"):
execute(ctx[1])

@ -0,0 +1,133 @@
import lib, sys
def isnum(n):
try:
float(n)
except:
return False
return True
reps = ["(", ")", "\"", ","]
f = open(sys.argv[1]).read()
f = f.replace(" ", " RDANEELOLIVAW ") # spacer
for r in reps: f = f.replace(r, " "+r+" ")
f = f.split()
def tokenize(f):
toks = []
collector = ""
instr = False
i = 0
while i < len(f):
c = f[i]
if c == "\"":
nex = f.index("\"", i+1)
toks.append("".join(f[i:nex+1]).replace("RDANEELOLIVAW", " "))
i = nex+1
continue
elif c == "(" or c == ")":
pass
elif c == ",":
pass
elif isnum(c):
toks.append(float(c))
i += 1
continue
elif c == "RDANEELOLIVAW":
i += 1
continue
toks.append(c)
i += 1
return toks
def lex(toks):
lexed = []
parendepth = 0
for t in toks:
if type(t) == type(0.0):
lexed.append(("number", t))
elif t[0] == "\"":
lexed.append(("string", t[1:-1]))
elif t == "(":
lexed.append(("open", parendepth))
parendepth += 1
elif t == ")":
parendepth -= 1
lexed.append(("close", parendepth))
elif t == ",":
pass#lexed.append(("sep", None))
else:
lexed.append(("identifier", t))
return lexed
def shit_structurize(chunk):
def splitlist(l, delim):
res = []
tmp = []
for i in l:
if i == delim:
res.append(tmp)
tmp = []
else:
tmp.append(i)
res.append(tmp)
res = list(filter(lambda a: a != [], res))
if type(res) == type([]) and len(res) == 1:
return res[0]
return res
if chunk[0][0] == "open":
end = chunk.index(("close", chunk[0][1]))
cunny = []
cunny.append(chunk[1])
jc = splitlist(chunk[2:end], ("sep", None))
for i in jc:
if chunk[0][0] == "open":
#chunk.append(structurize(jc))
#break
print(chunk)
cunny.append(structurize(i))
print(cunny, len(cunny), jc)
return cunny
else:
return chunk
def structurize(program):
def _amidone(prog):
good = True
for i in prog:
if i[0] == "open":
good = False
break
return good
def _gethighestparen(prog):
highest = -9001
for i in prog:
if i[0] == "open" and i[1] > highest:
highest = i[1]
return highest
parenq = _gethighestparen(program)
res = program
while not _amidone(res):
for i, tok in enumerate(program):
if tok[0] == "open" and tok[1] == parenq:
end = res.index(("close", parenq), i)
res[i:end+1] = [res[i+1:end]]
parenq -= 1
return res
#print(tokenize(f))
#print(lex(tokenize(f)), "\n")
program = structurize(lex(tokenize(f)))
#lib.execute(structurize(lex(tokenize(f))))
#print(program)
for strand in program:
lib.execute(strand)

@ -0,0 +1,15 @@
(defun spit
(id miracle
"builtins" "print" (id $1)
)
)
(defun thesis
(id spit
("this language belongs to makise kurisu. there are many like it, but this one is hers.")
)
)
(def a (id (id "desu")))
(spit
("lol swej" (+ 9000 1) a)
)
(thesis)
Loading…
Cancel
Save