Astar, fizzbuzz oneline, gerlang

This commit is contained in:
trollhase 2013-09-26 21:59:25 +02:00
parent a40d6cb81c
commit 35f9682162
3 changed files with 306 additions and 0 deletions

108
astar.py Normal file
View file

@ -0,0 +1,108 @@
from math import sqrt
class PriorityQueue:
def __init__(self):
self.nlist = []
self.plookup = {}
def put(self, node, priority):
if node not in self.nlist:
self.nlist.append(node)
self.plookup[node] = priority
self.__sort()
def __sort(self):
self.nlist.sort(key=lambda node: self.plookup[node])
def __contains__(self, node):
return self.nlist.__contains__(node)
def __getitem__(self, key):
return self.nlist.__getitem__(key)
def get(self):
m = self.nlist[0]
del self.nlist[0]
del self.plookup[m]
return m
def empty(self):
return len(self.nlist) == 0
class AStar:
def solve(self, begin, end):
#set up data structures for search
glookup = {}
pdlookup = {}
openlist = PriorityQueue()
closedlist = []
openlist.put(begin,0)
glookup[begin] = 0
while not openlist.empty():
currentNode = openlist.get()
if currentNode == end:
return self.__path(pdlookup,begin,end)
closedlist.append(currentNode)
self.__expandNode(currentNode,glookup, pdlookup, openlist, closedlist, end)
return None
def __path(self,pdlookup,begin,end):
route = [end]
node = end
while node != begin:
node = pdlookup[node]
route.append(node)
return list(reversed(route))
def __expandNode(self,node, glookup, pdlookup, openlist, closedlist, end):
for successor in self.get_successors(node):
if successor in closedlist:
continue
tentative_g = glookup[node] + self.cost(node, successor)
if successor in openlist and glookup[successor] <= tentative_g:
continue
pdlookup[successor] = node
glookup[successor] = tentative_g
openlist.put(successor, tentative_g + self.esteemed_cost(successor,end))
def cost(self, node1, node2):
raise "cost must be overwritten"
def esteemed_cost(self, node, end):
raise "esteemed_cost must be overwritten"
def get_successors(self, node):
raise "get_successors must be overwritten"
if __name__ == '__main__':
#Testing
def addnodes(*nodes):
result = (0,0)
for node in nodes:
result = (result[0] + node[0], result[1]+node[1])
return result
class Test(AStar):
def cost(self, node1, node2):
return 1
def esteemed_cost(self, node, end):
return sqrt( (node[0]-end[0])**2 + (node[1]-end[1])**2 )
def get_successors(self, node):
return map(lambda offset: addnodes(offset, node), [(1,0),(0,1),(-1,0),(0,-1),(1,1),(1,-1),(-1,-1),(-1,1)])
test = Test()
print test.solve( (1,1), (3,5) )

1
fizzbuzz-oneline.py Normal file
View file

@ -0,0 +1 @@
print reduce(lambda x,y:x+"\n"+y, [ (((str(i),"Fizz")[i%3==0],"Buzz")[i%5==0],"FizzBuzz")[i%15==0] for i in range(1,101) ], "")[1:]

197
gerlang.py Normal file
View file

@ -0,0 +1,197 @@
class Analyzer:
def __init__(self, itr):
self.itr = itr
def next(self,amount=1):
if len(self.itr) == 0: return None
if amount == 1: return self.itr[0]
return self.itr[:amount]
def take(self,amount=1):
m, self.itr = self.next(amount), self.itr[amount:]
return m
def __len__(self):
return len(self.itr)
class StringAnalyzer(Analyzer):
def nextOrd(self):
m = self.next()
if m is None: return None
return ord(m)
def between(self,cmin,cmax):
c = self.nextOrd()
if c is None: return False
return c >= ord(cmin) and c <= ord(cmax)
def is_a(self,c):
return self.next() == c
class TokenListAnalyzer(Analyzer):
def takeUntilType(self,end):
t = []
while self.next() is not None and self.next()[0] != end:
t.append( self.take() )
return t
class Lexer:
keywords = ["setze","auf","durch","schreibe"]
operators = ["plus","minus","mal","geteilt"]
IDENT = 0
KEYWORD = 1
INT = 2
FLOAT = 3
OP = 4
BRACE_OPEN = 5
BRACE_CLOSE = 6
NEWLINE = 7
def lex(self, source):
tokens = []
sa = StringAnalyzer(source)
braces = 0
while len(sa) != 0:
if sa.between('a', 'z') or sa.between('A', 'Z'): #identifier or keyword
ident = ""
while sa.between('a', 'z') or sa.between('A', 'Z') or sa.between('0', '9') or sa.is_a('_'):
ident += sa.take()
if ident.lower() in self.keywords:
tokens.append( (self.KEYWORD,ident.lower()) )
elif ident.lower() in self.operators:
tokens.append( (self.OP,ident.lower()) )
else:
tokens.append( (self.IDENT,ident) )
elif sa.between('0', '9'): #number
num = ""
t = (self.INT, int)
while sa.between('0', '9'):
num += sa.take()
if sa.is_a(','):
t = (self.FLOAT, float)
sa.take()
num += "."
while sa.between('0', '9'):
num += sa.take()
tokens.append( (t[0],t[1](num)) )
elif sa.is_a('('):
tokens.append( (self.BRACE_OPEN,braces) )
braces+=1
elif sa.is_a(')'):
braces-=1
tokens.append( (self.BRACE_CLOSE,braces) )
elif sa.is_a('\n'):
tokens.append( (self.NEWLINE,) )
sa.take()
elif sa.is_a(' ') or sa.is_a('\t') or sa.is_a('\r'):
sa.take()
else:
raise ParserException("WTF is %s" % sa.take() )
return tokens
class ParserException(Exception):
pass
class Parser:
def parse(self,tokens):
block = BlockTerm()
ta = TokenListAnalyzer(tokens)
while len(ta) > 0:
if ta.next()[0] == Lexer.KEYWORD:
if ta.next()[1] == "setze":
ta.take()
if ta.next()[0] != Lexer.IDENT:
raise ParseException("missing identifier after setze")
ident = ta.take()[1]
if ta.next()[0] != Lexer.KEYWORD or ta.next()[1] != "auf":
raise ParserException("missing auf after identifier")
ta.take()
term = self.__parseTerm(ta.takeUntilType(Lexer.NEWLINE))
ta.take()
block.append(AssignmentTerm(ident,term))
elif ta.next()[1] == "schreibe":
ta.take()
term = self.__parseTerm(ta.takeUntilType(Lexer.NEWLINE))
block.append(PrintTerm(term))
elif ta.next()[0] == Lexer.NEWLINE:
ta.take()
return block
def __parseTerm(self,tokens):
t = tokens[0]
if t[0] == Lexer.IDENT:
return IdentifierTerm(t[1])
elif t[0] == Lexer.INT or t[0] == Lexer.FLOAT:
return ValueTerm(t[1])
else:
raise ParseException("Unexpected token %s" % t)
class Context():
pass
class SubContext(Context):
pass
class Term:
def run(self,context):
raise Exception("get_value must be overwritten")
class IdentifierTerm:
def __init__(self, ident):
self.ident = ident
def run(self,context):
return context[self.ident]
class ValueTerm:
def __init__(self, value):
self.value = value
def run(self,context):
return self.value
class AssignmentTerm(Term):
def __init__(self,ident,term):
self.ident = ident
self.term = term
def run(self,context):
context[self.ident] = self.term.run(context)
return None
class PrintTerm(Term):
def __init__(self,term):
self.term = term
def run(self,context):
print self.term.run(context)
return None
class BlockTerm(Term):
def __init__(self):
self.terms = []
def append(self,item):
self.terms.append(item)
def run(self,context):
result = None
for term in self.terms:
result = term.run(context)
return result
def main():
context = {}
while True:
code = raw_input(">>> ")
try:
tokens = Lexer().lex(code)
term = Parser().parse(tokens)
term.run(context)
except Exception, e:
print e
if __name__ == '__main__':
main()