tokenizer workingpython relational/optimizer.py | lesspython relational/optimizer.py | less

git-svn-id: http://galileo.dmi.unict.it/svn/relational/trunk@109 014f5005-505e-4b48-8d0a-63407b615a7c
master
LtWorf 2009-03-16 16:30:41 +07:00
parent 4c03931fe5
commit 012607f876
1 changed files with 52 additions and 38 deletions

@ -154,12 +154,15 @@ def tokenize(expression):
4 previous stuff was a binary operator
'''
while len(expression)>0:
print "Expression", expression
print "Items" ,items
if expression.startswith('('): #Parenthesis state
state=2
par_count=0 #Count of parenthesis
end=0
for i in len(expression):
for i in range(len(expression)):
if expression[i]=='(':
par_count+=1
elif expression[i]==')':
@ -168,21 +171,29 @@ def tokenize(expression):
end=i
break
items.append(tokenize(expression[1:end]))
epression=expression[end+1:].strip()
expression=expression[end+1:].strip()
elif expression.startswith("σ"): or expression.startswith("π") or expression.startswith("ρ"): #Unary
elif expression.startswith("σ") or expression.startswith("π") or expression.startswith("ρ"): #Unary
items.append(expression[0:2]) #Adding operator in the top of the list
expression=expression[2:].strip() #Removing operator from the expression
par=expression.find('(')
items.append(expression[:par]) #Inserting parameter of the operator
expression=expression[par:].strip() #Removing parameter from the expression
elif expression.startswith("*") or expression.startswith("-"):
items.append(expression[0])
expression=expression[1:].strip() #1 char from the expression
state=4
elif expression.startswith("") or expression.startswith(""): #Binary short
items.append(expression[0:3]) #Adding operator in the top of the list
expression=expression[3:].strip() #Removing operator from the expression
state=4
elif expression.startswith(""): #Binary long
i=expression.find("")
items.append(expression[:i+3])
expression=expression[i+3:].strip()
state=4
else: #Relation (hopefully)
if state==1: #Previous was a relation, appending to the last token
@ -200,5 +211,8 @@ def tokenize(expression):
if __name__=="__main__":
#n=node(u"((a b) - c d) - b")
#n=node(u"((((((((((((2)))))))))))) - (3 * 5) - 2")
n=node(u"π a,b (d-a*b)")
print n.__str__()
#n=node(u"π a,b (d-a*b)")
#print n.__str__()
print tokenize("((a b) - c d) ᐅRIGHTᐊ a * (π a,b (a))")
#print tokenize("(a)")