From edc99f02cac53e9db7b3f26a841909893ae8d098 Mon Sep 17 00:00:00 2001 From: Gabriel Fredes <85911230+gabsfredes@users.noreply.github.com> Date: Sun, 7 Jul 2024 21:46:16 -0300 Subject: [PATCH] LISPtoPOSTFIX This example is detailed in (https://github.com/gabsfredes/LISPtoPOSFIX). It has a LISP expressionas an input and it generates an AST (abstract sintax tree), the conversion to an postfix expression is based on the AST as input. --- example/LISPtoPOSFIX/LICENSE | 21 ++ .../__pycache__/ast.cpython-311.pyc | Bin 0 -> 3953 bytes .../__pycache__/parsetab.cpython-311.pyc | Bin 0 -> 2803 bytes .../__pycache__/tokenizer.cpython-311.pyc | Bin 0 -> 1585 bytes .../__pycache__/trabalho.cpython-311.pyc | Bin 0 -> 1584 bytes example/LISPtoPOSFIX/input.txt | 1 + example/LISPtoPOSFIX/parser.out | 286 ++++++++++++++++++ example/LISPtoPOSFIX/parser_lf.py | 158 ++++++++++ example/LISPtoPOSFIX/parsetab.py | 36 +++ 9 files changed, 502 insertions(+) create mode 100644 example/LISPtoPOSFIX/LICENSE create mode 100644 example/LISPtoPOSFIX/__pycache__/ast.cpython-311.pyc create mode 100644 example/LISPtoPOSFIX/__pycache__/parsetab.cpython-311.pyc create mode 100644 example/LISPtoPOSFIX/__pycache__/tokenizer.cpython-311.pyc create mode 100644 example/LISPtoPOSFIX/__pycache__/trabalho.cpython-311.pyc create mode 100644 example/LISPtoPOSFIX/input.txt create mode 100644 example/LISPtoPOSFIX/parser.out create mode 100644 example/LISPtoPOSFIX/parser_lf.py create mode 100644 example/LISPtoPOSFIX/parsetab.py diff --git a/example/LISPtoPOSFIX/LICENSE b/example/LISPtoPOSFIX/LICENSE new file mode 100644 index 0000000..bc16ae5 --- /dev/null +++ b/example/LISPtoPOSFIX/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Gabriel Fredes + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/example/LISPtoPOSFIX/__pycache__/ast.cpython-311.pyc b/example/LISPtoPOSFIX/__pycache__/ast.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..addc9ff825a68337492dec0a5c95f85b332f8e24 GIT binary patch literal 3953 zcmd5;-ESMm5#K#7k0+i!tPktcj?T@8Nhyl##BEwZEuflmtU{(gWETa&LbzxzDrJhK z_D-rrsKB`pP`MCL6$lWfRtgp{Tm)|aiah0w4ER7C;HMxDek0&FJ!NK(lt@_>j1~yG zyq%r>8t(jdW_JGB(Gez4zWMv+CtrsM`4&5k609>Xrzs(y5l%QwlU16a)aRAeKq3IV zl3+ZL2y&KCc`y;;YC_|oL>t!J;XIsKfE}I{VRPtuViiy;Qm5gPV)>o~JleLrA%%$}-H@TIZ z$(YIcxrJL!_SVgXYjeLznzr*^Zqw78MIQsv15yW}+WkOW@?B^5-i1#uK1%PV-I?u= zs(QP7{Uywhe86Z6x9E$ODfQg|rd3}~n>FUj5K55cG2wwP0>^I%Rx6Da#Y4Mlj9|;z z@jV>T3dZw2S509qIhL@DRN7iL)5b-C!Vc)g7YJFDdBL0jFhwWyykOdTUZKd$Lg|+?P7uWXl!1`aAO;p%KiA_{laQmY_ z&Hr(JulwQ6?VGOriC);-f*AO=kJxo!jw8mEEx_goDd@4I-ctr11Jy4u7n}x`tAIf$ z|6}VxPFS{`%4Uoy!#7+5ZQE#?XkvYn7AWq3j=w;_UtmY#76)r%@hq2Cmup6+JGXcw zWHFasx5b;VD+1Mi8wgmB-nFL|0)^WE)!pLd;_W9_%6g=tN8B0Ld>C`#_rsy`lcU2P zp(1_#bXd@?1qNhs9?2U>(4gXC6xjE2bUBNip z99^G+ojyZbl?CI8j1mkZWxlrshPWUyGCGX@Nt6KJCp@ojmfY!L_d)XTqU z>xIBIC~-I{zPHhUr{+unFF8CJuQgKpS316eslcZ+hChw)Z~DLJ+toky1cw7!u> zD25?W-MlhoNQa239&WoLf(MK($*)CpNZ7+?;N;GmVp-&lZy>2?Ww5@ z2g|fko&j<(w-A^~NOBDr`)@#8Qtjy8n|+jYuT?|(_U7L8Z$jrvp>xl*j=la;=lg}e z{UK=JEAl5RK3CR0sAwOQ8sB%Xv-G*vvv=jwpB1d4`FX0WMJigPq(%JEsls}3?eopD z7OiN}Qsa}BlMhA4jmyUS(Y%T&vc5_aGVb0!TFMZJJ9<9+OmcviGfw?WxY9Owu7U^V4)-{6rm6*m|lCTs>I_ zuAbXqT8mk3YB>X^mHC`>JDIV__gTJ2GC<2vCNM&X%Sh0<63tzU>6Fom(f{wvU0fKpl|XG^VLl?;?xznZ3n6<4kGlW>=-R>Pfc zs0v)Ds&~0sRXl(;%uF0 z3LV34oxIu|$Jsi8Sp&OuqEVD`NHh}e-RavMdNi^-0*6;Ay^OPUqK5E4T1h_%cGdz) zm*VO*jdTpwNDyTMabs0|U}w6(%KG_=ejd84&7Iwu+`Fdo6KDwEzGB literal 0 HcmV?d00001 diff --git a/example/LISPtoPOSFIX/__pycache__/parsetab.cpython-311.pyc b/example/LISPtoPOSFIX/__pycache__/parsetab.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..edbfad44ab2ce52dfc40ebe535c2a1ab2c817c44 GIT binary patch literal 2803 zcmcgs&rcgi6rTNUukqT(5D0;2YZ0gqP22R)3Pn+%Ucx3|5?dja>(m%_aZJEAV?)46 z6Csq6Lk&3p6SH*dyo z{FB=)BJ!|jzMGpD5c(aA#^b6F-uy-(^gYs03~5x1(&!kSLK^doBFH|Y<_;|Gkj2+6 z!HRT%r8!fSCZ-xA*Z!z&f8>S{QI@PMkCo+p;*2qA6k~PfDc2~knV%Em<-Q|?Zhji4 z5sL9KfkwJR7c}3={aIqQA6U;x+)MJcACh^}%5=s=-Kn!H6z0-MXLaX!5}=DPI>^;| z@ILB!^x2n;<|j3GST%P-&1ov;A{DpkuK%^-wtB^F`xTQ>%Zgp3-V@KLm|N?LqF{HC z{p9S&9~tcU@OY&7$M8r*Nn9O{C}BCIPN^X^B9AJcj}6y-YDk_KR-^KS5}u68;p-Dh zWKy0|VoFqzhvcSYD@u`-=4?@JJogTTT3W}8wU#2P$I)%n|5h{J698QR-2{jj?5j!4GLHeR!~~Q4Gt}#`BqT6B0A2t% znxKH;Oq*70p=2Ko95#l-#&Fme4m&8?y9IUA4FF4+u!LzX(P~ApX|WdbxJ3a$ry(Bn ztwF0HgSQ)_$oA_Q&0wLG4bJL#gR>gY5bC{UxSB49y<50)2Bid@w%KB57}^L!8)0Z8 z44Wh2kl1Pp_rSy{fL?$;0zo0j7lr7c{J{lTDbmqFD?1zx6L-G?-h(aK7xt}7fCoSd ziZh(Kgp!y^XR^87wpNl5D$X%`vlAf?@(xYi(4v` zY=yzMYk0@qZmF?uYY3Kx8nR=opNWU9r!~9SZ|pFa6=rupQ>X}`ndLGNXrZdPR?iva@6hnL;)nA5G3KFK9_kj}K>(@o_bJwUE6!868!>h+ADic14#@@CZ?cOP~LR{KM54ilS;F@^qG5J8pkTtg*;9#+KYW0kvYkmva^SeQ&9T z&>A&OAE6|Di6PFzv`USEGs+Z?(p@IWlDWeaVP?R2n|*MUE^<>GnLAAL>R1K~4S|?eLRMYe2~Z JAAFHC?!R2$j8Omp literal 0 HcmV?d00001 diff --git a/example/LISPtoPOSFIX/__pycache__/tokenizer.cpython-311.pyc b/example/LISPtoPOSFIX/__pycache__/tokenizer.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e3f8a6d7b6062e27fc17cf4c2c097c71a7fa97c GIT binary patch literal 1585 zcmaJ>&2Jk;6rb5$?^o>DhBOf$LWY8Z6Jp>7G(s}9(;6jqWVaPswpuNoY2rrsR9Xc=*?9Shn#q`Yr77SI_vq(o0;c%^M3EW z`6ZLl5w!LndpG_~A@moYOqwtjj$IHA5kUkyXcrq8BO(|A5e<<@hD1_EipYjc6hk4Z zp%QI?42|f3I+-)(NE)Ox$pB`^Jm9>5D87j&X6}RVa56zgmf#jDW}otvdQpvpS|yU} zSGTIoMkH;OYpp1?RjoH*xK=f)tw^cVuGK2FW~7%tYgVo8aM%v5whrX@zxeQ?uf=d%M&PJm1`~+dYTagqmg7 zHaBan?ZDe^v^HvAn1OeTx}82{rS6?b4XlI{_A;FP26TTBAVh!9=kLAw!}49{zzNHP zOT)Qzc=;H1&?$%cg}BqlGQc6)NAdL+zqHr-l8)`w7^_7HQ1CPj{Zn({FD2Oo*} z|Nju4D^k7xR-@3incWU3qlJz`cWkGy{Eoj|xZ!mQ-JW9;kMYBw@?7JDbs3aJ!qIE6 z6iNQAPWKF5N?=jOJjTw0&IRZ`XDmcdB<)Bq{hEKY^jrSXtB>^ykEQkBrS&6eeaLD2 zZu8sCd--n~gGLyCF=@qIq;#D-B?uJ{&|W0RHRQu+#V-jMOWqO?lGg;HHF-}U17}{8 zOkmBtDRI@s`Ohn{v*UV<#z`8-IKDgMPA0j{OCqkD6Lo#pBRz+H%1W@tnel%H7>O9; zk&N)-5jr>f8>yNug!1Thq-R5UsLzMW5F}-o&V|}AyAY;_xpQG=q>4B*LQ_Z!d^!ak qrSqXWk_D{s)iLN2mPcp`3j$u?yJOIimit~jQ0{66TA1QrOzD67uuNP4 literal 0 HcmV?d00001 diff --git a/example/LISPtoPOSFIX/__pycache__/trabalho.cpython-311.pyc b/example/LISPtoPOSFIX/__pycache__/trabalho.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6c3acd0d22e8f41728ec0d7bf94b7542bee4cbef GIT binary patch literal 1584 zcmaJ>&2Jk;6rb5$?^o>DrfDKRgbW1*Cyjv{& z?ea#oQICX;vet}J8`WAJhAUOQ+Ki-%c15dbjYutj)u@`AGq_w^fDaxz)UEir{L&!q5JayA$m5Ozx&=Vi+7woCoK0b z3}({d#Y5OZ$KqxFu$T`;fPJ)!V(;g_u~vHJ72Ui#5vAWq>p8Rfb1C^M<=z zD+*D{v3G15iB8+KT`%Gt+of4J=0qS$V7m7AOpUTMeG1(#0svn^s(&G@^=rSj2C}jz zz2Lb8@m~b+0&yvk?S$~XCe8rPIbN*KBKcTUhH1*T1@N9h^RIS1WmjD_eQK{-%Mf8`%8{GEUJ)+6=YBVqNiuzDb@4j7F; zt^c@wH~&MuUk~FiCasu@q>gjD1gT;P+KI%#yJ@>VjAs0hfU)E$0U>!zAY0?-1S)Xq zLCFN>)RPk5y14$?O>A$u9<}2ljdL9PPFaviZL=#8-FW!^}|Tab}1nkmlHQ t0xC-9LwP82SYfLpPz5Xw(FEo=JjZrNpduyrGruR@QTCKD#lD!*{{X}1OO^lt literal 0 HcmV?d00001 diff --git a/example/LISPtoPOSFIX/input.txt b/example/LISPtoPOSFIX/input.txt new file mode 100644 index 0000000..518f4cc --- /dev/null +++ b/example/LISPtoPOSFIX/input.txt @@ -0,0 +1 @@ +(/ 10 (* (+ 10 -5) 2)) \ No newline at end of file diff --git a/example/LISPtoPOSFIX/parser.out b/example/LISPtoPOSFIX/parser.out new file mode 100644 index 0000000..9de2d8a --- /dev/null +++ b/example/LISPtoPOSFIX/parser.out @@ -0,0 +1,286 @@ +Created by PLY version 3.11 (http://www.dabeaz.com/ply) + +Grammar + +Rule 0 S' -> E +Rule 1 E -> ABRE_PAREN MAIS E E FECHA_PAREN +Rule 2 E -> ABRE_PAREN MENOS E E FECHA_PAREN +Rule 3 E -> ABRE_PAREN VEZES E E FECHA_PAREN +Rule 4 E -> ABRE_PAREN DIVIDIR E E FECHA_PAREN +Rule 5 E -> ID +Rule 6 E -> NUMERO + +Terminals, with rules where they appear + +ABRE_PAREN : 1 2 3 4 +DIVIDIR : 4 +FECHA_PAREN : 1 2 3 4 +ID : 5 +MAIS : 1 +MENOS : 2 +NUMERO : 6 +VEZES : 3 +error : + +Nonterminals, with rules where they appear + +E : 1 1 2 2 3 3 4 4 0 + +Parsing method: LALR + +state 0 + + (0) S' -> . E + (1) E -> . ABRE_PAREN MAIS E E FECHA_PAREN + (2) E -> . ABRE_PAREN MENOS E E FECHA_PAREN + (3) E -> . ABRE_PAREN VEZES E E FECHA_PAREN + (4) E -> . ABRE_PAREN DIVIDIR E E FECHA_PAREN + (5) E -> . ID + (6) E -> . NUMERO + + ABRE_PAREN shift and go to state 2 + ID shift and go to state 3 + NUMERO shift and go to state 4 + + E shift and go to state 1 + +state 1 + + (0) S' -> E . + + + +state 2 + + (1) E -> ABRE_PAREN . MAIS E E FECHA_PAREN + (2) E -> ABRE_PAREN . MENOS E E FECHA_PAREN + (3) E -> ABRE_PAREN . VEZES E E FECHA_PAREN + (4) E -> ABRE_PAREN . DIVIDIR E E FECHA_PAREN + + MAIS shift and go to state 5 + MENOS shift and go to state 6 + VEZES shift and go to state 7 + DIVIDIR shift and go to state 8 + + +state 3 + + (5) E -> ID . + + $end reduce using rule 5 (E -> ID .) + ABRE_PAREN reduce using rule 5 (E -> ID .) + ID reduce using rule 5 (E -> ID .) + NUMERO reduce using rule 5 (E -> ID .) + FECHA_PAREN reduce using rule 5 (E -> ID .) + + +state 4 + + (6) E -> NUMERO . + + $end reduce using rule 6 (E -> NUMERO .) + ABRE_PAREN reduce using rule 6 (E -> NUMERO .) + ID reduce using rule 6 (E -> NUMERO .) + NUMERO reduce using rule 6 (E -> NUMERO .) + FECHA_PAREN reduce using rule 6 (E -> NUMERO .) + + +state 5 + + (1) E -> ABRE_PAREN MAIS . E E FECHA_PAREN + (1) E -> . ABRE_PAREN MAIS E E FECHA_PAREN + (2) E -> . ABRE_PAREN MENOS E E FECHA_PAREN + (3) E -> . ABRE_PAREN VEZES E E FECHA_PAREN + (4) E -> . ABRE_PAREN DIVIDIR E E FECHA_PAREN + (5) E -> . ID + (6) E -> . NUMERO + + ABRE_PAREN shift and go to state 2 + ID shift and go to state 3 + NUMERO shift and go to state 4 + + E shift and go to state 9 + +state 6 + + (2) E -> ABRE_PAREN MENOS . E E FECHA_PAREN + (1) E -> . ABRE_PAREN MAIS E E FECHA_PAREN + (2) E -> . ABRE_PAREN MENOS E E FECHA_PAREN + (3) E -> . ABRE_PAREN VEZES E E FECHA_PAREN + (4) E -> . ABRE_PAREN DIVIDIR E E FECHA_PAREN + (5) E -> . ID + (6) E -> . NUMERO + + ABRE_PAREN shift and go to state 2 + ID shift and go to state 3 + NUMERO shift and go to state 4 + + E shift and go to state 10 + +state 7 + + (3) E -> ABRE_PAREN VEZES . E E FECHA_PAREN + (1) E -> . ABRE_PAREN MAIS E E FECHA_PAREN + (2) E -> . ABRE_PAREN MENOS E E FECHA_PAREN + (3) E -> . ABRE_PAREN VEZES E E FECHA_PAREN + (4) E -> . ABRE_PAREN DIVIDIR E E FECHA_PAREN + (5) E -> . ID + (6) E -> . NUMERO + + ABRE_PAREN shift and go to state 2 + ID shift and go to state 3 + NUMERO shift and go to state 4 + + E shift and go to state 11 + +state 8 + + (4) E -> ABRE_PAREN DIVIDIR . E E FECHA_PAREN + (1) E -> . ABRE_PAREN MAIS E E FECHA_PAREN + (2) E -> . ABRE_PAREN MENOS E E FECHA_PAREN + (3) E -> . ABRE_PAREN VEZES E E FECHA_PAREN + (4) E -> . ABRE_PAREN DIVIDIR E E FECHA_PAREN + (5) E -> . ID + (6) E -> . NUMERO + + ABRE_PAREN shift and go to state 2 + ID shift and go to state 3 + NUMERO shift and go to state 4 + + E shift and go to state 12 + +state 9 + + (1) E -> ABRE_PAREN MAIS E . E FECHA_PAREN + (1) E -> . ABRE_PAREN MAIS E E FECHA_PAREN + (2) E -> . ABRE_PAREN MENOS E E FECHA_PAREN + (3) E -> . ABRE_PAREN VEZES E E FECHA_PAREN + (4) E -> . ABRE_PAREN DIVIDIR E E FECHA_PAREN + (5) E -> . ID + (6) E -> . NUMERO + + ABRE_PAREN shift and go to state 2 + ID shift and go to state 3 + NUMERO shift and go to state 4 + + E shift and go to state 13 + +state 10 + + (2) E -> ABRE_PAREN MENOS E . E FECHA_PAREN + (1) E -> . ABRE_PAREN MAIS E E FECHA_PAREN + (2) E -> . ABRE_PAREN MENOS E E FECHA_PAREN + (3) E -> . ABRE_PAREN VEZES E E FECHA_PAREN + (4) E -> . ABRE_PAREN DIVIDIR E E FECHA_PAREN + (5) E -> . ID + (6) E -> . NUMERO + + ABRE_PAREN shift and go to state 2 + ID shift and go to state 3 + NUMERO shift and go to state 4 + + E shift and go to state 14 + +state 11 + + (3) E -> ABRE_PAREN VEZES E . E FECHA_PAREN + (1) E -> . ABRE_PAREN MAIS E E FECHA_PAREN + (2) E -> . ABRE_PAREN MENOS E E FECHA_PAREN + (3) E -> . ABRE_PAREN VEZES E E FECHA_PAREN + (4) E -> . ABRE_PAREN DIVIDIR E E FECHA_PAREN + (5) E -> . ID + (6) E -> . NUMERO + + ABRE_PAREN shift and go to state 2 + ID shift and go to state 3 + NUMERO shift and go to state 4 + + E shift and go to state 15 + +state 12 + + (4) E -> ABRE_PAREN DIVIDIR E . E FECHA_PAREN + (1) E -> . ABRE_PAREN MAIS E E FECHA_PAREN + (2) E -> . ABRE_PAREN MENOS E E FECHA_PAREN + (3) E -> . ABRE_PAREN VEZES E E FECHA_PAREN + (4) E -> . ABRE_PAREN DIVIDIR E E FECHA_PAREN + (5) E -> . ID + (6) E -> . NUMERO + + ABRE_PAREN shift and go to state 2 + ID shift and go to state 3 + NUMERO shift and go to state 4 + + E shift and go to state 16 + +state 13 + + (1) E -> ABRE_PAREN MAIS E E . FECHA_PAREN + + FECHA_PAREN shift and go to state 17 + + +state 14 + + (2) E -> ABRE_PAREN MENOS E E . FECHA_PAREN + + FECHA_PAREN shift and go to state 18 + + +state 15 + + (3) E -> ABRE_PAREN VEZES E E . FECHA_PAREN + + FECHA_PAREN shift and go to state 19 + + +state 16 + + (4) E -> ABRE_PAREN DIVIDIR E E . FECHA_PAREN + + FECHA_PAREN shift and go to state 20 + + +state 17 + + (1) E -> ABRE_PAREN MAIS E E FECHA_PAREN . + + $end reduce using rule 1 (E -> ABRE_PAREN MAIS E E FECHA_PAREN .) + ABRE_PAREN reduce using rule 1 (E -> ABRE_PAREN MAIS E E FECHA_PAREN .) + ID reduce using rule 1 (E -> ABRE_PAREN MAIS E E FECHA_PAREN .) + NUMERO reduce using rule 1 (E -> ABRE_PAREN MAIS E E FECHA_PAREN .) + FECHA_PAREN reduce using rule 1 (E -> ABRE_PAREN MAIS E E FECHA_PAREN .) + + +state 18 + + (2) E -> ABRE_PAREN MENOS E E FECHA_PAREN . + + $end reduce using rule 2 (E -> ABRE_PAREN MENOS E E FECHA_PAREN .) + ABRE_PAREN reduce using rule 2 (E -> ABRE_PAREN MENOS E E FECHA_PAREN .) + ID reduce using rule 2 (E -> ABRE_PAREN MENOS E E FECHA_PAREN .) + NUMERO reduce using rule 2 (E -> ABRE_PAREN MENOS E E FECHA_PAREN .) + FECHA_PAREN reduce using rule 2 (E -> ABRE_PAREN MENOS E E FECHA_PAREN .) + + +state 19 + + (3) E -> ABRE_PAREN VEZES E E FECHA_PAREN . + + $end reduce using rule 3 (E -> ABRE_PAREN VEZES E E FECHA_PAREN .) + ABRE_PAREN reduce using rule 3 (E -> ABRE_PAREN VEZES E E FECHA_PAREN .) + ID reduce using rule 3 (E -> ABRE_PAREN VEZES E E FECHA_PAREN .) + NUMERO reduce using rule 3 (E -> ABRE_PAREN VEZES E E FECHA_PAREN .) + FECHA_PAREN reduce using rule 3 (E -> ABRE_PAREN VEZES E E FECHA_PAREN .) + + +state 20 + + (4) E -> ABRE_PAREN DIVIDIR E E FECHA_PAREN . + + $end reduce using rule 4 (E -> ABRE_PAREN DIVIDIR E E FECHA_PAREN .) + ABRE_PAREN reduce using rule 4 (E -> ABRE_PAREN DIVIDIR E E FECHA_PAREN .) + ID reduce using rule 4 (E -> ABRE_PAREN DIVIDIR E E FECHA_PAREN .) + NUMERO reduce using rule 4 (E -> ABRE_PAREN DIVIDIR E E FECHA_PAREN .) + FECHA_PAREN reduce using rule 4 (E -> ABRE_PAREN DIVIDIR E E FECHA_PAREN .) + diff --git a/example/LISPtoPOSFIX/parser_lf.py b/example/LISPtoPOSFIX/parser_lf.py new file mode 100644 index 0000000..d465a41 --- /dev/null +++ b/example/LISPtoPOSFIX/parser_lf.py @@ -0,0 +1,158 @@ +import ply.lex as lex +import ply.yacc as yacc + +# Tokenizer +# Lista com nome dos tokens +tokens = ( + "ID", + "NUMERO", + "MAIS", + "MENOS", + "VEZES", + "DIVIDIR", + "ABRE_PAREN", + "FECHA_PAREN", +) + +# Expressões regulares para os tokens +# t_ é um prefixo especial (palavra reservada) que indica que a função é um token + +t_MAIS = r"\+" +t_MENOS = r"-" # O sinal de menos é um token separado para evitar ambiguidade +t_VEZES = r"\*" +t_DIVIDIR = r"/" # O sinal de divisão é um token separado para evitar ambiguidade +t_ABRE_PAREN = r"\(" +t_FECHA_PAREN = r"\)" + + +# Função para tratar números +def t_NUMERO(t): + r'-?\d+(\.\d+)?' + t.value = int(t.value) # Considera positivo ou negativo e float + return t + +# Função para tratar ID +def t_ID(t): + r"-?[a-zA-Z]+" # Apenas letras do alfabeto portugues (podendo ser negativo) + return t + +# Ignorar espaços em branco +t_ignore = " \t" + +# Ignorar comentários +def t_COMENTARIO(t): + r'\#.*' + pass + +# Numero da linha +def t_newline(t): + r"\n+" + t.lexer.lineno += len( + t.value + ) # Incrementa o número da linha para cada nova linha encontrada + # lexer é um objeto global que contém informações sobre o estado do analisador léxico + # lineno é um atributo de lexer que armazena o número da linha atual + + +# Tratamento de erro +def t_error(t): + print("O caractere ilegal '%s' foi pulado" % t.value[0]) + t.lexer.skip(1) # Pula o caractere ilegal + +# Construir o lexer +lexer = lex.lex() + +#################### +# Classes Tree # +#################### +#################### +# Não interferem # +# Na análise # +# É só para arvore # +#################### +class ASTNode: + pass + +class BinaryOpNode(ASTNode): + def __init__(self, op, left, right): + self.op = op + self.left = left + self.right = right + + def __repr__(self): + return f"({self.op} {self.left} {self.right})" + +class IdNode(ASTNode): + def __init__(self, name): + self.name = name + + def __repr__(self): + return self.name + +class NumberNode(ASTNode): + def __init__(self, value): + self.value = value + + def __repr__(self): + return str(self.value) + +def print_tree(node, indent="", is_right=False): + if isinstance(node, BinaryOpNode): + op_str = f"{indent}{'└─' if is_right else '├─'}{node.op}" + print(op_str) + new_indent = indent + (" " if is_right else "│ ") + print_tree(node.left, new_indent, False) + print_tree(node.right, new_indent, True) + elif isinstance(node, IdNode) or isinstance(node, NumberNode): + node_str = f"{indent}{'└─' if is_right else '├─'}{node}" + print(node_str) + +def tradutor_toposfix(node): + if isinstance(node, BinaryOpNode): + left_expr = tradutor_toposfix(node.left) + right_expr = tradutor_toposfix(node.right) + return f"{left_expr} {right_expr} {node.op}" + elif isinstance(node, IdNode): + return node.name + elif isinstance(node, NumberNode): + return str(node.value) + +################## +# --- Parser --- # +################## + +# Regras de produção +def p_E_binop(p): + '''E : ABRE_PAREN MAIS E E FECHA_PAREN + | ABRE_PAREN MENOS E E FECHA_PAREN + | ABRE_PAREN VEZES E E FECHA_PAREN + | ABRE_PAREN DIVIDIR E E FECHA_PAREN''' + p[0] = BinaryOpNode(p[2], p[3], p[4]) + +def p_E_id(p): + 'E : ID' + p[0] = IdNode(p[1]) + +def p_E_numero(p): + 'E : NUMERO' + p[0] = NumberNode(p[1]) + +def p_error(p): + print(f'Erro de sintaxe: {p.value!r}') + +# Construir o parser +parser = yacc.yacc() + +# Função para analisar a entrada do arquivo input.txt +arquivo = open('LISPtoPOSFIX/input.txt', 'r') +entrada = arquivo.read() +arquivo.close() + +# Chama o parser para entrada lida +ast = parser.parse(entrada) +print("Entrada: %s\nÁrvore:" % ast) +# Imprime a árvore +print_tree(ast) +# Imprime a expressão pós-fixa +print("Pósfixada: %s" % tradutor_toposfix(ast)) + diff --git a/example/LISPtoPOSFIX/parsetab.py b/example/LISPtoPOSFIX/parsetab.py new file mode 100644 index 0000000..24a2607 --- /dev/null +++ b/example/LISPtoPOSFIX/parsetab.py @@ -0,0 +1,36 @@ + +# parsetab.py +# This file is automatically generated. Do not edit. +# pylint: disable=W,C,R +_tabversion = '3.10' + +_lr_method = 'LALR' + +_lr_signature = 'ABRE_PAREN DIVIDIR FECHA_PAREN ID MAIS MENOS NUMERO VEZESE : ABRE_PAREN MAIS E E FECHA_PAREN\n | ABRE_PAREN MENOS E E FECHA_PAREN\n | ABRE_PAREN VEZES E E FECHA_PAREN\n | ABRE_PAREN DIVIDIR E E FECHA_PARENE : IDE : NUMERO' + +_lr_action_items = {'ABRE_PAREN':([0,3,4,5,6,7,8,9,10,11,12,17,18,19,20,],[2,-5,-6,2,2,2,2,2,2,2,2,-1,-2,-3,-4,]),'ID':([0,3,4,5,6,7,8,9,10,11,12,17,18,19,20,],[3,-5,-6,3,3,3,3,3,3,3,3,-1,-2,-3,-4,]),'NUMERO':([0,3,4,5,6,7,8,9,10,11,12,17,18,19,20,],[4,-5,-6,4,4,4,4,4,4,4,4,-1,-2,-3,-4,]),'$end':([1,3,4,17,18,19,20,],[0,-5,-6,-1,-2,-3,-4,]),'MAIS':([2,],[5,]),'MENOS':([2,],[6,]),'VEZES':([2,],[7,]),'DIVIDIR':([2,],[8,]),'FECHA_PAREN':([3,4,13,14,15,16,17,18,19,20,],[-5,-6,17,18,19,20,-1,-2,-3,-4,]),} + +_lr_action = {} +for _k, _v in _lr_action_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_action: _lr_action[_x] = {} + _lr_action[_x][_k] = _y +del _lr_action_items + +_lr_goto_items = {'E':([0,5,6,7,8,9,10,11,12,],[1,9,10,11,12,13,14,15,16,]),} + +_lr_goto = {} +for _k, _v in _lr_goto_items.items(): + for _x, _y in zip(_v[0], _v[1]): + if not _x in _lr_goto: _lr_goto[_x] = {} + _lr_goto[_x][_k] = _y +del _lr_goto_items +_lr_productions = [ + ("S' -> E","S'",1,None,None,None), + ('E -> ABRE_PAREN MAIS E E FECHA_PAREN','E',5,'p_E_binop','parser_lf.py',126), + ('E -> ABRE_PAREN MENOS E E FECHA_PAREN','E',5,'p_E_binop','parser_lf.py',127), + ('E -> ABRE_PAREN VEZES E E FECHA_PAREN','E',5,'p_E_binop','parser_lf.py',128), + ('E -> ABRE_PAREN DIVIDIR E E FECHA_PAREN','E',5,'p_E_binop','parser_lf.py',129), + ('E -> ID','E',1,'p_E_id','parser_lf.py',133), + ('E -> NUMERO','E',1,'p_E_numero','parser_lf.py',137), +]