Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions labs/04/ac-lexical-analyzer-main/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# Makefile for the 'ac' lexical analyzer

CC=gcc
LEX=lex
LEXFLAGS=
OUT=lex_analaizer

all: $(OUT)

$(OUT): lex_analaizer.l
$(LEX) lex_analaizer.l
$(CC) lex.yy.c -o $(OUT)

run: $(OUT)
@python code_generator.py > example.ac
@echo '=== INPUT (example.ac) ==='
@cat example.ac
@echo '\n=== OUTPUT (Tokens) ==='
@./$(OUT) example.ac

clean:
rm -f lex.yy.c $(OUT) example.ac

testcase:
@echo '=== INPUT (test_case.ac) ==='
@cat test_case.ac
@echo '\n=== OUTPUT (Tokens) ==='
@./lex_analaizer test_case.ac
54 changes: 54 additions & 0 deletions labs/04/ac-lexical-analyzer-main/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
# AC Lexical Analyzer

This project contains a lexical analyzer for the "ac" programming language, implemented using Lex.

## 📚 Language Description

The language has the following lexical rules:
- `f` → float declaration → `floatdcl`
- `i` → int declaration → `intdcl`
- `p` → print statement → `print`
- Identifiers are single lowercase letters except `f`, `i`, and `p` → `id`
- Integers → one or more digits → `inum`
- Floats → digits followed by a dot and 1 to 5 digits → `fnum`
- `=` → assignment → `assign`
- `+` → addition → `plus`
- Lines starting with `//` → `COMMENT`

## 🛠️ How to Compile and Run

1. Compile the scanner:
```
make
```

2. Run using the provided code generator:
```
make run
```

3. Clean all generated files:
```
make clean
```

## 🧰 Makefile Commands
You can use the following commands from the terminal:

- `make` → Compile the lexical analyzer from `lex_analaizer.l`
- `make run` → Generate random input using `code_generator.py`, print it, and show the corresponding tokens
- `make testcase` → Use the provided `test_case.ac` file, show it as input, and display the resulting tokens
- `make clean` → Delete generated files (binary, test inputs, etc.)

## 📁 File Structure
### Existing Source Files:
- `lex_analaizer.l` → Lexical analyzer definition
- `Makefile` → Build and test automation
- `code_generator.py` → Script to generate random AC language code
- `test_case.ac` → A static test case used for validation
- `README.md` → This documentation

### Generated Files:
- `lex.yy.c` → C code generated by `lex`
- `lex_analaizer` → Compiled executable from `gcc`
- `example.ac` → Random input file generated by `make run`
78 changes: 78 additions & 0 deletions labs/04/ac-lexical-analyzer-main/code_generator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import string
import random
import argparse

def id_generator(size=10, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))

opreators = ["+","-","*","/"]

def get_comment_line():
comment_line = "//%s" % (id_generator())
return comment_line

def get_float_line():
float_line = "f %s" % (random.choice(string.ascii_lowercase))
return float_line

def get_integer_line():
integer_line = "i %s"% (random.choice(string.ascii_lowercase))
return integer_line

def get_asigment_line():
asigment_line = "%s = %s" %(random.choice(string.ascii_lowercase),\
random.randint(0,100))
return asigment_line

def get_asigment_line_2():
asigment_line_2 = "%s = %s %s %s" % \
(random.choice(string.ascii_lowercase),\
random.choice(string.ascii_lowercase),\
random.choice(opreators),\
random.randint(0,100))
return asigment_line_2

def get_print_line():
print_line = "p %s" % (random.choice(string.ascii_lowercase))
return print_line


parser = argparse.ArgumentParser(description='Generate random AC code')
parser.add_argument('--stress', dest='stress', action='store_true',\
help='generate HUGE code to stress the lab')
args = parser.parse_args()

if args.stress:
f= open("random_code.ac","w+")
for x in range(0, 100000):
comment_line = get_comment_line()
float_line = get_float_line()
integer_line = get_integer_line()
asigment_line = get_asigment_line()
asigment_line_2 = get_asigment_line_2()
print_line = get_print_line()

f.write(comment_line + "\n")
f.write(float_line + "\n")
f.write(integer_line + "\n")
f.write(asigment_line+ "\n")
f.write(asigment_line_2 + "\n")
f.write(print_line + "\n")

f.close()

else:
comment_line = get_comment_line()
float_line = get_float_line()
integer_line = get_integer_line()
asigment_line = get_asigment_line()
asigment_line_2 = get_asigment_line_2()
print_line = get_print_line()

print(comment_line)
print(float_line)
print(integer_line)
print(asigment_line)
print(asigment_line_2)
print(print_line)

42 changes: 42 additions & 0 deletions labs/04/ac-lexical-analyzer-main/lex_analaizer.l
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
%{
#include <stdio.h>
#include <string.h>

extern FILE *yyin;
%}

/* Definitions */
ID [a-eg-hj-oq-z]
DIGIT [0-9]
INT {DIGIT}+
FLOAT {DIGIT}+"."{DIGIT}{1,5}

%%
"//".* { printf("COMMENT\n"); }
"f" { printf("floatdcl\n"); }
"i" { printf("intdcl\n"); }
"p" { printf("print\n"); }
"=" { printf("assign\n"); }
"+" { printf("plus\n"); }
{FLOAT} { printf("fnum\n"); }
{INT} { printf("inum\n"); }
{ID} { printf("id\n"); }
[ \t\r\n]+ { /* Ignore whitespace */ }
. { printf("UNKNOWN TOKEN: %s\n", yytext); }
%%

int main(int argc, char **argv) {
if (argc > 1) {
yyin = fopen(argv[1], "r");
if (!yyin) {
perror("Error opening input file");
return 1;
}
}
yylex();
return 0;
}

int yywrap() {
return 1;
}
16 changes: 16 additions & 0 deletions labs/04/ac-lexical-analyzer-main/test_case.ac
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
// basic code

//float b
f b

// integer a
i a

// a = 5
a = 5

// b = a + 3.2
b = a + 3.2

//print 8.5
p b