LCOV - code coverage report
Current view: top level - test/lexicalAnalysis - test_tokenStream.cc (source / functions) Coverage Total Hit
Test: coverage.info Lines: 100.0 % 99 99
Test Date: 2025-04-18 15:53:49 Functions: 100.0 % 2 2

            Line data    Source code
       1              : #include "../../inc/lexicalAnalysis/token.h"
       2              : #include "../../inc/lexicalAnalysis/tokenStream.h"
       3              : #include "../../inc/lexicalAnalysis/type.h"
       4              : #include <catch2/catch_test_macros.hpp>
       5              : #include <string>
       6              : #include <vector>
       7              : 
       8           63 : TEST_CASE("TokenStream operations", "[TokenStream]") {
       9           63 :   using namespace nicole;
      10              : 
      11              :   // Values of Location are not relevant here
      12              : 
      13           63 :   std::vector<Token> tokens = {
      14           63 :       Token(TokenType::ID, "var", Location{"", 0, 0}),
      15           63 :       Token(TokenType::ASSIGNMENT, "=", Location{"", 0, 0}),
      16           63 :       Token(TokenType::NUMBER_INT, "42", Location{"", 0, 0}),
      17           63 :       Token(TokenType::SEMICOLON, ";", Location{"", 0, 0})};
      18              : 
      19           63 :   TokenStream tokenStream(tokens);
      20              : 
      21           63 :   SECTION("Initial state") {
      22            9 :     REQUIRE(tokenStream.size() == tokens.size());
      23            9 :     REQUIRE(tokenStream.currentPos() == 0);
      24            9 :     REQUIRE_FALSE(tokenStream.isEnd());
      25            9 :   }
      26              : 
      27           63 :   SECTION("Eating tokens") {
      28            9 :     REQUIRE(tokenStream.eat().has_value());
      29            9 :     REQUIRE(tokenStream.currentPos() == 1);
      30            9 :     REQUIRE(tokenStream.eat().has_value());
      31            9 :     REQUIRE(tokenStream.currentPos() == 2);
      32            9 :   }
      33              : 
      34           63 :   SECTION("Current token") {
      35            9 :     auto currentToken = tokenStream.current();
      36            9 :     REQUIRE(currentToken.has_value());
      37            9 :     REQUIRE(currentToken->type() == TokenType::ID);
      38            9 :     REQUIRE(currentToken->raw() == "var");
      39            9 :   }
      40              : 
      41           63 :   SECTION("Look ahead") {
      42            9 :     auto lookaheadToken = tokenStream.lookAhead(2);
      43            9 :     REQUIRE(lookaheadToken.has_value());
      44            9 :     REQUIRE(lookaheadToken->type() == TokenType::NUMBER_INT);
      45            9 :     REQUIRE(lookaheadToken->raw() == "42");
      46            9 :   }
      47              : 
      48           63 :   SECTION("Is current token type") {
      49            9 :     auto isCurrentId = tokenStream.isCurrentTokenType(TokenType::ID);
      50            9 :     REQUIRE(isCurrentId.has_value());
      51            9 :     REQUIRE(isCurrentId.value() == true);
      52            9 :   }
      53              : 
      54           63 :   SECTION("Is token ahead before semicolon") {
      55            9 :     REQUIRE(tokenStream.isTokenAheadBeforeSemicolon(TokenType::NUMBER_INT));
      56            9 :     REQUIRE_FALSE(tokenStream.isTokenAheadBeforeSemicolon(TokenType::STRING));
      57            9 :   }
      58              : 
      59           63 :   SECTION("Insert after") {
      60            9 :     std::vector<Token> newTokens = {
      61            9 :         Token(TokenType::PRINT, "print", Location{"", 0, 0}),
      62            9 :         Token(TokenType::STRING, "\"Hello World\"", Location{"", 0, 0})};
      63            9 :     TokenStream newTokenStream(newTokens);
      64            9 :     REQUIRE(tokenStream.insertAfter(newTokenStream, 2).has_value());
      65            9 :     REQUIRE(tokenStream.size() == tokens.size() + newTokens.size());
      66            9 :   }
      67           63 : }
      68              : 
      69           45 : TEST_CASE("TokenStream: Manejo de errores", "[TokenStream]") {
      70           45 :   using namespace nicole;
      71              :   // Tokens de ejemplo para las pruebas
      72           45 :   std::vector<Token> tokens = {
      73           45 :       Token(TokenType::NUMBER_INT, "1", Location{"", 0, 0}),
      74           45 :       Token(TokenType::OPERATOR_ADD, "+", Location{"", 0, 0}),
      75           45 :       Token(TokenType::NUMBER_INT, "2", Location{"", 0, 0}),
      76           45 :       Token(TokenType::SEMICOLON, ";", Location{"", 0, 0})};
      77              : 
      78           45 :   TokenStream tokenStream(tokens);
      79              : 
      80           45 :   SECTION("Error en 'eat' al intentar avanzar más allá del final") {
      81              :     // Avanzar hasta el final del stream
      82           45 :     while (!tokenStream.isEnd()) {
      83           36 :       auto result = tokenStream.eat();
      84           36 :       REQUIRE(result.has_value());
      85           36 :     }
      86              :     // Intentar avanzar más allá del final
      87            9 :     auto result = tokenStream.eat();
      88            9 :     REQUIRE_FALSE(result.has_value());
      89            9 :     REQUIRE(result.error().type() == ERROR_TYPE::EAT);
      90            9 :   }
      91              : 
      92           45 :   SECTION("Error en 'current' al acceder fuera de los límites") {
      93              :     // Avanzar hasta el final del stream
      94           45 :     while (!tokenStream.isEnd()) {
      95           36 :       auto result = tokenStream.eat();
      96           36 :       REQUIRE(result.has_value());
      97           36 :     }
      98              :     // Intentar obtener el token actual más allá del final
      99            9 :     auto result = tokenStream.current();
     100            9 :     REQUIRE_FALSE(result.has_value());
     101            9 :     REQUIRE(result.error().type() == ERROR_TYPE::CURRENT);
     102            9 :   }
     103              : 
     104           45 :   SECTION("Error en 'lookAhead' con posición inválida") {
     105              :     // Intentar mirar más allá del final del stream
     106            9 :     auto result = tokenStream.lookAhead(tokens.size());
     107            9 :     REQUIRE_FALSE(result.has_value());
     108            9 :     REQUIRE(result.error().type() == ERROR_TYPE::LOOK_AHEAD);
     109            9 :   }
     110              : 
     111           45 :   SECTION(
     112            9 :       "Error en 'isCurrentTokenType' al verificar tipo fuera de los límites") {
     113              :     // Avanzar hasta el final del stream
     114           45 :     while (!tokenStream.isEnd()) {
     115           36 :       auto result = tokenStream.eat();
     116           36 :       REQUIRE(result.has_value());
     117           36 :     }
     118              :     // Intentar verificar el tipo del token actual más allá del final
     119            9 :     auto result = tokenStream.isCurrentTokenType(TokenType::NUMBER_INT);
     120            9 :     REQUIRE_FALSE(result.has_value());
     121            9 :     REQUIRE(result.error().type() == ERROR_TYPE::IS_CURRENT_TOKEN_TYPE);
     122            9 :   }
     123              : 
     124           45 :   SECTION("Error en 'insertAfter' con posición inválida") {
     125            9 :     TokenStream newTokens(
     126            9 :         {Token(TokenType::NUMBER_INT, "3", Location{"", 0, 0})});
     127              :     // Intentar insertar en una posición inválida
     128            9 :     auto result = tokenStream.insertAfter(newTokens,
     129            9 :                                           std::numeric_limits<int>::infinity());
     130            9 :     REQUIRE_FALSE(result.has_value());
     131            9 :     REQUIRE(result.error().type() == ERROR_TYPE::INSERT_AFTER);
     132            9 :   }
     133           45 : }
        

Generated by: LCOV version 2.0-1