Thanks to visit codestin.com
Credit goes to www.scribd.com

0% found this document useful (0 votes)
132 views5 pages

CC

The document is a C++ program that implements a lexical analyzer to identify different types of tokens such as identifiers, keywords, literals, operators, and comments from a given source code file. It defines various functions to check the characteristics of tokens and processes the input file to categorize each token accordingly. The program also includes functionality to read and ignore comments from a separate 'Readme.txt' file.
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
132 views5 pages

CC

The document is a C++ program that implements a lexical analyzer to identify different types of tokens such as identifiers, keywords, literals, operators, and comments from a given source code file. It defines various functions to check the characteristics of tokens and processes the input file to categorize each token accordingly. The program also includes functionality to read and ignore comments from a separate 'Readme.txt' file.
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
You are on page 1/ 5

#include<iostream>

#include <algorithm>
#include <fstream>
#include<ctype.h>
#include <vector>
#include <string>

using namespace std;

bool is_My_Identifier(const string &str)


{
if (isdigit(str[0]))
return false;
int My_counter = 0;
if (str[0] == '_')
My_counter++;

for (; My_counter < str.size(); My_counter++)


if (!(isalnum(str[My_counter])))
return false;

return true;
}

bool is_My_Comment(const string &str)


{
return str == "/*" || str == "//";
}

bool is_My_Digit(const string &str)


{
return all_of(str.begin(), str.end(), ::isdigit);
}

bool is_My_String(const string &str)


{
return str[0] == '"' && str[str.size() - 1] == '"';
}

bool is_My_Bool(const string &str)


{
return str == "true" || str == "false";
}

bool is_My_Literal(const string &str)


{
return is_My_Digit(str) || is_My_String(str) || is_My_Bool(str);
}

bool is_My_Keyword(const string &str)


{
const vector< string> keywords{ "int", "float", "auto", "double", "do",
"switch", "return" };
for (const auto& keyword : keywords)
if (keyword == str)
return true;

return false;
}
bool is_My_Statement(const string &str)
{
const vector< string> statements{ "for", "while" };
for (const auto& statement : statements)
if (statement == str)
return true;

return false;
}

bool is_My_Operator(const string &str)


{
const vector< string> operators{ "<", ">", "<=", ">=", "", "+", "-", "/",
"=", "-=", "=", "+=", "/=", "++", "--", "==" };
for (const auto& op : operators)
if (op == str)
return true;

return false;
}

bool is_My_Separator(const string &str)


{
const vector< string> Separators{ "{", "}", ",", "(", ")", ";" };
for (const auto& separate : Separators)
if (separate == str)
return true;

return false;
}

bool is_Illegal(const string &str)


{
return str == " " || str == "\n";
}

void My_Role_Of_Token(const string& token)


{
if (is_My_Operator(token))
cout << "(operator, " << token << ")" << endl;
else if (is_My_Separator(token))
cout << "(separator, " << token << ")" << endl;
else if (is_My_Keyword(token))
cout << "(keyword, " << token << ")" << endl;
else if (is_My_Statement(token))
cout << "(statement, " << token << ")" << endl;
else if (is_My_Literal(token))
cout << "(literal, " << token << ")" << endl;
else if (is_My_Identifier(token))
cout << "(identifier, " << token << ")" << endl;
else if (is_My_Comment(token))
cout << "(comment, " << token << ")" << endl;
else
cout << " " << endl;
}

void My_lexical_Analyzer(const string &nameOfFile)


{
char ch;

string buffer;
ifstream file(nameOfFile);

char data;
if (file.is_open()) {
cout << "Lexical Analyzer" << endl;
while (file.eof()) {
ch = file.get();
// Output the text from the file
}
}

else {
cout << "error while opening the file\n";
exit(0);
}
bool multiComment = false, singleComment = false;
while (file >> noskipws >> ch)
{
if (singleComment || multiComment)
{
if (singleComment && ch == '\n')
singleComment = false;

if (multiComment && ch == '*')


{
file >> ch;
if (ch == EOF)
break;

if (ch == '/')
multiComment = false;
}
continue;
}

if (ch == '/')
{
string comm(1, ch);
file >> ch;
if (ch == EOF)
{
My_Role_Of_Token(comm);
break;
}

if (ch == '*')
{
multiComment = true;
comm += ch;
}
else if (ch == '/')
{
singleComment = true;
comm += ch;
}
if (multiComment || singleComment)
{
My_Role_Of_Token(comm);
continue;
}
}

if (is_Illegal(string(1, ch)))
{
if (!buffer.empty())
{
My_Role_Of_Token(buffer);
buffer = "";
}
continue;
}

if (is_My_Operator(string(1, ch)))
{
if (!buffer.empty() && !is_My_Operator(buffer))
{
My_Role_Of_Token(buffer);
buffer = "";
}
}

if (is_My_Separator(string(1, ch)))
{
if (!buffer.empty())
{
My_Role_Of_Token(buffer);
buffer = "";
}
if (is_My_Separator(string(1, ch)))
{
My_Role_Of_Token(string(1, ch));
continue;
}
}
buffer += ch;
}
file.close();
char str[100];
string Read_and_write_string;
char c;

file.open("Readme.txt");

if (!file.is_open()) {}
else
{
while (!file.eof())
{
file.get(c);
if (c != ' ' && c != '\n' && c != '\t')
{
if (c == '/')
{
file.get(c);
if (c == '/')
{
while (c != '\n')
{
file.get(c);
}
}
else if (c == '*')
{
file.get(c);
while (c != '*')
{
file.get(c);
}
file.get(c);
}
file.get(c);

}
if (c != '\n' && c != '\t')
{
Read_and_write_string += c;
}
}
}
Read_and_write_string = Read_and_write_string.substr(0,
Read_and_write_string.length() - 1);
cout << Read_and_write_string << "\n\n";
}
}

int main()
{
My_lexical_Analyzer("Readme.txt");
system("pause");
return 0;
}

You might also like