Browse Source

adding cs236

master
Derek McQuay 3 years ago
parent
commit
8af25fbb22
56 changed files with 2313 additions and 0 deletions
  1. +63
    -0
      cs236/Makefile
  2. +0
    -0
      cs236/bin/.hgdir
  3. +19
    -0
      cs236/labs/lab01.cpp
  4. +30
    -0
      cs236/labs/lab02.cpp
  5. +32
    -0
      cs236/labs/lab03.cpp
  6. +361
    -0
      cs236/lexer/lexi.cpp
  7. +44
    -0
      cs236/lexer/lexi.h
  8. +11
    -0
      cs236/lexer/token.cpp
  9. +16
    -0
      cs236/lexer/token.h
  10. +56
    -0
      cs236/lexer/util.h
  11. +14
    -0
      cs236/parser/fact.h
  12. +13
    -0
      cs236/parser/parameter.h
  13. +200
    -0
      cs236/parser/parser.cpp
  14. +49
    -0
      cs236/parser/parser.h
  15. +83
    -0
      cs236/parser/predicate.h
  16. +14
    -0
      cs236/parser/query.h
  17. +27
    -0
      cs236/parser/rule.h
  18. +14
    -0
      cs236/parser/scheme.h
  19. +10
    -0
      cs236/rdbms/Tuple.h
  20. +45
    -0
      cs236/rdbms/db.h
  21. +15
    -0
      cs236/rdbms/relation.h
  22. +11
    -0
      cs236/rdbms/schema.h
  23. +14
    -0
      cs236/submission/lab02/fact.h
  24. +30
    -0
      cs236/submission/lab02/lab02.cpp
  25. +361
    -0
      cs236/submission/lab02/lexi.cpp
  26. +44
    -0
      cs236/submission/lab02/lexi.h
  27. +13
    -0
      cs236/submission/lab02/parameter.h
  28. +200
    -0
      cs236/submission/lab02/parser.cpp
  29. +49
    -0
      cs236/submission/lab02/parser.h
  30. +83
    -0
      cs236/submission/lab02/predicate.h
  31. +14
    -0
      cs236/submission/lab02/query.h
  32. +27
    -0
      cs236/submission/lab02/rule.h
  33. +14
    -0
      cs236/submission/lab02/scheme.h
  34. +11
    -0
      cs236/submission/lab02/token.cpp
  35. +16
    -0
      cs236/submission/lab02/token.h
  36. +56
    -0
      cs236/submission/lab02/util.h
  37. +21
    -0
      cs236/tests/lab01/input.txt
  38. +21
    -0
      cs236/tests/lab01/test1.txt
  39. +17
    -0
      cs236/tests/lab02/in/in21.txt
  40. +16
    -0
      cs236/tests/lab02/in/in22.txt
  41. +19
    -0
      cs236/tests/lab02/in/in23.txt
  42. +19
    -0
      cs236/tests/lab02/in/in24.txt
  43. +18
    -0
      cs236/tests/lab02/in/in25.txt
  44. +16
    -0
      cs236/tests/lab02/in/in26.txt
  45. +14
    -0
      cs236/tests/lab02/in/in27.txt
  46. +21
    -0
      cs236/tests/lab02/in/in28.txt
  47. +19
    -0
      cs236/tests/lab02/out/out21.txt
  48. +3
    -0
      cs236/tests/lab02/out/out22.txt
  49. +3
    -0
      cs236/tests/lab02/out/out23.txt
  50. +3
    -0
      cs236/tests/lab02/out/out24.txt
  51. +3
    -0
      cs236/tests/lab02/out/out25.txt
  52. +9
    -0
      cs236/tests/lab02/out/out26.txt
  53. +3
    -0
      cs236/tests/lab02/out/out27.txt
  54. +3
    -0
      cs236/tests/lab02/out/out28.txt
  55. +15
    -0
      cs236/tests/lab03/in/in40.txt
  56. +11
    -0
      cs236/tests/lab03/out/out40.txt

+ 63
- 0
cs236/Makefile View File

@@ -0,0 +1,63 @@
CXXFLAGS= -Wall -g -std=c++0x -I .

lexor_objs=labs/lab01.o \
lexer/lexi.o \
lexer/token.o

parser_objs=labs/lab02.o \
lexer/lexi.o \
lexer/token.o \
parser/parser.o \

rdbms_objs=labs/lab03.o \
lexer/lexi.o \
lexer/token.o \
parser/parser.o \

lab01=bin/lab01
lab02=bin/lab02
lab03=bin/lab03

all: $(lab01) $(lab02) $(lab03)

$(lab01): $(lexor_objs)
$(CXX) $(CXXFLAGS) $(lexor_objs) -o $@

$(lab02): $(parser_objs)
$(CXX) $(CXXFLAGS) $^ -o $@

$(lab03): $(rdbms_objs)
$(CXX) $(CXXFLAGS) $^ -o $@

labs/lab01.o: labs/lab01.cpp lexer/util.h lexer/lexi.h lexer/token.h
lexer/lexi.o: lexer/lexi.cpp lexer/lexi.h
lexer/token.o: lexer/token.h lexer/token.cpp

labs/lab02.o: labs/lab02.cpp lexer/util.h lexer/lexi.h lexer/token.h \
parser/scheme.h parser/fact.h parser/rule.h parser/query.h\
parser/predicate.h parser/parameter.h parser/parser.o

labs/lab03.o: labs/lab02.cpp lexer/util.h lexer/lexi.h lexer/token.h \
parser/scheme.h parser/fact.h parser/rule.h parser/query.h\
parser/predicate.h parser/parameter.h parser/parser.o rdbms/db.h \
rdbms/relation.h rdbms/schema.h rdbms/Tuple.h
parser/parser.o: parser/parser.h parser/parser.cpp

clean:
@rm -vf **/*.o
@rm -vf $(EXE)
@rm -vf **/*.1
@rm -vf **/*.0
@rm -vf test
@rm -rvf **/*.dSYM
@rm -vf output.txt
@rm -vf bin/*

drun: main
gdb ./main

valgrind: $(EXE)
valgrind --tool=memcheck --leak-check=yes ./$(EXE) input.txt output.txt

pmc: **/*.h **/*.cpp
pmccabe **/*.h **/*.cpp

+ 0
- 0
cs236/bin/.hgdir View File


+ 19
- 0
cs236/labs/lab01.cpp View File

@@ -0,0 +1,19 @@
#include <vector>
#include "lexer/lexi.h"
#include "lexer/util.h"
#include "lexer/token.h"

const string usage = "usage: app <input> <output>";
int main(int argc, char* argv[]) {
if(argc != 3) {
cerr << usage << endl;
return 1;
}
get_file_name(argv[1]);
vector<string> data = open_file(argv[1]);
lexi l;
string temp = argv[2];
l.lexical_analyzer(data, temp);
cout << "getting called here in lab 1" << endl;
}

+ 30
- 0
cs236/labs/lab02.cpp View File

@@ -0,0 +1,30 @@
#include <vector>
#include "lexer/lexi.h"
#include "lexer/util.h"
#include "lexer/token.h"
#include "parser/parser.h"

const string usage = "usage: app <input> <output>";
int main(int argc, char* argv[]) {
if(argc != 3) {
cerr << usage << endl;
return 1;
}
get_file_name(argv[1]);
vector<string> data = open_file(argv[1]);
lexi l;
string temp = argv[2];
vector<token> s = l.lexical_analyzer(data, temp);
parser p;
p.tokens = s;
try {
p.check_datalog();
string out = p.out();
write_file(out, argv[2]);
} catch(string str) {
stringstream s;
s << "Failure!\n " << str;
write_file(s.str(), argv[2]);
}
}

+ 32
- 0
cs236/labs/lab03.cpp View File

@@ -0,0 +1,32 @@
#include <vector>
#include "lexer/lexi.h"
#include "lexer/util.h"
#include "lexer/token.h"
#include "parser/parser.h"
#include "rdbms/db.h"

const string usage = "usage: app <input> <output>";
int main(int argc, char* argv[]) {
if(argc != 3) {
cerr << usage << endl;
return 1;
}
get_file_name(argv[1]);
vector<string> data = open_file(argv[1]);
lexi l;
string temp = argv[2];
vector<token> s = l.lexical_analyzer(data, temp);
parser p;
p.tokens = s;
try {
p.check_datalog();
string out = p.out();
write_file(out, argv[2]);
} catch(string str) {
stringstream s;
s << "Failure!\n " << str;
write_file(s.str(), argv[2]);
}
db database(p);
}

+ 361
- 0
cs236/lexer/lexi.cpp View File

@@ -0,0 +1,361 @@
#include "lexi.h"

vector<token> lexi::lexical_analyzer(vector<string> data, string file_name) {
string cur_string;
string next_character;
for(unsigned int i = 0; i < data.size(); i++) {
for(unsigned int j = 0; j < data[i].size(); j ++) {
cur_string = data[i].at(j);
if(j < data[i].size() - 1) {
next_character = data[i].at(j + 1);
}
else {
next_character = "";
}
string state = determiner(cur_string, next_character);
simple_state(data, state, cur_string, next_character, i, j);
simple_state_string(data, state, cur_string, next_character, i, j);
if(state == "start of string") {
string token_symbol = string_finder(data, i, j);
if(token_symbol != "error") {
token_symbol.erase(0,1);
token t("STRING", token_symbol, i + 1);
tokens.push_back(t);
}
else {
write_to_file(file_name, i + 1);
return tokens;
}
}
simple_comment(data, state, cur_string, next_character, i, j);
simple_id(data, state, cur_string, next_character, i, j);
if(state == "error") {
write_to_file(file_name, i + 1);
return tokens;
}
}
}
write_to_file(file_name);
return tokens;
}

void lexi::write_to_file(string file_name, int line) {
ofstream myfile;
myfile.open(file_name.c_str());
myfile << "Error on line " << line << endl;
myfile.close();
}

void lexi::write_to_file(string file_name) {
ofstream myfile;
myfile.open(file_name.c_str());
for(unsigned int i = 0; i < tokens.size(); i++) {
if(i < tokens.size()) {
myfile << tokens[i] << endl;
}
else {
myfile << tokens[i];
}
}
myfile << "Total Tokens = " << tokens.size();
myfile << endl;
myfile.close();
}


bool lexi::simple_comment(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "comment") {
string token_symbol = comment_finder(data, i, j);
}
return true;
}

bool lexi::simple_id(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "id") {
string token_symbol = id_finder(data, i, j);
if(token_symbol != "error") {
token t("ID", token_symbol, i + 1);
tokens.push_back(t);
}
}
return true;
}

bool lexi::simple_state_string(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "simple_string") {
string token_symbol = det_type_simple_string(data, i, j);
if(token_symbol != "wrong") {
string token_type = type_simple_string(token_symbol);
token t(token_type, token_symbol, i + 1);
tokens.push_back(t);
}
else {
string token_symbol = id_finder(data, i, j);
if(token_symbol != "error") {
token t("ID", token_symbol, i + 1);
tokens.push_back(t);
}
}
}
return true;
}

bool lexi::simple_state(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "simple") {
string token_symbol = type_simple(cur_string, next_character);
if(next_character == "-") {
data[i].replace(j,2, " ");
}
else {
data[i].replace(j,1, " ");
}
string token_id = type_simple_caps(cur_string, next_character);
token t(token_id, token_symbol, i + 1);
tokens.push_back(t);
}
return true;
}

string lexi::determiner(string cur_string, string next_character) {
if(det_help_simple(cur_string, next_character)) {
return "simple";
}
else if(det_help_simple_string(cur_string, next_character)) {
return "simple_string";
}
else if(det_help_id(cur_string)) {
return "id";
}
else if(cur_string == "'") {
return "start of string";
}
else if(cur_string == "#") {
return "comment";
}
else {
string temp = incorrect(cur_string);
return temp;
}
return "";
}

bool lexi::det_help_id(string cur_string) {
if(('A' <= cur_string[0] && cur_string[0] <= 'Z') ||
('a' <= cur_string[0] && cur_string[0] <= 'z')) {
return true;
}
return false;
}

bool lexi::quick_help(string a, string b) {
if(a == "S" && b == "c") {
return true;
}
return false;
}

bool lexi::det_help_simple_string(string cur_string, string next_character) {
if(quick_help(cur_string, next_character)) {
return true;
}
else if((cur_string == "Q" && next_character == "u")) {
return true;
}
else if((cur_string == "R" && next_character == "u")) {
return true;
}
else if((cur_string == "F" && next_character == "a")) {
return true;
}
return false;
}

bool lexi::det_help_simple(string cur_string, string next_character) {
if(cur_string == "," || cur_string == "." || cur_string == "?" ||
cur_string == "(" || cur_string == ")" || cur_string == ":") {
type_simple(cur_string, next_character);
return true;
}
return false;
}

string lexi::incorrect(string cur_string) {
if(cur_string == " " || cur_string == "\t") {
return "fine";
}
else if(!(('A' <= cur_string[0] && cur_string[0] <= 'Z') ||
('a' <= cur_string[0] && cur_string[0] <= 'z'))) {
return "error";
}
return " ";
}

string lexi::id_finder(vector<string> & data, int a, int b) {
string cur_string;
string next_character;
for(unsigned int j = b; j < data[a].size(); j++) {
cur_string += data[a].at(j);
if(j < data[a].size() - 1) {
next_character = data[a].at(j + 1);
}
else {
next_character = "!";
}
if(is_char_valid(next_character[0]) || next_character == "!") {
data[a].replace(data[a].begin() + b, data[a].begin() + j + 1, " ");
return cur_string;
}
}
return " ";
}

string lexi::comment_finder(vector<string> & data, int i, int b) {
string cur_string;
string next_character;
for(unsigned int j = b; j < data[i].size(); j++) {
cur_string += data[i].at(j);
if(j < data[i].size() - 1) {
next_character = data[i].at(j + 1);
}
else {
next_character = "!";
}
if((j > data[i].size()) - 1 && next_character != "!") {
data[i].replace(data[i].begin() + b, data[i].end(), " ");
return cur_string;
}
}
return "error";
}

string lexi::string_finder(vector<string> & data, int a, int b) {
string cur_string;
string next_character;
b = data[a].find('\'');
for(unsigned int j = b; j < data[a].size(); j++) {
cur_string += data[a].at(j);
if(j < data[a].size() - 1) {
next_character = data[a].at(j + 1);
}
if(next_character == "'") {
data[a].replace(data[a].begin() + b, data[a].begin() + j + 2, " ");
data[a].insert(data[a].begin() + b, ' ');
return cur_string;
}
}
return "error";
}

string lexi::type_simple_caps(string symbol, string next_symbol) {
if(symbol == ",") {
return "COMMA";
}
else if(symbol == ".") {
return "PERIOD";
}
else if(symbol == "?") {
return "Q_MARK";
}
else if(symbol == "(") {
return "LEFT_PAREN";
}
else if(symbol == ")") {
return "RIGHT_PAREN";
}
else if(symbol == ":") {
if(next_symbol == "-") {
return "COLON_DASH";
}
return "COLON";
}
return "";
}

string lexi::type_simple(string symbol, string next_symbol) {
if(symbol == ",") {
return ",";
}
else if(symbol == ".") {
return ".";
}
else if(symbol == "?") {
return "?";
}
else if(symbol == "(") {
return "(";
}
else if(symbol == ")") {
return ")";
}
else if(symbol == ":") {
if(next_symbol == "-") {
return ":-";
}
return ":";
}
return "";
}

string lexi::det_type_simple_string(vector<string> & data, int i, int b) {
string cur_string;
string next_character;
string special_case;
if(b > 0) {
special_case = data[i].at(b -1);
}
for(unsigned int j = b; j < data[i].size(); j++) {
cur_string += data[i].at(j);
if(j < data[i].size() - 1) {
next_character = data[i].at(j + 1);
}
else {
next_character = "!";
}
if((is_simple_string(cur_string)) && (is_char_valid(next_character.at(0))) && (is_char_valid(special_case[0]))) {
data[i].replace(data[i].begin() + b, data[i].begin() + j + 1, " ");
return cur_string;
}
}
return "wrong";
}

bool lexi::is_char_valid(char next_character) {
if(!(('A' <= next_character && next_character <= 'Z') ||
('a' <= next_character && next_character <= 'z') ||
('0' <= next_character && next_character <= '9')) || (next_character == '\'')) {
return true;
}
return false;
}

bool lexi::is_simple_string(string simple_com) {
if(simple_com == "Schemes") {
return true;
}
else if(simple_com == "Facts") {
return true;
}
else if(simple_com == "Rules") {
return true;
}
else if(simple_com == "Queries") {
return true;
}
return false;
}

string lexi::type_simple_string(string simple_com) {
if(simple_com == "Schemes") {
return "SCHEMES";
}
else if(simple_com == "Facts") {
return "FACTS";
}
else if(simple_com == "Rules") {
return "RULES";
}
else if(simple_com == "Queries") {
return "QUERIES";
}
return "";
}

+ 44
- 0
cs236/lexer/lexi.h View File

@@ -0,0 +1,44 @@
#ifndef __LEXI_H__
#define __LEXI_H__
#include <iostream>
#include <fstream>
#include <vector>
#include "token.h"
#include <cctype>
#include <string>
#include <sstream>

using namespace std;
class lexi {
public:
lexi(){}
vector<token> tokens;
string determiner(string, string);
vector<token> lexical_analyzer(vector<string>, string);
string type_simple(string, string);
string type_simple_string(string);
bool is_simple_string(string);
string det_type_simple_string(vector<string>&, int, int);
bool is_char_valid(char);
string string_finder(vector<string>&, int, int);
string comment_finder(vector<string>&, int, int);
string id_finder(vector<string>&, int, int);
string incorrect(string);
bool det_help_simple(string, string);
bool det_help_simple_string(string, string);
bool quick_help(string, string);
bool det_help_id(string);
string type_simple_caps(string, string);
bool simple_state(vector<string>&, string, string, string, int, int);
bool simple_state_string(vector<string>&, string, string, string, int, int);
bool simple_id(vector<string>&, string, string, string, int, int);
bool simple_comment(vector<string>&, string, string, string, int, int);
void write_to_file(string);
void write_to_file(string, int);
};
#endif


+ 11
- 0
cs236/lexer/token.cpp View File

@@ -0,0 +1,11 @@
#include "token.h"

token::token(string type, string character, int line_num) :
type(type), character(character), line_num(line_num) {}

ostream & operator<<(ostream & os, token tk) {
os << "(" << tk.type
<< ",\"" << tk.character
<< "\"," << tk.line_num << ")";
return os;
}

+ 16
- 0
cs236/lexer/token.h View File

@@ -0,0 +1,16 @@
#ifndef __TOKEN_H__
#define __TOKEN_H__

#include <iostream>

using namespace std;
class token {
public:
token(string, string, int);
string type;
string character;
int line_num;
friend ostream & operator<<(ostream & os, token tk);
};
#endif

+ 56
- 0
cs236/lexer/util.h View File

@@ -0,0 +1,56 @@
#ifndef __UTIL_H__
#define __UTIL_H__
#include <vector>
#include <iostream>
#include <fstream>

vector<string> open_file(string file_name) {
ifstream myfile;
vector<string> data;
myfile.open(file_name.c_str());
string temp;
while(!myfile.eof()) {
getline(myfile, temp);
data.push_back(temp);
}
myfile.close();
return data;
}

bool get_file_name(string input) {
bool file_correct = false;
string input_file_name;
while(!file_correct) {
ifstream inputs(input.c_str());
if(inputs.good()) {
input_file_name = input;
file_correct = true;
open_file(input_file_name);
return true;
}
else {
cerr << "incorrect file name" << endl;
return false;
}
}
return false;
}

void write_file(string output, string file_name) {
ofstream myfile;
myfile.open(file_name.c_str());
myfile << output << "\n";
}

void write_file(vector<string> output, string file_name) {
ofstream myfile;
myfile.open(file_name.c_str());
for(unsigned int i = 0; i < output.size(); i++) {
if(i != output.size() -1) {
myfile << output[i] << "\n";
}
myfile << output[i];
}
}

#endif

+ 14
- 0
cs236/parser/fact.h View File

@@ -0,0 +1,14 @@
#ifndef __FACT_H__
#define __FACT_H__

#include "predicate.h"
#include <vector>
#include <sstream>

using namespace std;

class fact {
public:
vector<predicate> pred_facts;
};
#endif

+ 13
- 0
cs236/parser/parameter.h View File

@@ -0,0 +1,13 @@
#ifndef __PARAMETER_H__
#define __PARAMETER_H__

#include <iostream>

using namespace std;

class parameter {
public:
string param;
string type;
};
#endif

+ 200
- 0
cs236/parser/parser.cpp View File

@@ -0,0 +1,200 @@
#include "parser.h"

string parser::get_token() {
string type = tokens[0].type;
return type;
}

void parser::check_datalog() {
match("SCHEMES");
match("COLON");
if(get_token() == "FACTS") {
error();
}
check_schemelist(get_token());
match("FACTS");
match("COLON");
check_factlist(get_token());
match("RULES");
match("COLON");
check_rulelist(get_token());
match("QUERIES");
match("COLON");
check_querylist(get_token());
out();
}

string parser::out() {
stringstream s;
s << "Success!" << endl;
s << "Schemes(" << schemelist.size() << "):" << endl;
for(unsigned int i = 0; i < schemelist.size(); i++) {
s << " " << schemelist[i].toString();
}
s << "Facts(" << factlist.size() << "):" << endl;
for(unsigned int i = 0; i < factlist.size(); i++) {
s << " " << factlist[i].toString(false);
}
s << "Rules(" << rulelist.size() << "):" << endl;
for(unsigned int i = 0; i < rulelist.size(); i++) {
s << " " << rulelist[i].toString();
}
s << "Queries(" << querylist.size() << "):" << endl;
double a = 0;
for(unsigned int i = 0; i < querylist.size(); i++) {
s << " " << querylist[i].toString(a);
}
s << "Domain(" << domain.size() << "):" << endl;
for (auto it=domain.cbegin(); it != domain.cend(); ++it) {
s << " '" << *it << "'" << endl;
}
return s.str();
}

void parser::check_schemelist(string type) {
if(type == "FACTS") {
return;
}
else {
check_scheme(type);
check_schemelist(get_token());
}
}

void parser::check_scheme(string type) {
schemelist.push_back(check_predicate(type));
}

void parser::check_factlist(string type) {
if(type == "RULES") {
return;
}
else {
check_fact(type);
check_factlist(get_token());
}
}

void parser::check_fact(string type) {
factlist.push_back(check_predicate(type));
match("PERIOD");
}

void parser::check_rulelist(string type) {
if(type == "QUERIES") {
return;
}
else {
check_rule(type);
check_rulelist(get_token());
}
}

void parser::check_rule(string type) {
rule r;
r.head = check_predicate(type);
match("COLON_DASH");
check_predicate_list(get_token(), r);
match("PERIOD");
rulelist.push_back(r);
}

void parser::check_querylist(string type) {
check_query(type);
if(tokens.empty()) {
return;
}
else {
check_querylist(get_token());
}
}

void parser::check_query(string type) {
querylist.push_back(check_predicate(type));
match("Q_MARK");
}

void parser::check_predicate_list(string type, rule& r) {
r.pred_rule.push_back(check_predicate(type));
if(get_token() == "COMMA") {
match("COMMA");
check_predicate_list(get_token(), r);
}
else {
return;
}
}

predicate parser::check_predicate(string type) {
predicate pred;
pred.id = tokens[0].character;
match("ID");
match("LEFT_PAREN");
if(get_token() == "RIGHT_PAREN") {
error();
}
check_parameterlist(get_token(), pred);
match("RIGHT_PAREN");
return pred;
}

void parser::check_parameterlist(string type, predicate& pred) {
if(type == "RIGHT_PAREN") {
return;
}
else {
check_parameter(type, pred);
if(get_token() == "COMMA") {
match("COMMA");
if(get_token() == "RIGHT_PAREN") {
error();
}
check_parameterlist(get_token(), pred);
}
else {
return;
}
}
}

void parser::check_parameter(string type, predicate& pred) {
parameter para;
if(type == "STRING") {
domain.insert(tokens[0].character);
para.param = tokens[0].character;
para.type = tokens[0].type;
pred.pred_list.push_back(para);
match("STRING");
return;
}
else if(type == "ID") {
para.param = tokens[0].character;
para.type = tokens[0].type;
pred.pred_list.push_back(para);
match("ID");
return;
}
else {
error();
}
}

void parser::match(string type) {
if(get_token() == type) {
if(tokens.empty()) {
error();
}
else {
tokens.erase(tokens.begin());
}
}
else {
error();
}
}

void parser::error() {
stringstream oss;
oss << tokens[0] << endl;
throw oss.str();
}

+ 49
- 0
cs236/parser/parser.h View File

@@ -0,0 +1,49 @@
#ifndef __PARSER_H__
#define __PARSER_H__

#include <iostream>
#include <sstream>
#include <vector>
#include <set>

#include "../lexer/token.h"
#include "scheme.h"
#include "fact.h"
#include "rule.h"
#include "query.h"
#include "predicate.h"
#include "parameter.h"

using namespace std;

class parser {
public:
parser() {}
vector<token> tokens;
set<string> domain;
vector<predicate> schemelist;
vector<predicate> factlist;
vector<predicate> querylist;
vector<predicate> predlist;
vector<rule> rulelist;

string get_token();
void check_datalog();
void check_schemelist(string);
void check_scheme(string);
void check_factlist(string);
void check_fact(string);
void check_rulelist(string);
void check_rule(string);
void check_querylist(string);
void check_query(string);
void check_predicate_list(string, rule&);
predicate check_predicate(string);
void check_parameterlist(string type, predicate&);
void check_parameter(string, predicate&);
void match(string);
void error();
string out();

};
#endif

+ 83
- 0
cs236/parser/predicate.h View File

@@ -0,0 +1,83 @@
#ifndef __PREDICATE_H__
#define __PREDICATE_H__

#include "parameter.h"
#include <vector>
#include <iostream>

using namespace std;

class predicate {
public:
string id;
vector<parameter> pred_list;
string toString() {
//schemes
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
s << pred_list[i].param;
if(i < pred_list.size()-1) {
s << ",";
}
}
s << ")\n";
return s.str();
}
string toString(bool a) {
//facts
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
s << "'" << pred_list[i].param << "'";
if(i < pred_list.size()-1) {
s << ",";
}
}
s << ").\n";
return s.str();
}
string toString(double a) {
//query
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
if(pred_list[i].type == "STRING") {
s << "'" << pred_list[i].param << "'";
if(i < pred_list.size()-1) {
s << ",";
}
}
if(pred_list[i].type == "ID") {
s << pred_list[i].param;
if(i < pred_list.size()-1) {
s << ",";
}
}
}
s << ")?\n";
return s.str();
}
string toString(int a) {
//rules
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
if(pred_list[i].type == "STRING") {
s << "'" << pred_list[i].param << "'";
if(i < pred_list.size()-1) {
s << ",";
}
}
if(pred_list[i].type == "ID") {
s << pred_list[i].param;
if(i < pred_list.size()-1) {
s << ",";
}
}
}
s << ")";
return s.str();
}
};
#endif

+ 14
- 0
cs236/parser/query.h View File

@@ -0,0 +1,14 @@
#ifndef __QUERY_H__
#define __QUERY_H__

#include "predicate.h"
#include <vector>
#include <iostream>

using namespace std;

class query {
public:
vector<predicate> pred_queries;
};
#endif

+ 27
- 0
cs236/parser/rule.h View File

@@ -0,0 +1,27 @@
#ifndef __RULE_H__
#define __RULE_H__

#include "predicate.h"
#include <vector>
#include <iostream>

using namespace std;

class rule {
public:
predicate head;
vector<predicate> pred_rule;
string toString() {
stringstream s;
s << head.toString(1) << " :- ";
for(unsigned int i = 0; i < pred_rule.size(); i++) {
s << pred_rule[i].toString(1);
if(i < pred_rule.size()-1) {
s << ",";
}
}
s << ".\n";
return s.str();
}
};
#endif

+ 14
- 0
cs236/parser/scheme.h View File

@@ -0,0 +1,14 @@
#ifndef __SCHEME_H__
#define __SCHEME_H__

#include "predicate.h"
#include <vector>
#include <iostream>

using namespace std;

class scheme {
public:
vector<predicate> pred_schemes;
};
#endif

+ 10
- 0
cs236/rdbms/Tuple.h View File

@@ -0,0 +1,10 @@
#ifndef __TUPLE_H__
#define __TUPLE_H__

#include <vector>
#include <string>

class Tuple: public vector<string> {
};

#endif

+ 45
- 0
cs236/rdbms/db.h View File

@@ -0,0 +1,45 @@
#ifndef __DB_H__
#define __DB_H__

#include "parser/parser.h"
#include "relation.h"

class db {
public:
db(parser incoming) {
p = incoming;
setup();
}
parser p;
vector<relation> relations;


void setup() {
for(unsigned int i = 0; i < p.schemelist.size(); i++) {
relation r;
r.name = p.schemelist[i].id;
for(unsigned int j = 0; j < p.schemelist[i].pred_list.size(); j++) {
r.schemas.s.push_back(p.schemelist[i].pred_list[j].param);
}
for(unsigned int k = 0; k < p.factlist.size(); k++) {
if(r.name == p.factlist[k].id) {
Tuple t;
for(unsigned int l = 0; l < p.factlist[k].pred_list.size(); k++) {
t.push_back(p.factlist[k].pred_list[l].param);
}
r.tuples.insert(t);
}
}
}
}

//set<string> domain;
//vector<predicate> schemelist;
//vector<predicate> factlist;
//vector<predicate> querylist;
//vector<predicate> predlist;
//vector<rule> rulelist;

};

#endif

+ 15
- 0
cs236/rdbms/relation.h View File

@@ -0,0 +1,15 @@
#ifndef __RELATION_H__
#define __RELATION_H__

#include "Tuple.h"
#include "schema.h"
#include <set>

class relation {
public:
string name;
schema schemas;
set<Tuple> tuples;
};

#endif

+ 11
- 0
cs236/rdbms/schema.h View File

@@ -0,0 +1,11 @@
#ifndef __SCHEMA_H__
#define __SCHEMA_H__

#include "Tuple.h"

class schema {
public:
Tuple s;
};

#endif

+ 14
- 0
cs236/submission/lab02/fact.h View File

@@ -0,0 +1,14 @@
#ifndef __FACT_H__
#define __FACT_H__

#include "predicate.h"
#include <vector>
#include <sstream>

using namespace std;

class fact {
public:
vector<predicate> pred_facts;
};
#endif

+ 30
- 0
cs236/submission/lab02/lab02.cpp View File

@@ -0,0 +1,30 @@
#include <vector>
#include "lexi.h"
#include "util.h"
#include "token.h"
#include "parser.h"

const string usage = "usage: app <input> <output>";
int main(int argc, char* argv[]) {
if(argc != 3) {
cerr << usage << endl;
return 1;
}
get_file_name(argv[1]);
vector<string> data = open_file(argv[1]);
lexi l;
string temp = argv[2];
vector<token> s = l.lexical_analyzer(data, temp);
parser p;
p.tokens = s;
try {
p.check_datalog();
string out = p.out();
write_file(out, argv[2]);
} catch(string str) {
stringstream s;
s << "Failure!\n " << str;
write_file(s.str(), argv[2]);
}
}

+ 361
- 0
cs236/submission/lab02/lexi.cpp View File

@@ -0,0 +1,361 @@
#include "lexi.h"

vector<token> lexi::lexical_analyzer(vector<string> data, string file_name) {
string cur_string;
string next_character;
for(unsigned int i = 0; i < data.size(); i++) {
for(unsigned int j = 0; j < data[i].size(); j ++) {
cur_string = data[i].at(j);
if(j < data[i].size() - 1) {
next_character = data[i].at(j + 1);
}
else {
next_character = "";
}
string state = determiner(cur_string, next_character);
simple_state(data, state, cur_string, next_character, i, j);
simple_state_string(data, state, cur_string, next_character, i, j);
if(state == "start of string") {
string token_symbol = string_finder(data, i, j);
if(token_symbol != "error") {
token_symbol.erase(0,1);
token t("STRING", token_symbol, i + 1);
tokens.push_back(t);
}
else {
write_to_file(file_name, i + 1);
return tokens;
}
}
simple_comment(data, state, cur_string, next_character, i, j);
simple_id(data, state, cur_string, next_character, i, j);
if(state == "error") {
write_to_file(file_name, i + 1);
return tokens;
}
}
}
write_to_file(file_name);
return tokens;
}

void lexi::write_to_file(string file_name, int line) {
ofstream myfile;
myfile.open(file_name.c_str());
myfile << "Error on line " << line << endl;
myfile.close();
}

void lexi::write_to_file(string file_name) {
ofstream myfile;
myfile.open(file_name.c_str());
for(unsigned int i = 0; i < tokens.size(); i++) {
if(i < tokens.size()) {
myfile << tokens[i] << endl;
}
else {
myfile << tokens[i];
}
}
myfile << "Total Tokens = " << tokens.size();
myfile << endl;
myfile.close();
}


bool lexi::simple_comment(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "comment") {
string token_symbol = comment_finder(data, i, j);
}
return true;
}

bool lexi::simple_id(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "id") {
string token_symbol = id_finder(data, i, j);
if(token_symbol != "error") {
token t("ID", token_symbol, i + 1);
tokens.push_back(t);
}
}
return true;
}

bool lexi::simple_state_string(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "simple_string") {
string token_symbol = det_type_simple_string(data, i, j);
if(token_symbol != "wrong") {
string token_type = type_simple_string(token_symbol);
token t(token_type, token_symbol, i + 1);
tokens.push_back(t);
}
else {
string token_symbol = id_finder(data, i, j);
if(token_symbol != "error") {
token t("ID", token_symbol, i + 1);
tokens.push_back(t);
}
}
}
return true;
}

bool lexi::simple_state(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "simple") {
string token_symbol = type_simple(cur_string, next_character);
if(next_character == "-") {
data[i].replace(j,2, " ");
}
else {
data[i].replace(j,1, " ");
}
string token_id = type_simple_caps(cur_string, next_character);
token t(token_id, token_symbol, i + 1);
tokens.push_back(t);
}
return true;
}

string lexi::determiner(string cur_string, string next_character) {
if(det_help_simple(cur_string, next_character)) {
return "simple";
}
else if(det_help_simple_string(cur_string, next_character)) {
return "simple_string";
}
else if(det_help_id(cur_string)) {
return "id";
}
else if(cur_string == "'") {
return "start of string";
}
else if(cur_string == "#") {
return "comment";
}
else {
string temp = incorrect(cur_string);
return temp;
}
return "";
}

bool lexi::det_help_id(string cur_string) {
if(('A' <= cur_string[0] && cur_string[0] <= 'Z') ||
('a' <= cur_string[0] && cur_string[0] <= 'z')) {
return true;
}
return false;
}

bool lexi::quick_help(string a, string b) {
if(a == "S" && b == "c") {
return true;
}
return false;
}

bool lexi::det_help_simple_string(string cur_string, string next_character) {
if(quick_help(cur_string, next_character)) {
return true;
}
else if((cur_string == "Q" && next_character == "u")) {
return true;
}
else if((cur_string == "R" && next_character == "u")) {
return true;
}
else if((cur_string == "F" && next_character == "a")) {
return true;
}
return false;
}

bool lexi::det_help_simple(string cur_string, string next_character) {
if(cur_string == "," || cur_string == "." || cur_string == "?" ||
cur_string == "(" || cur_string == ")" || cur_string == ":") {
type_simple(cur_string, next_character);
return true;
}
return false;
}

string lexi::incorrect(string cur_string) {
if(cur_string == " " || cur_string == "\t") {
return "fine";
}
else if(!(('A' <= cur_string[0] && cur_string[0] <= 'Z') ||
('a' <= cur_string[0] && cur_string[0] <= 'z'))) {
return "error";
}
return " ";
}

string lexi::id_finder(vector<string> & data, int a, int b) {
string cur_string;
string next_character;
for(unsigned int j = b; j < data[a].size(); j++) {
cur_string += data[a].at(j);
if(j < data[a].size() - 1) {
next_character = data[a].at(j + 1);
}
else {
next_character = "!";
}
if(is_char_valid(next_character[0]) || next_character == "!") {
data[a].replace(data[a].begin() + b, data[a].begin() + j + 1, " ");
return cur_string;
}
}
return " ";
}

string lexi::comment_finder(vector<string> & data, int i, int b) {
string cur_string;
string next_character;
for(unsigned int j = b; j < data[i].size(); j++) {
cur_string += data[i].at(j);
if(j < data[i].size() - 1) {
next_character = data[i].at(j + 1);
}
else {
next_character = "!";
}
if((j > data[i].size()) - 1 && next_character != "!") {
data[i].replace(data[i].begin() + b, data[i].end(), " ");
return cur_string;
}
}
return "error";
}

string lexi::string_finder(vector<string> & data, int a, int b) {
string cur_string;
string next_character;
b = data[a].find('\'');
for(unsigned int j = b; j < data[a].size(); j++) {
cur_string += data[a].at(j);
if(j < data[a].size() - 1) {
next_character = data[a].at(j + 1);
}
if(next_character == "'") {
data[a].replace(data[a].begin() + b, data[a].begin() + j + 2, " ");
data[a].insert(data[a].begin() + b, ' ');
return cur_string;
}
}
return "error";
}

string lexi::type_simple_caps(string symbol, string next_symbol) {
if(symbol == ",") {
return "COMMA";
}
else if(symbol == ".") {
return "PERIOD";
}
else if(symbol == "?") {
return "Q_MARK";
}
else if(symbol == "(") {
return "LEFT_PAREN";
}
else if(symbol == ")") {
return "RIGHT_PAREN";
}
else if(symbol == ":") {
if(next_symbol == "-") {
return "COLON_DASH";
}
return "COLON";
}
return "";
}

string lexi::type_simple(string symbol, string next_symbol) {
if(symbol == ",") {
return ",";
}
else if(symbol == ".") {
return ".";
}
else if(symbol == "?") {
return "?";
}
else if(symbol == "(") {
return "(";
}
else if(symbol == ")") {
return ")";
}
else if(symbol == ":") {
if(next_symbol == "-") {
return ":-";
}
return ":";
}
return "";
}

string lexi::det_type_simple_string(vector<string> & data, int i, int b) {
string cur_string;
string next_character;
string special_case;
if(b > 0) {
special_case = data[i].at(b -1);
}
for(unsigned int j = b; j < data[i].size(); j++) {
cur_string += data[i].at(j);
if(j < data[i].size() - 1) {
next_character = data[i].at(j + 1);
}
else {
next_character = "!";
}
if((is_simple_string(cur_string)) && (is_char_valid(next_character.at(0))) && (is_char_valid(special_case[0]))) {
data[i].replace(data[i].begin() + b, data[i].begin() + j + 1, " ");
return cur_string;
}
}
return "wrong";
}

bool lexi::is_char_valid(char next_character) {
if(!(('A' <= next_character && next_character <= 'Z') ||
('a' <= next_character && next_character <= 'z') ||
('0' <= next_character && next_character <= '9')) || (next_character == '\'')) {
return true;
}
return false;
}

bool lexi::is_simple_string(string simple_com) {
if(simple_com == "Schemes") {
return true;
}
else if(simple_com == "Facts") {
return true;
}
else if(simple_com == "Rules") {
return true;
}
else if(simple_com == "Queries") {
return true;
}
return false;
}

string lexi::type_simple_string(string simple_com) {
if(simple_com == "Schemes") {
return "SCHEMES";
}
else if(simple_com == "Facts") {
return "FACTS";
}
else if(simple_com == "Rules") {
return "RULES";
}
else if(simple_com == "Queries") {
return "QUERIES";
}
return "";
}

+ 44
- 0
cs236/submission/lab02/lexi.h View File

@@ -0,0 +1,44 @@
#ifndef __LEXI_H__
#define __LEXI_H__
#include <iostream>
#include <fstream>
#include <vector>
#include "token.h"
#include <cctype>
#include <string>
#include <sstream>

using namespace std;
class lexi {
public:
lexi(){}
vector<token> tokens;
string determiner(string, string);
vector<token> lexical_analyzer(vector<string>, string);
string type_simple(string, string);
string type_simple_string(string);
bool is_simple_string(string);
string det_type_simple_string(vector<string>&, int, int);
bool is_char_valid(char);
string string_finder(vector<string>&, int, int);
string comment_finder(vector<string>&, int, int);
string id_finder(vector<string>&, int, int);
string incorrect(string);
bool det_help_simple(string, string);
bool det_help_simple_string(string, string);
bool quick_help(string, string);
bool det_help_id(string);
string type_simple_caps(string, string);
bool simple_state(vector<string>&, string, string, string, int, int);
bool simple_state_string(vector<string>&, string, string, string, int, int);
bool simple_id(vector<string>&, string, string, string, int, int);
bool simple_comment(vector<string>&, string, string, string, int, int);
void write_to_file(string);
void write_to_file(string, int);
};
#endif


+ 13
- 0
cs236/submission/lab02/parameter.h View File

@@ -0,0 +1,13 @@
#ifndef __PARAMETER_H__
#define __PARAMETER_H__

#include <iostream>

using namespace std;

class parameter {
public:
string param;
string type;
};
#endif

+ 200
- 0
cs236/submission/lab02/parser.cpp View File

@@ -0,0 +1,200 @@
#include "parser.h"

string parser::get_token() {
string type = tokens[0].type;
return type;
}

void parser::check_datalog() {
match("SCHEMES");
match("COLON");
if(get_token() == "FACTS") {
error();
}
check_schemelist(get_token());
match("FACTS");
match("COLON");
check_factlist(get_token());
match("RULES");
match("COLON");
check_rulelist(get_token());
match("QUERIES");
match("COLON");
check_querylist(get_token());
out();
}

string parser::out() {
stringstream s;
s << "Success!" << endl;
s << "Schemes(" << schemelist.size() << "):" << endl;
for(unsigned int i = 0; i < schemelist.size(); i++) {
s << " " << schemelist[i].toString();
}
s << "Facts(" << factlist.size() << "):" << endl;
for(unsigned int i = 0; i < factlist.size(); i++) {
s << " " << factlist[i].toString(false);
}
s << "Rules(" << rulelist.size() << "):" << endl;
for(unsigned int i = 0; i < rulelist.size(); i++) {
s << " " << rulelist[i].toString();
}
s << "Queries(" << querylist.size() << "):" << endl;
double a = 0;
for(unsigned int i = 0; i < querylist.size(); i++) {
s << " " << querylist[i].toString(a);
}
s << "Domain(" << domain.size() << "):" << endl;
for (auto it=domain.cbegin(); it != domain.cend(); ++it) {
s << " '" << *it << "'" << endl;
}
return s.str();
}

void parser::check_schemelist(string type) {
if(type == "FACTS") {
return;
}
else {
check_scheme(type);
check_schemelist(get_token());
}
}

void parser::check_scheme(string type) {
schemelist.push_back(check_predicate(type));
}

void parser::check_factlist(string type) {
if(type == "RULES") {
return;
}
else {
check_fact(type);
check_factlist(get_token());
}
}

void parser::check_fact(string type) {
factlist.push_back(check_predicate(type));
match("PERIOD");
}

void parser::check_rulelist(string type) {
if(type == "QUERIES") {
return;
}
else {
check_rule(type);
check_rulelist(get_token());
}
}

void parser::check_rule(string type) {
rule r;
r.head = check_predicate(type);
match("COLON_DASH");
check_predicate_list(get_token(), r);
match("PERIOD");
rulelist.push_back(r);
}

void parser::check_querylist(string type) {
check_query(type);
if(tokens.empty()) {
return;
}
else {
check_querylist(get_token());
}
}

void parser::check_query(string type) {
querylist.push_back(check_predicate(type));
match("Q_MARK");
}

void parser::check_predicate_list(string type, rule& r) {
r.pred_rule.push_back(check_predicate(type));
if(get_token() == "COMMA") {
match("COMMA");
check_predicate_list(get_token(), r);
}
else {
return;
}
}

predicate parser::check_predicate(string type) {
predicate pred;
pred.id = tokens[0].character;
match("ID");
match("LEFT_PAREN");
if(get_token() == "RIGHT_PAREN") {
error();
}
check_parameterlist(get_token(), pred);
match("RIGHT_PAREN");
return pred;
}

void parser::check_parameterlist(string type, predicate& pred) {
if(type == "RIGHT_PAREN") {
return;
}
else {
check_parameter(type, pred);
if(get_token() == "COMMA") {
match("COMMA");
if(get_token() == "RIGHT_PAREN") {
error();
}
check_parameterlist(get_token(), pred);
}
else {
return;
}
}
}

void parser::check_parameter(string type, predicate& pred) {
parameter para;
if(type == "STRING") {
domain.insert(tokens[0].character);
para.param = tokens[0].character;
para.type = tokens[0].type;
pred.pred_list.push_back(para);
match("STRING");
return;
}
else if(type == "ID") {
para.param = tokens[0].character;
para.type = tokens[0].type;
pred.pred_list.push_back(para);
match("ID");
return;
}
else {
error();
}
}

void parser::match(string type) {
if(get_token() == type) {
if(tokens.empty()) {
error();
}
else {
tokens.erase(tokens.begin());
}
}
else {
error();
}
}

void parser::error() {
stringstream oss;
oss << tokens[0] << endl;
throw oss.str();
}

+ 49
- 0
cs236/submission/lab02/parser.h View File

@@ -0,0 +1,49 @@
#ifndef __PARSER_H__
#define __PARSER_H__

#include <iostream>
#include <sstream>
#include <vector>
#include <set>

#include "token.h"
#include "scheme.h"
#include "fact.h"
#include "rule.h"
#include "query.h"
#include "predicate.h"
#include "parameter.h"

using namespace std;

class parser {
public:
parser() {}
vector<token> tokens;
set<string> domain;
vector<predicate> schemelist;
vector<predicate> factlist;
vector<predicate> querylist;
vector<predicate> predlist;
vector<rule> rulelist;

string get_token();
void check_datalog();
void check_schemelist(string);
void check_scheme(string);
void check_factlist(string);
void check_fact(string);
void check_rulelist(string);
void check_rule(string);
void check_querylist(string);
void check_query(string);
void check_predicate_list(string, rule&);
predicate check_predicate(string);
void check_parameterlist(string type, predicate&);
void check_parameter(string, predicate&);
void match(string);
void error();
string out();

};
#endif

+ 83
- 0
cs236/submission/lab02/predicate.h View File

@@ -0,0 +1,83 @@
#ifndef __PREDICATE_H__
#define __PREDICATE_H__

#include "parameter.h"
#include <vector>
#include <iostream>

using namespace std;

class predicate {
public:
string id;
vector<parameter> pred_list;
string toString() {
//schemes
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
s << pred_list[i].param;
if(i < pred_list.size()-1) {
s << ",";
}
}
s << ")\n";
return s.str();
}
string toString(bool a) {
//facts
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
s << "'" << pred_list[i].param << "'";
if(i < pred_list.size()-1) {
s << ",";
}
}
s << ").\n";
return s.str();
}
string toString(double a) {
//query
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
if(pred_list[i].type == "STRING") {
s << "'" << pred_list[i].param << "'";
if(i < pred_list.size()-1) {
s << ",";
}
}
if(pred_list[i].type == "ID") {
s << pred_list[i].param;
if(i < pred_list.size()-1) {
s << ",";
}
}
}
s << ")?\n";
return s.str();
}
string toString(int a) {
//rules
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
if(pred_list[i].type == "STRING") {
s << "'" << pred_list[i].param << "'";
if(i < pred_list.size()-1) {
s << ",";
}
}
if(pred_list[i].type == "ID") {
s << pred_list[i].param;
if(i < pred_list.size()-1) {
s << ",";
}
}
}
s << ")";
return s.str();
}
};
#endif

+ 14
- 0
cs236/submission/lab02/query.h View File

@@ -0,0 +1,14 @@
#ifndef __QUERY_H__
#define __QUERY_H__

#include "predicate.h"
#include <vector>
#include <iostream>

using namespace std;

class query {
public:
vector<predicate> pred_queries;
};
#endif

+ 27
- 0
cs236/submission/lab02/rule.h View File

@@ -0,0 +1,27 @@
#ifndef __RULE_H__
#define __RULE_H__

#include "predicate.h"
#include <vector>
#include <iostream>

using namespace std;

class rule {
public:
predicate head;
vector<predicate> pred_rule;
string toString() {
stringstream s;
s << head.toString(1) << " :- ";
for(unsigned int i = 0; i < pred_rule.size(); i++) {
s << pred_rule[i].toString(1);
if(i < pred_rule.size()-1) {
s << ",";
}
}
s << ".\n";
return s.str();
}
};
#endif

+ 14
- 0
cs236/submission/lab02/scheme.h View File

@@ -0,0 +1,14 @@
#ifndef __SCHEME_H__
#define __SCHEME_H__

#include "predicate.h"
#include <vector>
#include <iostream>

using namespace std;

class scheme {
public:
vector<predicate> pred_schemes;
};
#endif

+ 11
- 0
cs236/submission/lab02/token.cpp View File

@@ -0,0 +1,11 @@
#include "token.h"

token::token(string type, string character, int line_num) :
type(type), character(character), line_num(line_num) {}

ostream & operator<<(ostream & os, token tk) {
os << "(" << tk.type
<< ",\"" << tk.character
<< "\"," << tk.line_num << ")";
return os;
}

+ 16
- 0
cs236/submission/lab02/token.h View File

@@ -0,0 +1,16 @@
#ifndef __TOKEN_H__
#define __TOKEN_H__

#include <iostream>

using namespace std;
class token {
public:
token(string, string, int);
string type;
string character;
int line_num;
friend ostream & operator<<(ostream & os, token tk);
};
#endif

+ 56
- 0
cs236/submission/lab02/util.h View File

@@ -0,0 +1,56 @@
#ifndef __UTIL_H__
#define __UTIL_H__
#include <vector>
#include <iostream>
#include <fstream>

vector<string> open_file(string file_name) {
ifstream myfile;
vector<string> data;
myfile.open(file_name.c_str());
string temp;
while(!myfile.eof()) {
getline(myfile, temp);
data.push_back(temp);
}
myfile.close();
return data;
}

bool get_file_name(string input) {
bool file_correct = false;
string input_file_name;
while(!file_correct) {
ifstream inputs(input.c_str());
if(inputs.good()) {
input_file_name = input;
file_correct = true;
open_file(input_file_name);
return true;
}
else {
cerr << "incorrect file name" << endl;
return false;
}
}
return false;
}

void write_file(string output, string file_name) {
ofstream myfile;
myfile.open(file_name.c_str());
myfile << output << "\n";
}

void write_file(vector<string> output, string file_name) {
ofstream myfile;
myfile.open(file_name.c_str());
for(unsigned int i = 0; i < output.size(); i++) {
if(i != output.size() -1) {
myfile << output[i] << "\n";
}
myfile << output[i];
}
}

#endif