adding cs236

This commit is contained in:
Derek McQuay 2016-04-06 20:46:32 -07:00
parent cf99ec6565
commit 8af25fbb22
56 changed files with 2313 additions and 0 deletions

63
cs236/Makefile Normal file
View File

@ -0,0 +1,63 @@
CXXFLAGS= -Wall -g -std=c++0x -I .
lexor_objs=labs/lab01.o \
lexer/lexi.o \
lexer/token.o
parser_objs=labs/lab02.o \
lexer/lexi.o \
lexer/token.o \
parser/parser.o \
rdbms_objs=labs/lab03.o \
lexer/lexi.o \
lexer/token.o \
parser/parser.o \
lab01=bin/lab01
lab02=bin/lab02
lab03=bin/lab03
all: $(lab01) $(lab02) $(lab03)
$(lab01): $(lexor_objs)
$(CXX) $(CXXFLAGS) $(lexor_objs) -o $@
$(lab02): $(parser_objs)
$(CXX) $(CXXFLAGS) $^ -o $@
$(lab03): $(rdbms_objs)
$(CXX) $(CXXFLAGS) $^ -o $@
labs/lab01.o: labs/lab01.cpp lexer/util.h lexer/lexi.h lexer/token.h
lexer/lexi.o: lexer/lexi.cpp lexer/lexi.h
lexer/token.o: lexer/token.h lexer/token.cpp
labs/lab02.o: labs/lab02.cpp lexer/util.h lexer/lexi.h lexer/token.h \
parser/scheme.h parser/fact.h parser/rule.h parser/query.h\
parser/predicate.h parser/parameter.h parser/parser.o
labs/lab03.o: labs/lab02.cpp lexer/util.h lexer/lexi.h lexer/token.h \
parser/scheme.h parser/fact.h parser/rule.h parser/query.h\
parser/predicate.h parser/parameter.h parser/parser.o rdbms/db.h \
rdbms/relation.h rdbms/schema.h rdbms/Tuple.h
parser/parser.o: parser/parser.h parser/parser.cpp
clean:
@rm -vf **/*.o
@rm -vf $(EXE)
@rm -vf **/*.1
@rm -vf **/*.0
@rm -vf test
@rm -rvf **/*.dSYM
@rm -vf output.txt
@rm -vf bin/*
drun: main
gdb ./main
valgrind: $(EXE)
valgrind --tool=memcheck --leak-check=yes ./$(EXE) input.txt output.txt
pmc: **/*.h **/*.cpp
pmccabe **/*.h **/*.cpp

0
cs236/bin/.hgdir Normal file
View File

19
cs236/labs/lab01.cpp Normal file
View File

@ -0,0 +1,19 @@
#include <vector>
#include "lexer/lexi.h"
#include "lexer/util.h"
#include "lexer/token.h"
const string usage = "usage: app <input> <output>";
int main(int argc, char* argv[]) {
if(argc != 3) {
cerr << usage << endl;
return 1;
}
get_file_name(argv[1]);
vector<string> data = open_file(argv[1]);
lexi l;
string temp = argv[2];
l.lexical_analyzer(data, temp);
cout << "getting called here in lab 1" << endl;
}

30
cs236/labs/lab02.cpp Normal file
View File

@ -0,0 +1,30 @@
#include <vector>
#include "lexer/lexi.h"
#include "lexer/util.h"
#include "lexer/token.h"
#include "parser/parser.h"
const string usage = "usage: app <input> <output>";
int main(int argc, char* argv[]) {
if(argc != 3) {
cerr << usage << endl;
return 1;
}
get_file_name(argv[1]);
vector<string> data = open_file(argv[1]);
lexi l;
string temp = argv[2];
vector<token> s = l.lexical_analyzer(data, temp);
parser p;
p.tokens = s;
try {
p.check_datalog();
string out = p.out();
write_file(out, argv[2]);
} catch(string str) {
stringstream s;
s << "Failure!\n " << str;
write_file(s.str(), argv[2]);
}
}

32
cs236/labs/lab03.cpp Normal file
View File

@ -0,0 +1,32 @@
#include <vector>
#include "lexer/lexi.h"
#include "lexer/util.h"
#include "lexer/token.h"
#include "parser/parser.h"
#include "rdbms/db.h"
const string usage = "usage: app <input> <output>";
int main(int argc, char* argv[]) {
if(argc != 3) {
cerr << usage << endl;
return 1;
}
get_file_name(argv[1]);
vector<string> data = open_file(argv[1]);
lexi l;
string temp = argv[2];
vector<token> s = l.lexical_analyzer(data, temp);
parser p;
p.tokens = s;
try {
p.check_datalog();
string out = p.out();
write_file(out, argv[2]);
} catch(string str) {
stringstream s;
s << "Failure!\n " << str;
write_file(s.str(), argv[2]);
}
db database(p);
}

361
cs236/lexer/lexi.cpp Normal file
View File

@ -0,0 +1,361 @@
#include "lexi.h"
vector<token> lexi::lexical_analyzer(vector<string> data, string file_name) {
string cur_string;
string next_character;
for(unsigned int i = 0; i < data.size(); i++) {
for(unsigned int j = 0; j < data[i].size(); j ++) {
cur_string = data[i].at(j);
if(j < data[i].size() - 1) {
next_character = data[i].at(j + 1);
}
else {
next_character = "";
}
string state = determiner(cur_string, next_character);
simple_state(data, state, cur_string, next_character, i, j);
simple_state_string(data, state, cur_string, next_character, i, j);
if(state == "start of string") {
string token_symbol = string_finder(data, i, j);
if(token_symbol != "error") {
token_symbol.erase(0,1);
token t("STRING", token_symbol, i + 1);
tokens.push_back(t);
}
else {
write_to_file(file_name, i + 1);
return tokens;
}
}
simple_comment(data, state, cur_string, next_character, i, j);
simple_id(data, state, cur_string, next_character, i, j);
if(state == "error") {
write_to_file(file_name, i + 1);
return tokens;
}
}
}
write_to_file(file_name);
return tokens;
}
void lexi::write_to_file(string file_name, int line) {
ofstream myfile;
myfile.open(file_name.c_str());
myfile << "Error on line " << line << endl;
myfile.close();
}
void lexi::write_to_file(string file_name) {
ofstream myfile;
myfile.open(file_name.c_str());
for(unsigned int i = 0; i < tokens.size(); i++) {
if(i < tokens.size()) {
myfile << tokens[i] << endl;
}
else {
myfile << tokens[i];
}
}
myfile << "Total Tokens = " << tokens.size();
myfile << endl;
myfile.close();
}
bool lexi::simple_comment(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "comment") {
string token_symbol = comment_finder(data, i, j);
}
return true;
}
bool lexi::simple_id(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "id") {
string token_symbol = id_finder(data, i, j);
if(token_symbol != "error") {
token t("ID", token_symbol, i + 1);
tokens.push_back(t);
}
}
return true;
}
bool lexi::simple_state_string(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "simple_string") {
string token_symbol = det_type_simple_string(data, i, j);
if(token_symbol != "wrong") {
string token_type = type_simple_string(token_symbol);
token t(token_type, token_symbol, i + 1);
tokens.push_back(t);
}
else {
string token_symbol = id_finder(data, i, j);
if(token_symbol != "error") {
token t("ID", token_symbol, i + 1);
tokens.push_back(t);
}
}
}
return true;
}
bool lexi::simple_state(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "simple") {
string token_symbol = type_simple(cur_string, next_character);
if(next_character == "-") {
data[i].replace(j,2, " ");
}
else {
data[i].replace(j,1, " ");
}
string token_id = type_simple_caps(cur_string, next_character);
token t(token_id, token_symbol, i + 1);
tokens.push_back(t);
}
return true;
}
string lexi::determiner(string cur_string, string next_character) {
if(det_help_simple(cur_string, next_character)) {
return "simple";
}
else if(det_help_simple_string(cur_string, next_character)) {
return "simple_string";
}
else if(det_help_id(cur_string)) {
return "id";
}
else if(cur_string == "'") {
return "start of string";
}
else if(cur_string == "#") {
return "comment";
}
else {
string temp = incorrect(cur_string);
return temp;
}
return "";
}
bool lexi::det_help_id(string cur_string) {
if(('A' <= cur_string[0] && cur_string[0] <= 'Z') ||
('a' <= cur_string[0] && cur_string[0] <= 'z')) {
return true;
}
return false;
}
bool lexi::quick_help(string a, string b) {
if(a == "S" && b == "c") {
return true;
}
return false;
}
bool lexi::det_help_simple_string(string cur_string, string next_character) {
if(quick_help(cur_string, next_character)) {
return true;
}
else if((cur_string == "Q" && next_character == "u")) {
return true;
}
else if((cur_string == "R" && next_character == "u")) {
return true;
}
else if((cur_string == "F" && next_character == "a")) {
return true;
}
return false;
}
bool lexi::det_help_simple(string cur_string, string next_character) {
if(cur_string == "," || cur_string == "." || cur_string == "?" ||
cur_string == "(" || cur_string == ")" || cur_string == ":") {
type_simple(cur_string, next_character);
return true;
}
return false;
}
string lexi::incorrect(string cur_string) {
if(cur_string == " " || cur_string == "\t") {
return "fine";
}
else if(!(('A' <= cur_string[0] && cur_string[0] <= 'Z') ||
('a' <= cur_string[0] && cur_string[0] <= 'z'))) {
return "error";
}
return " ";
}
string lexi::id_finder(vector<string> & data, int a, int b) {
string cur_string;
string next_character;
for(unsigned int j = b; j < data[a].size(); j++) {
cur_string += data[a].at(j);
if(j < data[a].size() - 1) {
next_character = data[a].at(j + 1);
}
else {
next_character = "!";
}
if(is_char_valid(next_character[0]) || next_character == "!") {
data[a].replace(data[a].begin() + b, data[a].begin() + j + 1, " ");
return cur_string;
}
}
return " ";
}
string lexi::comment_finder(vector<string> & data, int i, int b) {
string cur_string;
string next_character;
for(unsigned int j = b; j < data[i].size(); j++) {
cur_string += data[i].at(j);
if(j < data[i].size() - 1) {
next_character = data[i].at(j + 1);
}
else {
next_character = "!";
}
if((j > data[i].size()) - 1 && next_character != "!") {
data[i].replace(data[i].begin() + b, data[i].end(), " ");
return cur_string;
}
}
return "error";
}
string lexi::string_finder(vector<string> & data, int a, int b) {
string cur_string;
string next_character;
b = data[a].find('\'');
for(unsigned int j = b; j < data[a].size(); j++) {
cur_string += data[a].at(j);
if(j < data[a].size() - 1) {
next_character = data[a].at(j + 1);
}
if(next_character == "'") {
data[a].replace(data[a].begin() + b, data[a].begin() + j + 2, " ");
data[a].insert(data[a].begin() + b, ' ');
return cur_string;
}
}
return "error";
}
string lexi::type_simple_caps(string symbol, string next_symbol) {
if(symbol == ",") {
return "COMMA";
}
else if(symbol == ".") {
return "PERIOD";
}
else if(symbol == "?") {
return "Q_MARK";
}
else if(symbol == "(") {
return "LEFT_PAREN";
}
else if(symbol == ")") {
return "RIGHT_PAREN";
}
else if(symbol == ":") {
if(next_symbol == "-") {
return "COLON_DASH";
}
return "COLON";
}
return "";
}
string lexi::type_simple(string symbol, string next_symbol) {
if(symbol == ",") {
return ",";
}
else if(symbol == ".") {
return ".";
}
else if(symbol == "?") {
return "?";
}
else if(symbol == "(") {
return "(";
}
else if(symbol == ")") {
return ")";
}
else if(symbol == ":") {
if(next_symbol == "-") {
return ":-";
}
return ":";
}
return "";
}
string lexi::det_type_simple_string(vector<string> & data, int i, int b) {
string cur_string;
string next_character;
string special_case;
if(b > 0) {
special_case = data[i].at(b -1);
}
for(unsigned int j = b; j < data[i].size(); j++) {
cur_string += data[i].at(j);
if(j < data[i].size() - 1) {
next_character = data[i].at(j + 1);
}
else {
next_character = "!";
}
if((is_simple_string(cur_string)) && (is_char_valid(next_character.at(0))) && (is_char_valid(special_case[0]))) {
data[i].replace(data[i].begin() + b, data[i].begin() + j + 1, " ");
return cur_string;
}
}
return "wrong";
}
bool lexi::is_char_valid(char next_character) {
if(!(('A' <= next_character && next_character <= 'Z') ||
('a' <= next_character && next_character <= 'z') ||
('0' <= next_character && next_character <= '9')) || (next_character == '\'')) {
return true;
}
return false;
}
bool lexi::is_simple_string(string simple_com) {
if(simple_com == "Schemes") {
return true;
}
else if(simple_com == "Facts") {
return true;
}
else if(simple_com == "Rules") {
return true;
}
else if(simple_com == "Queries") {
return true;
}
return false;
}
string lexi::type_simple_string(string simple_com) {
if(simple_com == "Schemes") {
return "SCHEMES";
}
else if(simple_com == "Facts") {
return "FACTS";
}
else if(simple_com == "Rules") {
return "RULES";
}
else if(simple_com == "Queries") {
return "QUERIES";
}
return "";
}

44
cs236/lexer/lexi.h Normal file
View File

@ -0,0 +1,44 @@
#ifndef __LEXI_H__
#define __LEXI_H__
#include <iostream>
#include <fstream>
#include <vector>
#include "token.h"
#include <cctype>
#include <string>
#include <sstream>
using namespace std;
class lexi {
public:
lexi(){}
vector<token> tokens;
string determiner(string, string);
vector<token> lexical_analyzer(vector<string>, string);
string type_simple(string, string);
string type_simple_string(string);
bool is_simple_string(string);
string det_type_simple_string(vector<string>&, int, int);
bool is_char_valid(char);
string string_finder(vector<string>&, int, int);
string comment_finder(vector<string>&, int, int);
string id_finder(vector<string>&, int, int);
string incorrect(string);
bool det_help_simple(string, string);
bool det_help_simple_string(string, string);
bool quick_help(string, string);
bool det_help_id(string);
string type_simple_caps(string, string);
bool simple_state(vector<string>&, string, string, string, int, int);
bool simple_state_string(vector<string>&, string, string, string, int, int);
bool simple_id(vector<string>&, string, string, string, int, int);
bool simple_comment(vector<string>&, string, string, string, int, int);
void write_to_file(string);
void write_to_file(string, int);
};
#endif

11
cs236/lexer/token.cpp Normal file
View File

@ -0,0 +1,11 @@
#include "token.h"
token::token(string type, string character, int line_num) :
type(type), character(character), line_num(line_num) {}
ostream & operator<<(ostream & os, token tk) {
os << "(" << tk.type
<< ",\"" << tk.character
<< "\"," << tk.line_num << ")";
return os;
}

16
cs236/lexer/token.h Normal file
View File

@ -0,0 +1,16 @@
#ifndef __TOKEN_H__
#define __TOKEN_H__
#include <iostream>
using namespace std;
class token {
public:
token(string, string, int);
string type;
string character;
int line_num;
friend ostream & operator<<(ostream & os, token tk);
};
#endif

56
cs236/lexer/util.h Normal file
View File

@ -0,0 +1,56 @@
#ifndef __UTIL_H__
#define __UTIL_H__
#include <vector>
#include <iostream>
#include <fstream>
vector<string> open_file(string file_name) {
ifstream myfile;
vector<string> data;
myfile.open(file_name.c_str());
string temp;
while(!myfile.eof()) {
getline(myfile, temp);
data.push_back(temp);
}
myfile.close();
return data;
}
bool get_file_name(string input) {
bool file_correct = false;
string input_file_name;
while(!file_correct) {
ifstream inputs(input.c_str());
if(inputs.good()) {
input_file_name = input;
file_correct = true;
open_file(input_file_name);
return true;
}
else {
cerr << "incorrect file name" << endl;
return false;
}
}
return false;
}
void write_file(string output, string file_name) {
ofstream myfile;
myfile.open(file_name.c_str());
myfile << output << "\n";
}
void write_file(vector<string> output, string file_name) {
ofstream myfile;
myfile.open(file_name.c_str());
for(unsigned int i = 0; i < output.size(); i++) {
if(i != output.size() -1) {
myfile << output[i] << "\n";
}
myfile << output[i];
}
}
#endif

14
cs236/parser/fact.h Normal file
View File

@ -0,0 +1,14 @@
#ifndef __FACT_H__
#define __FACT_H__
#include "predicate.h"
#include <vector>
#include <sstream>
using namespace std;
class fact {
public:
vector<predicate> pred_facts;
};
#endif

13
cs236/parser/parameter.h Normal file
View File

@ -0,0 +1,13 @@
#ifndef __PARAMETER_H__
#define __PARAMETER_H__
#include <iostream>
using namespace std;
class parameter {
public:
string param;
string type;
};
#endif

200
cs236/parser/parser.cpp Normal file
View File

@ -0,0 +1,200 @@
#include "parser.h"
string parser::get_token() {
string type = tokens[0].type;
return type;
}
void parser::check_datalog() {
match("SCHEMES");
match("COLON");
if(get_token() == "FACTS") {
error();
}
check_schemelist(get_token());
match("FACTS");
match("COLON");
check_factlist(get_token());
match("RULES");
match("COLON");
check_rulelist(get_token());
match("QUERIES");
match("COLON");
check_querylist(get_token());
out();
}
string parser::out() {
stringstream s;
s << "Success!" << endl;
s << "Schemes(" << schemelist.size() << "):" << endl;
for(unsigned int i = 0; i < schemelist.size(); i++) {
s << " " << schemelist[i].toString();
}
s << "Facts(" << factlist.size() << "):" << endl;
for(unsigned int i = 0; i < factlist.size(); i++) {
s << " " << factlist[i].toString(false);
}
s << "Rules(" << rulelist.size() << "):" << endl;
for(unsigned int i = 0; i < rulelist.size(); i++) {
s << " " << rulelist[i].toString();
}
s << "Queries(" << querylist.size() << "):" << endl;
double a = 0;
for(unsigned int i = 0; i < querylist.size(); i++) {
s << " " << querylist[i].toString(a);
}
s << "Domain(" << domain.size() << "):" << endl;
for (auto it=domain.cbegin(); it != domain.cend(); ++it) {
s << " '" << *it << "'" << endl;
}
return s.str();
}
void parser::check_schemelist(string type) {
if(type == "FACTS") {
return;
}
else {
check_scheme(type);
check_schemelist(get_token());
}
}
void parser::check_scheme(string type) {
schemelist.push_back(check_predicate(type));
}
void parser::check_factlist(string type) {
if(type == "RULES") {
return;
}
else {
check_fact(type);
check_factlist(get_token());
}
}
void parser::check_fact(string type) {
factlist.push_back(check_predicate(type));
match("PERIOD");
}
void parser::check_rulelist(string type) {
if(type == "QUERIES") {
return;
}
else {
check_rule(type);
check_rulelist(get_token());
}
}
void parser::check_rule(string type) {
rule r;
r.head = check_predicate(type);
match("COLON_DASH");
check_predicate_list(get_token(), r);
match("PERIOD");
rulelist.push_back(r);
}
void parser::check_querylist(string type) {
check_query(type);
if(tokens.empty()) {
return;
}
else {
check_querylist(get_token());
}
}
void parser::check_query(string type) {
querylist.push_back(check_predicate(type));
match("Q_MARK");
}
void parser::check_predicate_list(string type, rule& r) {
r.pred_rule.push_back(check_predicate(type));
if(get_token() == "COMMA") {
match("COMMA");
check_predicate_list(get_token(), r);
}
else {
return;
}
}
predicate parser::check_predicate(string type) {
predicate pred;
pred.id = tokens[0].character;
match("ID");
match("LEFT_PAREN");
if(get_token() == "RIGHT_PAREN") {
error();
}
check_parameterlist(get_token(), pred);
match("RIGHT_PAREN");
return pred;
}
void parser::check_parameterlist(string type, predicate& pred) {
if(type == "RIGHT_PAREN") {
return;
}
else {
check_parameter(type, pred);
if(get_token() == "COMMA") {
match("COMMA");
if(get_token() == "RIGHT_PAREN") {
error();
}
check_parameterlist(get_token(), pred);
}
else {
return;
}
}
}
void parser::check_parameter(string type, predicate& pred) {
parameter para;
if(type == "STRING") {
domain.insert(tokens[0].character);
para.param = tokens[0].character;
para.type = tokens[0].type;
pred.pred_list.push_back(para);
match("STRING");
return;
}
else if(type == "ID") {
para.param = tokens[0].character;
para.type = tokens[0].type;
pred.pred_list.push_back(para);
match("ID");
return;
}
else {
error();
}
}
void parser::match(string type) {
if(get_token() == type) {
if(tokens.empty()) {
error();
}
else {
tokens.erase(tokens.begin());
}
}
else {
error();
}
}
void parser::error() {
stringstream oss;
oss << tokens[0] << endl;
throw oss.str();
}

49
cs236/parser/parser.h Normal file
View File

@ -0,0 +1,49 @@
#ifndef __PARSER_H__
#define __PARSER_H__
#include <iostream>
#include <sstream>
#include <vector>
#include <set>
#include "../lexer/token.h"
#include "scheme.h"
#include "fact.h"
#include "rule.h"
#include "query.h"
#include "predicate.h"
#include "parameter.h"
using namespace std;
class parser {
public:
parser() {}
vector<token> tokens;
set<string> domain;
vector<predicate> schemelist;
vector<predicate> factlist;
vector<predicate> querylist;
vector<predicate> predlist;
vector<rule> rulelist;
string get_token();
void check_datalog();
void check_schemelist(string);
void check_scheme(string);
void check_factlist(string);
void check_fact(string);
void check_rulelist(string);
void check_rule(string);
void check_querylist(string);
void check_query(string);
void check_predicate_list(string, rule&);
predicate check_predicate(string);
void check_parameterlist(string type, predicate&);
void check_parameter(string, predicate&);
void match(string);
void error();
string out();
};
#endif

83
cs236/parser/predicate.h Normal file
View File

@ -0,0 +1,83 @@
#ifndef __PREDICATE_H__
#define __PREDICATE_H__
#include "parameter.h"
#include <vector>
#include <iostream>
using namespace std;
class predicate {
public:
string id;
vector<parameter> pred_list;
string toString() {
//schemes
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
s << pred_list[i].param;
if(i < pred_list.size()-1) {
s << ",";
}
}
s << ")\n";
return s.str();
}
string toString(bool a) {
//facts
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
s << "'" << pred_list[i].param << "'";
if(i < pred_list.size()-1) {
s << ",";
}
}
s << ").\n";
return s.str();
}
string toString(double a) {
//query
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
if(pred_list[i].type == "STRING") {
s << "'" << pred_list[i].param << "'";
if(i < pred_list.size()-1) {
s << ",";
}
}
if(pred_list[i].type == "ID") {
s << pred_list[i].param;
if(i < pred_list.size()-1) {
s << ",";
}
}
}
s << ")?\n";
return s.str();
}
string toString(int a) {
//rules
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
if(pred_list[i].type == "STRING") {
s << "'" << pred_list[i].param << "'";
if(i < pred_list.size()-1) {
s << ",";
}
}
if(pred_list[i].type == "ID") {
s << pred_list[i].param;
if(i < pred_list.size()-1) {
s << ",";
}
}
}
s << ")";
return s.str();
}
};
#endif

14
cs236/parser/query.h Normal file
View File

@ -0,0 +1,14 @@
#ifndef __QUERY_H__
#define __QUERY_H__
#include "predicate.h"
#include <vector>
#include <iostream>
using namespace std;
class query {
public:
vector<predicate> pred_queries;
};
#endif

27
cs236/parser/rule.h Normal file
View File

@ -0,0 +1,27 @@
#ifndef __RULE_H__
#define __RULE_H__
#include "predicate.h"
#include <vector>
#include <iostream>
using namespace std;
class rule {
public:
predicate head;
vector<predicate> pred_rule;
string toString() {
stringstream s;
s << head.toString(1) << " :- ";
for(unsigned int i = 0; i < pred_rule.size(); i++) {
s << pred_rule[i].toString(1);
if(i < pred_rule.size()-1) {
s << ",";
}
}
s << ".\n";
return s.str();
}
};
#endif

14
cs236/parser/scheme.h Normal file
View File

@ -0,0 +1,14 @@
#ifndef __SCHEME_H__
#define __SCHEME_H__
#include "predicate.h"
#include <vector>
#include <iostream>
using namespace std;
class scheme {
public:
vector<predicate> pred_schemes;
};
#endif

10
cs236/rdbms/Tuple.h Normal file
View File

@ -0,0 +1,10 @@
#ifndef __TUPLE_H__
#define __TUPLE_H__
#include <vector>
#include <string>
class Tuple: public vector<string> {
};
#endif

45
cs236/rdbms/db.h Normal file
View File

@ -0,0 +1,45 @@
#ifndef __DB_H__
#define __DB_H__
#include "parser/parser.h"
#include "relation.h"
class db {
public:
db(parser incoming) {
p = incoming;
setup();
}
parser p;
vector<relation> relations;
void setup() {
for(unsigned int i = 0; i < p.schemelist.size(); i++) {
relation r;
r.name = p.schemelist[i].id;
for(unsigned int j = 0; j < p.schemelist[i].pred_list.size(); j++) {
r.schemas.s.push_back(p.schemelist[i].pred_list[j].param);
}
for(unsigned int k = 0; k < p.factlist.size(); k++) {
if(r.name == p.factlist[k].id) {
Tuple t;
for(unsigned int l = 0; l < p.factlist[k].pred_list.size(); k++) {
t.push_back(p.factlist[k].pred_list[l].param);
}
r.tuples.insert(t);
}
}
}
}
//set<string> domain;
//vector<predicate> schemelist;
//vector<predicate> factlist;
//vector<predicate> querylist;
//vector<predicate> predlist;
//vector<rule> rulelist;
};
#endif

15
cs236/rdbms/relation.h Normal file
View File

@ -0,0 +1,15 @@
#ifndef __RELATION_H__
#define __RELATION_H__
#include "Tuple.h"
#include "schema.h"
#include <set>
class relation {
public:
string name;
schema schemas;
set<Tuple> tuples;
};
#endif

11
cs236/rdbms/schema.h Normal file
View File

@ -0,0 +1,11 @@
#ifndef __SCHEMA_H__
#define __SCHEMA_H__
#include "Tuple.h"
class schema {
public:
Tuple s;
};
#endif

View File

@ -0,0 +1,14 @@
#ifndef __FACT_H__
#define __FACT_H__
#include "predicate.h"
#include <vector>
#include <sstream>
using namespace std;
class fact {
public:
vector<predicate> pred_facts;
};
#endif

View File

@ -0,0 +1,30 @@
#include <vector>
#include "lexi.h"
#include "util.h"
#include "token.h"
#include "parser.h"
const string usage = "usage: app <input> <output>";
int main(int argc, char* argv[]) {
if(argc != 3) {
cerr << usage << endl;
return 1;
}
get_file_name(argv[1]);
vector<string> data = open_file(argv[1]);
lexi l;
string temp = argv[2];
vector<token> s = l.lexical_analyzer(data, temp);
parser p;
p.tokens = s;
try {
p.check_datalog();
string out = p.out();
write_file(out, argv[2]);
} catch(string str) {
stringstream s;
s << "Failure!\n " << str;
write_file(s.str(), argv[2]);
}
}

View File

@ -0,0 +1,361 @@
#include "lexi.h"
vector<token> lexi::lexical_analyzer(vector<string> data, string file_name) {
string cur_string;
string next_character;
for(unsigned int i = 0; i < data.size(); i++) {
for(unsigned int j = 0; j < data[i].size(); j ++) {
cur_string = data[i].at(j);
if(j < data[i].size() - 1) {
next_character = data[i].at(j + 1);
}
else {
next_character = "";
}
string state = determiner(cur_string, next_character);
simple_state(data, state, cur_string, next_character, i, j);
simple_state_string(data, state, cur_string, next_character, i, j);
if(state == "start of string") {
string token_symbol = string_finder(data, i, j);
if(token_symbol != "error") {
token_symbol.erase(0,1);
token t("STRING", token_symbol, i + 1);
tokens.push_back(t);
}
else {
write_to_file(file_name, i + 1);
return tokens;
}
}
simple_comment(data, state, cur_string, next_character, i, j);
simple_id(data, state, cur_string, next_character, i, j);
if(state == "error") {
write_to_file(file_name, i + 1);
return tokens;
}
}
}
write_to_file(file_name);
return tokens;
}
void lexi::write_to_file(string file_name, int line) {
ofstream myfile;
myfile.open(file_name.c_str());
myfile << "Error on line " << line << endl;
myfile.close();
}
void lexi::write_to_file(string file_name) {
ofstream myfile;
myfile.open(file_name.c_str());
for(unsigned int i = 0; i < tokens.size(); i++) {
if(i < tokens.size()) {
myfile << tokens[i] << endl;
}
else {
myfile << tokens[i];
}
}
myfile << "Total Tokens = " << tokens.size();
myfile << endl;
myfile.close();
}
bool lexi::simple_comment(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "comment") {
string token_symbol = comment_finder(data, i, j);
}
return true;
}
bool lexi::simple_id(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "id") {
string token_symbol = id_finder(data, i, j);
if(token_symbol != "error") {
token t("ID", token_symbol, i + 1);
tokens.push_back(t);
}
}
return true;
}
bool lexi::simple_state_string(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "simple_string") {
string token_symbol = det_type_simple_string(data, i, j);
if(token_symbol != "wrong") {
string token_type = type_simple_string(token_symbol);
token t(token_type, token_symbol, i + 1);
tokens.push_back(t);
}
else {
string token_symbol = id_finder(data, i, j);
if(token_symbol != "error") {
token t("ID", token_symbol, i + 1);
tokens.push_back(t);
}
}
}
return true;
}
bool lexi::simple_state(vector<string> & data, string state, string cur_string, string next_character, int i, int j) {
if(state == "simple") {
string token_symbol = type_simple(cur_string, next_character);
if(next_character == "-") {
data[i].replace(j,2, " ");
}
else {
data[i].replace(j,1, " ");
}
string token_id = type_simple_caps(cur_string, next_character);
token t(token_id, token_symbol, i + 1);
tokens.push_back(t);
}
return true;
}
string lexi::determiner(string cur_string, string next_character) {
if(det_help_simple(cur_string, next_character)) {
return "simple";
}
else if(det_help_simple_string(cur_string, next_character)) {
return "simple_string";
}
else if(det_help_id(cur_string)) {
return "id";
}
else if(cur_string == "'") {
return "start of string";
}
else if(cur_string == "#") {
return "comment";
}
else {
string temp = incorrect(cur_string);
return temp;
}
return "";
}
bool lexi::det_help_id(string cur_string) {
if(('A' <= cur_string[0] && cur_string[0] <= 'Z') ||
('a' <= cur_string[0] && cur_string[0] <= 'z')) {
return true;
}
return false;
}
bool lexi::quick_help(string a, string b) {
if(a == "S" && b == "c") {
return true;
}
return false;
}
bool lexi::det_help_simple_string(string cur_string, string next_character) {
if(quick_help(cur_string, next_character)) {
return true;
}
else if((cur_string == "Q" && next_character == "u")) {
return true;
}
else if((cur_string == "R" && next_character == "u")) {
return true;
}
else if((cur_string == "F" && next_character == "a")) {
return true;
}
return false;
}
bool lexi::det_help_simple(string cur_string, string next_character) {
if(cur_string == "," || cur_string == "." || cur_string == "?" ||
cur_string == "(" || cur_string == ")" || cur_string == ":") {
type_simple(cur_string, next_character);
return true;
}
return false;
}
string lexi::incorrect(string cur_string) {
if(cur_string == " " || cur_string == "\t") {
return "fine";
}
else if(!(('A' <= cur_string[0] && cur_string[0] <= 'Z') ||
('a' <= cur_string[0] && cur_string[0] <= 'z'))) {
return "error";
}
return " ";
}
string lexi::id_finder(vector<string> & data, int a, int b) {
string cur_string;
string next_character;
for(unsigned int j = b; j < data[a].size(); j++) {
cur_string += data[a].at(j);
if(j < data[a].size() - 1) {
next_character = data[a].at(j + 1);
}
else {
next_character = "!";
}
if(is_char_valid(next_character[0]) || next_character == "!") {
data[a].replace(data[a].begin() + b, data[a].begin() + j + 1, " ");
return cur_string;
}
}
return " ";
}
string lexi::comment_finder(vector<string> & data, int i, int b) {
string cur_string;
string next_character;
for(unsigned int j = b; j < data[i].size(); j++) {
cur_string += data[i].at(j);
if(j < data[i].size() - 1) {
next_character = data[i].at(j + 1);
}
else {
next_character = "!";
}
if((j > data[i].size()) - 1 && next_character != "!") {
data[i].replace(data[i].begin() + b, data[i].end(), " ");
return cur_string;
}
}
return "error";
}
string lexi::string_finder(vector<string> & data, int a, int b) {
string cur_string;
string next_character;
b = data[a].find('\'');
for(unsigned int j = b; j < data[a].size(); j++) {
cur_string += data[a].at(j);
if(j < data[a].size() - 1) {
next_character = data[a].at(j + 1);
}
if(next_character == "'") {
data[a].replace(data[a].begin() + b, data[a].begin() + j + 2, " ");
data[a].insert(data[a].begin() + b, ' ');
return cur_string;
}
}
return "error";
}
string lexi::type_simple_caps(string symbol, string next_symbol) {
if(symbol == ",") {
return "COMMA";
}
else if(symbol == ".") {
return "PERIOD";
}
else if(symbol == "?") {
return "Q_MARK";
}
else if(symbol == "(") {
return "LEFT_PAREN";
}
else if(symbol == ")") {
return "RIGHT_PAREN";
}
else if(symbol == ":") {
if(next_symbol == "-") {
return "COLON_DASH";
}
return "COLON";
}
return "";
}
string lexi::type_simple(string symbol, string next_symbol) {
if(symbol == ",") {
return ",";
}
else if(symbol == ".") {
return ".";
}
else if(symbol == "?") {
return "?";
}
else if(symbol == "(") {
return "(";
}
else if(symbol == ")") {
return ")";
}
else if(symbol == ":") {
if(next_symbol == "-") {
return ":-";
}
return ":";
}
return "";
}
string lexi::det_type_simple_string(vector<string> & data, int i, int b) {
string cur_string;
string next_character;
string special_case;
if(b > 0) {
special_case = data[i].at(b -1);
}
for(unsigned int j = b; j < data[i].size(); j++) {
cur_string += data[i].at(j);
if(j < data[i].size() - 1) {
next_character = data[i].at(j + 1);
}
else {
next_character = "!";
}
if((is_simple_string(cur_string)) && (is_char_valid(next_character.at(0))) && (is_char_valid(special_case[0]))) {
data[i].replace(data[i].begin() + b, data[i].begin() + j + 1, " ");
return cur_string;
}
}
return "wrong";
}
bool lexi::is_char_valid(char next_character) {
if(!(('A' <= next_character && next_character <= 'Z') ||
('a' <= next_character && next_character <= 'z') ||
('0' <= next_character && next_character <= '9')) || (next_character == '\'')) {
return true;
}
return false;
}
bool lexi::is_simple_string(string simple_com) {
if(simple_com == "Schemes") {
return true;
}
else if(simple_com == "Facts") {
return true;
}
else if(simple_com == "Rules") {
return true;
}
else if(simple_com == "Queries") {
return true;
}
return false;
}
string lexi::type_simple_string(string simple_com) {
if(simple_com == "Schemes") {
return "SCHEMES";
}
else if(simple_com == "Facts") {
return "FACTS";
}
else if(simple_com == "Rules") {
return "RULES";
}
else if(simple_com == "Queries") {
return "QUERIES";
}
return "";
}

View File

@ -0,0 +1,44 @@
#ifndef __LEXI_H__
#define __LEXI_H__
#include <iostream>
#include <fstream>
#include <vector>
#include "token.h"
#include <cctype>
#include <string>
#include <sstream>
using namespace std;
class lexi {
public:
lexi(){}
vector<token> tokens;
string determiner(string, string);
vector<token> lexical_analyzer(vector<string>, string);
string type_simple(string, string);
string type_simple_string(string);
bool is_simple_string(string);
string det_type_simple_string(vector<string>&, int, int);
bool is_char_valid(char);
string string_finder(vector<string>&, int, int);
string comment_finder(vector<string>&, int, int);
string id_finder(vector<string>&, int, int);
string incorrect(string);
bool det_help_simple(string, string);
bool det_help_simple_string(string, string);
bool quick_help(string, string);
bool det_help_id(string);
string type_simple_caps(string, string);
bool simple_state(vector<string>&, string, string, string, int, int);
bool simple_state_string(vector<string>&, string, string, string, int, int);
bool simple_id(vector<string>&, string, string, string, int, int);
bool simple_comment(vector<string>&, string, string, string, int, int);
void write_to_file(string);
void write_to_file(string, int);
};
#endif

View File

@ -0,0 +1,13 @@
#ifndef __PARAMETER_H__
#define __PARAMETER_H__
#include <iostream>
using namespace std;
class parameter {
public:
string param;
string type;
};
#endif

View File

@ -0,0 +1,200 @@
#include "parser.h"
string parser::get_token() {
string type = tokens[0].type;
return type;
}
void parser::check_datalog() {
match("SCHEMES");
match("COLON");
if(get_token() == "FACTS") {
error();
}
check_schemelist(get_token());
match("FACTS");
match("COLON");
check_factlist(get_token());
match("RULES");
match("COLON");
check_rulelist(get_token());
match("QUERIES");
match("COLON");
check_querylist(get_token());
out();
}
string parser::out() {
stringstream s;
s << "Success!" << endl;
s << "Schemes(" << schemelist.size() << "):" << endl;
for(unsigned int i = 0; i < schemelist.size(); i++) {
s << " " << schemelist[i].toString();
}
s << "Facts(" << factlist.size() << "):" << endl;
for(unsigned int i = 0; i < factlist.size(); i++) {
s << " " << factlist[i].toString(false);
}
s << "Rules(" << rulelist.size() << "):" << endl;
for(unsigned int i = 0; i < rulelist.size(); i++) {
s << " " << rulelist[i].toString();
}
s << "Queries(" << querylist.size() << "):" << endl;
double a = 0;
for(unsigned int i = 0; i < querylist.size(); i++) {
s << " " << querylist[i].toString(a);
}
s << "Domain(" << domain.size() << "):" << endl;
for (auto it=domain.cbegin(); it != domain.cend(); ++it) {
s << " '" << *it << "'" << endl;
}
return s.str();
}
void parser::check_schemelist(string type) {
if(type == "FACTS") {
return;
}
else {
check_scheme(type);
check_schemelist(get_token());
}
}
void parser::check_scheme(string type) {
schemelist.push_back(check_predicate(type));
}
void parser::check_factlist(string type) {
if(type == "RULES") {
return;
}
else {
check_fact(type);
check_factlist(get_token());
}
}
void parser::check_fact(string type) {
factlist.push_back(check_predicate(type));
match("PERIOD");
}
void parser::check_rulelist(string type) {
if(type == "QUERIES") {
return;
}
else {
check_rule(type);
check_rulelist(get_token());
}
}
void parser::check_rule(string type) {
rule r;
r.head = check_predicate(type);
match("COLON_DASH");
check_predicate_list(get_token(), r);
match("PERIOD");
rulelist.push_back(r);
}
void parser::check_querylist(string type) {
check_query(type);
if(tokens.empty()) {
return;
}
else {
check_querylist(get_token());
}
}
void parser::check_query(string type) {
querylist.push_back(check_predicate(type));
match("Q_MARK");
}
void parser::check_predicate_list(string type, rule& r) {
r.pred_rule.push_back(check_predicate(type));
if(get_token() == "COMMA") {
match("COMMA");
check_predicate_list(get_token(), r);
}
else {
return;
}
}
predicate parser::check_predicate(string type) {
predicate pred;
pred.id = tokens[0].character;
match("ID");
match("LEFT_PAREN");
if(get_token() == "RIGHT_PAREN") {
error();
}
check_parameterlist(get_token(), pred);
match("RIGHT_PAREN");
return pred;
}
void parser::check_parameterlist(string type, predicate& pred) {
if(type == "RIGHT_PAREN") {
return;
}
else {
check_parameter(type, pred);
if(get_token() == "COMMA") {
match("COMMA");
if(get_token() == "RIGHT_PAREN") {
error();
}
check_parameterlist(get_token(), pred);
}
else {
return;
}
}
}
void parser::check_parameter(string type, predicate& pred) {
parameter para;
if(type == "STRING") {
domain.insert(tokens[0].character);
para.param = tokens[0].character;
para.type = tokens[0].type;
pred.pred_list.push_back(para);
match("STRING");
return;
}
else if(type == "ID") {
para.param = tokens[0].character;
para.type = tokens[0].type;
pred.pred_list.push_back(para);
match("ID");
return;
}
else {
error();
}
}
void parser::match(string type) {
if(get_token() == type) {
if(tokens.empty()) {
error();
}
else {
tokens.erase(tokens.begin());
}
}
else {
error();
}
}
void parser::error() {
stringstream oss;
oss << tokens[0] << endl;
throw oss.str();
}

View File

@ -0,0 +1,49 @@
#ifndef __PARSER_H__
#define __PARSER_H__
#include <iostream>
#include <sstream>
#include <vector>
#include <set>
#include "token.h"
#include "scheme.h"
#include "fact.h"
#include "rule.h"
#include "query.h"
#include "predicate.h"
#include "parameter.h"
using namespace std;
class parser {
public:
parser() {}
vector<token> tokens;
set<string> domain;
vector<predicate> schemelist;
vector<predicate> factlist;
vector<predicate> querylist;
vector<predicate> predlist;
vector<rule> rulelist;
string get_token();
void check_datalog();
void check_schemelist(string);
void check_scheme(string);
void check_factlist(string);
void check_fact(string);
void check_rulelist(string);
void check_rule(string);
void check_querylist(string);
void check_query(string);
void check_predicate_list(string, rule&);
predicate check_predicate(string);
void check_parameterlist(string type, predicate&);
void check_parameter(string, predicate&);
void match(string);
void error();
string out();
};
#endif

View File

@ -0,0 +1,83 @@
#ifndef __PREDICATE_H__
#define __PREDICATE_H__
#include "parameter.h"
#include <vector>
#include <iostream>
using namespace std;
class predicate {
public:
string id;
vector<parameter> pred_list;
string toString() {
//schemes
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
s << pred_list[i].param;
if(i < pred_list.size()-1) {
s << ",";
}
}
s << ")\n";
return s.str();
}
string toString(bool a) {
//facts
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
s << "'" << pred_list[i].param << "'";
if(i < pred_list.size()-1) {
s << ",";
}
}
s << ").\n";
return s.str();
}
string toString(double a) {
//query
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
if(pred_list[i].type == "STRING") {
s << "'" << pred_list[i].param << "'";
if(i < pred_list.size()-1) {
s << ",";
}
}
if(pred_list[i].type == "ID") {
s << pred_list[i].param;
if(i < pred_list.size()-1) {
s << ",";
}
}
}
s << ")?\n";
return s.str();
}
string toString(int a) {
//rules
stringstream s;
s << id << "(";
for(unsigned int i = 0; i < pred_list.size(); i++) {
if(pred_list[i].type == "STRING") {
s << "'" << pred_list[i].param << "'";
if(i < pred_list.size()-1) {
s << ",";
}
}
if(pred_list[i].type == "ID") {
s << pred_list[i].param;
if(i < pred_list.size()-1) {
s << ",";
}
}
}
s << ")";
return s.str();
}
};
#endif

View File

@ -0,0 +1,14 @@
#ifndef __QUERY_H__
#define __QUERY_H__
#include "predicate.h"
#include <vector>
#include <iostream>
using namespace std;
class query {
public:
vector<predicate> pred_queries;
};
#endif

View File

@ -0,0 +1,27 @@
#ifndef __RULE_H__
#define __RULE_H__
#include "predicate.h"
#include <vector>
#include <iostream>
using namespace std;
class rule {
public:
predicate head;
vector<predicate> pred_rule;
string toString() {
stringstream s;
s << head.toString(1) << " :- ";
for(unsigned int i = 0; i < pred_rule.size(); i++) {
s << pred_rule[i].toString(1);
if(i < pred_rule.size()-1) {
s << ",";
}
}
s << ".\n";
return s.str();
}
};
#endif

View File

@ -0,0 +1,14 @@
#ifndef __SCHEME_H__
#define __SCHEME_H__
#include "predicate.h"
#include <vector>
#include <iostream>
using namespace std;
class scheme {
public:
vector<predicate> pred_schemes;
};
#endif

View File

@ -0,0 +1,11 @@
#include "token.h"
token::token(string type, string character, int line_num) :
type(type), character(character), line_num(line_num) {}
ostream & operator<<(ostream & os, token tk) {
os << "(" << tk.type
<< ",\"" << tk.character
<< "\"," << tk.line_num << ")";
return os;
}

View File

@ -0,0 +1,16 @@
#ifndef __TOKEN_H__
#define __TOKEN_H__
#include <iostream>
using namespace std;
class token {
public:
token(string, string, int);
string type;
string character;
int line_num;
friend ostream & operator<<(ostream & os, token tk);
};
#endif

View File

@ -0,0 +1,56 @@
#ifndef __UTIL_H__
#define __UTIL_H__
#include <vector>
#include <iostream>
#include <fstream>
vector<string> open_file(string file_name) {
ifstream myfile;
vector<string> data;
myfile.open(file_name.c_str());
string temp;
while(!myfile.eof()) {
getline(myfile, temp);
data.push_back(temp);
}
myfile.close();
return data;
}
bool get_file_name(string input) {
bool file_correct = false;
string input_file_name;
while(!file_correct) {
ifstream inputs(input.c_str());
if(inputs.good()) {
input_file_name = input;
file_correct = true;
open_file(input_file_name);
return true;
}
else {
cerr << "incorrect file name" << endl;
return false;
}
}
return false;
}
void write_file(string output, string file_name) {
ofstream myfile;
myfile.open(file_name.c_str());
myfile << output << "\n";
}
void write_file(vector<string> output, string file_name) {
ofstream myfile;
myfile.open(file_name.c_str());
for(unsigned int i = 0; i < output.size(); i++) {
if(i != output.size() -1) {
myfile << output[i] << "\n";
}
myfile << output[i];
}
}
#endif

View File

@ -0,0 +1,21 @@
Schemes:
student(Name, ID, Address, Major)
Facts:
Student('North', '51', '10 Main', 'CS').
# |student('Reagan', '52', '11 Maple', 'CS').|
student('Reagan', '52', '11 Maple', 'CS').
student('Clinton', '53', '12 Ashton', 'MATH').
Rules:
major(Name,Major):-student(Name,ID,Address,Major).
Queries:
major('North',Major)?
major(Name,'MATH')?
#| major(Name,'MATH')?
# major(Name,'MATH')? |#
# |major(Name,'MATH')?|

View File

@ -0,0 +1,21 @@
Schemes:
student(Name, ID, Address, Major)
Facts:
Student('North', '51', '10 Main', 'CS').
# |student('Reagan', '52', '11 Maple', 'CS').|
student('Reagan', '52', '11 Maple', 'CS').
student('Clinton', '53', '12 Ashton', 'MATH').
Rules:
major(Name,Major):-student(Name,ID,Address,Major).
Queries:
major('North',Major)?
major(Name,'MATH')?
#| major(Name,'MATH')?
# major(Name,'MATH')? |#
# |major(Name,'MATH')?|

View File

@ -0,0 +1,17 @@
Schemes:
snap(S,N,A,P)
HasSameAddress(X,Y)
Facts:
snap('12345','C. Brown','12 Apple','555-1234').
snap('33333','Snoopy','12 Apple','555-1234').
Rules:
HasSameAddress(X,'Y') :- snap(A,X,B,C),snap(D,Y,'B',E).
Queries:
HasSameAddress('Snoopy',Who)?

View File

@ -0,0 +1,16 @@
Schemes:
snap(S,N,A,P)
NameHasID(N,S)
Facts:
snap('12345','C. Brown','12 Apple','555-1234').
snap('67890','L. Van Pelt','34 Pear','555-5678').
Rules:
NameHasID(N,S) :- snap(S,N,A,P)?
Queries:
NameHasID('Snoopy',Id)?

View File

@ -0,0 +1,19 @@
Schemes:
bob(A,B)
Facts:
bob('4','2').
Rules:
bob(A,B) :- bob(B,A).
Queries:
bob(X,Y)?
# extra tokens at end of file
????

View File

@ -0,0 +1,19 @@
Schemes:
bob(A,B)
Facts:
bob('4','2').
Rules:
bob(A,B) :- bob(B,A).
# wrong punctuation for query
Queries:
bob(X,Y).

View File

@ -0,0 +1,18 @@
Schemes:
bob(A,B)
# missing punctuation on fact
Facts:
bob('4','2')
Rules:
bob(A,B) :- bob(B,A).
Queries:
bob(X,Y)?

View File

@ -0,0 +1,16 @@
Schemes:
bob(A,B)
# fact/rule lists should be optional
Facts:
Rules:
Queries:
bob(X,Y)?

View File

@ -0,0 +1,14 @@
# scheme/query lists should be required
Schemes:
Facts:
Rules:
Queries:

View File

@ -0,0 +1,21 @@
Schemes:
bob(A,B)
Facts:
bob('4','2').
# rules section should exist
# queries should come after rules
Queries:
bob(X,Y)?
# facts can't be repeated
Facts:
bob('4','4').

View File

@ -0,0 +1,19 @@
Success!
Schemes(2):
snap(S,N,A,P)
HasSameAddress(X,Y)
Facts(2):
snap('12345','C. Brown','12 Apple','555-1234').
snap('33333','Snoopy','12 Apple','555-1234').
Rules(1):
HasSameAddress(X,Y) :- snap(A,X,B,C),snap(D,Y,B,E).
Queries(1):
HasSameAddress('Snoopy',Who)?
Domain(6):
'12 Apple'
'12345'
'33333'
'555-1234'
'C. Brown'
'Snoopy'

View File

@ -0,0 +1,3 @@
Failure!
(Q_MARK,"?",10)

View File

@ -0,0 +1,3 @@
Failure!
(Q_MARK,"?",18)

View File

@ -0,0 +1,3 @@
Failure!
(PERIOD,".",16)

View File

@ -0,0 +1,3 @@
Failure!
(RULES,"Rules",11)

View File

@ -0,0 +1,9 @@
Success!
Schemes(1):
bob(A,B)
Facts(0):
Rules(0):
Queries(1):
bob(X,Y)?
Domain(0):

View File

@ -0,0 +1,3 @@
Failure!
(FACTS,"Facts",7)

View File

@ -0,0 +1,3 @@
Failure!
(QUERIES,"Queries",12)

View File

@ -0,0 +1,15 @@
Schemes:
SK(A,B)
Facts:
SK('a','c').
SK('b','c').
SK('b','b').
SK('b','c').
Rules:
DoNothing(Z) :- Stuff(Z).
Queries:
SK(A,'c')?
SK('b','c')?
SK(X,X)?
SK(A,B)?

View File

@ -0,0 +1,11 @@
SK(A,'c')? Yes(2)
A='a'
A='b'
SK('b','c')? Yes(1)
SK(X,X)? Yes(1)
X='b'
SK(A,B)? Yes(3)
A='a', B='c'
A='b', B='b'
A='b', B='c'