All files / src/lexer/statements create.ts

100% Statements 16/16
100% Branches 6/6
100% Functions 4/4
100% Lines 16/16
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48    15x 15x     27x                                   5x   5x 5x 14x 14x 5x 9x 5x   5x 4x     14x       5x       15x  
import { Query } from "../../reader/query";
import { ILexer } from "../interface";
import { cleanUnquotedIdentifier } from "../lexer";
import { Keyword, Types } from "../tokens";
 
class Create implements ILexer {
  public options: string[] = [
    "algorithm",
    "database",
    "definer",
    "event",
    "function",
    "index",
    "procedure",
    "server",
    "table",
    "tablespace",
    "temporary",
    "trigger",
    "user",
    "view"
  ];
 
  public tokenise(query: Query): Query {
    let lastToken = "";
 
    query.lines.forEach(line => {
      line.content.split(" ").forEach(word => {
        let item = word.toLowerCase().trim();
        if (item === Keyword.Create) {
          line.tokens.push([Types.Keyword, item]);
        } else if (lastToken === Keyword.Create) {
          item = cleanUnquotedIdentifier(item);
 
          if (item.length > 0) {
            line.tokens.push([Types.Option, cleanUnquotedIdentifier(item)]);
          }
        }
        lastToken = item;
      });
    });
 
    return query;
  }
}
 
export { Create };