All files / src/lexer/statements update.ts

100% Statements 15/15
100% Branches 6/6
100% Functions 3/3
100% Lines 15/15
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34    1x 1x       2x 2x 2x 5x   5x 2x 3x 2x   2x 1x           5x       2x       1x  
import { Query } from "../../reader/query";
import { ILexer } from "../interface";
import { cleanUnquotedIdentifier } from "../lexer";
import { Keyword, Types } from "../tokens";
 
class Update implements ILexer {
  public tokenise(query: Query): Query {
    let lastToken = "";
    query.lines.forEach(line => {
      line.content.split(" ").forEach(word => {
        let item = word.toLowerCase().trim();
 
        if (item === Keyword.Update) {
          line.tokens.push([Types.Keyword, item]);
        } else if (lastToken === Keyword.Update) {
          item = cleanUnquotedIdentifier(item);
 
          if (item.length > 0) {
            line.tokens.push([
              Types.TableReference,
              cleanUnquotedIdentifier(item)
            ]);
          }
        }
        lastToken = item;
      });
    });
 
    return query;
  }
}
 
export { Update };