Commit db36c916 authored by Kamil Trzciński's avatar Kamil Trzciński
Browse files

Merge branch 'feature/gb/pipeline-variable-expressions' into 'master'

Pipeline variables expressions

See merge request gitlab-org/gitlab-ce!17261
module Gitlab
module Ci
module Pipeline
module Expression
module Lexeme
class Base
def evaluate(**variables)
raise NotImplementedError
end
def self.build(token)
raise NotImplementedError
end
def self.scan(scanner)
if scanner.scan(self::PATTERN)
Expression::Token.new(scanner.matched, self)
end
end
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Expression
module Lexeme
class Equals < Lexeme::Operator
PATTERN = /==/.freeze
def initialize(left, right)
@left = left
@right = right
end
def evaluate(variables = {})
@left.evaluate(variables) == @right.evaluate(variables)
end
def self.build(_value, behind, ahead)
new(behind, ahead)
end
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Expression
module Lexeme
class Null < Lexeme::Value
PATTERN = /null/.freeze
def initialize(value = nil)
@value = nil
end
def evaluate(variables = {})
nil
end
def self.build(_value)
self.new
end
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Expression
module Lexeme
class Operator < Lexeme::Base
def self.type
:operator
end
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Expression
module Lexeme
class String < Lexeme::Value
PATTERN = /("(?<string>.+?)")|('(?<string>.+?)')/.freeze
def initialize(value)
@value = value
end
def evaluate(variables = {})
@value.to_s
end
def self.build(string)
new(string.match(PATTERN)[:string])
end
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Expression
module Lexeme
class Value < Lexeme::Base
def self.type
:value
end
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Expression
module Lexeme
class Variable < Lexeme::Value
PATTERN = /\$(?<name>\w+)/.freeze
def initialize(name)
@name = name
end
def evaluate(variables = {})
HashWithIndifferentAccess.new(variables).fetch(@name, nil)
end
def self.build(string)
new(string.match(PATTERN)[:name])
end
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Expression
class Lexer
include ::Gitlab::Utils::StrongMemoize
LEXEMES = [
Expression::Lexeme::Variable,
Expression::Lexeme::String,
Expression::Lexeme::Null,
Expression::Lexeme::Equals
].freeze
SyntaxError = Class.new(Statement::StatementError)
MAX_TOKENS = 100
def initialize(statement, max_tokens: MAX_TOKENS)
@scanner = StringScanner.new(statement)
@max_tokens = max_tokens
end
def tokens
strong_memoize(:tokens) { tokenize }
end
def lexemes
tokens.map(&:to_lexeme)
end
private
def tokenize
tokens = []
@max_tokens.times do
@scanner.skip(/\s+/) # ignore whitespace
return tokens if @scanner.eos?
lexeme = LEXEMES.find do |type|
type.scan(@scanner).tap do |token|
tokens.push(token) if token.present?
end
end
unless lexeme.present?
raise Lexer::SyntaxError, 'Unknown lexeme found!'
end
end
raise Lexer::SyntaxError, 'Too many tokens!'
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Expression
class Parser
def initialize(tokens)
@tokens = tokens.to_enum
@nodes = []
end
##
# This produces a reverse descent parse tree.
#
# It currently does not support precedence of operators.
#
def tree
while token = @tokens.next
case token.type
when :operator
token.build(@nodes.pop, tree).tap do |node|
@nodes.push(node)
end
when :value
token.build.tap do |leaf|
@nodes.push(leaf)
end
end
end
rescue StopIteration
@nodes.last || Lexeme::Null.new
end
def self.seed(statement)
new(Expression::Lexer.new(statement).tokens)
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Expression
class Statement
StatementError = Class.new(StandardError)
GRAMMAR = [
%w[variable equals string],
%w[variable equals variable],
%w[variable equals null],
%w[string equals variable],
%w[null equals variable],
%w[variable]
].freeze
def initialize(statement, pipeline)
@lexer = Expression::Lexer.new(statement)
@variables = pipeline.variables.map do |variable|
[variable.key, variable.value]
end
end
def parse_tree
raise StatementError if @lexer.lexemes.empty?
unless GRAMMAR.find { |syntax| syntax == @lexer.lexemes }
raise StatementError, 'Unknown pipeline expression!'
end
Expression::Parser.new(@lexer.tokens).tree
end
def evaluate
parse_tree.evaluate(@variables.to_h)
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Expression
class Token
attr_reader :value, :lexeme
def initialize(value, lexeme)
@value = value
@lexeme = lexeme
end
def build(*args)
@lexeme.build(@value, *args)
end
def type
@lexeme.type
end
def to_lexeme
@lexeme.name.demodulize.downcase
end
end
end
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Expression::Lexeme::Equals do
let(:left) { double('left') }
let(:right) { double('right') }
describe '.build' do
it 'creates a new instance of the token' do
expect(described_class.build('==', left, right))
.to be_a(described_class)
end
end
describe '.type' do
it 'is an operator' do
expect(described_class.type).to eq :operator
end
end
describe '#evaluate' do
it 'returns false when left and right are not equal' do
allow(left).to receive(:evaluate).and_return(1)
allow(right).to receive(:evaluate).and_return(2)
operator = described_class.new(left, right)
expect(operator.evaluate(VARIABLE: 3)).to eq false
end
it 'returns true when left and right are equal' do
allow(left).to receive(:evaluate).and_return(1)
allow(right).to receive(:evaluate).and_return(1)
operator = described_class.new(left, right)
expect(operator.evaluate(VARIABLE: 3)).to eq true
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Expression::Lexeme::Null do
describe '.build' do
it 'creates a new instance of the token' do
expect(described_class.build('null'))
.to be_a(described_class)
end
end
describe '.type' do
it 'is a value lexeme' do
expect(described_class.type).to eq :value
end
end
describe '#evaluate' do
it 'always evaluates to `nil`' do
expect(described_class.new('null').evaluate).to be_nil
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Expression::Lexeme::String do
describe '.build' do
it 'creates a new instance of the token' do
expect(described_class.build('"my string"'))
.to be_a(described_class)
end
end
describe '.type' do
it 'is a value lexeme' do
expect(described_class.type).to eq :value
end
end
describe '.scan' do
context 'when using double quotes' do
it 'correctly identifies string token' do
scanner = StringScanner.new('"some string"')
token = described_class.scan(scanner)
expect(token).not_to be_nil
expect(token.build.evaluate).to eq 'some string'
end
end
context 'when using single quotes' do
it 'correctly identifies string token' do
scanner = StringScanner.new("'some string 2'")
token = described_class.scan(scanner)
expect(token).not_to be_nil
expect(token.build.evaluate).to eq 'some string 2'
end
end
context 'when there are mixed quotes in the string' do
it 'is a greedy scanner for double quotes' do
scanner = StringScanner.new('"some string" "and another one"')
token = described_class.scan(scanner)
expect(token).not_to be_nil
expect(token.build.evaluate).to eq 'some string'
end
it 'is a greedy scanner for single quotes' do
scanner = StringScanner.new("'some string' 'and another one'")
token = described_class.scan(scanner)
expect(token).not_to be_nil
expect(token.build.evaluate).to eq 'some string'
end
it 'allows to use single quotes inside double quotes' do
scanner = StringScanner.new(%("some ' string"))
token = described_class.scan(scanner)
expect(token).not_to be_nil
expect(token.build.evaluate).to eq "some ' string"
end
it 'allow to use double quotes inside single quotes' do
scanner = StringScanner.new(%('some " string'))
token = described_class.scan(scanner)
expect(token).not_to be_nil
expect(token.build.evaluate).to eq 'some " string'
end
end
end
describe '#evaluate' do
it 'returns string value it is is present' do
string = described_class.new('my string')
expect(string.evaluate).to eq 'my string'
end
it 'returns an empty string if it is empty' do
string = described_class.new('')
expect(string.evaluate).to eq ''
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Expression::Lexeme::Variable do
describe '.build' do
it 'creates a new instance of the token' do
expect(described_class.build('$VARIABLE'))
.to be_a(described_class)
end
end
describe '.type' do
it 'is a value lexeme' do
expect(described_class.type).to eq :value
end
end
describe '#evaluate' do
it 'returns variable value if it is defined' do
variable = described_class.new('VARIABLE')
expect(variable.evaluate(VARIABLE: 'my variable'))
.to eq 'my variable'
end
it 'allows to use a string as a variable key too' do
variable = described_class.new('VARIABLE')
expect(variable.evaluate('VARIABLE' => 'my variable'))
.to eq 'my variable'
end
it 'returns nil if it is not defined' do
variable = described_class.new('VARIABLE')
expect(variable.evaluate(OTHER: 'variable')).to be_nil
end
it 'returns an empty string if it is empty' do
variable = described_class.new('VARIABLE')
expect(variable.evaluate(VARIABLE: '')).to eq ''
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Expression::Lexer do
let(:token_class) do
Gitlab::Ci::Pipeline::Expression::Token
end
describe '#tokens' do
it 'tokenss single value' do
tokens = described_class.new('$VARIABLE').tokens
expect(tokens).to be_one
expect(tokens).to all(be_an_instance_of(token_class))
end
it 'does ignore whitespace characters' do
tokens = described_class.new("\t$VARIABLE ").tokens
expect(tokens).to be_one
expect(tokens).to all(be_an_instance_of(token_class))
end
it 'tokenss multiple values of the same token' do
tokens = described_class.new("$VARIABLE1 $VARIABLE2").tokens
expect(tokens.size).to eq 2
expect(tokens).to all(be_an_instance_of(token_class))
end
it 'tokenss multiple values with different tokens' do
tokens = described_class.new('$VARIABLE "text" "value"').tokens
expect(tokens.size).to eq 3
expect(tokens.first.value).to eq '$VARIABLE'
expect(tokens.second.value).to eq '"text"'
expect(tokens.third.value).to eq '"value"'
end
it 'tokenss tokens and operators' do
tokens = described_class.new('$VARIABLE == "text"').tokens
expect(tokens.size).to eq 3
expect(tokens.first.value).to eq '$VARIABLE'
expect(tokens.second.value).to eq '=='
expect(tokens.third.value).to eq '"text"'
end
it 'limits statement to specified amount of tokens' do
lexer = described_class.new("$V1 $V2 $V3 $V4", max_tokens: 3)
expect { lexer.tokens }
.to raise_error described_class::SyntaxError
end