From 048f9297a151bb7ff292da3f3f3e4f910b5065aa Mon Sep 17 00:00:00 2001 From: gal salomon Date: Tue, 31 Mar 2020 14:35:51 +0300 Subject: [PATCH] adding test for detection of cyclic reference to alias (cherry picked from commit d543619e7162aaedbff58b8f30f4d08da14c5493) --- s3tests_boto3/functional/test_s3select.py | 28 ++++++++++++++++++----- 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/s3tests_boto3/functional/test_s3select.py b/s3tests_boto3/functional/test_s3select.py index 3ed776e..18306ed 100644 --- a/s3tests_boto3/functional/test_s3select.py +++ b/s3tests_boto3/functional/test_s3select.py @@ -73,7 +73,6 @@ from . import ( import boto import boto.s3.connection import sys -#import urlparse import random from botocore.client import Config @@ -216,11 +215,12 @@ def test_column_sum_min_max(): def test_alias(): - # purpose: test is comparing result of exact queries , one with alias the other without. - # this test is settign alias on 3 projections, the third projection is using other projection alias, also the where clause is using aliases - # the test validate that where-cluase and projections are executing aliases correctlly, bare in mind that each alias has its own cache, - # and that cache need to invalidate time. - + + # purpose: test is comparing result of exactly the same queries , one with alias the other without. + # this test is setting alias on 3 projections, the third projection is using other projection alias, also the where clause is using aliases + # the test validate that where-clause and projections are executing aliases correctly, bare in mind that each alias has its own cache, + # and that cache need to be invalidate per new row. + csv_obj = create_random_csv_object(10000,10) csv_obj_name = "csv_10000x10" @@ -234,3 +234,19 @@ def test_alias(): assert res_s3select_alias == res_s3select_no_alias +def test_alias_cyclic_refernce(): + + # purpose of test is to validate the s3select-engine is able to detect a cyclic reference to alias. + + csv_obj = create_random_csv_object(10000,10) + + csv_obj_name = "csv_10000x10" + bucket_name = "test" + upload_csv_object(bucket_name,csv_obj_name,csv_obj) + + res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(_1) as a1,int(_2) as a2, a1+a4 as a3, a5+a1 as a4, int(_3)+a3 as a5 from stdin;") ) + + find_res = res_s3select_alias.find("number of calls exceed maximum size, probably a cyclic reference to alias"); + + assert int(find_res) >= 0 +