From a4d282c1dbec34be9637a3d8d629d937d7b46ad5 Mon Sep 17 00:00:00 2001 From: gal salomon Date: Wed, 12 Jan 2022 19:01:06 +0200 Subject: [PATCH] revert the arrow installation(causing failure on some distro"s, such as fedora) Signed-off-by: gal salomon --- bootstrap | 2 +- requirements.txt | 2 -- s3tests_boto3/functional/test_s3select.py | 43 ----------------------- 3 files changed, 1 insertion(+), 46 deletions(-) diff --git a/bootstrap b/bootstrap index 6e6d51e..36a5c5b 100755 --- a/bootstrap +++ b/bootstrap @@ -22,7 +22,7 @@ case "$ID" in ;; centos|fedora|rhel|ol|virtuozzo) - packages=(which python3-virtualenv python36-devel libevent-devel libffi-devel libxml2-devel libxslt-devel zlib-devel arrow-devel parquet-devel) + packages=(which python3-virtualenv python36-devel libevent-devel libffi-devel libxml2-devel libxslt-devel zlib-devel) for package in ${packages[@]}; do # When the package is python36-devel we change it to python3-devel on Fedora if [[ ${package} == "python36-devel" && -f /etc/fedora-release ]]; then diff --git a/requirements.txt b/requirements.txt index df4c059..88e34a5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,5 +10,3 @@ requests >=2.23.0 pytz >=2011k httplib2 lxml -pyarrow -pandas diff --git a/s3tests_boto3/functional/test_s3select.py b/s3tests_boto3/functional/test_s3select.py index 50eaf86..2b31342 100644 --- a/s3tests_boto3/functional/test_s3select.py +++ b/s3tests_boto3/functional/test_s3select.py @@ -15,11 +15,6 @@ from . import ( import logging logging.basicConfig(level=logging.INFO) -#import numpy as np -import pandas as pd -import pyarrow as pa -import pyarrow.parquet as pq - region_name = '' # recurssion function for generating arithmetical expression @@ -223,38 +218,6 @@ def upload_csv_object(bucket_name,new_key,obj): response = c2.get_object(Bucket=bucket_name, Key=new_key) eq(response['Body'].read().decode('utf-8'), obj, 's3select error[ downloaded object not equal to uploaded objecy') -def parquet_generator(): - - parquet_size = 1000000 - a=[] - for i in range(parquet_size): - a.append(int(random.randint(1,10000))) - - b=[] - for i in range(parquet_size): - b.append(int(random.randint(1,10000))) - - c=[] - for i in range(parquet_size): - c.append(int(random.randint(1,10000))) - - d=[] - for i in range(parquet_size): - d.append(int(random.randint(1,10000))) - - df3 = pd.DataFrame({'a': a, - 'b': b, - 'c': c, - 'd': d} - ) - - - table = pa.Table.from_pandas(df3,preserve_index=False) - - print (table) - - pq.write_table(table,version='1.0',where='/tmp/3col_int_10k.parquet') - def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',esc_char='\\',csv_header_info="NONE", progress = False): s3 = get_client() @@ -1311,9 +1274,3 @@ def test_output_serial_expressions(): s3select_assert_result( res_s3select_quot, res_s3select_final ) -@attr('s3select') -def test_parqueet(): - - parquet_generator() - -