Merge pull request #426 from galsalomon66/revert_install_arrow

revert the arrow installation(causing failure on some distro"s, such …
This commit is contained in:
Ali Maredia 2022-01-13 16:31:30 -05:00 committed by GitHub
commit 540b28fa20
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 1 additions and 46 deletions

View file

@ -22,7 +22,7 @@ case "$ID" in
;; ;;
centos|fedora|rhel|ol|virtuozzo) centos|fedora|rhel|ol|virtuozzo)
packages=(which python3-virtualenv python36-devel libevent-devel libffi-devel libxml2-devel libxslt-devel zlib-devel arrow-devel parquet-devel) packages=(which python3-virtualenv python36-devel libevent-devel libffi-devel libxml2-devel libxslt-devel zlib-devel)
for package in ${packages[@]}; do for package in ${packages[@]}; do
# When the package is python36-devel we change it to python3-devel on Fedora # When the package is python36-devel we change it to python3-devel on Fedora
if [[ ${package} == "python36-devel" && -f /etc/fedora-release ]]; then if [[ ${package} == "python36-devel" && -f /etc/fedora-release ]]; then

View file

@ -10,5 +10,3 @@ requests >=2.23.0
pytz >=2011k pytz >=2011k
httplib2 httplib2
lxml lxml
pyarrow
pandas

View file

@ -15,11 +15,6 @@ from . import (
import logging import logging
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
#import numpy as np
import pandas as pd
import pyarrow as pa
import pyarrow.parquet as pq
region_name = '' region_name = ''
# recurssion function for generating arithmetical expression # recurssion function for generating arithmetical expression
@ -223,38 +218,6 @@ def upload_csv_object(bucket_name,new_key,obj):
response = c2.get_object(Bucket=bucket_name, Key=new_key) response = c2.get_object(Bucket=bucket_name, Key=new_key)
eq(response['Body'].read().decode('utf-8'), obj, 's3select error[ downloaded object not equal to uploaded objecy') eq(response['Body'].read().decode('utf-8'), obj, 's3select error[ downloaded object not equal to uploaded objecy')
def parquet_generator():
parquet_size = 1000000
a=[]
for i in range(parquet_size):
a.append(int(random.randint(1,10000)))
b=[]
for i in range(parquet_size):
b.append(int(random.randint(1,10000)))
c=[]
for i in range(parquet_size):
c.append(int(random.randint(1,10000)))
d=[]
for i in range(parquet_size):
d.append(int(random.randint(1,10000)))
df3 = pd.DataFrame({'a': a,
'b': b,
'c': c,
'd': d}
)
table = pa.Table.from_pandas(df3,preserve_index=False)
print (table)
pq.write_table(table,version='1.0',where='/tmp/3col_int_10k.parquet')
def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',esc_char='\\',csv_header_info="NONE", progress = False): def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',esc_char='\\',csv_header_info="NONE", progress = False):
s3 = get_client() s3 = get_client()
@ -1311,9 +1274,3 @@ def test_output_serial_expressions():
s3select_assert_result( res_s3select_quot, res_s3select_final ) s3select_assert_result( res_s3select_quot, res_s3select_final )
@attr('s3select')
def test_parqueet():
parquet_generator()