forked from TrueCloudLab/s3-tests
Compare commits
148 commits
Author | SHA1 | Date | |
---|---|---|---|
|
e110951125 | ||
|
c6594c8080 | ||
|
ab3d6f4b7d | ||
|
d4e76db4cf | ||
|
5642cf4b7b | ||
|
e0dea7f74b | ||
|
5ae14d0ef9 | ||
|
7ac92fbd11 | ||
|
9f2d994eb8 | ||
|
c94eed2e9f | ||
|
91b8a58dcf | ||
|
f060572730 | ||
|
99690b71c5 | ||
|
a8fd8d430e | ||
|
968d0841da | ||
|
acec6abe78 | ||
|
8a51654721 | ||
|
bb4158dec6 | ||
|
3d8fa8179d | ||
|
b4bb9afb52 | ||
|
8186dd7561 | ||
|
5396b04f1b | ||
|
e8d56603a3 | ||
|
b6dcf111fa | ||
|
60cc478d70 | ||
|
c749452fd0 | ||
|
d528efdda9 | ||
|
c12216d0cc | ||
|
def95c2968 | ||
|
5eadd6d08c | ||
|
89b10274dd | ||
|
02ee15041a | ||
|
698f19d621 | ||
|
9ea2dfb393 | ||
|
2227b61eee | ||
|
c70849d527 | ||
|
d3e1ccda9c | ||
|
963359db88 | ||
|
6dbdf39037 | ||
|
001b7f3f55 | ||
|
9f1b9d02a5 | ||
|
54699553d5 | ||
|
358547142a | ||
|
e0b27409b9 | ||
|
78ce27ae27 | ||
|
73f458da05 | ||
|
2aa4a22c5a | ||
|
e60dda8e1d | ||
|
3c52fa8bb9 | ||
|
23cd521b5f | ||
|
e5617b783f | ||
|
1af1880b7a | ||
|
0b8e554dd1 | ||
|
70b928269f | ||
|
cfa805efe9 | ||
|
090fbfbead | ||
|
4143ec30f3 | ||
|
41006d68c2 | ||
|
ae981dd3a8 | ||
|
5e0cea1c07 | ||
|
d44879544d | ||
|
7f21baaea6 | ||
|
3c1de4acc3 | ||
|
de683cd665 | ||
|
6f9becf5b9 | ||
|
f4c19c9389 | ||
|
89363e8fef | ||
|
d1a38df407 | ||
|
e95809024c | ||
|
93a6d89681 | ||
|
907331d90c | ||
|
6e426d3291 | ||
|
80763520dc | ||
|
27364bf5f6 | ||
|
0fa29f6c8e | ||
|
47fff36c36 | ||
|
95154bf0ce | ||
|
e00e3a77b0 | ||
|
864902a3ad | ||
|
ec530d049c | ||
|
f004493dcc | ||
|
9eacf29594 | ||
|
8090ea4629 | ||
|
df426ea041 | ||
|
057432b9f5 | ||
|
e22a689a44 | ||
|
f344fd6ca7 | ||
|
97fb5a7ee3 | ||
|
0fef1637ae | ||
|
a81ad3515e | ||
|
0d7111ffc2 | ||
|
2ad7f81917 | ||
|
72957ece35 | ||
|
8423389033 | ||
|
aea3f6b4c3 | ||
|
6f74f2af07 | ||
|
6b412b509b | ||
|
87993f147d | ||
|
ecc4cbc5c4 | ||
|
c4635f9463 | ||
|
287acbc6e7 | ||
|
68f1939942 | ||
|
a0aa55d4ae | ||
|
e3d31ef6eb | ||
|
3fe80dc877 | ||
|
a5108a7d69 | ||
|
3be10d722f | ||
|
adad16121f | ||
|
dd163877d4 | ||
|
86bc2a191f | ||
|
3698d093bf | ||
|
65b067486e | ||
|
0e36699571 | ||
|
3dc4ff5da8 | ||
|
c9792cb975 | ||
|
b930f194e4 | ||
|
253b63aa11 | ||
|
6bd75be1d6 | ||
|
61804bcf91 | ||
|
ea9f07a2bf | ||
|
7d14452035 | ||
|
6ea6cb6467 | ||
|
10c801a2e0 | ||
|
9d670846a3 | ||
|
bb801b8625 | ||
|
652619f46f | ||
|
b1ddeee6eb | ||
|
ca9cb5cc2c | ||
|
e010c4cfec | ||
|
edea887e9c | ||
|
cd4f7e1a7a | ||
|
048f9297a1 | ||
|
8bd6158054 | ||
|
aca68a9d39 | ||
|
537431c686 | ||
|
8ca96c4519 | ||
|
34040769ff | ||
|
8ebb504159 | ||
|
9092d1ac61 | ||
|
7b3df700cc | ||
|
4fc133b1b5 | ||
|
0a495efc8c | ||
|
a48cf75391 | ||
|
a20e0d47f2 | ||
|
19947bd541 | ||
|
94168194fd | ||
|
0e3084c995 | ||
|
1d39198872 |
11 changed files with 352 additions and 4008 deletions
|
@ -22,7 +22,7 @@ Once you have that file copied and edited, you can run the tests with::
|
||||||
|
|
||||||
You can specify which directory of tests to run::
|
You can specify which directory of tests to run::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf tox -- s3tests_boto3/functional
|
S3TEST_CONF=your.conf tox s3tests_boto3/functional
|
||||||
|
|
||||||
You can specify which file of tests to run::
|
You can specify which file of tests to run::
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ located in the ``s3test_boto3`` directory.
|
||||||
|
|
||||||
You can run only the boto3 tests with::
|
You can run only the boto3 tests with::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf tox -- s3tests_boto3/functional
|
S3TEST_CONF=your.conf tox -- -m 'not fails_on_aws' s3tests_boto3/functional
|
||||||
|
|
||||||
========================
|
========================
|
||||||
STS compatibility tests
|
STS compatibility tests
|
||||||
|
|
12
pytest.ini
12
pytest.ini
|
@ -7,7 +7,6 @@ markers =
|
||||||
auth_common
|
auth_common
|
||||||
bucket_policy
|
bucket_policy
|
||||||
bucket_encryption
|
bucket_encryption
|
||||||
checksum
|
|
||||||
cloud_transition
|
cloud_transition
|
||||||
encryption
|
encryption
|
||||||
fails_on_aws
|
fails_on_aws
|
||||||
|
@ -17,28 +16,21 @@ markers =
|
||||||
fails_on_rgw
|
fails_on_rgw
|
||||||
fails_on_s3
|
fails_on_s3
|
||||||
fails_with_subdomain
|
fails_with_subdomain
|
||||||
group
|
|
||||||
group_policy
|
|
||||||
iam_account
|
|
||||||
iam_cross_account
|
|
||||||
iam_role
|
|
||||||
iam_tenant
|
|
||||||
iam_user
|
|
||||||
lifecycle
|
lifecycle
|
||||||
lifecycle_expiration
|
lifecycle_expiration
|
||||||
lifecycle_transition
|
lifecycle_transition
|
||||||
list_objects_v2
|
list_objects_v2
|
||||||
object_lock
|
object_lock
|
||||||
role_policy
|
|
||||||
session_policy
|
session_policy
|
||||||
s3select
|
s3select
|
||||||
s3website
|
s3website
|
||||||
s3website_routing_rules
|
s3website_routing_rules
|
||||||
s3website_redirect_location
|
s3website_redirect_location
|
||||||
sns
|
3website
|
||||||
sse_s3
|
sse_s3
|
||||||
storage_class
|
storage_class
|
||||||
tagging
|
tagging
|
||||||
|
test_of_iam
|
||||||
test_of_sts
|
test_of_sts
|
||||||
token_claims_trust_policy_test
|
token_claims_trust_policy_test
|
||||||
token_principal_tag_role_policy_test
|
token_principal_tag_role_policy_test
|
||||||
|
|
|
@ -19,14 +19,6 @@ ssl_verify = False
|
||||||
## the prefix to 30 characters long, and avoid collisions
|
## the prefix to 30 characters long, and avoid collisions
|
||||||
bucket prefix = yournamehere-{random}-
|
bucket prefix = yournamehere-{random}-
|
||||||
|
|
||||||
# all the iam account resources (users, roles, etc) created
|
|
||||||
# will start with this name prefix
|
|
||||||
iam name prefix = s3-tests-
|
|
||||||
|
|
||||||
# all the iam account resources (users, roles, etc) created
|
|
||||||
# will start with this path prefix
|
|
||||||
iam path prefix = /s3-tests/
|
|
||||||
|
|
||||||
[s3 main]
|
[s3 main]
|
||||||
# main display_name set in vstart.sh
|
# main display_name set in vstart.sh
|
||||||
display_name = M. Tester
|
display_name = M. Tester
|
||||||
|
@ -117,9 +109,6 @@ secret_key = opqrstuvwxyzabcdefghijklmnopqrstuvwxyzab
|
||||||
# tenant email set in vstart.sh
|
# tenant email set in vstart.sh
|
||||||
email = tenanteduser@example.com
|
email = tenanteduser@example.com
|
||||||
|
|
||||||
# tenant name
|
|
||||||
tenant = testx
|
|
||||||
|
|
||||||
#following section needs to be added for all sts-tests
|
#following section needs to be added for all sts-tests
|
||||||
[iam]
|
[iam]
|
||||||
#used for iam operations in sts-tests
|
#used for iam operations in sts-tests
|
||||||
|
@ -138,20 +127,6 @@ secret_key = abcdefghijklmnopqrstuvwxyzabcdefghijklmn
|
||||||
#display_name from vstart.sh
|
#display_name from vstart.sh
|
||||||
display_name = youruseridhere
|
display_name = youruseridhere
|
||||||
|
|
||||||
# iam account root user for iam_account tests
|
|
||||||
[iam root]
|
|
||||||
access_key = AAAAAAAAAAAAAAAAAAaa
|
|
||||||
secret_key = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
|
|
||||||
user_id = RGW11111111111111111
|
|
||||||
email = account1@ceph.com
|
|
||||||
|
|
||||||
# iam account root user in a different account than [iam root]
|
|
||||||
[iam alt root]
|
|
||||||
access_key = BBBBBBBBBBBBBBBBBBbb
|
|
||||||
secret_key = bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
|
|
||||||
user_id = RGW22222222222222222
|
|
||||||
email = account2@ceph.com
|
|
||||||
|
|
||||||
#following section needs to be added when you want to run Assume Role With Webidentity test
|
#following section needs to be added when you want to run Assume Role With Webidentity test
|
||||||
[webidentity]
|
[webidentity]
|
||||||
#used for assume role with web identity test in sts-tests
|
#used for assume role with web identity test in sts-tests
|
||||||
|
|
|
@ -290,7 +290,7 @@ def verify_object(bucket, k, data=None, storage_class=None):
|
||||||
if data:
|
if data:
|
||||||
read_data = k.get_contents_as_string()
|
read_data = k.get_contents_as_string()
|
||||||
|
|
||||||
equal = data == read_data.decode() # avoid spamming log if data not equal
|
equal = data == read_data # avoid spamming log if data not equal
|
||||||
assert equal == True
|
assert equal == True
|
||||||
|
|
||||||
def copy_object_storage_class(src_bucket, src_key, dest_bucket, dest_key, storage_class):
|
def copy_object_storage_class(src_bucket, src_key, dest_bucket, dest_key, storage_class):
|
||||||
|
@ -362,9 +362,6 @@ def configured_storage_classes():
|
||||||
if item != 'STANDARD':
|
if item != 'STANDARD':
|
||||||
sc.append(item)
|
sc.append(item)
|
||||||
|
|
||||||
sc = [i for i in sc if i]
|
|
||||||
print("storage classes configured: " + str(sc))
|
|
||||||
|
|
||||||
return sc
|
return sc
|
||||||
|
|
||||||
def lc_transition(days=None, date=None, storage_class=None):
|
def lc_transition(days=None, date=None, storage_class=None):
|
||||||
|
|
|
@ -174,7 +174,7 @@ def configured_storage_classes():
|
||||||
|
|
||||||
return sc
|
return sc
|
||||||
|
|
||||||
def configure():
|
def setup():
|
||||||
cfg = configparser.RawConfigParser()
|
cfg = configparser.RawConfigParser()
|
||||||
try:
|
try:
|
||||||
path = os.environ['S3TEST_CONF']
|
path = os.environ['S3TEST_CONF']
|
||||||
|
@ -259,43 +259,25 @@ def configure():
|
||||||
config.tenant_display_name = cfg.get('s3 tenant',"display_name")
|
config.tenant_display_name = cfg.get('s3 tenant',"display_name")
|
||||||
config.tenant_user_id = cfg.get('s3 tenant',"user_id")
|
config.tenant_user_id = cfg.get('s3 tenant',"user_id")
|
||||||
config.tenant_email = cfg.get('s3 tenant',"email")
|
config.tenant_email = cfg.get('s3 tenant',"email")
|
||||||
config.tenant_name = cfg.get('s3 tenant',"tenant")
|
|
||||||
|
|
||||||
config.iam_access_key = cfg.get('iam',"access_key")
|
|
||||||
config.iam_secret_key = cfg.get('iam',"secret_key")
|
|
||||||
config.iam_display_name = cfg.get('iam',"display_name")
|
|
||||||
config.iam_user_id = cfg.get('iam',"user_id")
|
|
||||||
config.iam_email = cfg.get('iam',"email")
|
|
||||||
|
|
||||||
config.iam_root_access_key = cfg.get('iam root',"access_key")
|
|
||||||
config.iam_root_secret_key = cfg.get('iam root',"secret_key")
|
|
||||||
config.iam_root_user_id = cfg.get('iam root',"user_id")
|
|
||||||
config.iam_root_email = cfg.get('iam root',"email")
|
|
||||||
|
|
||||||
config.iam_alt_root_access_key = cfg.get('iam alt root',"access_key")
|
|
||||||
config.iam_alt_root_secret_key = cfg.get('iam alt root',"secret_key")
|
|
||||||
config.iam_alt_root_user_id = cfg.get('iam alt root',"user_id")
|
|
||||||
config.iam_alt_root_email = cfg.get('iam alt root',"email")
|
|
||||||
|
|
||||||
# vars from the fixtures section
|
# vars from the fixtures section
|
||||||
template = cfg.get('fixtures', "bucket prefix", fallback='test-{random}-')
|
try:
|
||||||
|
template = cfg.get('fixtures', "bucket prefix")
|
||||||
|
except (configparser.NoOptionError):
|
||||||
|
template = 'test-{random}-'
|
||||||
prefix = choose_bucket_prefix(template=template)
|
prefix = choose_bucket_prefix(template=template)
|
||||||
template = cfg.get('fixtures', "iam name prefix", fallback="s3-tests-")
|
|
||||||
config.iam_name_prefix = choose_bucket_prefix(template=template)
|
alt_client = get_alt_client()
|
||||||
template = cfg.get('fixtures', "iam path prefix", fallback="/s3-tests/")
|
tenant_client = get_tenant_client()
|
||||||
config.iam_path_prefix = choose_bucket_prefix(template=template)
|
nuke_prefixed_buckets(prefix=prefix)
|
||||||
|
nuke_prefixed_buckets(prefix=prefix, client=alt_client)
|
||||||
|
nuke_prefixed_buckets(prefix=prefix, client=tenant_client)
|
||||||
|
|
||||||
if cfg.has_section("s3 cloud"):
|
if cfg.has_section("s3 cloud"):
|
||||||
get_cloud_config(cfg)
|
get_cloud_config(cfg)
|
||||||
else:
|
else:
|
||||||
config.cloud_storage_class = None
|
config.cloud_storage_class = None
|
||||||
|
|
||||||
def setup():
|
|
||||||
alt_client = get_alt_client()
|
|
||||||
tenant_client = get_tenant_client()
|
|
||||||
nuke_prefixed_buckets(prefix=prefix)
|
|
||||||
nuke_prefixed_buckets(prefix=prefix, client=alt_client)
|
|
||||||
nuke_prefixed_buckets(prefix=prefix, client=tenant_client)
|
|
||||||
|
|
||||||
def teardown():
|
def teardown():
|
||||||
alt_client = get_alt_client()
|
alt_client = get_alt_client()
|
||||||
|
@ -324,12 +306,11 @@ def teardown():
|
||||||
|
|
||||||
@pytest.fixture(scope="package")
|
@pytest.fixture(scope="package")
|
||||||
def configfile():
|
def configfile():
|
||||||
configure()
|
setup()
|
||||||
return config
|
return config
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def setup_teardown(configfile):
|
def setup_teardown(configfile):
|
||||||
setup()
|
|
||||||
yield
|
yield
|
||||||
teardown()
|
teardown()
|
||||||
|
|
||||||
|
@ -414,65 +395,64 @@ def get_v2_client():
|
||||||
config=Config(signature_version='s3'))
|
config=Config(signature_version='s3'))
|
||||||
return client
|
return client
|
||||||
|
|
||||||
def get_sts_client(**kwargs):
|
def get_sts_client(client_config=None):
|
||||||
kwargs.setdefault('aws_access_key_id', config.alt_access_key)
|
if client_config == None:
|
||||||
kwargs.setdefault('aws_secret_access_key', config.alt_secret_key)
|
client_config = Config(signature_version='s3v4')
|
||||||
kwargs.setdefault('config', Config(signature_version='s3v4'))
|
|
||||||
|
|
||||||
client = boto3.client(service_name='sts',
|
client = boto3.client(service_name='sts',
|
||||||
endpoint_url=config.default_endpoint,
|
aws_access_key_id=config.alt_access_key,
|
||||||
region_name='',
|
aws_secret_access_key=config.alt_secret_key,
|
||||||
use_ssl=config.default_is_secure,
|
|
||||||
verify=config.default_ssl_verify,
|
|
||||||
**kwargs)
|
|
||||||
return client
|
|
||||||
|
|
||||||
def get_iam_client(**kwargs):
|
|
||||||
kwargs.setdefault('aws_access_key_id', config.iam_access_key)
|
|
||||||
kwargs.setdefault('aws_secret_access_key', config.iam_secret_key)
|
|
||||||
|
|
||||||
client = boto3.client(service_name='iam',
|
|
||||||
endpoint_url=config.default_endpoint,
|
endpoint_url=config.default_endpoint,
|
||||||
region_name='',
|
region_name='',
|
||||||
use_ssl=config.default_is_secure,
|
use_ssl=config.default_is_secure,
|
||||||
verify=config.default_ssl_verify,
|
verify=config.default_ssl_verify,
|
||||||
**kwargs)
|
config=client_config)
|
||||||
return client
|
return client
|
||||||
|
|
||||||
def get_iam_s3client(**kwargs):
|
def get_iam_client(client_config=None):
|
||||||
kwargs.setdefault('aws_access_key_id', config.iam_access_key)
|
cfg = configparser.RawConfigParser()
|
||||||
kwargs.setdefault('aws_secret_access_key', config.iam_secret_key)
|
try:
|
||||||
kwargs.setdefault('config', Config(signature_version='s3v4'))
|
path = os.environ['S3TEST_CONF']
|
||||||
|
except KeyError:
|
||||||
|
raise RuntimeError(
|
||||||
|
'To run tests, point environment '
|
||||||
|
+ 'variable S3TEST_CONF to a config file.',
|
||||||
|
)
|
||||||
|
cfg.read(path)
|
||||||
|
if not cfg.has_section("iam"):
|
||||||
|
raise RuntimeError('Your config file is missing the "iam" section!')
|
||||||
|
|
||||||
|
config.iam_access_key = cfg.get('iam',"access_key")
|
||||||
|
config.iam_secret_key = cfg.get('iam',"secret_key")
|
||||||
|
config.iam_display_name = cfg.get('iam',"display_name")
|
||||||
|
config.iam_user_id = cfg.get('iam',"user_id")
|
||||||
|
config.iam_email = cfg.get('iam',"email")
|
||||||
|
|
||||||
|
if client_config == None:
|
||||||
|
client_config = Config(signature_version='s3v4')
|
||||||
|
|
||||||
|
client = boto3.client(service_name='iam',
|
||||||
|
aws_access_key_id=config.iam_access_key,
|
||||||
|
aws_secret_access_key=config.iam_secret_key,
|
||||||
|
endpoint_url=config.default_endpoint,
|
||||||
|
region_name='',
|
||||||
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
|
config=client_config)
|
||||||
|
return client
|
||||||
|
|
||||||
|
def get_iam_s3client(client_config=None):
|
||||||
|
if client_config == None:
|
||||||
|
client_config = Config(signature_version='s3v4')
|
||||||
client = boto3.client(service_name='s3',
|
client = boto3.client(service_name='s3',
|
||||||
|
aws_access_key_id=get_iam_access_key(),
|
||||||
|
aws_secret_access_key=get_iam_secret_key(),
|
||||||
endpoint_url=config.default_endpoint,
|
endpoint_url=config.default_endpoint,
|
||||||
use_ssl=config.default_is_secure,
|
use_ssl=config.default_is_secure,
|
||||||
verify=config.default_ssl_verify,
|
verify=config.default_ssl_verify,
|
||||||
**kwargs)
|
config=client_config)
|
||||||
return client
|
return client
|
||||||
|
|
||||||
def get_iam_root_client(**kwargs):
|
|
||||||
kwargs.setdefault('service_name', 'iam')
|
|
||||||
kwargs.setdefault('aws_access_key_id', config.iam_root_access_key)
|
|
||||||
kwargs.setdefault('aws_secret_access_key', config.iam_root_secret_key)
|
|
||||||
|
|
||||||
return boto3.client(endpoint_url=config.default_endpoint,
|
|
||||||
region_name='',
|
|
||||||
use_ssl=config.default_is_secure,
|
|
||||||
verify=config.default_ssl_verify,
|
|
||||||
**kwargs)
|
|
||||||
|
|
||||||
def get_iam_alt_root_client(**kwargs):
|
|
||||||
kwargs.setdefault('service_name', 'iam')
|
|
||||||
kwargs.setdefault('aws_access_key_id', config.iam_alt_root_access_key)
|
|
||||||
kwargs.setdefault('aws_secret_access_key', config.iam_alt_root_secret_key)
|
|
||||||
|
|
||||||
return boto3.client(endpoint_url=config.default_endpoint,
|
|
||||||
region_name='',
|
|
||||||
use_ssl=config.default_is_secure,
|
|
||||||
verify=config.default_ssl_verify,
|
|
||||||
**kwargs)
|
|
||||||
|
|
||||||
def get_alt_client(client_config=None):
|
def get_alt_client(client_config=None):
|
||||||
if client_config == None:
|
if client_config == None:
|
||||||
client_config = Config(signature_version='s3v4')
|
client_config = Config(signature_version='s3v4')
|
||||||
|
@ -511,17 +491,6 @@ def get_tenant_client(client_config=None):
|
||||||
config=client_config)
|
config=client_config)
|
||||||
return client
|
return client
|
||||||
|
|
||||||
def get_v2_tenant_client():
|
|
||||||
client_config = Config(signature_version='s3')
|
|
||||||
client = boto3.client(service_name='s3',
|
|
||||||
aws_access_key_id=config.tenant_access_key,
|
|
||||||
aws_secret_access_key=config.tenant_secret_key,
|
|
||||||
endpoint_url=config.default_endpoint,
|
|
||||||
use_ssl=config.default_is_secure,
|
|
||||||
verify=config.default_ssl_verify,
|
|
||||||
config=client_config)
|
|
||||||
return client
|
|
||||||
|
|
||||||
def get_tenant_iam_client():
|
def get_tenant_iam_client():
|
||||||
|
|
||||||
client = boto3.client(service_name='iam',
|
client = boto3.client(service_name='iam',
|
||||||
|
@ -706,9 +675,6 @@ def get_tenant_aws_secret_key():
|
||||||
def get_tenant_display_name():
|
def get_tenant_display_name():
|
||||||
return config.tenant_display_name
|
return config.tenant_display_name
|
||||||
|
|
||||||
def get_tenant_name():
|
|
||||||
return config.tenant_name
|
|
||||||
|
|
||||||
def get_tenant_user_id():
|
def get_tenant_user_id():
|
||||||
return config.tenant_user_id
|
return config.tenant_user_id
|
||||||
|
|
||||||
|
@ -733,33 +699,12 @@ def get_token():
|
||||||
def get_realm_name():
|
def get_realm_name():
|
||||||
return config.webidentity_realm
|
return config.webidentity_realm
|
||||||
|
|
||||||
def get_iam_name_prefix():
|
|
||||||
return config.iam_name_prefix
|
|
||||||
|
|
||||||
def make_iam_name(name):
|
|
||||||
return config.iam_name_prefix + name
|
|
||||||
|
|
||||||
def get_iam_path_prefix():
|
|
||||||
return config.iam_path_prefix
|
|
||||||
|
|
||||||
def get_iam_access_key():
|
def get_iam_access_key():
|
||||||
return config.iam_access_key
|
return config.iam_access_key
|
||||||
|
|
||||||
def get_iam_secret_key():
|
def get_iam_secret_key():
|
||||||
return config.iam_secret_key
|
return config.iam_secret_key
|
||||||
|
|
||||||
def get_iam_root_user_id():
|
|
||||||
return config.iam_root_user_id
|
|
||||||
|
|
||||||
def get_iam_root_email():
|
|
||||||
return config.iam_root_email
|
|
||||||
|
|
||||||
def get_iam_alt_root_user_id():
|
|
||||||
return config.iam_alt_root_user_id
|
|
||||||
|
|
||||||
def get_iam_alt_root_email():
|
|
||||||
return config.iam_alt_root_email
|
|
||||||
|
|
||||||
def get_user_token():
|
def get_user_token():
|
||||||
return config.webidentity_user_token
|
return config.webidentity_user_token
|
||||||
|
|
||||||
|
|
|
@ -1,199 +0,0 @@
|
||||||
from botocore.exceptions import ClientError
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from . import (
|
|
||||||
configfile,
|
|
||||||
get_iam_root_client,
|
|
||||||
get_iam_root_user_id,
|
|
||||||
get_iam_root_email,
|
|
||||||
get_iam_alt_root_client,
|
|
||||||
get_iam_alt_root_user_id,
|
|
||||||
get_iam_alt_root_email,
|
|
||||||
get_iam_path_prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
def nuke_user_keys(client, name):
|
|
||||||
p = client.get_paginator('list_access_keys')
|
|
||||||
for response in p.paginate(UserName=name):
|
|
||||||
for key in response['AccessKeyMetadata']:
|
|
||||||
try:
|
|
||||||
client.delete_access_key(UserName=name, AccessKeyId=key['AccessKeyId'])
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def nuke_user_policies(client, name):
|
|
||||||
p = client.get_paginator('list_user_policies')
|
|
||||||
for response in p.paginate(UserName=name):
|
|
||||||
for policy in response['PolicyNames']:
|
|
||||||
try:
|
|
||||||
client.delete_user_policy(UserName=name, PolicyName=policy)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def nuke_attached_user_policies(client, name):
|
|
||||||
p = client.get_paginator('list_attached_user_policies')
|
|
||||||
for response in p.paginate(UserName=name):
|
|
||||||
for policy in response['AttachedPolicies']:
|
|
||||||
try:
|
|
||||||
client.detach_user_policy(UserName=name, PolicyArn=policy['PolicyArn'])
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def nuke_user(client, name):
|
|
||||||
# delete access keys, user policies, etc
|
|
||||||
try:
|
|
||||||
nuke_user_keys(client, name)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
nuke_user_policies(client, name)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
nuke_attached_user_policies(client, name)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
client.delete_user(UserName=name)
|
|
||||||
|
|
||||||
def nuke_users(client, **kwargs):
|
|
||||||
p = client.get_paginator('list_users')
|
|
||||||
for response in p.paginate(**kwargs):
|
|
||||||
for user in response['Users']:
|
|
||||||
try:
|
|
||||||
nuke_user(client, user['UserName'])
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def nuke_group_policies(client, name):
|
|
||||||
p = client.get_paginator('list_group_policies')
|
|
||||||
for response in p.paginate(GroupName=name):
|
|
||||||
for policy in response['PolicyNames']:
|
|
||||||
try:
|
|
||||||
client.delete_group_policy(GroupName=name, PolicyName=policy)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def nuke_attached_group_policies(client, name):
|
|
||||||
p = client.get_paginator('list_attached_group_policies')
|
|
||||||
for response in p.paginate(GroupName=name):
|
|
||||||
for policy in response['AttachedPolicies']:
|
|
||||||
try:
|
|
||||||
client.detach_group_policy(GroupName=name, PolicyArn=policy['PolicyArn'])
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def nuke_group_users(client, name):
|
|
||||||
p = client.get_paginator('get_group')
|
|
||||||
for response in p.paginate(GroupName=name):
|
|
||||||
for user in response['Users']:
|
|
||||||
try:
|
|
||||||
client.remove_user_from_group(GroupName=name, UserName=user['UserName'])
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def nuke_group(client, name):
|
|
||||||
# delete group policies and remove all users
|
|
||||||
try:
|
|
||||||
nuke_group_policies(client, name)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
nuke_attached_group_policies(client, name)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
nuke_group_users(client, name)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
client.delete_group(GroupName=name)
|
|
||||||
|
|
||||||
def nuke_groups(client, **kwargs):
|
|
||||||
p = client.get_paginator('list_groups')
|
|
||||||
for response in p.paginate(**kwargs):
|
|
||||||
for user in response['Groups']:
|
|
||||||
try:
|
|
||||||
nuke_group(client, user['GroupName'])
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def nuke_role_policies(client, name):
|
|
||||||
p = client.get_paginator('list_role_policies')
|
|
||||||
for response in p.paginate(RoleName=name):
|
|
||||||
for policy in response['PolicyNames']:
|
|
||||||
try:
|
|
||||||
client.delete_role_policy(RoleName=name, PolicyName=policy)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def nuke_attached_role_policies(client, name):
|
|
||||||
p = client.get_paginator('list_attached_role_policies')
|
|
||||||
for response in p.paginate(RoleName=name):
|
|
||||||
for policy in response['AttachedPolicies']:
|
|
||||||
try:
|
|
||||||
client.detach_role_policy(RoleName=name, PolicyArn=policy['PolicyArn'])
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def nuke_role(client, name):
|
|
||||||
# delete role policies, etc
|
|
||||||
try:
|
|
||||||
nuke_role_policies(client, name)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
nuke_attached_role_policies(client, name)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
client.delete_role(RoleName=name)
|
|
||||||
|
|
||||||
def nuke_roles(client, **kwargs):
|
|
||||||
p = client.get_paginator('list_roles')
|
|
||||||
for response in p.paginate(**kwargs):
|
|
||||||
for role in response['Roles']:
|
|
||||||
try:
|
|
||||||
nuke_role(client, role['RoleName'])
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def nuke_oidc_providers(client, prefix):
|
|
||||||
result = client.list_open_id_connect_providers()
|
|
||||||
for provider in result['OpenIDConnectProviderList']:
|
|
||||||
arn = provider['Arn']
|
|
||||||
if f':oidc-provider{prefix}' in arn:
|
|
||||||
try:
|
|
||||||
client.delete_open_id_connect_provider(OpenIDConnectProviderArn=arn)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# fixture for iam account root user
|
|
||||||
@pytest.fixture
|
|
||||||
def iam_root(configfile):
|
|
||||||
client = get_iam_root_client()
|
|
||||||
try:
|
|
||||||
arn = client.get_user()['User']['Arn']
|
|
||||||
if not arn.endswith(':root'):
|
|
||||||
pytest.skip('[iam root] user does not have :root arn')
|
|
||||||
except ClientError as e:
|
|
||||||
pytest.skip('[iam root] user does not belong to an account')
|
|
||||||
|
|
||||||
yield client
|
|
||||||
nuke_users(client, PathPrefix=get_iam_path_prefix())
|
|
||||||
nuke_groups(client, PathPrefix=get_iam_path_prefix())
|
|
||||||
nuke_roles(client, PathPrefix=get_iam_path_prefix())
|
|
||||||
nuke_oidc_providers(client, get_iam_path_prefix())
|
|
||||||
|
|
||||||
# fixture for iam alt account root user
|
|
||||||
@pytest.fixture
|
|
||||||
def iam_alt_root(configfile):
|
|
||||||
client = get_iam_alt_root_client()
|
|
||||||
try:
|
|
||||||
arn = client.get_user()['User']['Arn']
|
|
||||||
if not arn.endswith(':root'):
|
|
||||||
pytest.skip('[iam alt root] user does not have :root arn')
|
|
||||||
except ClientError as e:
|
|
||||||
pytest.skip('[iam alt root] user does not belong to an account')
|
|
||||||
|
|
||||||
yield client
|
|
||||||
nuke_users(client, PathPrefix=get_iam_path_prefix())
|
|
||||||
nuke_roles(client, PathPrefix=get_iam_path_prefix())
|
|
|
@ -37,10 +37,10 @@ class Policy(object):
|
||||||
|
|
||||||
return json.dumps(policy_dict)
|
return json.dumps(policy_dict)
|
||||||
|
|
||||||
def make_json_policy(action, resource, principal={"AWS": "*"}, effect="Allow", conditions=None):
|
def make_json_policy(action, resource, principal={"AWS": "*"}, conditions=None):
|
||||||
"""
|
"""
|
||||||
Helper function to make single statement policies
|
Helper function to make single statement policies
|
||||||
"""
|
"""
|
||||||
s = Statement(action, resource, principal, effect=effect, condition=conditions)
|
s = Statement(action, resource, principal, condition=conditions)
|
||||||
p = Policy()
|
p = Policy()
|
||||||
return p.add_statement(s).to_json()
|
return p.add_statement(s).to_json()
|
||||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -4,17 +4,13 @@ import string
|
||||||
import re
|
import re
|
||||||
import json
|
import json
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
from botocore.exceptions import EventStreamError
|
|
||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
import warnings
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
from . import (
|
from . import (
|
||||||
configfile,
|
configfile,
|
||||||
setup_teardown,
|
setup_teardown,
|
||||||
get_client,
|
get_client
|
||||||
get_new_bucket_name
|
|
||||||
)
|
)
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
@ -91,20 +87,19 @@ def test_generate_where_clause():
|
||||||
|
|
||||||
# create small csv file for testing the random expressions
|
# create small csv file for testing the random expressions
|
||||||
single_line_csv = create_random_csv_object(1,1)
|
single_line_csv = create_random_csv_object(1,1)
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
obj_name = get_random_string() #"single_line_csv.csv"
|
obj_name = get_random_string() #"single_line_csv.csv"
|
||||||
upload_object(bucket_name,obj_name,single_line_csv)
|
upload_object(bucket_name,obj_name,single_line_csv)
|
||||||
|
|
||||||
for _ in range(100):
|
for _ in range(100):
|
||||||
generate_s3select_where_clause(bucket_name,obj_name)
|
generate_s3select_where_clause(bucket_name,obj_name)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.s3select
|
@pytest.mark.s3select
|
||||||
def test_generate_projection():
|
def test_generate_projection():
|
||||||
|
|
||||||
# create small csv file for testing the random expressions
|
# create small csv file for testing the random expressions
|
||||||
single_line_csv = create_random_csv_object(1,1)
|
single_line_csv = create_random_csv_object(1,1)
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
obj_name = get_random_string() #"single_line_csv.csv"
|
obj_name = get_random_string() #"single_line_csv.csv"
|
||||||
upload_object(bucket_name,obj_name,single_line_csv)
|
upload_object(bucket_name,obj_name,single_line_csv)
|
||||||
|
|
||||||
|
@ -115,26 +110,12 @@ def s3select_assert_result(a,b):
|
||||||
if type(a) == str:
|
if type(a) == str:
|
||||||
a_strip = a.strip()
|
a_strip = a.strip()
|
||||||
b_strip = b.strip()
|
b_strip = b.strip()
|
||||||
if a=="" and b=="":
|
|
||||||
warnings.warn(UserWarning("{}".format("both results are empty, it may indicates a wrong input, please check the test input")))
|
|
||||||
## print the calling function that created the empty result.
|
|
||||||
stack = traceback.extract_stack(limit=2)
|
|
||||||
formatted_stack = traceback.format_list(stack)[0]
|
|
||||||
warnings.warn(UserWarning("{}".format(formatted_stack)))
|
|
||||||
return True
|
|
||||||
assert a_strip != ""
|
assert a_strip != ""
|
||||||
assert b_strip != ""
|
assert b_strip != ""
|
||||||
else:
|
else:
|
||||||
if a=="" and b=="":
|
|
||||||
warnings.warn(UserWarning("{}".format("both results are empty, it may indicates a wrong input, please check the test input")))
|
|
||||||
## print the calling function that created the empty result.
|
|
||||||
stack = traceback.extract_stack(limit=2)
|
|
||||||
formatted_stack = traceback.format_list(stack)[0]
|
|
||||||
warnings.warn(UserWarning("{}".format(formatted_stack)))
|
|
||||||
return True
|
|
||||||
assert a != ""
|
assert a != ""
|
||||||
assert b != ""
|
assert b != ""
|
||||||
assert True
|
assert a == b
|
||||||
|
|
||||||
def create_csv_object_for_datetime(rows,columns):
|
def create_csv_object_for_datetime(rows,columns):
|
||||||
result = ""
|
result = ""
|
||||||
|
@ -295,7 +276,6 @@ def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',
|
||||||
s3 = get_client()
|
s3 = get_client()
|
||||||
result = ""
|
result = ""
|
||||||
result_status = {}
|
result_status = {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = s3.select_object_content(
|
r = s3.select_object_content(
|
||||||
Bucket=bucket,
|
Bucket=bucket,
|
||||||
|
@ -311,34 +291,26 @@ def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',
|
||||||
return result
|
return result
|
||||||
|
|
||||||
if progress == False:
|
if progress == False:
|
||||||
|
for event in r['Payload']:
|
||||||
try:
|
if 'Records' in event:
|
||||||
for event in r['Payload']:
|
records = event['Records']['Payload'].decode('utf-8')
|
||||||
if 'Records' in event:
|
result += records
|
||||||
records = event['Records']['Payload'].decode('utf-8')
|
|
||||||
result += records
|
|
||||||
|
|
||||||
except EventStreamError as c:
|
|
||||||
result = str(c)
|
|
||||||
return result
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
result = []
|
result = []
|
||||||
max_progress_scanned = 0
|
max_progress_scanned = 0
|
||||||
for event in r['Payload']:
|
for event in r['Payload']:
|
||||||
if 'Records' in event:
|
if 'Records' in event:
|
||||||
records = event['Records']
|
records = event['Records']
|
||||||
result.append(records.copy())
|
result.append(records.copy())
|
||||||
if 'Progress' in event:
|
if 'Progress' in event:
|
||||||
if(event['Progress']['Details']['BytesScanned'] > max_progress_scanned):
|
if(event['Progress']['Details']['BytesScanned'] > max_progress_scanned):
|
||||||
max_progress_scanned = event['Progress']['Details']['BytesScanned']
|
max_progress_scanned = event['Progress']['Details']['BytesScanned']
|
||||||
result_status['Progress'] = event['Progress']
|
result_status['Progress'] = event['Progress']
|
||||||
|
|
||||||
if 'Stats' in event:
|
|
||||||
result_status['Stats'] = event['Stats']
|
|
||||||
if 'End' in event:
|
|
||||||
result_status['End'] = event['End']
|
|
||||||
|
|
||||||
|
if 'Stats' in event:
|
||||||
|
result_status['Stats'] = event['Stats']
|
||||||
|
if 'End' in event:
|
||||||
|
result_status['End'] = event['End']
|
||||||
|
|
||||||
if progress == False:
|
if progress == False:
|
||||||
return result
|
return result
|
||||||
|
@ -418,7 +390,7 @@ def create_list_of_int(column_pos,obj,field_split=",",row_split="\n"):
|
||||||
@pytest.mark.s3select
|
@pytest.mark.s3select
|
||||||
def test_count_operation():
|
def test_count_operation():
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
num_of_rows = 1234
|
num_of_rows = 1234
|
||||||
obj_to_load = create_random_csv_object(num_of_rows,10)
|
obj_to_load = create_random_csv_object(num_of_rows,10)
|
||||||
upload_object(bucket_name,csv_obj_name,obj_to_load)
|
upload_object(bucket_name,csv_obj_name,obj_to_load)
|
||||||
|
@ -429,8 +401,7 @@ def test_count_operation():
|
||||||
@pytest.mark.s3select
|
@pytest.mark.s3select
|
||||||
def test_count_json_operation():
|
def test_count_json_operation():
|
||||||
json_obj_name = get_random_string()
|
json_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
num_of_rows = 1
|
num_of_rows = 1
|
||||||
obj_to_load = create_random_json_object(num_of_rows,10)
|
obj_to_load = create_random_json_object(num_of_rows,10)
|
||||||
upload_object(bucket_name,json_obj_name,obj_to_load)
|
upload_object(bucket_name,json_obj_name,obj_to_load)
|
||||||
|
@ -452,8 +423,7 @@ def test_json_column_sum_min_max():
|
||||||
json_obj = csv_to_json(csv_obj);
|
json_obj = csv_to_json(csv_obj);
|
||||||
|
|
||||||
json_obj_name = get_random_string()
|
json_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,json_obj_name,json_obj)
|
upload_object(bucket_name,json_obj_name,json_obj)
|
||||||
|
|
||||||
json_obj_name_2 = get_random_string()
|
json_obj_name_2 = get_random_string()
|
||||||
|
@ -519,8 +489,7 @@ def test_json_nullif_expressions():
|
||||||
json_obj = create_random_json_object(10000,10)
|
json_obj = create_random_json_object(10000,10)
|
||||||
|
|
||||||
json_obj_name = get_random_string()
|
json_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,json_obj_name,json_obj)
|
upload_object(bucket_name,json_obj_name,json_obj)
|
||||||
|
|
||||||
res_s3select_nullif = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*].root where nullif(_1.c1,_1.c2) is null ;") ).replace("\n","")
|
res_s3select_nullif = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*].root where nullif(_1.c1,_1.c2) is null ;") ).replace("\n","")
|
||||||
|
@ -559,8 +528,7 @@ def test_column_sum_min_max():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
csv_obj_name_2 = get_random_string()
|
csv_obj_name_2 = get_random_string()
|
||||||
|
@ -626,8 +594,7 @@ def test_nullif_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from s3object where nullif(_1,_2) is null ;") ).replace("\n","")
|
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from s3object where nullif(_1,_2) is null ;") ).replace("\n","")
|
||||||
|
@ -682,8 +649,7 @@ def test_nulliftrue_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from s3object where (nullif(_1,_2) is null) = true ;") ).replace("\n","")
|
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from s3object where (nullif(_1,_2) is null) = true ;") ).replace("\n","")
|
||||||
|
@ -710,8 +676,7 @@ def test_is_not_null_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_null = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(*) from s3object where nullif(_1,_2) is not null ;") ).replace("\n","")
|
res_s3select_null = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(*) from s3object where nullif(_1,_2) is not null ;") ).replace("\n","")
|
||||||
|
@ -732,8 +697,7 @@ def test_lowerupper_expressions():
|
||||||
csv_obj = create_random_csv_object(1,10)
|
csv_obj = create_random_csv_object(1,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select lower("AB12cd$$") from s3object ;') ).replace("\n","")
|
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select lower("AB12cd$$") from s3object ;') ).replace("\n","")
|
||||||
|
@ -751,8 +715,7 @@ def test_in_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from s3object where int(_1) in(1);')).replace("\n","")
|
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from s3object where int(_1) in(1);')).replace("\n","")
|
||||||
|
@ -821,11 +784,7 @@ def test_true_false_in_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
## 1,2 must exist in first/second column (to avoid empty results)
|
|
||||||
csv_obj = csv_obj + "1,2,,,,,,,,,,\n"
|
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from s3object where (int(_1) in(1)) = true;')).replace("\n","")
|
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from s3object where (int(_1) in(1)) = true;')).replace("\n","")
|
||||||
|
@ -870,8 +829,7 @@ def test_like_expressions():
|
||||||
csv_obj = create_random_csv_object_string(1000,10)
|
csv_obj = create_random_csv_object_string(1000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where _1 like "%aeio%";')).replace("\n","")
|
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where _1 like "%aeio%";')).replace("\n","")
|
||||||
|
@ -894,7 +852,7 @@ def test_like_expressions():
|
||||||
|
|
||||||
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _1 like "%aeio%" like;')).replace("\n","")
|
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _1 like "%aeio%" like;')).replace("\n","")
|
||||||
|
|
||||||
find_like = res_s3select_like.find("UnsupportedSyntax")
|
find_like = res_s3select_like.find("s3select-Syntax-Error")
|
||||||
|
|
||||||
assert int(find_like) >= 0
|
assert int(find_like) >= 0
|
||||||
|
|
||||||
|
@ -958,8 +916,7 @@ def test_truefalselike_expressions():
|
||||||
csv_obj = create_random_csv_object_string(1000,10)
|
csv_obj = create_random_csv_object_string(1000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where (_1 like "%aeio%") = true;')).replace("\n","")
|
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where (_1 like "%aeio%") = true;')).replace("\n","")
|
||||||
|
@ -1004,8 +961,7 @@ def test_nullif_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from stdin where nullif(_1,_2) is null ;") ).replace("\n","")
|
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from stdin where nullif(_1,_2) is null ;") ).replace("\n","")
|
||||||
|
@ -1032,8 +988,7 @@ def test_lowerupper_expressions():
|
||||||
csv_obj = create_random_csv_object(1,10)
|
csv_obj = create_random_csv_object(1,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select lower("AB12cd$$") from stdin ;') ).replace("\n","")
|
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select lower("AB12cd$$") from stdin ;') ).replace("\n","")
|
||||||
|
@ -1051,8 +1006,7 @@ def test_in_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from stdin where int(_1) in(1);')).replace("\n","")
|
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from stdin where int(_1) in(1);')).replace("\n","")
|
||||||
|
@ -1091,8 +1045,7 @@ def test_like_expressions():
|
||||||
csv_obj = create_random_csv_object_string(10000,10)
|
csv_obj = create_random_csv_object_string(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _1 like "%aeio%";')).replace("\n","")
|
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _1 like "%aeio%";')).replace("\n","")
|
||||||
|
@ -1139,8 +1092,7 @@ def test_complex_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select min(int(_1)),max(int(_2)),min(int(_3))+1 from s3object;")).replace("\n","")
|
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select min(int(_1)),max(int(_2)),min(int(_3))+1 from s3object;")).replace("\n","")
|
||||||
|
@ -1176,8 +1128,7 @@ def test_alias():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(_1) as a1, int(_2) as a2 , (a1+a2) as a3 from s3object where a3>100 and a3<300;") ).replace(",","")
|
res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(_1) as a1, int(_2) as a2 , (a1+a2) as a3 from s3object where a3>100 and a3<300;") ).replace(",","")
|
||||||
|
@ -1196,8 +1147,7 @@ def test_alias_cyclic_refernce():
|
||||||
csv_obj = create_random_csv_object(number_of_rows,10)
|
csv_obj = create_random_csv_object(number_of_rows,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(_1) as a1,int(_2) as a2, a1+a4 as a3, a5+a1 as a4, int(_3)+a3 as a5 from s3object;") )
|
res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(_1) as a1,int(_2) as a2, a1+a4 as a3, a5+a1 as a4, int(_3)+a3 as a5 from s3object;") )
|
||||||
|
@ -1215,7 +1165,7 @@ def test_datetime():
|
||||||
csv_obj = create_csv_object_for_datetime(10000,1)
|
csv_obj = create_csv_object_for_datetime(10000,1)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
|
@ -1246,7 +1196,7 @@ def test_true_false_datetime():
|
||||||
csv_obj = create_csv_object_for_datetime(10000,1)
|
csv_obj = create_csv_object_for_datetime(10000,1)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
|
@ -1279,7 +1229,7 @@ def test_csv_parser():
|
||||||
|
|
||||||
csv_obj = r',first,,,second,third="c31,c32,c33",forth="1,2,3,4",fifth=my_string=\"any_value\" \, my_other_string=\"aaaa\,bbb\" ,' + "\n"
|
csv_obj = r',first,,,second,third="c31,c32,c33",forth="1,2,3,4",fifth=my_string=\"any_value\" \, my_other_string=\"aaaa\,bbb\" ,' + "\n"
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
|
@ -1320,7 +1270,7 @@ def test_csv_definition():
|
||||||
csv_obj = create_random_csv_object(number_of_rows,10,"|","\t")
|
csv_obj = create_random_csv_object(number_of_rows,10,"|","\t")
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
|
@ -1350,7 +1300,7 @@ def test_schema_definition():
|
||||||
csv_obj = create_random_csv_object(number_of_rows,10,csv_schema="c1,c2,c3,c4,c5,c6,c7,c8,c9,c10")
|
csv_obj = create_random_csv_object(number_of_rows,10,csv_schema="c1,c2,c3,c4,c5,c6,c7,c8,c9,c10")
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
|
@ -1359,6 +1309,7 @@ def test_schema_definition():
|
||||||
|
|
||||||
# using the scheme on first line, query is using the attach schema
|
# using the scheme on first line, query is using the attach schema
|
||||||
res_use = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select c1,c3 from s3object;",csv_header_info="USE") ).replace("\n","")
|
res_use = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select c1,c3 from s3object;",csv_header_info="USE") ).replace("\n","")
|
||||||
|
|
||||||
# result of both queries should be the same
|
# result of both queries should be the same
|
||||||
s3select_assert_result( res_ignore, res_use)
|
s3select_assert_result( res_ignore, res_use)
|
||||||
|
|
||||||
|
@ -1367,8 +1318,8 @@ def test_schema_definition():
|
||||||
|
|
||||||
assert ((res_multiple_defintion.find("alias {c11} or column not exist in schema")) >= 0)
|
assert ((res_multiple_defintion.find("alias {c11} or column not exist in schema")) >= 0)
|
||||||
|
|
||||||
#find_processing_error = res_multiple_defintion.find("ProcessingTimeError")
|
#find_processing_error = res_multiple_defintion.find("s3select-ProcessingTime-Error")
|
||||||
assert ((res_multiple_defintion.find("ProcessingTimeError")) >= 0)
|
assert ((res_multiple_defintion.find("s3select-ProcessingTime-Error")) >= 0)
|
||||||
|
|
||||||
# alias-name is identical to column-name
|
# alias-name is identical to column-name
|
||||||
res_multiple_defintion = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(c1)+int(c2) as c4,c4 from s3object;",csv_header_info="USE") ).replace("\n","")
|
res_multiple_defintion = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(c1)+int(c2) as c4,c4 from s3object;",csv_header_info="USE") ).replace("\n","")
|
||||||
|
@ -1381,8 +1332,7 @@ def test_when_then_else_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select case when cast(_1 as int)>100 and cast(_1 as int)<200 then "(100-200)" when cast(_1 as int)>200 and cast(_1 as int)<300 then "(200-300)" else "NONE" end from s3object;') ).replace("\n","")
|
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select case when cast(_1 as int)>100 and cast(_1 as int)<200 then "(100-200)" when cast(_1 as int)>200 and cast(_1 as int)<300 then "(200-300)" else "NONE" end from s3object;') ).replace("\n","")
|
||||||
|
@ -1411,8 +1361,7 @@ def test_coalesce_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where char_length(_3)>2 and char_length(_4)>2 and cast(substring(_3,1,2) as int) = cast(substring(_4,1,2) as int);') ).replace("\n","")
|
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where char_length(_3)>2 and char_length(_4)>2 and cast(substring(_3,1,2) as int) = cast(substring(_4,1,2) as int);') ).replace("\n","")
|
||||||
|
@ -1434,8 +1383,7 @@ def test_cast_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where cast(_3 as int)>999;') ).replace("\n","")
|
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where cast(_3 as int)>999;') ).replace("\n","")
|
||||||
|
@ -1460,7 +1408,7 @@ def test_version():
|
||||||
csv_obj = create_random_csv_object(number_of_rows,10)
|
csv_obj = create_random_csv_object(number_of_rows,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
|
@ -1474,8 +1422,7 @@ def test_trim_expressions():
|
||||||
csv_obj = create_random_csv_object_trim(10000,10)
|
csv_obj = create_random_csv_object_trim(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_trim = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where trim(_1) = "aeiou";')).replace("\n","")
|
res_s3select_trim = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where trim(_1) = "aeiou";')).replace("\n","")
|
||||||
|
@ -1514,8 +1461,7 @@ def test_truefalse_trim_expressions():
|
||||||
csv_obj = create_random_csv_object_trim(10000,10)
|
csv_obj = create_random_csv_object_trim(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_trim = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where trim(_1) = "aeiou" = true;')).replace("\n","")
|
res_s3select_trim = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where trim(_1) = "aeiou" = true;')).replace("\n","")
|
||||||
|
@ -1554,8 +1500,7 @@ def test_escape_expressions():
|
||||||
csv_obj = create_random_csv_object_escape(10000,10)
|
csv_obj = create_random_csv_object_escape(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_escape = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where _1 like "%_ar" escape "%";')).replace("\n","")
|
res_s3select_escape = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where _1 like "%_ar" escape "%";')).replace("\n","")
|
||||||
|
@ -1576,8 +1521,7 @@ def test_case_value_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_case = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select case cast(_1 as int) when cast(_2 as int) then "case_1_1" else "case_2_2" end from s3object;')).replace("\n","")
|
res_s3select_case = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select case cast(_1 as int) when cast(_2 as int) then "case_1_1" else "case_2_2" end from s3object;')).replace("\n","")
|
||||||
|
@ -1592,8 +1536,7 @@ def test_bool_cast_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_cast = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where cast(int(_1) as bool) = true ;')).replace("\n","")
|
res_s3select_cast = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where cast(int(_1) as bool) = true ;')).replace("\n","")
|
||||||
|
@ -1608,8 +1551,7 @@ def test_progress_expressions():
|
||||||
csv_obj = create_random_csv_object(1000000,10)
|
csv_obj = create_random_csv_object(1000000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
obj_size = len(csv_obj.encode('utf-8'))
|
obj_size = len(csv_obj.encode('utf-8'))
|
||||||
|
@ -1642,8 +1584,7 @@ def test_output_serial_expressions():
|
||||||
csv_obj = create_random_csv_object(10000,10)
|
csv_obj = create_random_csv_object(10000,10)
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = "test"
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_1 = remove_xml_tags_from_result( run_s3select_output(bucket_name,csv_obj_name,"select _1, _2 from s3object where nullif(_1,_2) is null ;", "ALWAYS") ).replace("\n",",").replace(",","")
|
res_s3select_1 = remove_xml_tags_from_result( run_s3select_output(bucket_name,csv_obj_name,"select _1, _2 from s3object where nullif(_1,_2) is null ;", "ALWAYS") ).replace("\n",",").replace(",","")
|
||||||
|
|
|
@ -1,159 +0,0 @@
|
||||||
import json
|
|
||||||
import pytest
|
|
||||||
from botocore.exceptions import ClientError
|
|
||||||
from . import (
|
|
||||||
configfile,
|
|
||||||
get_iam_root_client,
|
|
||||||
get_iam_alt_root_client,
|
|
||||||
get_new_bucket_name,
|
|
||||||
get_prefix,
|
|
||||||
nuke_prefixed_buckets,
|
|
||||||
)
|
|
||||||
from .iam import iam_root, iam_alt_root
|
|
||||||
from .utils import assert_raises, _get_status_and_error_code
|
|
||||||
|
|
||||||
def get_new_topic_name():
|
|
||||||
return get_new_bucket_name()
|
|
||||||
|
|
||||||
def nuke_topics(client, prefix):
|
|
||||||
p = client.get_paginator('list_topics')
|
|
||||||
for response in p.paginate():
|
|
||||||
for topic in response['Topics']:
|
|
||||||
arn = topic['TopicArn']
|
|
||||||
if prefix not in arn:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
client.delete_topic(TopicArn=arn)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def sns(iam_root):
|
|
||||||
client = get_iam_root_client(service_name='sns')
|
|
||||||
yield client
|
|
||||||
nuke_topics(client, get_prefix())
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def sns_alt(iam_alt_root):
|
|
||||||
client = get_iam_alt_root_client(service_name='sns')
|
|
||||||
yield client
|
|
||||||
nuke_topics(client, get_prefix())
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def s3(iam_root):
|
|
||||||
client = get_iam_root_client(service_name='s3')
|
|
||||||
yield client
|
|
||||||
nuke_prefixed_buckets(get_prefix(), client)
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def s3_alt(iam_alt_root):
|
|
||||||
client = get_iam_alt_root_client(service_name='s3')
|
|
||||||
yield client
|
|
||||||
nuke_prefixed_buckets(get_prefix(), client)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.iam_account
|
|
||||||
@pytest.mark.sns
|
|
||||||
def test_account_topic(sns):
|
|
||||||
name = get_new_topic_name()
|
|
||||||
|
|
||||||
response = sns.create_topic(Name=name)
|
|
||||||
arn = response['TopicArn']
|
|
||||||
assert arn.startswith('arn:aws:sns:')
|
|
||||||
assert arn.endswith(f':{name}')
|
|
||||||
|
|
||||||
response = sns.list_topics()
|
|
||||||
assert arn in [p['TopicArn'] for p in response['Topics']]
|
|
||||||
|
|
||||||
sns.set_topic_attributes(TopicArn=arn, AttributeName='Policy', AttributeValue='')
|
|
||||||
|
|
||||||
response = sns.get_topic_attributes(TopicArn=arn)
|
|
||||||
assert 'Attributes' in response
|
|
||||||
|
|
||||||
sns.delete_topic(TopicArn=arn)
|
|
||||||
|
|
||||||
response = sns.list_topics()
|
|
||||||
assert arn not in [p['TopicArn'] for p in response['Topics']]
|
|
||||||
|
|
||||||
with pytest.raises(sns.exceptions.NotFoundException):
|
|
||||||
sns.get_topic_attributes(TopicArn=arn)
|
|
||||||
sns.delete_topic(TopicArn=arn)
|
|
||||||
|
|
||||||
@pytest.mark.iam_account
|
|
||||||
@pytest.mark.sns
|
|
||||||
def test_cross_account_topic(sns, sns_alt):
|
|
||||||
name = get_new_topic_name()
|
|
||||||
arn = sns.create_topic(Name=name)['TopicArn']
|
|
||||||
|
|
||||||
# not visible to any alt user apis
|
|
||||||
with pytest.raises(sns.exceptions.NotFoundException):
|
|
||||||
sns_alt.get_topic_attributes(TopicArn=arn)
|
|
||||||
with pytest.raises(sns.exceptions.NotFoundException):
|
|
||||||
sns_alt.set_topic_attributes(TopicArn=arn, AttributeName='Policy', AttributeValue='')
|
|
||||||
|
|
||||||
# delete returns success
|
|
||||||
sns_alt.delete_topic(TopicArn=arn)
|
|
||||||
|
|
||||||
response = sns_alt.list_topics()
|
|
||||||
assert arn not in [p['TopicArn'] for p in response['Topics']]
|
|
||||||
|
|
||||||
@pytest.mark.iam_account
|
|
||||||
@pytest.mark.sns
|
|
||||||
def test_account_topic_publish(sns, s3):
|
|
||||||
name = get_new_topic_name()
|
|
||||||
|
|
||||||
response = sns.create_topic(Name=name)
|
|
||||||
topic_arn = response['TopicArn']
|
|
||||||
|
|
||||||
bucket = get_new_bucket_name()
|
|
||||||
s3.create_bucket(Bucket=bucket)
|
|
||||||
|
|
||||||
config = {'TopicConfigurations': [{
|
|
||||||
'Id': 'id',
|
|
||||||
'TopicArn': topic_arn,
|
|
||||||
'Events': [ 's3:ObjectCreated:*' ],
|
|
||||||
}]}
|
|
||||||
s3.put_bucket_notification_configuration(
|
|
||||||
Bucket=bucket, NotificationConfiguration=config)
|
|
||||||
|
|
||||||
@pytest.mark.iam_account
|
|
||||||
@pytest.mark.iam_cross_account
|
|
||||||
@pytest.mark.sns
|
|
||||||
def test_cross_account_topic_publish(sns, s3_alt, iam_alt_root):
|
|
||||||
name = get_new_topic_name()
|
|
||||||
|
|
||||||
response = sns.create_topic(Name=name)
|
|
||||||
topic_arn = response['TopicArn']
|
|
||||||
|
|
||||||
bucket = get_new_bucket_name()
|
|
||||||
s3_alt.create_bucket(Bucket=bucket)
|
|
||||||
|
|
||||||
config = {'TopicConfigurations': [{
|
|
||||||
'Id': 'id',
|
|
||||||
'TopicArn': topic_arn,
|
|
||||||
'Events': [ 's3:ObjectCreated:*' ],
|
|
||||||
}]}
|
|
||||||
|
|
||||||
# expect AccessDenies because no resource policy allows cross-account access
|
|
||||||
e = assert_raises(ClientError, s3_alt.put_bucket_notification_configuration,
|
|
||||||
Bucket=bucket, NotificationConfiguration=config)
|
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
|
||||||
assert status == 403
|
|
||||||
assert error_code == 'AccessDenied'
|
|
||||||
|
|
||||||
# add topic policy to allow the alt user
|
|
||||||
alt_principal = iam_alt_root.get_user()['User']['Arn']
|
|
||||||
policy = json.dumps({
|
|
||||||
'Version': '2012-10-17',
|
|
||||||
'Statement': [{
|
|
||||||
'Effect': 'Allow',
|
|
||||||
'Principal': {'AWS': alt_principal},
|
|
||||||
'Action': 'sns:Publish',
|
|
||||||
'Resource': topic_arn
|
|
||||||
}]
|
|
||||||
})
|
|
||||||
sns.set_topic_attributes(TopicArn=topic_arn, AttributeName='Policy',
|
|
||||||
AttributeValue=policy)
|
|
||||||
|
|
||||||
s3_alt.put_bucket_notification_configuration(
|
|
||||||
Bucket=bucket, NotificationConfiguration=config)
|
|
Loading…
Reference in a new issue