diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/README.md b/hadoop-ozone/dist/src/main/smoketest/s3/README.md new file mode 100644 index 0000000000..884ba2e2c7 --- /dev/null +++ b/hadoop-ozone/dist/src/main/smoketest/s3/README.md @@ -0,0 +1,27 @@ + + +## Ozone S3 Gatway Acceptance Tests + +Note: the aws cli based acceptance tests can be cross-checked with the original AWS s3 endpoint. + +You need to + + 1. Create a bucket + 2. Configure your local aws cli + 3. Set bucket/endpointurl during the robot test execution + +``` +robot -v bucket:ozonetest -v OZONE_S3_SET_CREDENTIALS:false -v ENDPOINT_URL:https://s3.us-east-2.amazonaws.com smoketest/s3 +``` diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/__init__.robot b/hadoop-ozone/dist/src/main/smoketest/s3/__init__.robot new file mode 100644 index 0000000000..f1bbea933b --- /dev/null +++ b/hadoop-ozone/dist/src/main/smoketest/s3/__init__.robot @@ -0,0 +1,21 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +*** Settings *** +Library OperatingSystem +Library String +Resource ../commonlib.robot +Resource ./commonawslib.robot +Test Setup Setup s3 tests \ No newline at end of file diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/awscli.robot b/hadoop-ozone/dist/src/main/smoketest/s3/awss3.robot similarity index 60% rename from hadoop-ozone/dist/src/main/smoketest/s3/awscli.robot rename to hadoop-ozone/dist/src/main/smoketest/s3/awss3.robot index b26ad91d44..79db6880dc 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/awscli.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/awss3.robot @@ -18,46 +18,30 @@ Documentation S3 gateway test with aws cli Library OperatingSystem Library String Resource ../commonlib.robot +Resource ./commonawslib.robot +Suite Setup Setup s3 tests *** Variables *** ${ENDPOINT_URL} http://s3g:9878 - -*** Keywords *** -Execute AWSCli - [Arguments] ${command} - ${output} = Execute aws s3 --endpoint-url ${ENDPOINT_URL}/${VOLUME} ${command} - [return] ${output} +${BUCKET} generated *** Test Cases *** -Create volume and bucket for the tests - ${postfix} = Generate Random String 5 [NUMBERS] - Set Suite Variable ${BUCKET} bucket-${postfix} - Set Suite Variable ${VOLUME} vol-${postfix} - Log Testing s3 commands in /${VOLUME}/${BUCKET} - ${result} = Execute ozone sh volume create /${VOLUME} --user hadoop - ${result} = Execute ozone sh bucket create /${VOLUME}/${BUCKET} - -Install aws s3 cli - Execute sudo apt-get install -y awscli - Set Environment Variable AWS_ACCESS_KEY_ID ANYID - Set Environment Variable AWS_SECRET_ACCESS_KEY ANYKEY - File upload and directory list Execute date > /tmp/testfile - ${result} = Execute AWSCli cp /tmp/testfile s3://${BUCKET} + ${result} = Execute AWSS3Cli cp /tmp/testfile s3://${BUCKET} Should contain ${result} upload - ${result} = Execute AWSCli cp /tmp/testfile s3://${BUCKET}/dir1/dir2/file + ${result} = Execute AWSS3Cli cp /tmp/testfile s3://${BUCKET}/dir1/dir2/file Should contain ${result} upload - ${result} = Execute AWSCli ls s3://${BUCKET} + ${result} = Execute AWSS3Cli ls s3://${BUCKET} Should contain ${result} testfile Should contain ${result} dir1 Should not contain ${result} dir2 - ${result} = Execute AWSCli ls s3://${BUCKET}/dir1/ + ${result} = Execute AWSS3Cli ls s3://${BUCKET}/dir1/ Should not contain ${result} testfile Should not contain ${result} dir1 Should contain ${result} dir2 - ${result} = Execute AWSCli ls s3://${BUCKET}/dir1/dir2/ + ${result} = Execute AWSS3Cli ls s3://${BUCKET}/dir1/dir2/ Should not contain ${result} testfile Should not contain ${result} dir1 Should contain ${result} file diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/buckethead.robot b/hadoop-ozone/dist/src/main/smoketest/s3/buckethead.robot new file mode 100644 index 0000000000..2ce5002a10 --- /dev/null +++ b/hadoop-ozone/dist/src/main/smoketest/s3/buckethead.robot @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +*** Settings *** +Documentation S3 gateway test with aws cli +Library OperatingSystem +Library String +Resource ../commonlib.robot +Resource commonawslib.robot +Test Setup Setup s3 tests + +*** Variables *** +${ENDPOINT_URL} http://s3g:9878 +${BUCKET} generated + +*** Test Cases *** + +Head Bucket not existent + ${result} = Execute AWSS3APICli head-bucket --bucket ${BUCKET} + ${result} = Execute AWSS3APICli and checkrc head-bucket --bucket ozonenosuchbucketqqweqwe 255 + Should contain ${result} Bad Request + Should contain ${result} 400 diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/bucketlist.robot b/hadoop-ozone/dist/src/main/smoketest/s3/bucketlist.robot new file mode 100644 index 0000000000..4fe9b6507d --- /dev/null +++ b/hadoop-ozone/dist/src/main/smoketest/s3/bucketlist.robot @@ -0,0 +1,32 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +*** Settings *** +Documentation S3 gateway test with aws cli +Library OperatingSystem +Library String +Resource ../commonlib.robot +Resource commonawslib.robot +Test Setup Setup s3 tests + +*** Variables *** +${ENDPOINT_URL} http://s3g:9878 +${BUCKET} generated + +*** Test Cases *** + +List buckets + ${result} = Execute AWSS3APICli list-buckets | jq -r '.Buckets[].Name' + Should contain ${result} ${BUCKET} \ No newline at end of file diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/bucketv2.robot b/hadoop-ozone/dist/src/main/smoketest/s3/bucketv2.robot deleted file mode 100644 index f17189b41e..0000000000 --- a/hadoop-ozone/dist/src/main/smoketest/s3/bucketv2.robot +++ /dev/null @@ -1,66 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -*** Settings *** -Documentation S3 gateway test with aws cli for bucket operations -Library String -Library OperatingSystem -Resource commonawslib.robot - -*** Variables *** -${ENDPOINT_URL} http://s3g:9878 -${OZONE_TEST} true -${BUCKET} generated -${NONEXIST-BUCKET} generated1 -*** Keywords *** - -Install aws s3 cli - Execute sudo apt-get install -y awscli - Set Environment Variable AWS_ACCESS_KEY_ID default - Set Environment Variable AWS_SECRET_ACCESS_KEY defaultsecret - ${postfix1} = Generate Random String 5 [NUMBERS] - Set Suite Variable ${BUCKET} bucket-${postfix1} - -Check Volume - # as we know bucket to volume map. Volume name bucket mapped is s3 + AWS_ACCESS_KEY_ID - ${result} = Execute ozone sh volume info /s3default - Should contain ${result} s3default - Should not contain ${result} VOLUME_NOT_FOUND - -*** Test Cases *** - -Setup s3 Tests - Run Keyword if '${OZONE_TEST}' == 'true' Install aws s3 cli - -Create Bucket - ${result} = Execute AWSS3APICli create-bucket --bucket ${BUCKET} - Should contain ${result} ${BUCKET} - Should contain ${result} Location - # create an already existing bucket - ${result} = Execute AWSS3APICli create-bucket --bucket ${BUCKET} - Should contain ${result} ${BUCKET} - Should contain ${result} Location - - Run Keyword if '${OZONE_TEST}' == 'true' Check Volume - -Head Bucket - ${result} = Execute AWSS3APICli head-bucket --bucket ${BUCKET} - ${result} = Execute AWSS3APICli and checkrc head-bucket --bucket ${NONEXIST-BUCKET} 255 - Should contain ${result} Not Found - Should contain ${result} 404 -Delete Bucket - ${result} = Execute AWSS3APICli head-bucket --bucket ${BUCKET} - ${result} = Execute AWSS3APICli and checkrc delete-bucket --bucket ${NONEXIST-BUCKET} 255 - Should contain ${result} NoSuchBucket \ No newline at end of file diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/bucketv4.robot b/hadoop-ozone/dist/src/main/smoketest/s3/bucketv4.robot deleted file mode 100644 index 1a93690da4..0000000000 --- a/hadoop-ozone/dist/src/main/smoketest/s3/bucketv4.robot +++ /dev/null @@ -1,71 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -*** Settings *** -Documentation S3 gateway test with aws cli for bucket operations -Library String -Library OperatingSystem -Resource commonawslib.robot - -*** Variables *** -${ENDPOINT_URL} http://s3g:9878 -${OZONE_TEST} true -${BUCKET} generated -${NONEXIST-BUCKET} generated1 - -*** Keywords *** - -Install aws s3 cli - Execute sudo apt-get install -y awscli - Remove Environment Variable AWS_ACCESS_KEY_ID - Remove Environment Variable AWS_SECRET_ACCESS_KEY - Execute aws configure set default.s3.signature_version s3v4 - Execute aws configure set aws_access_key_id default1 - Execute aws configure set aws_secret_access_key defaultsecret - Execute aws configure set region us-west-1 - ${postfix1} = Generate Random String 5 [NUMBERS] - Set Suite Variable ${BUCKET} bucket-${postfix1} - -Check Volume - # as we know bucket to volume map. Volume name bucket mapped is s3 + AWS_ACCESS_KEY_ID - ${result} = Execute ozone sh volume info /s3default1 - Should contain ${result} s3default1 - Should not contain ${result} VOLUME_NOT_FOUND - -*** Test Cases *** - -Setup s3 Tests - Run Keyword if '${OZONE_TEST}' == 'true' Install aws s3 cli - -Create Bucket - ${result} = Execute AWSS3APICli create-bucket --bucket ${BUCKET} - Should contain ${result} ${BUCKET} - Should contain ${result} Location - # create an already existing bucket - ${result} = Execute AWSS3APICli create-bucket --bucket ${BUCKET} - Should contain ${result} ${BUCKET} - Should contain ${result} Location - - Run Keyword if '${OZONE_TEST}' == 'true' Check Volume - -Head Bucket - ${result} = Execute AWSS3APICli head-bucket --bucket ${BUCKET} - ${result} = Execute AWSS3APICli and checkrc head-bucket --bucket ${NONEXIST-BUCKET} 255 - Should contain ${result} Not Found - Should contain ${result} 404 -Delete Bucket - ${result} = Execute AWSS3APICli head-bucket --bucket ${BUCKET} - ${result} = Execute AWSS3APICli and checkrc delete-bucket --bucket ${NONEXIST-BUCKET} 255 - Should contain ${result} NoSuchBucket \ No newline at end of file diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot b/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot index 07fa66790e..f4261455c8 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot @@ -16,6 +16,10 @@ *** Settings *** Resource ../commonlib.robot +*** Variables *** +${OZONE_S3_HEADER_VERSION} v2 +${OZONE_S3_SET_CREDENTIALS} true + *** Keywords *** Execute AWSS3APICli [Arguments] ${command} @@ -23,6 +27,41 @@ Execute AWSS3APICli [return] ${output} Execute AWSS3APICli and checkrc - [Arguments] ${command} ${expected_error_code} + [Arguments] ${command} ${expected_error_code} ${output} = Execute and checkrc aws s3api --endpoint-url ${ENDPOINT_URL} ${command} ${expected_error_code} - [return] ${output} \ No newline at end of file + [return] ${output} + +Execute AWSS3Cli + [Arguments] ${command} + ${output} = Execute aws s3 --endpoint-url ${ENDPOINT_URL} ${command} + [return] ${output} + +Install aws cli + ${rc} ${output} = Run And Return Rc And Output which apt-get + Run Keyword if '${rc}' == '0' Install aws cli s3 debian + +Install aws cli s3 debian + Execute sudo apt-get install -y awscli + +Setup v2 headers + Set Environment Variable AWS_ACCESS_KEY_ID ANYID + Set Environment Variable AWS_SECRET_ACCESS_KEY ANYKEY + +Setup v4 headers + Execute aws configure set default.s3.signature_version s3v4 + Execute aws configure set aws_access_key_id default1 + Execute aws configure set aws_secret_access_key defaultsecret + Execute aws configure set region us-west-1 +Create bucket + ${postfix} = Generate Random String 5 [NUMBERS] + Set Suite Variable ${BUCKET} bucket-${postfix} + Execute AWSS3APICli create-bucket --bucket ${BUCKET} + +Setup credentials + Run Keyword if '${OZONE_S3_HEADER_VERSION}' == 'v4' Setup v4 headers + Run Keyword if '${OZONE_S3_HEADER_VERSION}' != 'v4' Setup v2 headers + +Setup s3 tests + Run Keyword Install aws cli + Run Keyword if '${OZONE_S3_SET_CREDENTIALS}' == 'true' Setup credentials + Run Keyword if '${BUCKET}' == 'generated' Create bucket diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/objectdelete.robot b/hadoop-ozone/dist/src/main/smoketest/s3/objectdelete.robot new file mode 100644 index 0000000000..9e57d5003a --- /dev/null +++ b/hadoop-ozone/dist/src/main/smoketest/s3/objectdelete.robot @@ -0,0 +1,72 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +*** Settings *** +Documentation S3 gateway test with aws cli +Library OperatingSystem +Library String +Resource ../commonlib.robot +Resource commonawslib.robot +Test Setup Setup s3 tests + +*** Variables *** +${ENDPOINT_URL} http://s3g:9878 +${BUCKET} generated + +*** Test Cases *** +Delete file with s3api + Execute date > /tmp/testfile + ${result} = Execute AWSS3ApiCli put-object --bucket ${BUCKET} --key deletetestapi/f1 --body /tmp/testfile + ${result} = Execute AWSS3ApiCli list-objects --bucket ${BUCKET} --prefix deletetestapi/ + Should contain ${result} f1 + ${result} = Execute AWSS3APICli delete-object --bucket ${BUCKET} --key deletetestapi/f1 + ${result} = Execute AWSS3ApiCli list-objects --bucket ${BUCKET} --prefix deletetestapi/ + Should not contain ${result} f1 +#In case of HTTP 500, the error code is printed out to the console. + Should not contain ${result} 500 + +Delete file with s3api, file doesn't exist + ${result} = Execute AWSS3Cli ls s3://${BUCKET}/ + Should not contain ${result} thereisnosuchfile + ${result} = Execute AWSS3APICli delete-object --bucket ${BUCKET} --key thereisnosuchfile + ${result} = Execute AWSS3Cli ls s3://${BUCKET}/ + Should not contain ${result} thereisnosuchfile + +Delete dir with s3api + Execute date > /tmp/testfile + ${result} = Execute AWSS3Cli cp /tmp/testfile s3://${BUCKET}/deletetestapidir/f1 + ${result} = Execute AWSS3Cli ls s3://${BUCKET}/deletetestapidir/ + Should contain ${result} f1 + ${result} = Execute AWSS3APICli delete-object --bucket ${BUCKET} --key deletetestapidir/ + ${result} = Execute AWSS3Cli ls s3://${BUCKET}/deletetestapidir/ + Should contain ${result} f1 + ${result} = Execute AWSS3APICli delete-object --bucket ${BUCKET} --key deletetestapidir/f1 + + +Delete file with s3api, file doesn't exist, prefix of a real file + Execute date > /tmp/testfile + ${result} = Execute AWSS3Cli cp /tmp/testfile s3://${BUCKET}/deletetestapiprefix/filefile + ${result} = Execute AWSS3Cli ls s3://${BUCKET}/deletetestapiprefix/ + Should contain ${result} filefile + ${result} = Execute AWSS3APICli delete-object --bucket ${BUCKET} --key deletetestapiprefix/file + ${result} = Execute AWSS3Cli ls s3://${BUCKET}/deletetestapiprefix/ + Should contain ${result} filefile + ${result} = Execute AWSS3APICli delete-object --bucket ${BUCKET} --key deletetestapiprefix/filefile + + + +Delete file with s3api, bucket doesn't exist + ${result} = Execute AWSS3APICli and checkrc delete-object --bucket ${BUCKET}-nosuchbucket --key f1 255 + Should contain ${result} NoSuchBucket diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/objectputget.robot b/hadoop-ozone/dist/src/main/smoketest/s3/objectputget.robot new file mode 100644 index 0000000000..858e472de7 --- /dev/null +++ b/hadoop-ozone/dist/src/main/smoketest/s3/objectputget.robot @@ -0,0 +1,42 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +*** Settings *** +Documentation S3 gateway test with aws cli +Library OperatingSystem +Library String +Resource ../commonlib.robot +Resource commonawslib.robot +Test Setup Setup s3 tests + +*** Variables *** +${ENDPOINT_URL} http://s3g:9878 +${OZONE_TEST} true +${BUCKET} generated + +*** Test Cases *** + +Put object to s3 + Execute date > /tmp/testfile + ${result} = Execute AWSS3ApiCli put-object --bucket ${BUCKET} --key putobject/f1 --body /tmp/testfile + ${result} = Execute AWSS3ApiCli list-objects --bucket ${BUCKET} --prefix putobject/ + Should contain ${result} f1 + +#This test depends on the previous test case. Can't be executes alone +Get object from s3 + ${result} = Execute AWSS3ApiCli get-object --bucket ${BUCKET} --key putobject/f1 /tmp/testfile.result + ${checksumbefore} = Execute md5sum /tmp/testfile | awk '{print $1}' + ${checksumafter} = Execute md5sum /tmp/testfile.result | awk '{print $1}' + Should Be Equal ${checksumbefore} ${checksumafter} diff --git a/hadoop-ozone/dist/src/main/smoketest/test.sh b/hadoop-ozone/dist/src/main/smoketest/test.sh index a6dc1f1fe4..59903c6c8e 100755 --- a/hadoop-ozone/dist/src/main/smoketest/test.sh +++ b/hadoop-ozone/dist/src/main/smoketest/test.sh @@ -43,9 +43,10 @@ execute_tests(){ for TEST in "${TESTS[@]}"; do TITLE="Ozone $TEST tests with $COMPOSE_DIR cluster" set +e - docker-compose -f "$COMPOSE_FILE" exec datanode python -m robot --log NONE --report NONE "${OZONE_ROBOT_OPTS[@]}" --output "smoketest/$RESULT_DIR/robot-$COMPOSE_DIR-${TEST//\//_/}.xml" --logtitle "$TITLE" --reporttitle "$TITLE" "smoketest/$TEST" + OUTPUT_NAME="$COMPOSE_DIR-${TEST//\//_}" + docker-compose -f "$COMPOSE_FILE" exec datanode python -m robot --log NONE --report NONE "${OZONE_ROBOT_OPTS[@]}" --output "smoketest/$RESULT_DIR/robot-$OUTPUT_NAME.xml" --logtitle "$TITLE" --reporttitle "$TITLE" "smoketest/$TEST" set -e - docker-compose -f "$COMPOSE_FILE" logs > "$DIR/$RESULT_DIR/docker-$COMPOSE_DIR-${TEST//\//_/}.log" + docker-compose -f "$COMPOSE_FILE" logs > "$DIR/$RESULT_DIR/docker-$OUTPUT_NAME.log" done if [ "$KEEP_RUNNING" = false ]; then docker-compose -f "$COMPOSE_FILE" down diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/DeleteBucket.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/DeleteBucket.java deleted file mode 100644 index 42885e21ac..0000000000 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/DeleteBucket.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.ozone.s3.bucket; - -import javax.ws.rs.DELETE; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import java.io.IOException; - -import org.apache.hadoop.ozone.s3.EndpointBase; -import org.apache.hadoop.ozone.s3.exception.OS3Exception; -import org.apache.hadoop.ozone.s3.exception.S3ErrorTable; - -import org.apache.http.HttpStatus; - -/** - * Delete a bucket. - */ -@Path("/{bucket}") -public class DeleteBucket extends EndpointBase { - - @DELETE - @Produces(MediaType.APPLICATION_XML) - public Response delete(@PathParam("bucket") String bucketName) - throws IOException, OS3Exception { - - try { - deleteS3Bucket(bucketName); - } catch (IOException ex) { - if (ex.getMessage().contains("BUCKET_NOT_EMPTY")) { - OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable - .BUCKET_NOT_EMPTY, S3ErrorTable.Resource.BUCKET); - throw os3Exception; - } else if (ex.getMessage().contains("BUCKET_NOT_FOUND")) { - OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable - .NO_SUCH_BUCKET, S3ErrorTable.Resource.BUCKET); - throw os3Exception; - } else { - throw ex; - } - } - - return Response - .status(HttpStatus.SC_NO_CONTENT) - .build(); - - } -} diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/HeadBucket.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/HeadBucket.java deleted file mode 100644 index 5ddc78cb25..0000000000 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/HeadBucket.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.ozone.s3.bucket; - -import javax.ws.rs.HEAD; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.core.Response; -import java.io.IOException; - -import org.apache.hadoop.ozone.s3.EndpointBase; -import org.apache.hadoop.ozone.s3.exception.OS3Exception; -import org.apache.hadoop.ozone.s3.exception.S3ErrorTable; -import org.apache.hadoop.ozone.s3.exception.S3ErrorTable.Resource; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Finds the bucket exists or not. - */ -@Path("/{bucket}") -public class HeadBucket extends EndpointBase { - - private static final Logger LOG = - LoggerFactory.getLogger(HeadBucket.class); - - @HEAD - public Response head(@PathParam("bucket") String bucketName) - throws Exception { - try { - getVolume(getOzoneVolumeName(bucketName)).getBucket(bucketName); - } catch (IOException ex) { - LOG.error("Exception occurred in headBucket", ex); - if (ex.getMessage().contains("NOT_FOUND")) { - OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable - .NO_SUCH_BUCKET, Resource.BUCKET); - throw os3Exception; - } else { - throw ex; - } - } - return Response.ok().build(); - } -} diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/PutBucket.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/PutBucket.java deleted file mode 100644 index bdb3c59c3f..0000000000 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/PutBucket.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.s3.bucket; - -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.Response; -import java.io.IOException; - -import org.apache.hadoop.ozone.s3.EndpointBase; -import org.apache.hadoop.ozone.s3.exception.OS3Exception; -import org.apache.hadoop.ozone.s3.exception.S3ErrorTable; -import org.apache.hadoop.ozone.s3.header.AuthorizationHeaderV2; -import org.apache.hadoop.ozone.s3.header.AuthorizationHeaderV4; -import org.apache.http.HttpStatus; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - - -/** - * Create new bucket. - */ -@Path("/{bucket}") -public class PutBucket extends EndpointBase { - - private static final Logger LOG = - LoggerFactory.getLogger(PutBucket.class); - - @PUT - public Response put(@PathParam("bucket") String bucketName, @Context - HttpHeaders httpHeaders) throws IOException, OS3Exception { - - String auth = httpHeaders.getHeaderString("Authorization"); - LOG.info("Auth header string {}", auth); - - if (auth == null) { - throw S3ErrorTable.newError(S3ErrorTable.MALFORMED_HEADER, S3ErrorTable - .Resource.HEADER); - } - - String userName; - if (auth.startsWith("AWS4")) { - LOG.info("V4 Header {}", auth); - AuthorizationHeaderV4 authorizationHeader = new AuthorizationHeaderV4( - auth); - userName = authorizationHeader.getAccessKeyID().toLowerCase(); - } else { - LOG.info("V2 Header {}", auth); - AuthorizationHeaderV2 authorizationHeader = new AuthorizationHeaderV2( - auth); - userName = authorizationHeader.getAccessKeyID().toLowerCase(); - } - - String location = createS3Bucket(userName, bucketName); - - LOG.info("Location is {}", location); - return Response.status(HttpStatus.SC_OK).header("Location", location) - .build(); - - } -} diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/package-info.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/package-info.java deleted file mode 100644 index c099c69fe7..0000000000 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/package-info.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Rest endpoint implementation for the bucket specific methods. - */ -@javax.xml.bind.annotation.XmlSchema( - namespace = "http://s3.amazonaws" - + ".com/doc/2006-03-01/", elementFormDefault = - javax.xml.bind.annotation.XmlNsForm.QUALIFIED, - xmlns = { - @javax.xml.bind.annotation.XmlNs(namespaceURI = "http://s3.amazonaws" - + ".com/doc/2006-03-01/", prefix = "")}) - -package org.apache.hadoop.ozone.s3.bucket; \ No newline at end of file diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/commontypes/IsoDateAdapter.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/commontypes/IsoDateAdapter.java index 281e00b4e3..cb04870e23 100644 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/commontypes/IsoDateAdapter.java +++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/commontypes/IsoDateAdapter.java @@ -31,7 +31,7 @@ public class IsoDateAdapter extends XmlAdapter { public IsoDateAdapter() { iso8861Formatter = - DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mmX") + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSX") .withZone(ZoneOffset.UTC); } diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/ListObject.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/BucketEndpoint.java similarity index 53% rename from hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/ListObject.java rename to hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/BucketEndpoint.java index a7bd7ad6f3..7a7c92d261 100644 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/ListObject.java +++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/BucketEndpoint.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -6,50 +6,61 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

* Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.s3.object; +package org.apache.hadoop.ozone.s3.endpoint; +import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; +import javax.ws.rs.HEAD; +import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.Status; import java.io.IOException; import java.time.Instant; import java.util.Iterator; import org.apache.hadoop.ozone.client.OzoneBucket; import org.apache.hadoop.ozone.client.OzoneKey; -import org.apache.hadoop.ozone.client.OzoneVolume; -import org.apache.hadoop.ozone.s3.EndpointBase; import org.apache.hadoop.ozone.s3.commontypes.KeyMetadata; +import org.apache.hadoop.ozone.s3.exception.OS3Exception; +import org.apache.hadoop.ozone.s3.exception.S3ErrorTable; import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.ozone.s3.exception.OS3Exception; +import org.apache.http.HttpStatus; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** - * List Object Rest endpoint. + * Bucket level rest endpoints. */ -@Path("/{volume}/{bucket}") -public class ListObject extends EndpointBase { +@Path("/{bucket}") +public class BucketEndpoint extends EndpointBase { + private static final Logger LOG = + LoggerFactory.getLogger(BucketEndpoint.class); + /** + * Rest endpoint to list objects in a specific bucket. + *

+ * See: https://docs.aws.amazon.com/AmazonS3/latest/API/v2-RESTBucketGET.html + * for more details. + */ @GET - @Produces(MediaType.APPLICATION_XML) - public ListObjectResponse get( - @PathParam("volume") String volumeName, + public ListObjectResponse list( @PathParam("bucket") String bucketName, @QueryParam("delimiter") String delimiter, @QueryParam("encoding-type") String encodingType, @@ -65,8 +76,7 @@ public ListObjectResponse get( prefix = ""; } - OzoneVolume volume = getVolume(volumeName); - OzoneBucket bucket = getBucket(volume, bucketName); + OzoneBucket bucket = getBucket(bucketName); Iterator ozoneKeyIterator = bucket.listKeys(prefix); @@ -113,7 +123,77 @@ public ListObjectResponse get( response.addKey(keyMetadata); } } + response.setKeyCount( + response.getCommonPrefixes().size() + response.getContents().size()); return response; } + @PUT + public Response put(@PathParam("bucket") String bucketName, @Context + HttpHeaders httpHeaders) throws IOException, OS3Exception { + + String userName = parseUsername(httpHeaders); + + String location = createS3Bucket(userName, bucketName); + + LOG.info("Location is {}", location); + return Response.status(HttpStatus.SC_OK).header("Location", location) + .build(); + + } + + /** + * Rest endpoint to check the existence of a bucket. + *

+ * See: https://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketHEAD.html + * for more details. + */ + @HEAD + public Response head(@PathParam("bucket") String bucketName) + throws OS3Exception, IOException { + try { + getBucket(bucketName); + } catch (OS3Exception ex) { + LOG.error("Exception occurred in headBucket", ex); + //TODO: use a subclass fo OS3Exception and catch it here. + if (ex.getCode().contains("NoSuchBucket")) { + return Response.status(Status.BAD_REQUEST).build(); + } else { + throw ex; + } + } + return Response.ok().build(); + } + + /** + * Rest endpoint to delete specific bucket. + *

+ * See: https://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketDELETE.html + * for more details. + */ + @DELETE + public Response delete(@PathParam("bucket") String bucketName) + throws IOException, OS3Exception { + + try { + deleteS3Bucket(bucketName); + } catch (IOException ex) { + if (ex.getMessage().contains("BUCKET_NOT_EMPTY")) { + OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable + .BUCKET_NOT_EMPTY, S3ErrorTable.Resource.BUCKET); + throw os3Exception; + } else if (ex.getMessage().contains("BUCKET_NOT_FOUND")) { + OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable + .NO_SUCH_BUCKET, S3ErrorTable.Resource.BUCKET); + throw os3Exception; + } else { + throw ex; + } + } + + return Response + .status(HttpStatus.SC_NO_CONTENT) + .build(); + + } } diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/EndpointBase.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/EndpointBase.java similarity index 74% rename from hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/EndpointBase.java rename to hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/EndpointBase.java index daa75a9c1a..61f066c6fc 100644 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/EndpointBase.java +++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/EndpointBase.java @@ -15,10 +15,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.s3; +package org.apache.hadoop.ozone.s3.endpoint; import javax.inject.Inject; import javax.ws.rs.NotFoundException; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.HttpHeaders; import java.io.IOException; import org.apache.hadoop.ozone.client.OzoneBucket; @@ -27,6 +29,8 @@ import org.apache.hadoop.ozone.s3.exception.OS3Exception; import org.apache.hadoop.ozone.s3.exception.S3ErrorTable; import org.apache.hadoop.ozone.s3.exception.S3ErrorTable.Resource; +import org.apache.hadoop.ozone.s3.header.AuthorizationHeaderV2; +import org.apache.hadoop.ozone.s3.header.AuthorizationHeaderV4; import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; @@ -39,6 +43,7 @@ public class EndpointBase { private static final Logger LOG = LoggerFactory.getLogger(EndpointBase.class); + @Inject private OzoneClient client; @@ -65,6 +70,25 @@ protected OzoneBucket getBucket(OzoneVolume volume, String bucketName) return bucket; } + protected OzoneBucket getBucket(String bucketName) + throws OS3Exception, IOException { + OzoneBucket bucket; + try { + OzoneVolume volume = getVolume(getOzoneVolumeName(bucketName)); + bucket = volume.getBucket(bucketName); + } catch (IOException ex) { + LOG.error("Error occurred is {}", ex); + if (ex.getMessage().contains("NOT_FOUND")) { + OS3Exception oex = + S3ErrorTable.newError(S3ErrorTable.NO_SUCH_BUCKET, Resource.BUCKET); + throw oex; + } else { + throw ex; + } + } + return bucket; + } + protected OzoneVolume getVolume(String volumeName) throws IOException { OzoneVolume volume = null; try { @@ -149,6 +173,37 @@ public String getOzoneBucketName(String s3BucketName) throws IOException { return client.getObjectStore().getOzoneBucketName(s3BucketName); } + /** + * Retrieve the username based on the authorization header. + * + * @param httpHeaders + * @return Identified username + * @throws OS3Exception + */ + public String parseUsername( + @Context HttpHeaders httpHeaders) throws OS3Exception { + String auth = httpHeaders.getHeaderString("Authorization"); + LOG.info("Auth header string {}", auth); + + if (auth == null) { + throw S3ErrorTable + .newError(S3ErrorTable.MALFORMED_HEADER, Resource.HEADER); + } + + String userName; + if (auth.startsWith("AWS4")) { + LOG.info("V4 Header {}", auth); + AuthorizationHeaderV4 authorizationHeader = new AuthorizationHeaderV4( + auth); + userName = authorizationHeader.getAccessKeyID().toLowerCase(); + } else { + LOG.info("V2 Header {}", auth); + AuthorizationHeaderV2 authorizationHeader = new AuthorizationHeaderV2( + auth); + userName = authorizationHeader.getAccessKeyID().toLowerCase(); + } + return userName; + } @VisibleForTesting public void setClient(OzoneClient ozoneClient) { diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucketResponse.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListBucketResponse.java similarity index 97% rename from hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucketResponse.java rename to hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListBucketResponse.java index 1f117ddd08..b9f87026f7 100644 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucketResponse.java +++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListBucketResponse.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.s3.bucket; +package org.apache.hadoop.ozone.s3.endpoint; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.ozone.s3.commontypes.BucketMetadata; diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/ListObjectResponse.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListObjectResponse.java similarity index 93% rename from hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/ListObjectResponse.java rename to hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListObjectResponse.java index a32fb93d06..b9ab977514 100644 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/ListObjectResponse.java +++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListObjectResponse.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.s3.object; +package org.apache.hadoop.ozone.s3.endpoint; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; @@ -48,6 +48,9 @@ public class ListObjectResponse { @XmlElement(name = "MaxKeys") private int maxKeys; + @XmlElement(name = "KeyCount") + private int keyCount; + @XmlElement(name = "Delimiter") private String delimiter = "/"; @@ -144,4 +147,12 @@ public void addKey(KeyMetadata keyMetadata) { public void addPrefix(String relativeKeyName) { commonPrefixes.add(new CommonPrefix(relativeKeyName)); } + + public int getKeyCount() { + return keyCount; + } + + public void setKeyCount(int keyCount) { + this.keyCount = keyCount; + } } diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java new file mode 100644 index 0000000000..d32bc9ff88 --- /dev/null +++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java @@ -0,0 +1,222 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.ozone.s3.endpoint; + +import javax.ws.rs.DELETE; +import javax.ws.rs.DefaultValue; +import javax.ws.rs.GET; +import javax.ws.rs.HEAD; +import javax.ws.rs.HeaderParam; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.ResponseBuilder; +import javax.ws.rs.core.Response.Status; +import javax.ws.rs.core.StreamingOutput; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.client.ReplicationFactor; +import org.apache.hadoop.hdds.client.ReplicationType; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.ozone.client.OzoneBucket; +import org.apache.hadoop.ozone.client.OzoneKeyDetails; +import org.apache.hadoop.ozone.client.io.OzoneInputStream; +import org.apache.hadoop.ozone.client.io.OzoneOutputStream; +import org.apache.hadoop.ozone.s3.exception.OS3Exception; +import org.apache.hadoop.ozone.s3.exception.S3ErrorTable; + +import org.apache.commons.io.IOUtils; +import org.apache.http.HttpStatus; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Key level rest endpoints. + */ +@Path("/{bucket}/{path:.+}") +public class ObjectEndpoint extends EndpointBase { + + private static final Logger LOG = + LoggerFactory.getLogger(ObjectEndpoint.class); + + private List customizableGetHeaders = new ArrayList<>(); + + public ObjectEndpoint() { + customizableGetHeaders.add("Content-Type"); + customizableGetHeaders.add("Content-Language"); + customizableGetHeaders.add("Expires"); + customizableGetHeaders.add("Cache-Control"); + customizableGetHeaders.add("Content-Disposition"); + customizableGetHeaders.add("Content-Encoding"); + } + + /** + * Rest endpoint to upload object to a bucket. + *

+ * See: https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html for + * more details. + */ + @PUT + public Response put( + @Context HttpHeaders headers, + @PathParam("bucket") String bucketName, + @PathParam("path") String keyPath, + @DefaultValue("STAND_ALONE") @QueryParam("replicationType") + ReplicationType replicationType, + @DefaultValue("ONE") @QueryParam("replicationFactor") + ReplicationFactor replicationFactor, + @DefaultValue("32 * 1024 * 1024") @QueryParam("chunkSize") + String chunkSize, + @HeaderParam("Content-Length") long length, + InputStream body) throws IOException, OS3Exception { + + try { + Configuration config = new OzoneConfiguration(); + config.set(ScmConfigKeys.OZONE_SCM_CHUNK_SIZE_KEY, chunkSize); + + OzoneBucket bucket = getBucket(bucketName); + OzoneOutputStream output = bucket + .createKey(keyPath, length, replicationType, replicationFactor); + + IOUtils.copy(body, output); + output.close(); + + return Response.ok().status(HttpStatus.SC_OK) + .build(); + } catch (IOException ex) { + LOG.error("Exception occurred in PutObject", ex); + throw ex; + } + } + + /** + * Rest endpoint to download object from a bucket. + *

+ * See: https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html for + * more details. + */ + @GET + public Response get( + @Context HttpHeaders headers, + @PathParam("bucket") String bucketName, + @PathParam("path") String keyPath, + InputStream body) throws IOException, OS3Exception { + + try { + OzoneBucket bucket = getBucket(bucketName); + + OzoneInputStream key = bucket + .readKey(keyPath); + + StreamingOutput output = dest -> IOUtils.copy(key, dest); + ResponseBuilder responseBuilder = Response.ok(output); + + for (String responseHeader : customizableGetHeaders) { + String headerValue = headers.getHeaderString(responseHeader); + if (headerValue != null) { + responseBuilder.header(responseHeader, headerValue); + } + } + + return responseBuilder.build(); + } catch (IOException ex) { + if (ex.getMessage().contains("NOT_FOUND")) { + OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable + .NO_SUCH_OBJECT, S3ErrorTable.Resource.OBJECT); + throw os3Exception; + } else { + throw ex; + } + } + } + + /** + * Rest endpoint to check existence of an object in a bucket. + *

+ * See: https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectHEAD.html + * for more details. + */ + @HEAD + public Response head( + @PathParam("bucket") String bucketName, + @PathParam("path") String keyPath) throws Exception { + OzoneKeyDetails key; + + try { + key = getBucket(bucketName).getKey(keyPath); + // TODO: return the specified range bytes of this object. + } catch (IOException ex) { + LOG.error("Exception occurred in HeadObject", ex); + if (ex.getMessage().contains("KEY_NOT_FOUND")) { + OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable + .NO_SUCH_OBJECT, S3ErrorTable.Resource.OBJECT); + throw os3Exception; + } else { + throw ex; + } + } + + return Response.ok().status(HttpStatus.SC_OK) + .header("Last-Modified", key.getModificationTime()) + .header("ETag", "" + key.getModificationTime()) + .header("Content-Length", key.getDataSize()) + .header("Content-Type", "binary/octet-stream") + .build(); + } + + /** + * Delete a specific object from a bucket. + *

+ * See: https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectDELETE.html + * for more details. + */ + @DELETE + public Response delete( + @PathParam("bucket") String bucketName, + @PathParam("path") String keyPath) throws IOException, OS3Exception { + + try { + OzoneBucket bucket = getBucket(bucketName); + bucket.getKey(keyPath); + bucket.deleteKey(keyPath); + } catch (IOException ex) { + if (ex.getMessage().contains("BUCKET_NOT_FOUND")) { + throw S3ErrorTable.newError(S3ErrorTable + .NO_SUCH_BUCKET, S3ErrorTable.Resource.BUCKET); + } else if (!ex.getMessage().contains("NOT_FOUND")) { + throw ex; + } + //NOT_FOUND is not a problem, AWS doesn't throw exception for missing + // keys. Just return 204. + } + return Response + .status(Status.NO_CONTENT) + .build(); + + } + +} diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucket.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/RootEndpoint.java similarity index 67% rename from hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucket.java rename to hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/RootEndpoint.java index 962b8a6b29..4de26f3878 100644 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucket.java +++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/RootEndpoint.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -6,61 +6,69 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

* Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.s3.bucket; +package org.apache.hadoop.ozone.s3.endpoint; -import org.apache.hadoop.ozone.client.OzoneBucket; -import org.apache.hadoop.ozone.client.OzoneVolume; -import org.apache.hadoop.ozone.s3.EndpointBase; -import org.apache.hadoop.ozone.s3.commontypes.BucketMetadata; -import org.apache.hadoop.ozone.s3.exception.OS3Exception; -import org.apache.hadoop.ozone.s3.exception.S3ErrorTable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.ws.rs.*; -import javax.ws.rs.core.MediaType; +import javax.ws.rs.GET; +import javax.ws.rs.NotFoundException; +import javax.ws.rs.Path; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.HttpHeaders; import java.io.IOException; import java.time.Instant; import java.util.Iterator; -/** - * List Object Rest endpoint. - */ -@Path("/{volume}") -public class ListBucket extends EndpointBase { - private static final Logger LOG = - LoggerFactory.getLogger(ListBucket.class); +import org.apache.hadoop.ozone.client.OzoneBucket; +import org.apache.hadoop.ozone.client.OzoneVolume; +import org.apache.hadoop.ozone.s3.commontypes.BucketMetadata; +import org.apache.hadoop.ozone.s3.exception.OS3Exception; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Top level rest endpoint. + */ +@Path("/") +public class RootEndpoint extends EndpointBase { + + private static final Logger LOG = + LoggerFactory.getLogger(RootEndpoint.class); + + /** + * Rest endpoint to list all the buckets of the current user. + * + * See https://docs.aws.amazon.com/AmazonS3/latest/API/RESTServiceGET.html + * for more details. + */ @GET - @Produces(MediaType.APPLICATION_XML) - public ListBucketResponse get(@PathParam("volume") String volumeName) + public ListBucketResponse get(@Context HttpHeaders headers) throws OS3Exception, IOException { OzoneVolume volume; + ListBucketResponse response = new ListBucketResponse(); + + String volumeName = "s3" + parseUsername(headers).toLowerCase(); try { + //TODO: we need a specific s3bucketlist endpoint instead + // of reimplement the naming convention here volume = getVolume(volumeName); } catch (NotFoundException ex) { - LOG.error("Exception occurred in ListBucket: volume {} not found.", - volumeName, ex); - OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable - .NO_SUCH_VOLUME, S3ErrorTable.Resource.VOLUME); - throw os3Exception; + return response; } catch (IOException e) { throw e; } Iterator volABucketIter = volume.listBuckets(null); - ListBucketResponse response = new ListBucketResponse(); - while(volABucketIter.hasNext()) { + while (volABucketIter.hasNext()) { OzoneBucket next = volABucketIter.next(); BucketMetadata bucketMetadata = new BucketMetadata(); bucketMetadata.setName(next.getName()); diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/package-info.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/package-info.java similarity index 91% rename from hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/package-info.java rename to hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/package-info.java index e255991d58..c55cdf4ecd 100644 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/package-info.java +++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/package-info.java @@ -17,7 +17,7 @@ */ /** - * Rest endpoint implementation for the Object specific methods. + * Rest endpoint implementation for the s3 gateway. */ @javax.xml.bind.annotation.XmlSchema( namespace = "http://s3.amazonaws" @@ -26,4 +26,5 @@ xmlns = { @javax.xml.bind.annotation.XmlNs(namespaceURI = "http://s3.amazonaws" + ".com/doc/2006-03-01/", prefix = "")}) -package org.apache.hadoop.ozone.s3.object; \ No newline at end of file + +package org.apache.hadoop.ozone.s3.endpoint; diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/DeleteObject.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/DeleteObject.java deleted file mode 100644 index d5ef70e324..0000000000 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/DeleteObject.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.s3.object; - -import javax.ws.rs.DELETE; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import java.io.IOException; - -import org.apache.hadoop.ozone.client.OzoneBucket; -import org.apache.hadoop.ozone.s3.EndpointBase; - -/** - * Delete Object rest endpoint. - */ -@Path("/{volume}/{bucket}/{path:.+}") -public class DeleteObject extends EndpointBase { - - @DELETE - @Produces(MediaType.APPLICATION_XML) - public Response delete( - @PathParam("volume") String volumeName, - @PathParam("bucket") String bucketName, - @PathParam("path") String keyPath) throws IOException { - - OzoneBucket bucket = getBucket(volumeName, bucketName); - bucket.deleteKey(keyPath); - return Response. - ok() - .build(); - - } -} diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/HeadObject.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/HeadObject.java deleted file mode 100644 index 8bbdf7679b..0000000000 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/HeadObject.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.s3.object; - -import javax.ws.rs.HEAD; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import java.io.IOException; - -import org.apache.hadoop.ozone.client.OzoneKeyDetails; -import org.apache.hadoop.ozone.s3.EndpointBase; -import org.apache.hadoop.ozone.s3.exception.OS3Exception; -import org.apache.hadoop.ozone.s3.exception.S3ErrorTable; - -import org.apache.http.HttpStatus; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Get object info rest endpoint. - */ -@Path("/{volume}/{bucket}/{path:.+}") -public class HeadObject extends EndpointBase { - private static final Logger LOG = - LoggerFactory.getLogger(HeadObject.class); - - @HEAD - @Produces(MediaType.APPLICATION_XML) - public Response head( - @PathParam("volume") String volumeName, - @PathParam("bucket") String bucketName, - @PathParam("path") String keyPath) throws Exception { - OzoneKeyDetails key; - - try { - key = getVolume(volumeName).getBucket(bucketName).getKey(keyPath); - // TODO: return the specified range bytes of this object. - } catch (IOException ex) { - LOG.error("Exception occurred in HeadObject", ex); - if (ex.getMessage().contains("KEY_NOT_FOUND")) { - OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable - .NO_SUCH_OBJECT, S3ErrorTable.Resource.OBJECT); - throw os3Exception; - } else { - throw ex; - } - } - - return Response.ok().status(HttpStatus.SC_OK) - .header("Last-Modified", key.getModificationTime()) - .header("ETag", "" + key.getModificationTime()) - .header("Content-Length", key.getDataSize()) - .header("Content-Type", "binary/octet-stream") - .build(); - } -} diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/PutObject.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/PutObject.java deleted file mode 100644 index 84c25eb71e..0000000000 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/object/PutObject.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.s3.object; - -import javax.ws.rs.DefaultValue; -import javax.ws.rs.HeaderParam; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import java.io.IOException; -import java.io.InputStream; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdds.client.ReplicationFactor; -import org.apache.hadoop.hdds.client.ReplicationType; -import org.apache.hadoop.hdds.conf.OzoneConfiguration; -import org.apache.hadoop.hdds.scm.ScmConfigKeys; -import org.apache.hadoop.ozone.client.OzoneBucket; -import org.apache.hadoop.ozone.client.io.OzoneOutputStream; -import org.apache.hadoop.ozone.s3.EndpointBase; - -import org.apache.commons.io.IOUtils; -import org.apache.http.HttpStatus; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * File upload. - */ -@Path("/{bucket}/{path:.+}") -public class PutObject extends EndpointBase { - - private static final Logger LOG = - LoggerFactory.getLogger(PutObject.class); - - @PUT - @Produces(MediaType.APPLICATION_XML) - public Response put( - @Context HttpHeaders headers, - @PathParam("bucket") String bucketName, - @PathParam("path") String keyPath, - @DefaultValue("STAND_ALONE" ) @QueryParam("replicationType") - ReplicationType replicationType, - @DefaultValue("ONE") @QueryParam("replicationFactor") - ReplicationFactor replicationFactor, - @DefaultValue("32 * 1024 * 1024") @QueryParam("chunkSize") - String chunkSize, - @HeaderParam("Content-Length") long length, - InputStream body) throws IOException { - - try { - Configuration config = new OzoneConfiguration(); - config.set(ScmConfigKeys.OZONE_SCM_CHUNK_SIZE_KEY, chunkSize); - - OzoneBucket bucket = getVolume(getOzoneVolumeName(bucketName)) - .getBucket(bucketName); - OzoneOutputStream output = bucket - .createKey(keyPath, length, replicationType, replicationFactor); - - IOUtils.copy(body, output); - output.close(); - - return Response.ok().status(HttpStatus.SC_OK) - .header("Content-Length", length) - .build(); - } catch (IOException ex) { - LOG.error("Exception occurred in PutObject", ex); - throw ex; - } - } -} \ No newline at end of file diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/OzoneVolumeStub.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/OzoneVolumeStub.java index c0c5e1101b..dd8701f557 100644 --- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/OzoneVolumeStub.java +++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/OzoneVolumeStub.java @@ -82,7 +82,8 @@ public Iterator listBuckets(String bucketPrefix) { return bucket.getName().startsWith(bucketPrefix); } else { return true; - }}) + } + }) .collect(Collectors.toList()) .iterator(); } diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/package-info.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/package-info.java deleted file mode 100644 index de09daef55..0000000000 --- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/package-info.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * Unit tests for the bucket related rest endpoints. - */ -package org.apache.hadoop.ozone.s3.bucket; \ No newline at end of file diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestDeleteBucket.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketDelete.java similarity index 87% rename from hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestDeleteBucket.java rename to hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketDelete.java index 513c33e7cc..5114a471eb 100644 --- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestDeleteBucket.java +++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketDelete.java @@ -18,30 +18,31 @@ * */ -package org.apache.hadoop.ozone.s3.bucket; +package org.apache.hadoop.ozone.s3.endpoint; + +import javax.ws.rs.core.Response; import org.apache.hadoop.ozone.client.ObjectStore; import org.apache.hadoop.ozone.client.ObjectStoreStub; import org.apache.hadoop.ozone.client.OzoneClientStub; import org.apache.hadoop.ozone.s3.exception.OS3Exception; import org.apache.hadoop.ozone.s3.exception.S3ErrorTable; + import org.apache.http.HttpStatus; -import org.junit.Before; -import org.junit.Test; - -import javax.ws.rs.core.Response; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; +import org.junit.Before; +import org.junit.Test; /** * This class tests delete bucket functionality. */ -public class TestDeleteBucket { +public class TestBucketDelete { + private String bucketName = "myBucket"; private OzoneClientStub clientStub; private ObjectStore objectStoreStub; - private DeleteBucket deleteBucket; + private BucketEndpoint bucketEndpoint; @Before public void setup() throws Exception { @@ -53,15 +54,15 @@ public void setup() throws Exception { objectStoreStub.createS3Bucket("ozone", bucketName); // Create HeadBucket and setClient to OzoneClientStub - deleteBucket = new DeleteBucket(); - deleteBucket.setClient(clientStub); + bucketEndpoint = new BucketEndpoint(); + bucketEndpoint.setClient(clientStub); } @Test - public void testDeleteBucket() throws Exception { - Response response = deleteBucket.delete(bucketName); + public void testBucketEndpoint() throws Exception { + Response response = bucketEndpoint.delete(bucketName); assertEquals(response.getStatus(), HttpStatus.SC_NO_CONTENT); } @@ -69,7 +70,7 @@ public void testDeleteBucket() throws Exception { @Test public void testDeleteWithNoSuchBucket() throws Exception { try { - deleteBucket.delete("unknownbucket"); + bucketEndpoint.delete("unknownbucket"); } catch (OS3Exception ex) { assertEquals(S3ErrorTable.NO_SUCH_BUCKET.getCode(), ex.getCode()); assertEquals(S3ErrorTable.NO_SUCH_BUCKET.getErrorMessage(), @@ -87,7 +88,7 @@ public void testDeleteWithBucketNotEmpty() throws Exception { objectStoreStub.createS3Bucket("ozone1", bucket); ObjectStoreStub stub = (ObjectStoreStub) objectStoreStub; stub.setBucketEmptyStatus(bucket, false); - deleteBucket.delete(bucket); + bucketEndpoint.delete(bucket); } catch (OS3Exception ex) { assertEquals(S3ErrorTable.BUCKET_NOT_EMPTY.getCode(), ex.getCode()); assertEquals(S3ErrorTable.BUCKET_NOT_EMPTY.getErrorMessage(), diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestGetBucket.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketGet.java similarity index 80% rename from hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestGetBucket.java rename to hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketGet.java index 123dd7946d..41778b2a2a 100644 --- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestGetBucket.java +++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketGet.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -17,7 +17,7 @@ * under the License. * */ -package org.apache.hadoop.ozone.s3.bucket; +package org.apache.hadoop.ozone.s3.endpoint; import java.io.IOException; @@ -25,8 +25,6 @@ import org.apache.hadoop.ozone.client.OzoneClient; import org.apache.hadoop.ozone.client.OzoneClientStub; import org.apache.hadoop.ozone.s3.exception.OS3Exception; -import org.apache.hadoop.ozone.s3.object.ListObject; -import org.apache.hadoop.ozone.s3.object.ListObjectResponse; import org.junit.Assert; import org.junit.Test; @@ -34,19 +32,19 @@ /** * Testing basic object list browsing. */ -public class TestGetBucket { +public class TestBucketGet { @Test public void listRoot() throws OS3Exception, IOException { - ListObject getBucket = new ListObject(); + BucketEndpoint getBucket = new BucketEndpoint(); OzoneClient client = createClientWithKeys("file1", "dir1/file2"); getBucket.setClient(client); ListObjectResponse getBucketResponse = - getBucket.get("vol1", "b1", "/", null, null, 100, "", null); + getBucket.list("b1", "/", null, null, 100, "", null); Assert.assertEquals(1, getBucketResponse.getCommonPrefixes().size()); Assert.assertEquals("dir1/", @@ -61,14 +59,14 @@ public void listRoot() throws OS3Exception, IOException { @Test public void listDir() throws OS3Exception, IOException { - ListObject getBucket = new ListObject(); + BucketEndpoint getBucket = new BucketEndpoint(); OzoneClient client = createClientWithKeys("dir1/file2", "dir1/dir2/file2"); getBucket.setClient(client); ListObjectResponse getBucketResponse = - getBucket.get("vol1", "b1", "/", null, null, 100, "dir1", null); + getBucket.list("b1", "/", null, null, 100, "dir1", null); Assert.assertEquals(1, getBucketResponse.getCommonPrefixes().size()); Assert.assertEquals("dir1/", @@ -81,14 +79,15 @@ public void listDir() throws OS3Exception, IOException { @Test public void listSubDir() throws OS3Exception, IOException { - ListObject getBucket = new ListObject(); + BucketEndpoint getBucket = new BucketEndpoint(); + OzoneClient ozoneClient = createClientWithKeys("dir1/file2", "dir1/dir2/file2"); getBucket.setClient(ozoneClient); ListObjectResponse getBucketResponse = - getBucket.get("vol1", "b1", "/", null, null, 100, "dir1/", null); + getBucket.list("b1", "/", null, null, 100, "dir1/", null); Assert.assertEquals(1, getBucketResponse.getCommonPrefixes().size()); Assert.assertEquals("dir1/dir2/", @@ -102,10 +101,12 @@ public void listSubDir() throws OS3Exception, IOException { private OzoneClient createClientWithKeys(String... keys) throws IOException { OzoneClient client = new OzoneClientStub(); - client.getObjectStore().createVolume("vol1"); - client.getObjectStore().getVolume("vol1").createBucket("b1"); + + client.getObjectStore().createS3Bucket("bilbo", "b1"); + String volume = client.getObjectStore().getOzoneVolumeName("b1"); + client.getObjectStore().getVolume(volume).createBucket("b1"); OzoneBucket bucket = - client.getObjectStore().getVolume("vol1").getBucket("b1"); + client.getObjectStore().getVolume(volume).getBucket("b1"); for (String key : keys) { bucket.createKey(key, 0).close(); } diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestHeadBucket.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketHead.java similarity index 66% rename from hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestHeadBucket.java rename to hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketHead.java index c392ac0e73..f06da703c2 100644 --- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestHeadBucket.java +++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketHead.java @@ -18,30 +18,28 @@ * */ -package org.apache.hadoop.ozone.s3.bucket; - -import org.apache.hadoop.ozone.client.ObjectStore; -import org.apache.hadoop.ozone.client.OzoneClientStub; -import org.apache.hadoop.ozone.s3.exception.OS3Exception; -import org.apache.hadoop.ozone.s3.exception.S3ErrorTable; -import org.junit.Before; -import org.junit.Test; +package org.apache.hadoop.ozone.s3.endpoint; import javax.ws.rs.core.Response; +import org.apache.hadoop.ozone.client.ObjectStore; +import org.apache.hadoop.ozone.client.OzoneClientStub; + +import org.junit.Assert; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import org.junit.Before; +import org.junit.Test; /** * This class test HeadBucket functionality. */ -public class TestHeadBucket { +public class TestBucketHead { private String bucketName = "myBucket"; private String userName = "ozone"; private OzoneClientStub clientStub; private ObjectStore objectStoreStub; - private HeadBucket headBucket; + private BucketEndpoint bucketEndpoint; @Before public void setup() throws Exception { @@ -53,33 +51,21 @@ public void setup() throws Exception { objectStoreStub.createS3Bucket(userName, bucketName); // Create HeadBucket and setClient to OzoneClientStub - headBucket = new HeadBucket(); - headBucket.setClient(clientStub); + bucketEndpoint = new BucketEndpoint(); + bucketEndpoint.setClient(clientStub); } @Test public void testHeadBucket() throws Exception { - Response response = headBucket.head(bucketName); + Response response = bucketEndpoint.head(bucketName); assertEquals(200, response.getStatus()); } @Test - public void testHeadFail() { - try { - headBucket.head("unknownbucket"); - } catch (Exception ex) { - if (ex instanceof OS3Exception) { - assertEquals(S3ErrorTable.NO_SUCH_BUCKET.getCode(), - ((OS3Exception) ex).getCode()); - assertEquals(S3ErrorTable.NO_SUCH_BUCKET.getErrorMessage(), ( - (OS3Exception) ex).getErrorMessage()); - } else { - fail("testHeadFail failed"); - } - return; - } - fail("testHeadFail failed"); + public void testHeadFail() throws Exception { + Response response = bucketEndpoint.head("unknownbucket"); + Assert.assertEquals(400, response.getStatus()); } } diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestBucketResponse.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketResponse.java similarity index 91% rename from hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestBucketResponse.java rename to hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketResponse.java index efc69bea8d..7c5bfadad7 100644 --- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestBucketResponse.java +++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketResponse.java @@ -17,13 +17,11 @@ * under the License. * */ -package org.apache.hadoop.ozone.s3.bucket; +package org.apache.hadoop.ozone.s3.endpoint; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; -import org.apache.hadoop.ozone.s3.object.ListObjectResponse; - import org.junit.Test; /** diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/object/TestDeleteObject.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectDelete.java similarity index 74% rename from hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/object/TestDeleteObject.java rename to hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectDelete.java index 6c06a76742..395acebe2c 100644 --- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/object/TestDeleteObject.java +++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectDelete.java @@ -17,13 +17,14 @@ * under the License. * */ -package org.apache.hadoop.ozone.s3.object; +package org.apache.hadoop.ozone.s3.endpoint; import java.io.IOException; import org.apache.hadoop.ozone.client.OzoneBucket; import org.apache.hadoop.ozone.client.OzoneClient; import org.apache.hadoop.ozone.client.OzoneClientStub; +import org.apache.hadoop.ozone.s3.exception.OS3Exception; import org.junit.Assert; import org.junit.Test; @@ -31,23 +32,26 @@ /** * Test delete object. */ -public class TestDeleteObject { +public class TestObjectDelete { @Test - public void delete() throws IOException { + public void delete() throws IOException, OS3Exception { //GIVEN OzoneClient client = new OzoneClientStub(); - client.getObjectStore().createVolume("vol1"); - client.getObjectStore().getVolume("vol1").createBucket("b1"); + client.getObjectStore().createS3Bucket("bilbo", "b1"); + + String volumeName = client.getObjectStore().getOzoneVolumeName("b1"); + OzoneBucket bucket = - client.getObjectStore().getVolume("vol1").getBucket("b1"); + client.getObjectStore().getVolume(volumeName).getBucket("b1"); + bucket.createKey("key1", 0).close(); - DeleteObject rest = new DeleteObject(); + ObjectEndpoint rest = new ObjectEndpoint(); rest.setClient(client); //WHEN - rest.delete("vol1", "b1", "key1"); + rest.delete("b1", "key1"); //THEN Assert.assertFalse("Bucket Should not contain any key after delete", diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectGet.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectGet.java new file mode 100644 index 0000000000..65abb1d57e --- /dev/null +++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectGet.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + +package org.apache.hadoop.ozone.s3.endpoint; + +import javax.ws.rs.core.HttpHeaders; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.Charset; + +import org.apache.hadoop.ozone.client.OzoneBucket; +import org.apache.hadoop.ozone.client.OzoneClientStub; +import org.apache.hadoop.ozone.client.OzoneVolume; +import org.apache.hadoop.ozone.client.io.OzoneInputStream; +import org.apache.hadoop.ozone.client.io.OzoneOutputStream; +import org.apache.hadoop.ozone.s3.exception.OS3Exception; + +import org.apache.commons.io.IOUtils; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +/** + * Test get object. + */ +public class TestObjectGet { + + public static final String CONTENT = "0123456789"; + + @Test + public void get() throws IOException, OS3Exception { + //GIVEN + OzoneClientStub client = new OzoneClientStub(); + client.getObjectStore().createS3Bucket("bilbo", "b1"); + String volumeName = client.getObjectStore().getOzoneVolumeName("b1"); + OzoneVolume volume = client.getObjectStore().getVolume(volumeName); + volume.createBucket("b1"); + OzoneBucket bucket = + volume.getBucket("b1"); + OzoneOutputStream keyStream = + bucket.createKey("key1", CONTENT.getBytes().length); + keyStream.write(CONTENT.getBytes()); + keyStream.close(); + + ObjectEndpoint rest = new ObjectEndpoint(); + rest.setClient(client); + HttpHeaders headers = Mockito.mock(HttpHeaders.class); + + ByteArrayInputStream body = new ByteArrayInputStream(CONTENT.getBytes()); + + //WHEN + rest.get(headers, "b1", "key1", body); + + //THEN + OzoneInputStream ozoneInputStream = + volume.getBucket("b1") + .readKey("key1"); + String keyContent = + IOUtils.toString(ozoneInputStream, Charset.forName("UTF-8")); + + Assert.assertEquals(CONTENT, keyContent); + } +} \ No newline at end of file diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/object/TestHeadObject.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectHead.java similarity index 82% rename from hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/object/TestHeadObject.java rename to hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectHead.java index 5b65dac159..446c2c91a1 100644 --- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/object/TestHeadObject.java +++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectHead.java @@ -17,32 +17,33 @@ * under the License. * */ -package org.apache.hadoop.ozone.s3.object; +package org.apache.hadoop.ozone.s3.endpoint; + +import javax.ws.rs.core.Response; +import java.io.IOException; -import org.apache.commons.lang3.RandomStringUtils; import org.apache.hadoop.hdds.client.ReplicationFactor; import org.apache.hadoop.hdds.client.ReplicationType; -import org.apache.hadoop.ozone.client.*; +import org.apache.hadoop.ozone.client.ObjectStore; +import org.apache.hadoop.ozone.client.OzoneBucket; +import org.apache.hadoop.ozone.client.OzoneClientStub; import org.apache.hadoop.ozone.client.io.OzoneOutputStream; +import org.apache.hadoop.ozone.s3.exception.OS3Exception; + +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; +import org.apache.commons.lang3.RandomStringUtils; import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import javax.ws.rs.core.Response; -import org.apache.hadoop.ozone.s3.exception.OS3Exception; -import java.io.IOException; - -import static java.net.HttpURLConnection.HTTP_NOT_FOUND; - /** * Test head object. */ -public class TestHeadObject { - private String volName = "vol1"; +public class TestObjectHead { private String bucketName = "b1"; private OzoneClientStub clientStub; private ObjectStore objectStoreStub; - private HeadObject headObject; + private ObjectEndpoint keyEndpoint; private OzoneBucket bucket; @Before @@ -52,14 +53,14 @@ public void setup() throws IOException { objectStoreStub = clientStub.getObjectStore(); // Create volume and bucket - objectStoreStub.createVolume(volName); - OzoneVolume volumeStub = objectStoreStub.getVolume(volName); - volumeStub.createBucket(bucketName); + objectStoreStub.createS3Bucket("bilbo", bucketName); + String volName = objectStoreStub.getOzoneVolumeName(bucketName); + bucket = objectStoreStub.getVolume(volName).getBucket(bucketName); // Create HeadBucket and setClient to OzoneClientStub - headObject = new HeadObject(); - headObject.setClient(clientStub); + keyEndpoint = new ObjectEndpoint(); + keyEndpoint.setClient(clientStub); } @Test @@ -73,7 +74,7 @@ public void testHeadObject() throws Exception { out.close(); //WHEN - Response response = headObject.head(volName, bucketName, "key1"); + Response response = keyEndpoint.head(bucketName, "key1"); //THEN Assert.assertEquals(200, response.getStatus()); @@ -85,7 +86,7 @@ public void testHeadObject() throws Exception { public void testHeadFailByBadName() throws Exception { //Head an object that doesn't exist. try { - headObject.head(volName, bucketName, "badKeyName"); + keyEndpoint.head(bucketName, "badKeyName"); } catch (OS3Exception ex) { Assert.assertTrue(ex.getCode().contains("NoSuchObject")); Assert.assertTrue(ex.getErrorMessage().contains("object does not exist")); diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/object/TestPutObject.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestPutObject.java similarity index 85% rename from hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/object/TestPutObject.java rename to hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestPutObject.java index a8f0648641..c3607dab4b 100644 --- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/object/TestPutObject.java +++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestPutObject.java @@ -18,7 +18,7 @@ * */ -package org.apache.hadoop.ozone.s3.object; +package org.apache.hadoop.ozone.s3.endpoint; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.Response; @@ -26,11 +26,12 @@ import java.io.IOException; import java.nio.charset.Charset; +import org.apache.hadoop.hdds.client.ReplicationFactor; +import org.apache.hadoop.hdds.client.ReplicationType; import org.apache.hadoop.ozone.client.ObjectStore; import org.apache.hadoop.ozone.client.OzoneClientStub; import org.apache.hadoop.ozone.client.io.OzoneInputStream; -import org.apache.hadoop.hdds.client.ReplicationFactor; -import org.apache.hadoop.hdds.client.ReplicationType; +import org.apache.hadoop.ozone.s3.exception.OS3Exception; import org.apache.commons.io.IOUtils; import org.junit.Assert; @@ -48,7 +49,7 @@ public class TestPutObject { private String keyName = "key1"; private OzoneClientStub clientStub; private ObjectStore objectStoreStub; - private PutObject putObject; + private ObjectEndpoint objectEndpoint; @Before public void setup() throws IOException { @@ -60,23 +61,24 @@ public void setup() throws IOException { objectStoreStub.createS3Bucket(userName, bucketName); // Create PutObject and setClient to OzoneClientStub - putObject = new PutObject(); - putObject.setClient(clientStub); + objectEndpoint = new ObjectEndpoint(); + objectEndpoint.setClient(clientStub); } @Test - public void testPutObject() throws IOException { + public void testPutObject() throws IOException, OS3Exception { //GIVEN HttpHeaders headers = Mockito.mock(HttpHeaders.class); ByteArrayInputStream body = new ByteArrayInputStream(CONTENT.getBytes()); //WHEN - Response response = putObject.put(headers, bucketName, keyName, + Response response = objectEndpoint.put(headers, bucketName, keyName, ReplicationType.STAND_ALONE, ReplicationFactor.ONE, "32 * 1024 * 1024", CONTENT.length(), body); //THEN - String volumeName = clientStub.getObjectStore().getOzoneVolumeName(bucketName); + String volumeName = clientStub.getObjectStore() + .getOzoneVolumeName(bucketName); OzoneInputStream ozoneInputStream = clientStub.getObjectStore().getVolume(volumeName).getBucket(bucketName) .readKey(keyName); diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestListBucket.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestRootList.java similarity index 62% rename from hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestListBucket.java rename to hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestRootList.java index d0db81524e..65fdf4e0cd 100644 --- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/bucket/TestListBucket.java +++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestRootList.java @@ -18,32 +18,31 @@ * */ -package org.apache.hadoop.ozone.s3.bucket; +package org.apache.hadoop.ozone.s3.endpoint; + +import javax.ws.rs.core.HttpHeaders; -import org.apache.commons.lang3.RandomStringUtils; import org.apache.hadoop.ozone.client.ObjectStore; import org.apache.hadoop.ozone.client.OzoneClientStub; import org.apache.hadoop.ozone.client.OzoneVolume; -import org.apache.hadoop.ozone.s3.exception.OS3Exception; -import org.apache.hadoop.ozone.s3.exception.S3ErrorTable; + +import org.apache.commons.lang3.RandomStringUtils; +import static org.junit.Assert.assertEquals; import org.junit.Before; import org.junit.Test; - -import javax.ws.rs.core.Response; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import org.mockito.Mockito; +import static org.mockito.Mockito.when; /** * This class test HeadBucket functionality. */ -public class TestListBucket { +public class TestRootList { private String volumeName = "vol1"; private OzoneClientStub clientStub; private ObjectStore objectStoreStub; - OzoneVolume volumeStub; - private ListBucket listBucket; + private OzoneVolume volumeStub; + private RootEndpoint rootEndpoint; @Before public void setup() throws Exception { @@ -51,22 +50,21 @@ public void setup() throws Exception { //Create client stub and object store stub. clientStub = new OzoneClientStub(); objectStoreStub = clientStub.getObjectStore(); - - // Create volume and bucket - objectStoreStub.createVolume(volumeName); - - volumeStub = objectStoreStub.getVolume(volumeName); - //volumeStub.createBucket(bucketName); + objectStoreStub.createVolume("s3key"); + volumeStub = objectStoreStub.getVolume("s3key"); // Create HeadBucket and setClient to OzoneClientStub - listBucket = new ListBucket(); - listBucket.setClient(clientStub); + rootEndpoint = new RootEndpoint(); + rootEndpoint.setClient(clientStub); } @Test public void testListBucket() throws Exception { + HttpHeaders headers = Mockito.mock(HttpHeaders.class); + when(headers.getHeaderString("Authorization")).thenReturn("AWS key:secret"); + // List operation should success even there is no bucket. - ListBucketResponse response = listBucket.get(volumeName); + ListBucketResponse response = rootEndpoint.get(headers); assertEquals(0, response.getBucketsNum()); String bucketBaseName = "bucket-"; @@ -74,24 +72,8 @@ public void testListBucket() throws Exception { volumeStub.createBucket( bucketBaseName + RandomStringUtils.randomNumeric(3)); } - response = listBucket.get(volumeName); + response = rootEndpoint.get(headers); assertEquals(10, response.getBucketsNum()); } - @Test - public void testListBucketFail() { - try { - listBucket.get("badVolumeName"); - } catch (Exception ex) { - if (ex instanceof OS3Exception) { - assertEquals(S3ErrorTable.NO_SUCH_VOLUME.getCode(), - ((OS3Exception) ex).getCode()); - assertEquals(S3ErrorTable.NO_SUCH_VOLUME.getErrorMessage(), ( - (OS3Exception) ex).getErrorMessage()); - } else { - fail("testHeadFail failed"); - } - return; - } - } } diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/object/package-info.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/package-info.java similarity index 89% rename from hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/object/package-info.java rename to hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/package-info.java index 897e2095a6..d320041e5d 100644 --- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/object/package-info.java +++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/package-info.java @@ -16,6 +16,6 @@ * limitations under the License. */ /** - * Unit tests for the object related rest endpoints. + * Unit tests for the rest endpoint implementations. */ -package org.apache.hadoop.ozone.s3.object; \ No newline at end of file +package org.apache.hadoop.ozone.s3.endpoint; \ No newline at end of file