Thanks to visit codestin.com
Credit goes to github.com

Skip to content

implement S3 native tagging #8811

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Aug 12, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions localstack/aws/api/s3/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -831,6 +831,21 @@ class InvalidPart(ServiceException):
PartNumber: Optional[PartNumber]


class NoSuchTagSet(ServiceException):
code: str = "NoSuchTagSet"
sender_fault: bool = False
status_code: int = 404
BucketName: Optional[BucketName]


class InvalidTag(ServiceException):
code: str = "InvalidTag"
sender_fault: bool = False
status_code: int = 400
TagKey: Optional[ObjectKey]
TagValue: Optional[Value]


AbortDate = datetime


Expand Down
39 changes: 39 additions & 0 deletions localstack/aws/spec-patches.json
Original file line number Diff line number Diff line change
Expand Up @@ -992,6 +992,45 @@
"documentation": "<p>One or more of the specified parts could not be found. The part might not have been uploaded, or the specified entity tag might not have matched the part's entity tag.</p>",
"exception": true
}
},
{
"op": "add",
"path": "/shapes/NoSuchTagSet",
"value": {
"type": "structure",
"members": {
"BucketName": {
"shape": "BucketName"
}
},
"error": {
"httpStatusCode": 404
},
"documentation": "<p>There is no tag set associated with the bucket.</p>",
"exception": true
}
},
{
"op": "add",
"path": "/operations/PutBucketTagging/http/responseCode",
"value": 204
},
{
"op": "add",
"path": "/shapes/InvalidTag",
"value": {
"type": "structure",
"members": {
"TagKey": {
"shape": "ObjectKey"
},
"TagValue": {
"shape": "Value"
}
},
"documentation": "<p>The tag provided was not a valid tag. This error can occur if the tag did not pass input validation.</p>",
"exception": true
}
}
]
}
77 changes: 72 additions & 5 deletions localstack/services/s3/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import moto.s3.models as moto_s3_models
from botocore.exceptions import ClientError
from botocore.utils import InvalidArnException
from moto.s3.exceptions import MissingBucket
from moto.s3.exceptions import MalformedXML, MissingBucket
from moto.s3.models import FakeBucket, FakeDeleteMarker, FakeKey
from moto.s3.utils import clean_key_name

Expand All @@ -23,6 +23,7 @@
CopySource,
InvalidArgument,
InvalidRange,
InvalidTag,
LifecycleExpiration,
LifecycleRule,
LifecycleRules,
Expand All @@ -34,6 +35,8 @@
ObjectVersionId,
Owner,
SSEKMSKeyId,
TaggingHeader,
TagSet,
)
from localstack.aws.connect import connect_to
from localstack.services.s3.constants import (
Expand Down Expand Up @@ -62,6 +65,8 @@
REGION_REGEX = r"[a-z]{2}-[a-z]+-[0-9]{1,}"
PORT_REGEX = r"(:[\d]{0,6})?"

TAG_REGEX = re.compile(r"^[\w\s.:/=+\-@]*$")

S3_VIRTUAL_HOSTNAME_REGEX = ( # path based refs have at least valid bucket expression (separated by .) followed by .s3
r"^(http(s)?://)?((?!s3\.)[^\./]+)\." # the negative lookahead part is for considering buckets
r"(((s3(-website)?\.({}\.)?)localhost(\.localstack\.cloud)?)|(localhost\.localstack\.cloud)|"
Expand All @@ -72,10 +77,6 @@
)
_s3_virtual_host_regex = re.compile(S3_VIRTUAL_HOSTNAME_REGEX)

PATTERN_UUID = re.compile(
r"[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}"
)


RFC1123 = "%a, %d %b %Y %H:%M:%S GMT"

Expand Down Expand Up @@ -690,3 +691,69 @@ def validate_dict_fields(data: dict, required_fields: set, optional_fields: set)
return (set_fields := set(data)) >= required_fields and set_fields <= (
required_fields | optional_fields
)


def parse_tagging_header(tagging_header: TaggingHeader) -> dict:
try:
parsed_tags = urlparser.parse_qs(tagging_header, keep_blank_values=True)
tags: dict[str, str] = {}
for key, val in parsed_tags.items():
if len(val) != 1 or not TAG_REGEX.match(key) or not TAG_REGEX.match(val[0]):
raise InvalidArgument(
"The header 'x-amz-tagging' shall be encoded as UTF-8 then URLEncoded URL query parameters without tag name duplicates.",
ArgumentName="x-amz-tagging",
ArgumentValue=tagging_header,
)
elif key.startswith("aws:"):
raise
tags[key] = val[0]
return tags

except ValueError:
raise InvalidArgument(
"The header 'x-amz-tagging' shall be encoded as UTF-8 then URLEncoded URL query parameters without tag name duplicates.",
ArgumentName="x-amz-tagging",
ArgumentValue=tagging_header,
)


def validate_tag_set(tag_set: TagSet, type_set: Literal["bucket", "object"] = "bucket"):
keys = set()
for tag in tag_set:
if set(tag) != {"Key", "Value"}:
raise MalformedXML()

key = tag["Key"]
if key in keys:
raise InvalidTag(
"Cannot provide multiple Tags with the same key",
TagKey=key,
)

if key.startswith("aws:"):
if type_set == "bucket":
message = "System tags cannot be added/updated by requester"
else:
message = "Your TagKey cannot be prefixed with aws:"
raise InvalidTag(
message,
TagKey=key,
)

if not TAG_REGEX.match(key):
raise InvalidTag(
"The TagKey you have provided is invalid",
TagKey=key,
)
elif not TAG_REGEX.match(tag["Value"]):
raise InvalidTag(
"The TagValue you have provided is invalid", TagKey=key, TagValue=tag["Value"]
)

keys.add(key)


def get_unique_key_id(
bucket: BucketName, object_key: ObjectKey, version_id: ObjectVersionId
) -> str:
return f"{bucket}/{object_key}/{version_id or 'null'}"
17 changes: 13 additions & 4 deletions localstack/services/s3/v3/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,8 @@
CrossRegionAttribute,
LocalAttribute,
)
from localstack.utils.aws import arns
from localstack.utils.tagging import TaggingService

# TODO: beware of timestamp data, we need the snapshot to be more precise for S3, with the different types
# moto had a lot of issue with it? not sure about our parser/serializer
Expand Down Expand Up @@ -127,19 +129,23 @@ def __init__(

# see https://docs.aws.amazon.com/AmazonS3/latest/API/API_Owner.html
self.owner = get_owner_for_account_id(account_id)
self.bucket_arn = arns.s3_bucket_arn(self.name)

def get_object(
self,
key: ObjectKey,
version_id: ObjectVersionId = None,
http_method: Literal["GET", "PUT", "HEAD"] = "GET",
) -> "S3Object":
http_method: Literal["GET", "PUT", "HEAD", "DELETE"] = "GET",
raise_for_delete_marker: bool = True,
) -> Union["S3Object", "S3DeleteMarker"]:
"""
:param key: the Object Key
:param version_id: optional, the versionId of the object
:param http_method: the HTTP method of the original call. This is necessary for the exception if the bucket is
versioned or suspended
see: https://docs.aws.amazon.com/AmazonS3/latest/userguide/DeleteMarker.html
:param raise_for_delete_marker: optional, indicates if the method should raise an exception if the found object
is a S3DeleteMarker. If False, it can return a S3DeleteMarker
:return:
:raises NoSuchKey if the object key does not exist at all, or if the object is a DeleteMarker
:raises MethodNotAllowed if the object is a DeleteMarker and the operation is not allowed against it
Expand Down Expand Up @@ -168,7 +174,7 @@ def get_object(
Key=key,
VersionId=version_id,
)
elif isinstance(s3_object_version, S3DeleteMarker):
elif raise_for_delete_marker and isinstance(s3_object_version, S3DeleteMarker):
raise MethodNotAllowed(
"The specified method is not allowed against this resource.",
Method=http_method,
Expand All @@ -184,7 +190,7 @@ def get_object(
if not s3_object:
raise NoSuchKey("The specified key does not exist.", Key=key)

elif isinstance(s3_object, S3DeleteMarker):
elif raise_for_delete_marker and isinstance(s3_object, S3DeleteMarker):
raise NoSuchKey(
"The specified key does not exist.",
Key=key,
Expand Down Expand Up @@ -573,6 +579,9 @@ class S3Store(BaseStore):
global_bucket_map: dict[BucketName, AccountId] = CrossAccountAttribute(default=dict)
aws_managed_kms_key_id: SSEKMSKeyId = LocalAttribute(default=str)

# static tagging service instance
TAGS: TaggingService = CrossAccountAttribute(default=TaggingService)


class BucketCorsIndex:
def __init__(self):
Expand Down
Loading