def get_s3_client():
return boto3.client(
service_name="s3",
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
region_name=settings.AWS_S3_REGION_NAME,
endpoint_url="http://localhost:4566",
config=boto3.session.Config(signature_version='s3v4')
)
def get_s3_get_presigned_url(file_path: str, file_size: int, max_parts, file_type: str):
expires_in = settings.AWS_S3_PRESIGNED_EXPIRY
s3_client = get_s3_client()
# URL-encode the file_path
encoded_file_path = file_path
# Create a multipart upload request
multipart_upload = s3_client.create_multipart_upload(
Bucket=settings.AWS_STORAGE_BUCKET_NAME,
Key=encoded_file_path,
ContentType=file_type,
)
presigned_urls = []
number_of_parts = calculate_number_of_parts(file_size, max_parts)
for part_number in range(1, number_of_parts + 1):
# Generate presigned URL for each part
presigned_url = s3_client.generate_presigned_url(
"upload_part",
Params={
"Bucket": settings.AWS_STORAGE_BUCKET_NAME,
"Key": encoded_file_path,
"UploadId": multipart_upload["UploadId"],
"PartNumber": part_number,
},
ExpiresIn=expires_in,
)
presigned_urls.append({"url": presigned_url, "part_number": part_number})
response = {
"presigned_urls": presigned_urls,
"upload_id": multipart_upload["UploadId"],
"file_path": encoded_file_path,
}
return response
I Successfully created the aws secret key access key , and Bucket locally , when i hit the url then it response thr presigned url of files successfully but then i check it did not uploaded in bucket
AWS_STORAGE_BUCKET_NAME=data-storage-bucket
AWS_ACCESS_KEY_ID=test
AWS_SECRET_ACCESS_KEY=test
AWS_S3_REGION_NAME=ca-central-1
I Used These COmmands
aws configure
docker run --rm -it -p 4566:4566 -p 4571:4571 localstack/localstack
awslocal s3 mb s3://my-bucket