Inital Commit

This commit is contained in:
Brett Woodruff
2024-06-10 12:24:37 -04:00
commit 106024bcb4
2223 changed files with 241071 additions and 0 deletions

View File

@@ -0,0 +1,111 @@
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import logging
from boto3.compat import _warn_deprecated_python
from boto3.session import Session
__author__ = 'Amazon Web Services'
__version__ = '1.34.39'
# The default Boto3 session; autoloaded when needed.
DEFAULT_SESSION = None
def setup_default_session(**kwargs):
"""
Set up a default session, passing through any parameters to the session
constructor. There is no need to call this unless you wish to pass custom
parameters, because a default session will be created for you.
"""
global DEFAULT_SESSION
DEFAULT_SESSION = Session(**kwargs)
def set_stream_logger(name='boto3', level=logging.DEBUG, format_string=None):
"""
Add a stream handler for the given name and level to the logging module.
By default, this logs all boto3 messages to ``stdout``.
>>> import boto3
>>> boto3.set_stream_logger('boto3.resources', logging.INFO)
For debugging purposes a good choice is to set the stream logger to ``''``
which is equivalent to saying "log everything".
.. WARNING::
Be aware that when logging anything from ``'botocore'`` the full wire
trace will appear in your logs. If your payloads contain sensitive data
this should not be used in production.
:type name: string
:param name: Log name
:type level: int
:param level: Logging level, e.g. ``logging.INFO``
:type format_string: str
:param format_string: Log message format
"""
if format_string is None:
format_string = "%(asctime)s %(name)s [%(levelname)s] %(message)s"
logger = logging.getLogger(name)
logger.setLevel(level)
handler = logging.StreamHandler()
handler.setLevel(level)
formatter = logging.Formatter(format_string)
handler.setFormatter(formatter)
logger.addHandler(handler)
def _get_default_session():
"""
Get the default session, creating one if needed.
:rtype: :py:class:`~boto3.session.Session`
:return: The default session
"""
if DEFAULT_SESSION is None:
setup_default_session()
_warn_deprecated_python()
return DEFAULT_SESSION
def client(*args, **kwargs):
"""
Create a low-level service client by name using the default session.
See :py:meth:`boto3.session.Session.client`.
"""
return _get_default_session().client(*args, **kwargs)
def resource(*args, **kwargs):
"""
Create a resource service client by name using the default session.
See :py:meth:`boto3.session.Session.resource`.
"""
return _get_default_session().resource(*args, **kwargs)
# Set up logging to ``/dev/null`` like a library is supposed to.
# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger('boto3').addHandler(NullHandler())

View File

@@ -0,0 +1,82 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import sys
import os
import errno
import socket
import warnings
from boto3.exceptions import PythonDeprecationWarning
# In python3, socket.error is OSError, which is too general
# for what we want (i.e FileNotFoundError is a subclass of OSError).
# In py3 all the socket related errors are in a newly created
# ConnectionError
SOCKET_ERROR = ConnectionError
import collections.abc as collections_abc
if sys.platform.startswith('win'):
def rename_file(current_filename, new_filename):
try:
os.remove(new_filename)
except OSError as e:
if not e.errno == errno.ENOENT:
# We only want to a ignore trying to remove
# a file that does not exist. If it fails
# for any other reason we should be propagating
# that exception.
raise
os.rename(current_filename, new_filename)
else:
rename_file = os.rename
def filter_python_deprecation_warnings():
"""
Invoking this filter acknowledges your runtime will soon be deprecated
at which time you will stop receiving all updates to your client.
"""
warnings.filterwarnings(
'ignore',
message=".*Boto3 will no longer support Python.*",
category=PythonDeprecationWarning,
module=r".*boto3\.compat"
)
def _warn_deprecated_python():
"""Use this template for future deprecation campaigns as needed."""
py_37_params = {
'date': 'December 13, 2023',
'blog_link': (
'https://aws.amazon.com/blogs/developer/'
'python-support-policy-updates-for-aws-sdks-and-tools/'
)
}
deprecated_versions = {
# Example template for future deprecations
(3, 7): py_37_params,
}
py_version = sys.version_info[:2]
if py_version in deprecated_versions:
params = deprecated_versions[py_version]
warning = (
"Boto3 will no longer support Python {}.{} "
"starting {}. To continue receiving service updates, "
"bug fixes, and security updates please upgrade to Python 3.8 or "
"later. More information can be found here: {}"
).format(py_version[0], py_version[1], params['date'], params['blog_link'])
warnings.warn(warning, PythonDeprecationWarning)

View File

@@ -0,0 +1,167 @@
# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""
This file contains private functionality for interacting with the AWS
Common Runtime library (awscrt) in boto3.
All code contained within this file is for internal usage within this
project and is not intended for external consumption. All interfaces
contained within are subject to abrupt breaking changes.
"""
import threading
import botocore.exceptions
from botocore.session import Session
from s3transfer.crt import (
BotocoreCRTCredentialsWrapper,
BotocoreCRTRequestSerializer,
CRTTransferManager,
acquire_crt_s3_process_lock,
create_s3_crt_client,
)
# Singletons for CRT-backed transfers
CRT_S3_CLIENT = None
BOTOCORE_CRT_SERIALIZER = None
CLIENT_CREATION_LOCK = threading.Lock()
PROCESS_LOCK_NAME = 'boto3'
def _create_crt_client(session, config, region_name, cred_provider):
"""Create a CRT S3 Client for file transfer.
Instantiating many of these may lead to degraded performance or
system resource exhaustion.
"""
create_crt_client_kwargs = {
'region': region_name,
'use_ssl': True,
'crt_credentials_provider': cred_provider,
}
return create_s3_crt_client(**create_crt_client_kwargs)
def _create_crt_request_serializer(session, region_name):
return BotocoreCRTRequestSerializer(
session, {'region_name': region_name, 'endpoint_url': None}
)
def _create_crt_s3_client(
session, config, region_name, credentials, lock, **kwargs
):
"""Create boto3 wrapper class to manage crt lock reference and S3 client."""
cred_wrapper = BotocoreCRTCredentialsWrapper(credentials)
cred_provider = cred_wrapper.to_crt_credentials_provider()
return CRTS3Client(
_create_crt_client(session, config, region_name, cred_provider),
lock,
region_name,
cred_wrapper,
)
def _initialize_crt_transfer_primatives(client, config):
lock = acquire_crt_s3_process_lock(PROCESS_LOCK_NAME)
if lock is None:
# If we're unable to acquire the lock, we cannot
# use the CRT in this process and should default to
# the classic s3transfer manager.
return None, None
session = Session()
region_name = client.meta.region_name
credentials = client._get_credentials()
serializer = _create_crt_request_serializer(session, region_name)
s3_client = _create_crt_s3_client(
session, config, region_name, credentials, lock
)
return serializer, s3_client
def get_crt_s3_client(client, config):
global CRT_S3_CLIENT
global BOTOCORE_CRT_SERIALIZER
with CLIENT_CREATION_LOCK:
if CRT_S3_CLIENT is None:
serializer, s3_client = _initialize_crt_transfer_primatives(
client, config
)
BOTOCORE_CRT_SERIALIZER = serializer
CRT_S3_CLIENT = s3_client
return CRT_S3_CLIENT
class CRTS3Client:
"""
This wrapper keeps track of our underlying CRT client, the lock used to
acquire it and the region we've used to instantiate the client.
Due to limitations in the existing CRT interfaces, we can only make calls
in a single region and does not support redirects. We track the region to
ensure we don't use the CRT client when a successful request cannot be made.
"""
def __init__(self, crt_client, process_lock, region, cred_provider):
self.crt_client = crt_client
self.process_lock = process_lock
self.region = region
self.cred_provider = cred_provider
def is_crt_compatible_request(client, crt_s3_client):
"""
Boto3 client must use same signing region and credentials
as the CRT_S3_CLIENT singleton. Otherwise fallback to classic.
"""
if crt_s3_client is None:
return False
boto3_creds = client._get_credentials()
if boto3_creds is None:
return False
is_same_identity = compare_identity(
boto3_creds.get_frozen_credentials(), crt_s3_client.cred_provider
)
is_same_region = client.meta.region_name == crt_s3_client.region
return is_same_region and is_same_identity
def compare_identity(boto3_creds, crt_s3_creds):
try:
crt_creds = crt_s3_creds()
except botocore.exceptions.NoCredentialsError:
return False
is_matching_identity = (
boto3_creds.access_key == crt_creds.access_key_id
and boto3_creds.secret_key == crt_creds.secret_access_key
and boto3_creds.token == crt_creds.session_token
)
return is_matching_identity
def create_crt_transfer_manager(client, config):
"""Create a CRTTransferManager for optimized data transfer."""
crt_s3_client = get_crt_s3_client(client, config)
if is_crt_compatible_request(client, crt_s3_client):
return CRTTransferManager(
crt_s3_client.crt_client, BOTOCORE_CRT_SERIALIZER
)
return None

View File

@@ -0,0 +1,195 @@
{
"service": {
"actions": {
"CreateStack": {
"request": { "operation": "CreateStack" },
"resource": {
"type": "Stack",
"identifiers": [
{ "target": "Name", "source": "requestParameter", "path": "StackName" }
]
}
}
},
"has": {
"Event": {
"resource": {
"type": "Event",
"identifiers": [
{ "target": "Id", "source": "input" }
]
}
},
"Stack": {
"resource": {
"type": "Stack",
"identifiers": [
{ "target": "Name", "source": "input" }
]
}
}
},
"hasMany": {
"Stacks": {
"request": { "operation": "DescribeStacks" },
"resource": {
"type": "Stack",
"identifiers": [
{ "target": "Name", "source": "response", "path": "Stacks[].StackName" }
],
"path": "Stacks[]"
}
}
}
},
"resources": {
"Event": {
"identifiers": [
{
"name": "Id",
"memberName": "EventId"
}
],
"shape": "StackEvent"
},
"Stack": {
"identifiers": [
{
"name": "Name",
"memberName": "StackName"
}
],
"shape": "Stack",
"load": {
"request": {
"operation": "DescribeStacks",
"params": [
{ "target": "StackName", "source": "identifier", "name": "Name" }
]
},
"path": "Stacks[0]"
},
"actions": {
"CancelUpdate": {
"request": {
"operation": "CancelUpdateStack",
"params": [
{ "target": "StackName", "source": "identifier", "name": "Name" }
]
}
},
"Delete": {
"request": {
"operation": "DeleteStack",
"params": [
{ "target": "StackName", "source": "identifier", "name": "Name" }
]
}
},
"Update": {
"request": {
"operation": "UpdateStack",
"params": [
{ "target": "StackName", "source": "identifier", "name": "Name" }
]
}
}
},
"has": {
"Resource": {
"resource": {
"type": "StackResource",
"identifiers": [
{ "target": "StackName", "source": "identifier", "name": "Name" },
{ "target": "LogicalId", "source": "input" }
]
}
}
},
"hasMany": {
"Events": {
"request": {
"operation": "DescribeStackEvents",
"params": [
{ "target": "StackName", "source": "identifier", "name": "Name" }
]
},
"resource": {
"type": "Event",
"identifiers": [
{ "target": "Id", "source": "response", "path": "StackEvents[].EventId" }
],
"path": "StackEvents[]"
}
},
"ResourceSummaries": {
"request": {
"operation": "ListStackResources",
"params": [
{ "target": "StackName", "source": "identifier", "name": "Name" }
]
},
"resource": {
"type": "StackResourceSummary",
"identifiers": [
{ "target": "LogicalId", "source": "response", "path": "StackResourceSummaries[].LogicalResourceId" },
{ "target": "StackName", "source": "requestParameter", "path": "StackName" }
],
"path": "StackResourceSummaries[]"
}
}
}
},
"StackResource": {
"identifiers": [
{ "name": "StackName" },
{
"name": "LogicalId",
"memberName": "LogicalResourceId"
}
],
"shape": "StackResourceDetail",
"load": {
"request": {
"operation": "DescribeStackResource",
"params": [
{ "target": "LogicalResourceId", "source": "identifier", "name": "LogicalId" },
{ "target": "StackName", "source": "identifier", "name": "StackName" }
]
},
"path": "StackResourceDetail"
},
"has": {
"Stack": {
"resource": {
"type": "Stack",
"identifiers": [
{ "target": "Name", "source": "identifier", "name": "StackName" }
]
}
}
}
},
"StackResourceSummary": {
"identifiers": [
{ "name": "StackName" },
{
"name": "LogicalId",
"memberName": "LogicalResourceId"
}
],
"shape": "StackResourceSummary",
"has": {
"Resource": {
"resource": {
"type": "StackResource",
"identifiers": [
{ "target": "LogicalId", "source": "identifier", "name": "LogicalId" },
{ "target": "StackName", "source": "identifier", "name": "StackName" }
]
}
}
}
}
}
}

View File

@@ -0,0 +1,334 @@
{
"service": {
"has": {
"Alarm": {
"resource": {
"type": "Alarm",
"identifiers": [
{
"target": "Name",
"source": "input"
}
]
}
},
"Metric": {
"resource": {
"type": "Metric",
"identifiers": [
{
"target": "Namespace",
"source": "input"
},
{
"target": "Name",
"source": "input"
}
]
}
}
},
"hasMany": {
"Alarms": {
"request": { "operation": "DescribeAlarms" },
"resource": {
"type": "Alarm",
"identifiers": [
{
"target": "Name",
"source": "response",
"path": "MetricAlarms[].AlarmName"
}
],
"path": "MetricAlarms[]"
}
},
"Metrics": {
"request": { "operation": "ListMetrics" },
"resource": {
"type": "Metric",
"identifiers": [
{
"target": "Namespace",
"source": "response",
"path": "Metrics[].Namespace"
},
{
"target": "Name",
"source": "response",
"path": "Metrics[].MetricName"
}
],
"path": "Metrics[]"
}
}
}
},
"resources": {
"Alarm": {
"identifiers": [
{
"name": "Name",
"memberName": "AlarmName"
}
],
"shape": "MetricAlarm",
"load": {
"request": {
"operation": "DescribeAlarms",
"params": [
{
"target": "AlarmNames[0]",
"source": "identifier",
"name": "Name"
}
]
},
"path": "MetricAlarms[0]"
},
"actions": {
"Delete": {
"request": {
"operation": "DeleteAlarms",
"params": [
{
"target": "AlarmNames[0]",
"source": "identifier",
"name": "Name"
}
]
}
},
"DescribeHistory": {
"request": {
"operation": "DescribeAlarmHistory",
"params": [
{
"target": "AlarmName",
"source": "identifier",
"name": "Name"
}
]
}
},
"DisableActions": {
"request": {
"operation": "DisableAlarmActions",
"params": [
{
"target": "AlarmNames[0]",
"source": "identifier",
"name": "Name"
}
]
}
},
"EnableActions": {
"request": {
"operation": "EnableAlarmActions",
"params": [
{
"target": "AlarmNames[0]",
"source": "identifier",
"name": "Name"
}
]
}
},
"SetState": {
"request": {
"operation": "SetAlarmState",
"params": [
{
"target": "AlarmName",
"source": "identifier",
"name": "Name"
}
]
}
}
},
"batchActions": {
"Delete": {
"request": {
"operation": "DeleteAlarms",
"params": [
{
"target": "AlarmNames[]",
"source": "identifier",
"name": "Name"
}
]
}
},
"DisableActions": {
"request": {
"operation": "DisableAlarmActions",
"params": [
{
"target": "AlarmNames[]",
"source": "identifier",
"name": "Name"
}
]
}
},
"EnableActions": {
"request": {
"operation": "EnableAlarmActions",
"params": [
{
"target": "AlarmNames[]",
"source": "identifier",
"name": "Name"
}
]
}
}
},
"has": {
"Metric": {
"resource": {
"type": "Metric",
"identifiers": [
{
"target": "Namespace",
"source": "data",
"path": "Namespace"
},
{
"target": "Name",
"source": "data",
"path": "MetricName"
}
]
}
}
}
},
"Metric": {
"identifiers": [
{
"name": "Namespace",
"memberName": "Namespace"
},
{
"name": "Name",
"memberName": "MetricName"
}
],
"shape": "Metric",
"load": {
"request": {
"operation": "ListMetrics",
"params": [
{
"target": "MetricName",
"source": "identifier",
"name": "Name"
},
{
"target": "Namespace",
"source": "identifier",
"name": "Namespace"
}
]
},
"path": "Metrics[0]"
},
"actions": {
"GetStatistics": {
"request": {
"operation": "GetMetricStatistics",
"params": [
{
"target": "Namespace",
"source": "identifier",
"name": "Namespace"
},
{
"target": "MetricName",
"source": "identifier",
"name": "Name"
}
]
}
},
"PutAlarm": {
"request": {
"operation": "PutMetricAlarm",
"params": [
{
"target": "Namespace",
"source": "identifier",
"name": "Namespace"
},
{
"target": "MetricName",
"source": "identifier",
"name": "Name"
}
]
},
"resource": {
"type": "Alarm",
"identifiers": [
{
"target": "Name",
"source": "requestParameter",
"path": "AlarmName"
}
]
}
},
"PutData": {
"request": {
"operation": "PutMetricData",
"params": [
{
"target": "Namespace",
"source": "identifier",
"name": "Namespace"
},
{
"target": "MetricData[].MetricName",
"source": "identifier",
"name": "Name"
}
]
}
}
},
"hasMany": {
"Alarms": {
"request": {
"operation": "DescribeAlarmsForMetric",
"params": [
{
"target": "Namespace",
"source": "identifier",
"name": "Namespace"
},
{
"target": "MetricName",
"source": "identifier",
"name": "Name"
}
]
},
"resource": {
"type": "Alarm",
"identifiers": [
{
"target": "Name",
"source": "response",
"path": "MetricAlarms[].AlarmName"
}
],
"path": "MetricAlarms[]"
}
}
}
}
}
}

View File

@@ -0,0 +1,150 @@
{
"service": {
"actions": {
"BatchGetItem": {
"request": { "operation": "BatchGetItem" }
},
"BatchWriteItem": {
"request": { "operation": "BatchWriteItem" }
},
"CreateTable": {
"request": { "operation": "CreateTable" },
"resource": {
"type": "Table",
"identifiers": [
{ "target": "Name", "source": "response", "path": "TableDescription.TableName" }
],
"path": "TableDescription"
}
}
},
"has": {
"Table": {
"resource": {
"type": "Table",
"identifiers": [
{ "target": "Name", "source": "input" }
]
}
}
},
"hasMany": {
"Tables": {
"request": { "operation": "ListTables" },
"resource": {
"type": "Table",
"identifiers": [
{ "target": "Name", "source": "response", "path": "TableNames[]" }
]
}
}
}
},
"resources": {
"Table": {
"identifiers": [
{
"name": "Name",
"memberName": "TableName"
}
],
"shape": "TableDescription",
"load": {
"request": {
"operation": "DescribeTable",
"params": [
{ "target": "TableName", "source": "identifier", "name": "Name" }
]
},
"path": "Table"
},
"actions": {
"Delete": {
"request": {
"operation": "DeleteTable",
"params": [
{ "target": "TableName", "source": "identifier", "name": "Name" }
]
}
},
"DeleteItem": {
"request": {
"operation": "DeleteItem",
"params": [
{ "target": "TableName", "source": "identifier", "name": "Name" }
]
}
},
"GetItem": {
"request": {
"operation": "GetItem",
"params": [
{ "target": "TableName", "source": "identifier", "name": "Name" }
]
}
},
"PutItem": {
"request": {
"operation": "PutItem",
"params": [
{ "target": "TableName", "source": "identifier", "name": "Name" }
]
}
},
"Query": {
"request": {
"operation": "Query",
"params": [
{ "target": "TableName", "source": "identifier", "name": "Name" }
]
}
},
"Scan": {
"request": {
"operation": "Scan",
"params": [
{ "target": "TableName", "source": "identifier", "name": "Name" }
]
}
},
"Update": {
"request": {
"operation": "UpdateTable",
"params": [
{ "target": "TableName", "source": "identifier", "name": "Name" }
]
},
"resource": {
"type": "Table",
"identifiers": [
{ "target": "Name", "source": "identifier", "name": "Name" }
],
"path": "TableDescription"
}
},
"UpdateItem": {
"request": {
"operation": "UpdateItem",
"params": [
{ "target": "TableName", "source": "identifier", "name": "Name" }
]
}
}
},
"waiters":{
"Exists": {
"waiterName": "TableExists",
"params": [
{ "target": "TableName", "source": "identifier", "name": "Name" }
]
},
"NotExists": {
"waiterName": "TableNotExists",
"params": [
{ "target": "TableName", "source": "identifier", "name": "Name" }
]
}
}
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,581 @@
{
"service": {
"actions": {
"CreateVault": {
"request": {
"operation": "CreateVault",
"params": [
{ "target": "accountId", "source": "string", "value": "-" }
]
},
"resource": {
"type": "Vault",
"identifiers": [
{ "target": "AccountId", "source": "requestParameter", "path": "accountId" },
{ "target": "Name", "source": "requestParameter", "path": "vaultName" }
]
}
}
},
"has": {
"Account": {
"resource": {
"type": "Account",
"identifiers": [
{ "target": "Id", "source": "input" }
]
}
}
},
"hasMany": {
"Vaults": {
"request": {
"operation": "ListVaults",
"params": [
{ "target": "accountId", "source": "string", "value": "-" }
]
},
"resource": {
"type": "Vault",
"identifiers": [
{ "target": "AccountId", "source": "requestParameter", "path": "accountId" },
{ "target": "Name", "source": "response", "path": "VaultList[].VaultName" }
],
"path": "VaultList[]"
}
}
}
},
"resources": {
"Account": {
"identifiers": [
{ "name": "Id" }
],
"actions": {
"CreateVault": {
"request": {
"operation": "CreateVault",
"params": [
{ "target": "accountId", "source": "identifier", "name": "Id" }
]
},
"resource": {
"type": "Vault",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "Id" },
{ "target": "Name", "source": "requestParameter", "path": "vaultName" }
]
}
}
},
"has": {
"Vault": {
"resource": {
"type": "Vault",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "Id" },
{ "target": "Name", "source": "input" }
]
}
}
},
"hasMany": {
"Vaults": {
"request": {
"operation": "ListVaults",
"params": [
{ "target": "accountId", "source": "identifier", "name": "Id" }
]
},
"resource": {
"type": "Vault",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "Id" },
{ "target": "Name", "source": "response", "path": "VaultList[].VaultName" }
],
"path": "VaultList[]"
}
}
}
},
"Archive": {
"identifiers": [
{ "name": "AccountId" },
{ "name": "VaultName" },
{ "name": "Id" }
],
"actions": {
"Delete": {
"request": {
"operation": "DeleteArchive",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "VaultName" },
{ "target": "archiveId", "source": "identifier", "name": "Id" }
]
}
},
"InitiateArchiveRetrieval": {
"request": {
"operation": "InitiateJob",
"params": [
{ "target": "vaultName", "source": "identifier", "name": "VaultName" },
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "jobParameters.Type", "source": "string", "value": "archive-retrieval" },
{ "target": "jobParameters.ArchiveId", "source": "identifier", "name": "Id" }
]
},
"resource": {
"type": "Job",
"identifiers": [
{ "target": "Id", "source": "response", "path": "jobId" },
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "VaultName" }
]
}
}
},
"has": {
"Vault": {
"resource": {
"type": "Vault",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "Name", "source": "identifier", "name": "VaultName" }
]
}
}
}
},
"Job": {
"identifiers": [
{ "name": "AccountId" },
{ "name": "VaultName" },
{
"name": "Id",
"memberName": "JobId"
}
],
"shape": "GlacierJobDescription",
"load": {
"request": {
"operation": "DescribeJob",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "VaultName" },
{ "target": "jobId", "source": "identifier", "name": "Id" }
]
},
"path": "@"
},
"actions": {
"GetOutput": {
"request": {
"operation": "GetJobOutput",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "VaultName" },
{ "target": "jobId", "source": "identifier", "name": "Id" }
]
}
}
},
"has": {
"Vault": {
"resource": {
"type": "Vault",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "Name", "source": "identifier", "name": "VaultName" }
]
}
}
}
},
"MultipartUpload": {
"identifiers": [
{ "name": "AccountId" },
{ "name": "VaultName" },
{
"name": "Id",
"memberName": "MultipartUploadId"
}
],
"shape": "UploadListElement",
"actions": {
"Abort": {
"request": {
"operation": "AbortMultipartUpload",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "VaultName" },
{ "target": "uploadId", "source": "identifier", "name": "Id" }
]
}
},
"Complete": {
"request": {
"operation": "CompleteMultipartUpload",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "VaultName" },
{ "target": "uploadId", "source": "identifier", "name": "Id" }
]
}
},
"Parts": {
"request": {
"operation": "ListParts",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "VaultName" },
{ "target": "uploadId", "source": "identifier", "name": "Id" }
]
}
},
"UploadPart": {
"request": {
"operation": "UploadMultipartPart",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "VaultName" },
{ "target": "uploadId", "source": "identifier", "name": "Id" }
]
}
}
},
"has": {
"Vault": {
"resource": {
"type": "Vault",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "Name", "source": "identifier", "name": "VaultName" }
]
}
}
}
},
"Notification": {
"identifiers": [
{ "name": "AccountId" },
{ "name": "VaultName" }
],
"shape": "VaultNotificationConfig",
"load": {
"request": {
"operation": "GetVaultNotifications",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "VaultName" }
]
},
"path": "vaultNotificationConfig"
},
"actions": {
"Delete": {
"request": {
"operation": "DeleteVaultNotifications",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "VaultName" }
]
}
},
"Set": {
"request": {
"operation": "SetVaultNotifications",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "VaultName" }
]
}
}
},
"has": {
"Vault": {
"resource": {
"type": "Vault",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "Name", "source": "identifier", "name": "VaultName" }
]
}
}
}
},
"Vault": {
"identifiers": [
{ "name": "AccountId" },
{
"name": "Name",
"memberName": "VaultName"
}
],
"shape": "DescribeVaultOutput",
"load": {
"request": {
"operation": "DescribeVault",
"params": [
{ "target": "vaultName", "source": "identifier", "name": "Name" },
{ "target": "accountId", "source": "identifier", "name": "AccountId" }
]
},
"path": "@"
},
"actions": {
"Create": {
"request": {
"operation": "CreateVault",
"params": [
{ "target": "vaultName", "source": "identifier", "name": "Name" },
{ "target": "accountId", "source": "identifier", "name": "AccountId" }
]
}
},
"Delete": {
"request": {
"operation": "DeleteVault",
"params": [
{ "target": "vaultName", "source": "identifier", "name": "Name" },
{ "target": "accountId", "source": "identifier", "name": "AccountId" }
]
}
},
"InitiateInventoryRetrieval": {
"request": {
"operation": "InitiateJob",
"params": [
{ "target": "vaultName", "source": "identifier", "name": "Name" },
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "jobParameters.Type", "source": "string", "value": "inventory-retrieval" }
]
},
"resource": {
"type": "Job",
"identifiers": [
{ "target": "Id", "source": "response", "path": "jobId" },
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" }
]
}
},
"InitiateMultipartUpload": {
"request": {
"operation": "InitiateMultipartUpload",
"params": [
{ "target": "vaultName", "source": "identifier", "name": "Name" },
{ "target": "accountId", "source": "identifier", "name": "AccountId" }
]
},
"resource": {
"type": "MultipartUpload",
"identifiers": [
{ "target": "Id", "source": "response", "path": "uploadId" },
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" }
]
}
},
"UploadArchive": {
"request": {
"operation": "UploadArchive",
"params": [
{ "target": "vaultName", "source": "identifier", "name": "Name" },
{ "target": "accountId", "source": "identifier", "name": "AccountId" }
]
},
"resource": {
"type": "Archive",
"identifiers": [
{ "target": "Id", "source": "response", "path": "archiveId" },
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" }
]
}
}
},
"has": {
"Account": {
"resource": {
"type": "Account",
"identifiers": [
{ "target": "Id", "source": "identifier", "name": "AccountId" }
]
}
},
"Archive": {
"resource": {
"type": "Archive",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" },
{ "target": "Id", "source": "input" }
]
}
},
"Job": {
"resource": {
"type": "Job",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" },
{ "target": "Id", "source": "input" }
]
}
},
"MultipartUpload": {
"resource": {
"type": "MultipartUpload",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" },
{ "target": "Id", "source": "input" }
]
}
},
"Notification": {
"resource": {
"type": "Notification",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" }
]
}
}
},
"hasMany": {
"CompletedJobs": {
"request": {
"operation": "ListJobs",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "Name" },
{ "target": "completed", "source": "string", "value": "true" }
]
},
"resource": {
"type": "Job",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" },
{ "target": "Id", "source": "response", "path": "JobList[].JobId" }
],
"path": "JobList[]"
}
},
"FailedJobs": {
"request": {
"operation": "ListJobs",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "Name" },
{ "target": "statuscode", "source": "string", "value": "Failed" }
]
},
"resource": {
"type": "Job",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" },
{ "target": "Id", "source": "response", "path": "JobList[].JobId" }
],
"path": "JobList[]"
}
},
"Jobs": {
"request": {
"operation": "ListJobs",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "Name" }
]
},
"resource": {
"type": "Job",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" },
{ "target": "Id", "source": "response", "path": "JobList[].JobId" }
],
"path": "JobList[]"
}
},
"JobsInProgress": {
"request": {
"operation": "ListJobs",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "Name" },
{ "target": "statuscode", "source": "string", "value": "InProgress" }
]
},
"resource": {
"type": "Job",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" },
{ "target": "Id", "source": "response", "path": "JobList[].JobId" }
],
"path": "JobList[]"
}
},
"MultipartUplaods": {
"request": {
"operation": "ListMultipartUploads",
"params": [
{ "target": "vaultName", "source": "identifier", "name": "Name" },
{ "target": "accountId", "source": "identifier", "name": "AccountId" }
]
},
"resource": {
"type": "MultipartUpload",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" },
{ "target": "Id", "source": "response", "path": "UploadsList[].MultipartUploadId" }
],
"path": "UploadsList[]"
}
},
"MultipartUploads": {
"request": {
"operation": "ListMultipartUploads",
"params": [
{ "target": "vaultName", "source": "identifier", "name": "Name" },
{ "target": "accountId", "source": "identifier", "name": "AccountId" }
]
},
"resource": {
"type": "MultipartUpload",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" },
{ "target": "Id", "source": "response", "path": "UploadsList[].MultipartUploadId" }
],
"path": "UploadsList[]"
}
},
"SucceededJobs": {
"request": {
"operation": "ListJobs",
"params": [
{ "target": "accountId", "source": "identifier", "name": "AccountId" },
{ "target": "vaultName", "source": "identifier", "name": "Name" },
{ "target": "statuscode", "source": "string", "value": "Succeeded" }
]
},
"resource": {
"type": "Job",
"identifiers": [
{ "target": "AccountId", "source": "identifier", "name": "AccountId" },
{ "target": "VaultName", "source": "identifier", "name": "Name" },
{ "target": "Id", "source": "response", "path": "JobList[].JobId" }
],
"path": "JobList[]"
}
}
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,173 @@
{
"service": {
"actions": {
"CreateStack": {
"request": { "operation": "CreateStack" },
"resource": {
"type": "Stack",
"identifiers": [
{ "target": "Id", "source": "response", "path": "StackId" }
]
}
}
},
"has": {
"Layer": {
"resource": {
"type": "Layer",
"identifiers": [
{ "target": "Id", "source": "input" }
]
}
},
"Stack": {
"resource": {
"type": "Stack",
"identifiers": [
{ "target": "Id", "source": "input" }
]
}
}
},
"hasMany": {
"Stacks": {
"request": { "operation": "DescribeStacks" },
"resource": {
"type": "Stack",
"identifiers": [
{ "target": "Id", "source": "response", "path": "Stacks[].StackId" }
],
"path": "Stacks[]"
}
}
}
},
"resources": {
"Layer": {
"identifiers": [
{ "name": "Id" }
],
"shape": "Layer",
"load": {
"request": {
"operation": "DescribeLayers",
"params": [
{ "target": "LayerIds[]", "source": "identifier", "name": "Id" }
]
},
"path": "Layers[0]"
},
"actions": {
"Delete": {
"request": {
"operation": "DeleteLayer",
"params": [
{ "target": "LayerId", "source": "identifier", "name": "Id" }
]
}
}
},
"has": {
"Stack": {
"resource": {
"type": "Stack",
"identifiers": [
{ "target": "Id", "source": "data", "path": "StackId" }
]
}
}
}
},
"Stack": {
"identifiers": [
{ "name": "Id" }
],
"shape": "Stack",
"load": {
"request": {
"operation": "DescribeStacks",
"params": [
{ "target": "StackIds[]", "source": "identifier", "name": "Id" }
]
},
"path": "Stacks[0]"
},
"actions": {
"CreateLayer": {
"request": {
"operation": "CreateLayer",
"params": [
{ "target": "StackId", "source": "identifier", "name": "Id" }
]
},
"resource": {
"type": "Layer",
"identifiers": [
{ "target": "Id", "source": "response", "path": "LayerId" }
]
}
},
"Delete": {
"request": {
"operation": "DeleteStack",
"params": [
{ "target": "StackId", "source": "identifier", "name": "Id" }
]
}
}
},
"has": {
"Summary": {
"resource": {
"type": "StackSummary",
"identifiers": [
{ "target": "StackId", "source": "identifier", "name": "Id" }
]
}
}
},
"hasMany": {
"Layers": {
"request": {
"operation": "DescribeLayers",
"params": [
{ "target": "StackId", "source": "identifier", "name": "Id" }
]
},
"resource": {
"type": "Layer",
"identifiers": [
{ "target": "Id", "source": "response", "path": "Layers[].LayerId" }
],
"path": "Layers[]"
}
}
}
},
"StackSummary": {
"identifiers": [
{ "name": "StackId" }
],
"shape": "StackSummary",
"load": {
"request": {
"operation": "DescribeStackSummary",
"params": [
{ "target": "StackId", "source": "identifier", "name": "StackId" }
]
},
"path": "StackSummary"
},
"has": {
"Stack": {
"resource": {
"type": "Stack",
"identifiers": [
{ "target": "Id", "source": "identifier", "name": "StackId" }
]
}
}
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,327 @@
{
"service": {
"actions": {
"CreatePlatformApplication": {
"request": { "operation": "CreatePlatformApplication" },
"resource": {
"type": "PlatformApplication",
"identifiers": [
{ "target": "Arn", "source": "response", "path": "PlatformApplicationArn" }
]
}
},
"CreateTopic": {
"request": { "operation": "CreateTopic" },
"resource": {
"type": "Topic",
"identifiers": [
{ "target": "Arn", "source": "response", "path": "TopicArn" }
]
}
}
},
"has": {
"PlatformApplication": {
"resource": {
"type": "PlatformApplication",
"identifiers": [
{ "target": "Arn", "source": "input" }
]
}
},
"PlatformEndpoint": {
"resource": {
"type": "PlatformEndpoint",
"identifiers": [
{ "target": "Arn", "source": "input" }
]
}
},
"Subscription": {
"resource": {
"type": "Subscription",
"identifiers": [
{ "target": "Arn", "source": "input" }
]
}
},
"Topic": {
"resource": {
"type": "Topic",
"identifiers": [
{ "target": "Arn", "source": "input" }
]
}
}
},
"hasMany": {
"PlatformApplications": {
"request": { "operation": "ListPlatformApplications" },
"resource": {
"type": "PlatformApplication",
"identifiers": [
{ "target": "Arn", "source": "response", "path": "PlatformApplications[].PlatformApplicationArn" }
]
}
},
"Subscriptions": {
"request": { "operation": "ListSubscriptions" },
"resource": {
"type": "Subscription",
"identifiers": [
{ "target": "Arn", "source": "response", "path": "Subscriptions[].SubscriptionArn" }
]
}
},
"Topics": {
"request": { "operation": "ListTopics" },
"resource": {
"type": "Topic",
"identifiers": [
{ "target": "Arn", "source": "response", "path": "Topics[].TopicArn" }
]
}
}
}
},
"resources": {
"PlatformApplication": {
"identifiers": [
{ "name": "Arn" }
],
"shape": "GetPlatformApplicationAttributesResponse",
"load": {
"request": {
"operation": "GetPlatformApplicationAttributes",
"params": [
{ "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" }
]
},
"path": "@"
},
"actions": {
"CreatePlatformEndpoint": {
"request": {
"operation": "CreatePlatformEndpoint",
"params": [
{ "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" }
]
},
"resource": {
"type": "PlatformEndpoint",
"identifiers": [
{ "target": "Arn", "source": "response", "path": "EndpointArn" }
]
}
},
"Delete": {
"request": {
"operation": "DeletePlatformApplication",
"params": [
{ "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" }
]
}
},
"SetAttributes": {
"request": {
"operation": "SetPlatformApplicationAttributes",
"params": [
{ "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" }
]
}
}
},
"hasMany": {
"Endpoints": {
"request": {
"operation": "ListEndpointsByPlatformApplication",
"params": [
{ "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" }
]
},
"resource": {
"type": "PlatformEndpoint",
"identifiers": [
{ "target": "Arn", "source": "response", "path": "Endpoints[].EndpointArn" }
]
}
}
}
},
"PlatformEndpoint": {
"identifiers": [
{ "name": "Arn" }
],
"shape": "GetEndpointAttributesResponse",
"load": {
"request": {
"operation": "GetEndpointAttributes",
"params": [
{ "target": "EndpointArn", "source": "identifier", "name": "Arn" }
]
},
"path": "@"
},
"actions": {
"Delete": {
"request": {
"operation": "DeleteEndpoint",
"params": [
{ "target": "EndpointArn", "source": "identifier", "name": "Arn" }
]
}
},
"Publish": {
"request": {
"operation": "Publish",
"params": [
{ "target": "TargetArn", "source": "identifier", "name": "Arn" }
]
}
},
"SetAttributes": {
"request": {
"operation": "SetEndpointAttributes",
"params": [
{ "target": "EndpointArn", "source": "identifier", "name": "Arn" }
]
}
}
}
},
"Subscription": {
"identifiers": [
{ "name": "Arn" }
],
"shape": "GetSubscriptionAttributesResponse",
"load": {
"request": {
"operation": "GetSubscriptionAttributes",
"params": [
{ "target": "SubscriptionArn", "source": "identifier", "name": "Arn" }
]
},
"path": "@"
},
"actions": {
"Delete": {
"request": {
"operation": "Unsubscribe",
"params": [
{ "target": "SubscriptionArn", "source": "identifier", "name": "Arn" }
]
}
},
"SetAttributes": {
"request": {
"operation": "SetSubscriptionAttributes",
"params": [
{ "target": "SubscriptionArn", "source": "identifier", "name": "Arn" }
]
}
}
}
},
"Topic": {
"identifiers": [
{ "name": "Arn" }
],
"shape": "GetTopicAttributesResponse",
"load": {
"request": {
"operation": "GetTopicAttributes",
"params": [
{ "target": "TopicArn", "source": "identifier", "name": "Arn" }
]
},
"path": "@"
},
"actions": {
"AddPermission": {
"request": {
"operation": "AddPermission",
"params": [
{ "target": "TopicArn", "source": "identifier", "name": "Arn" }
]
}
},
"ConfirmSubscription": {
"request": {
"operation": "ConfirmSubscription",
"params": [
{ "target": "TopicArn", "source": "identifier", "name": "Arn" }
]
},
"resource": {
"type": "Subscription",
"identifiers": [
{ "target": "Arn", "source": "response", "path": "SubscriptionArn" }
]
}
},
"Delete": {
"request": {
"operation": "DeleteTopic",
"params": [
{ "target": "TopicArn", "source": "identifier", "name": "Arn" }
]
}
},
"Publish": {
"request": {
"operation": "Publish",
"params": [
{ "target": "TopicArn", "source": "identifier", "name": "Arn" }
]
}
},
"RemovePermission": {
"request": {
"operation": "RemovePermission",
"params": [
{ "target": "TopicArn", "source": "identifier", "name": "Arn" }
]
}
},
"SetAttributes": {
"request": {
"operation": "SetTopicAttributes",
"params": [
{ "target": "TopicArn", "source": "identifier", "name": "Arn" }
]
}
},
"Subscribe": {
"request": {
"operation": "Subscribe",
"params": [
{ "target": "TopicArn", "source": "identifier", "name": "Arn" }
]
},
"resource": {
"type": "Subscription",
"identifiers": [
{ "target": "Arn", "source": "response", "path": "SubscriptionArn" }
]
}
}
},
"hasMany": {
"Subscriptions": {
"request": {
"operation": "ListSubscriptionsByTopic",
"params": [
{ "target": "TopicArn", "source": "identifier", "name": "Arn" }
]
},
"resource": {
"type": "Subscription",
"identifiers": [
{ "target": "Arn", "source": "response", "path": "Subscriptions[].SubscriptionArn" }
]
}
}
}
}
}
}

View File

@@ -0,0 +1,232 @@
{
"service": {
"actions": {
"CreateQueue": {
"request": { "operation": "CreateQueue" },
"resource": {
"type": "Queue",
"identifiers": [
{ "target": "Url", "source": "response", "path": "QueueUrl" }
]
}
},
"GetQueueByName": {
"request": { "operation": "GetQueueUrl" },
"resource": {
"type": "Queue",
"identifiers": [
{ "target": "Url", "source": "response", "path": "QueueUrl" }
]
}
}
},
"has": {
"Queue": {
"resource": {
"type": "Queue",
"identifiers": [
{ "target": "Url", "source": "input" }
]
}
}
},
"hasMany": {
"Queues": {
"request": { "operation": "ListQueues" },
"resource": {
"type": "Queue",
"identifiers": [
{ "target": "Url", "source": "response", "path": "QueueUrls[]" }
]
}
}
}
},
"resources": {
"Message": {
"identifiers": [
{ "name": "QueueUrl" },
{
"name": "ReceiptHandle",
"memberName": "ReceiptHandle"
}
],
"shape": "Message",
"actions": {
"ChangeVisibility": {
"request": {
"operation": "ChangeMessageVisibility",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "QueueUrl" },
{ "target": "ReceiptHandle", "source": "identifier", "name": "ReceiptHandle" }
]
}
},
"Delete": {
"request": {
"operation": "DeleteMessage",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "QueueUrl" },
{ "target": "ReceiptHandle", "source": "identifier", "name": "ReceiptHandle" }
]
}
}
},
"batchActions": {
"Delete": {
"request": {
"operation": "DeleteMessageBatch",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "QueueUrl" },
{ "target": "Entries[*].Id", "source": "data", "path": "MessageId" },
{ "target": "Entries[*].ReceiptHandle", "source": "identifier", "name": "ReceiptHandle" }
]
}
}
},
"has": {
"Queue": {
"resource": {
"type": "Queue",
"identifiers": [
{ "target": "Url", "source": "identifier", "name": "QueueUrl" }
]
}
}
}
},
"Queue": {
"identifiers": [
{ "name": "Url" }
],
"shape": "GetQueueAttributesResult",
"load": {
"request": {
"operation": "GetQueueAttributes",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" },
{ "target": "AttributeNames[]", "source": "string", "value": "All" }
]
},
"path": "@"
},
"actions": {
"AddPermission": {
"request": {
"operation": "AddPermission",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" }
]
}
},
"ChangeMessageVisibilityBatch": {
"request": {
"operation": "ChangeMessageVisibilityBatch",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" }
]
}
},
"Delete": {
"request": {
"operation": "DeleteQueue",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" }
]
}
},
"DeleteMessages": {
"request": {
"operation": "DeleteMessageBatch",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" }
]
}
},
"Purge": {
"request": {
"operation": "PurgeQueue",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" }
]
}
},
"ReceiveMessages": {
"request": {
"operation": "ReceiveMessage",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" }
]
},
"resource": {
"type": "Message",
"identifiers": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" },
{ "target": "ReceiptHandle", "source": "response", "path": "Messages[].ReceiptHandle" }
],
"path": "Messages[]"
}
},
"RemovePermission": {
"request": {
"operation": "RemovePermission",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" }
]
}
},
"SendMessage": {
"request": {
"operation": "SendMessage",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" }
]
}
},
"SendMessages": {
"request": {
"operation": "SendMessageBatch",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" }
]
}
},
"SetAttributes": {
"request": {
"operation": "SetQueueAttributes",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" }
]
}
}
},
"has": {
"Message": {
"resource": {
"type": "Message",
"identifiers": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" },
{ "target": "ReceiptHandle", "source": "input" }
]
}
}
},
"hasMany": {
"DeadLetterSourceQueues": {
"request": {
"operation": "ListDeadLetterSourceQueues",
"params": [
{ "target": "QueueUrl", "source": "identifier", "name": "Url" }
]
},
"resource": {
"type": "Queue",
"identifiers": [
{ "target": "Url", "source": "response", "path": "queueUrls[]" }
]
}
}
}
}
}
}

View File

@@ -0,0 +1,51 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
from botocore.docs import DEPRECATED_SERVICE_NAMES
from boto3.docs.service import ServiceDocumenter
def generate_docs(root_dir, session):
"""Generates the reference documentation for botocore
This will go through every available AWS service and output ReSTructured
text files documenting each service.
:param root_dir: The directory to write the reference files to. Each
service's reference documentation is loacated at
root_dir/reference/services/service-name.rst
:param session: The boto3 session
"""
services_doc_path = os.path.join(root_dir, 'reference', 'services')
if not os.path.exists(services_doc_path):
os.makedirs(services_doc_path)
# Prevents deprecated service names from being generated in docs.
available_services = [
service
for service in session.get_available_services()
if service not in DEPRECATED_SERVICE_NAMES
]
for service_name in available_services:
docs = ServiceDocumenter(
service_name, session, services_doc_path
).document_service()
service_doc_path = os.path.join(
services_doc_path, service_name + '.rst'
)
with open(service_doc_path, 'wb') as f:
f.write(docs)

View File

@@ -0,0 +1,217 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
from botocore import xform_name
from botocore.docs.bcdoc.restdoc import DocumentStructure
from botocore.docs.method import (
document_custom_method,
document_model_driven_method,
)
from botocore.model import OperationModel
from botocore.utils import get_service_module_name
from boto3.docs.base import NestedDocumenter
from boto3.docs.method import document_model_driven_resource_method
from boto3.docs.utils import (
add_resource_type_overview,
get_resource_ignore_params,
get_resource_public_actions,
)
PUT_DATA_WARNING_MESSAGE = """
.. warning::
It is recommended to use the :py:meth:`put_metric_data`
:doc:`client method <../../cloudwatch/client/put_metric_data>`
instead. If you would still like to use this resource method,
please make sure that ``MetricData[].MetricName`` is equal to
the metric resource's ``name`` attribute.
"""
WARNING_MESSAGES = {
"Metric": {"put_data": PUT_DATA_WARNING_MESSAGE},
}
IGNORE_PARAMS = {"Metric": {"put_data": ["Namespace"]}}
class ActionDocumenter(NestedDocumenter):
def document_actions(self, section):
modeled_actions_list = self._resource_model.actions
modeled_actions = {}
for modeled_action in modeled_actions_list:
modeled_actions[modeled_action.name] = modeled_action
resource_actions = get_resource_public_actions(
self._resource.__class__
)
self.member_map['actions'] = sorted(resource_actions)
add_resource_type_overview(
section=section,
resource_type='Actions',
description=(
'Actions call operations on resources. They may '
'automatically handle the passing in of arguments set '
'from identifiers and some attributes.'
),
intro_link='actions_intro',
)
resource_warnings = WARNING_MESSAGES.get(self._resource_name, {})
for action_name in sorted(resource_actions):
# Create a new DocumentStructure for each action and add contents.
action_doc = DocumentStructure(action_name, target='html')
breadcrumb_section = action_doc.add_new_section('breadcrumb')
breadcrumb_section.style.ref(self._resource_class_name, 'index')
breadcrumb_section.write(f' / Action / {action_name}')
action_doc.add_title_section(action_name)
warning_message = resource_warnings.get(action_name)
if warning_message is not None:
action_doc.add_new_section("warning").write(warning_message)
action_section = action_doc.add_new_section(
action_name,
context={'qualifier': f'{self.class_name}.'},
)
if action_name in ['load', 'reload'] and self._resource_model.load:
document_load_reload_action(
section=action_section,
action_name=action_name,
resource_name=self._resource_name,
event_emitter=self._resource.meta.client.meta.events,
load_model=self._resource_model.load,
service_model=self._service_model,
)
elif action_name in modeled_actions:
document_action(
section=action_section,
resource_name=self._resource_name,
event_emitter=self._resource.meta.client.meta.events,
action_model=modeled_actions[action_name],
service_model=self._service_model,
)
else:
document_custom_method(
action_section, action_name, resource_actions[action_name]
)
# Write actions in individual/nested files.
# Path: <root>/reference/services/<service>/<resource_name>/<action_name>.rst
actions_dir_path = os.path.join(
self._root_docs_path,
f'{self._service_name}',
f'{self._resource_sub_path}',
)
action_doc.write_to_file(actions_dir_path, action_name)
def document_action(
section,
resource_name,
event_emitter,
action_model,
service_model,
include_signature=True,
):
"""Documents a resource action
:param section: The section to write to
:param resource_name: The name of the resource
:param event_emitter: The event emitter to use to emit events
:param action_model: The model of the action
:param service_model: The model of the service
:param include_signature: Whether or not to include the signature.
It is useful for generating docstrings.
"""
operation_model = service_model.operation_model(
action_model.request.operation
)
ignore_params = IGNORE_PARAMS.get(resource_name, {}).get(
action_model.name,
get_resource_ignore_params(action_model.request.params),
)
example_return_value = 'response'
if action_model.resource:
example_return_value = xform_name(action_model.resource.type)
example_resource_name = xform_name(resource_name)
if service_model.service_name == resource_name:
example_resource_name = resource_name
example_prefix = '{} = {}.{}'.format(
example_return_value, example_resource_name, action_model.name
)
full_action_name = (
f"{section.context.get('qualifier', '')}{action_model.name}"
)
document_model_driven_resource_method(
section=section,
method_name=full_action_name,
operation_model=operation_model,
event_emitter=event_emitter,
method_description=operation_model.documentation,
example_prefix=example_prefix,
exclude_input=ignore_params,
resource_action_model=action_model,
include_signature=include_signature,
)
def document_load_reload_action(
section,
action_name,
resource_name,
event_emitter,
load_model,
service_model,
include_signature=True,
):
"""Documents the resource load action
:param section: The section to write to
:param action_name: The name of the loading action should be load or reload
:param resource_name: The name of the resource
:param event_emitter: The event emitter to use to emit events
:param load_model: The model of the load action
:param service_model: The model of the service
:param include_signature: Whether or not to include the signature.
It is useful for generating docstrings.
"""
description = (
'Calls :py:meth:`{}.Client.{}` to update the attributes of the '
'{} resource. Note that the load and reload methods are '
'the same method and can be used interchangeably.'.format(
get_service_module_name(service_model),
xform_name(load_model.request.operation),
resource_name,
)
)
example_resource_name = xform_name(resource_name)
if service_model.service_name == resource_name:
example_resource_name = resource_name
example_prefix = f'{example_resource_name}.{action_name}'
full_action_name = f"{section.context.get('qualifier', '')}{action_name}"
document_model_driven_method(
section=section,
method_name=full_action_name,
operation_model=OperationModel({}, service_model),
event_emitter=event_emitter,
method_description=description,
example_prefix=example_prefix,
include_signature=include_signature,
)

View File

@@ -0,0 +1,72 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from botocore.docs.params import ResponseParamsDocumenter
from boto3.docs.utils import get_identifier_description
class ResourceShapeDocumenter(ResponseParamsDocumenter):
EVENT_NAME = 'resource-shape'
def document_attribute(
section,
service_name,
resource_name,
attr_name,
event_emitter,
attr_model,
include_signature=True,
):
if include_signature:
full_attr_name = f"{section.context.get('qualifier', '')}{attr_name}"
section.style.start_sphinx_py_attr(full_attr_name)
# Note that an attribute may have one, may have many, or may have no
# operations that back the resource's shape. So we just set the
# operation_name to the resource name if we ever to hook in and modify
# a particular attribute.
ResourceShapeDocumenter(
service_name=service_name,
operation_name=resource_name,
event_emitter=event_emitter,
).document_params(section=section, shape=attr_model)
def document_identifier(
section,
resource_name,
identifier_model,
include_signature=True,
):
if include_signature:
full_identifier_name = (
f"{section.context.get('qualifier', '')}{identifier_model.name}"
)
section.style.start_sphinx_py_attr(full_identifier_name)
description = get_identifier_description(
resource_name, identifier_model.name
)
section.write(f'*(string)* {description}')
def document_reference(section, reference_model, include_signature=True):
if include_signature:
full_reference_name = (
f"{section.context.get('qualifier', '')}{reference_model.name}"
)
section.style.start_sphinx_py_attr(full_reference_name)
reference_type = f'(:py:class:`{reference_model.resource.type}`) '
section.write(reference_type)
section.include_doc_string(
f'The related {reference_model.name} if set, otherwise ``None``.'
)

View File

@@ -0,0 +1,51 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from botocore.compat import OrderedDict
class BaseDocumenter:
def __init__(self, resource):
self._resource = resource
self._client = self._resource.meta.client
self._resource_model = self._resource.meta.resource_model
self._service_model = self._client.meta.service_model
self._resource_name = self._resource.meta.resource_model.name
self._service_name = self._service_model.service_name
self._service_docs_name = self._client.__class__.__name__
self.member_map = OrderedDict()
self.represents_service_resource = (
self._service_name == self._resource_name
)
self._resource_class_name = self._resource_name
if self._resource_name == self._service_name:
self._resource_class_name = 'ServiceResource'
@property
def class_name(self):
return f'{self._service_docs_name}.{self._resource_name}'
class NestedDocumenter(BaseDocumenter):
def __init__(self, resource, root_docs_path):
super().__init__(resource)
self._root_docs_path = root_docs_path
self._resource_sub_path = self._resource_name.lower()
if self._resource_name == self._service_name:
self._resource_sub_path = 'service-resource'
@property
def class_name(self):
resource_class_name = self._resource_name
if self._resource_name == self._service_name:
resource_class_name = 'ServiceResource'
return f'{self._service_docs_name}.{resource_class_name}'

View File

@@ -0,0 +1,28 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from botocore.docs.client import ClientDocumenter
class Boto3ClientDocumenter(ClientDocumenter):
def _add_client_creation_example(self, section):
section.style.start_codeblock()
section.style.new_line()
section.write('import boto3')
section.style.new_line()
section.style.new_line()
section.write(
'client = boto3.client(\'{service}\')'.format(
service=self._service_name
)
)
section.style.end_codeblock()

View File

@@ -0,0 +1,312 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
from botocore import xform_name
from botocore.docs.bcdoc.restdoc import DocumentStructure
from botocore.docs.method import get_instance_public_methods
from botocore.docs.utils import DocumentedShape
from boto3.docs.base import NestedDocumenter
from boto3.docs.method import document_model_driven_resource_method
from boto3.docs.utils import (
add_resource_type_overview,
get_resource_ignore_params,
)
class CollectionDocumenter(NestedDocumenter):
def document_collections(self, section):
collections = self._resource.meta.resource_model.collections
collections_list = []
add_resource_type_overview(
section=section,
resource_type='Collections',
description=(
'Collections provide an interface to iterate over and '
'manipulate groups of resources. '
),
intro_link='guide_collections',
)
self.member_map['collections'] = collections_list
for collection in collections:
collections_list.append(collection.name)
# Create a new DocumentStructure for each collection and add contents.
collection_doc = DocumentStructure(collection.name, target='html')
breadcrumb_section = collection_doc.add_new_section('breadcrumb')
breadcrumb_section.style.ref(self._resource_class_name, 'index')
breadcrumb_section.write(f' / Collection / {collection.name}')
collection_doc.add_title_section(collection.name)
collection_section = collection_doc.add_new_section(
collection.name,
context={'qualifier': f'{self.class_name}.'},
)
self._document_collection(collection_section, collection)
# Write collections in individual/nested files.
# Path: <root>/reference/services/<service>/<resource_name>/<collection_name>.rst
collections_dir_path = os.path.join(
self._root_docs_path,
f'{self._service_name}',
f'{self._resource_sub_path}',
)
collection_doc.write_to_file(collections_dir_path, collection.name)
def _document_collection(self, section, collection):
methods = get_instance_public_methods(
getattr(self._resource, collection.name)
)
document_collection_object(section, collection)
batch_actions = {}
for batch_action in collection.batch_actions:
batch_actions[batch_action.name] = batch_action
for method in sorted(methods):
method_section = section.add_new_section(method)
if method in batch_actions:
document_batch_action(
section=method_section,
resource_name=self._resource_name,
event_emitter=self._resource.meta.client.meta.events,
batch_action_model=batch_actions[method],
collection_model=collection,
service_model=self._resource.meta.client.meta.service_model,
)
else:
document_collection_method(
section=method_section,
resource_name=self._resource_name,
action_name=method,
event_emitter=self._resource.meta.client.meta.events,
collection_model=collection,
service_model=self._resource.meta.client.meta.service_model,
)
def document_collection_object(
section,
collection_model,
include_signature=True,
):
"""Documents a collection resource object
:param section: The section to write to
:param collection_model: The model of the collection
:param include_signature: Whether or not to include the signature.
It is useful for generating docstrings.
"""
if include_signature:
full_collection_name = (
f"{section.context.get('qualifier', '')}{collection_model.name}"
)
section.style.start_sphinx_py_attr(full_collection_name)
section.include_doc_string(
f'A collection of {collection_model.resource.type} resources.'
)
section.include_doc_string(
f'A {collection_model.resource.type} Collection will include all '
f'resources by default, and extreme caution should be taken when '
f'performing actions on all resources.'
)
def document_batch_action(
section,
resource_name,
event_emitter,
batch_action_model,
service_model,
collection_model,
include_signature=True,
):
"""Documents a collection's batch action
:param section: The section to write to
:param resource_name: The name of the resource
:param action_name: The name of collection action. Currently only
can be all, filter, limit, or page_size
:param event_emitter: The event emitter to use to emit events
:param batch_action_model: The model of the batch action
:param collection_model: The model of the collection
:param service_model: The model of the service
:param include_signature: Whether or not to include the signature.
It is useful for generating docstrings.
"""
operation_model = service_model.operation_model(
batch_action_model.request.operation
)
ignore_params = get_resource_ignore_params(
batch_action_model.request.params
)
example_return_value = 'response'
if batch_action_model.resource:
example_return_value = xform_name(batch_action_model.resource.type)
example_resource_name = xform_name(resource_name)
if service_model.service_name == resource_name:
example_resource_name = resource_name
example_prefix = '{} = {}.{}.{}'.format(
example_return_value,
example_resource_name,
collection_model.name,
batch_action_model.name,
)
document_model_driven_resource_method(
section=section,
method_name=batch_action_model.name,
operation_model=operation_model,
event_emitter=event_emitter,
method_description=operation_model.documentation,
example_prefix=example_prefix,
exclude_input=ignore_params,
resource_action_model=batch_action_model,
include_signature=include_signature,
)
def document_collection_method(
section,
resource_name,
action_name,
event_emitter,
collection_model,
service_model,
include_signature=True,
):
"""Documents a collection method
:param section: The section to write to
:param resource_name: The name of the resource
:param action_name: The name of collection action. Currently only
can be all, filter, limit, or page_size
:param event_emitter: The event emitter to use to emit events
:param collection_model: The model of the collection
:param service_model: The model of the service
:param include_signature: Whether or not to include the signature.
It is useful for generating docstrings.
"""
operation_model = service_model.operation_model(
collection_model.request.operation
)
underlying_operation_members = []
if operation_model.input_shape:
underlying_operation_members = operation_model.input_shape.members
example_resource_name = xform_name(resource_name)
if service_model.service_name == resource_name:
example_resource_name = resource_name
custom_action_info_dict = {
'all': {
'method_description': (
f'Creates an iterable of all {collection_model.resource.type} '
f'resources in the collection.'
),
'example_prefix': '{}_iterator = {}.{}.all'.format(
xform_name(collection_model.resource.type),
example_resource_name,
collection_model.name,
),
'exclude_input': underlying_operation_members,
},
'filter': {
'method_description': (
f'Creates an iterable of all {collection_model.resource.type} '
f'resources in the collection filtered by kwargs passed to '
f'method. A {collection_model.resource.type} collection will '
f'include all resources by default if no filters are provided, '
f'and extreme caution should be taken when performing actions '
f'on all resources.'
),
'example_prefix': '{}_iterator = {}.{}.filter'.format(
xform_name(collection_model.resource.type),
example_resource_name,
collection_model.name,
),
'exclude_input': get_resource_ignore_params(
collection_model.request.params
),
},
'limit': {
'method_description': (
f'Creates an iterable up to a specified amount of '
f'{collection_model.resource.type} resources in the collection.'
),
'example_prefix': '{}_iterator = {}.{}.limit'.format(
xform_name(collection_model.resource.type),
example_resource_name,
collection_model.name,
),
'include_input': [
DocumentedShape(
name='count',
type_name='integer',
documentation=(
'The limit to the number of resources '
'in the iterable.'
),
)
],
'exclude_input': underlying_operation_members,
},
'page_size': {
'method_description': (
f'Creates an iterable of all {collection_model.resource.type} '
f'resources in the collection, but limits the number of '
f'items returned by each service call by the specified amount.'
),
'example_prefix': '{}_iterator = {}.{}.page_size'.format(
xform_name(collection_model.resource.type),
example_resource_name,
collection_model.name,
),
'include_input': [
DocumentedShape(
name='count',
type_name='integer',
documentation=(
'The number of items returned by each ' 'service call'
),
)
],
'exclude_input': underlying_operation_members,
},
}
if action_name in custom_action_info_dict:
action_info = custom_action_info_dict[action_name]
document_model_driven_resource_method(
section=section,
method_name=action_name,
operation_model=operation_model,
event_emitter=event_emitter,
resource_action_model=collection_model,
include_signature=include_signature,
**action_info,
)

View File

@@ -0,0 +1,77 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from botocore.docs.docstring import LazyLoadedDocstring
from boto3.docs.action import document_action, document_load_reload_action
from boto3.docs.attr import (
document_attribute,
document_identifier,
document_reference,
)
from boto3.docs.collection import (
document_batch_action,
document_collection_method,
document_collection_object,
)
from boto3.docs.subresource import document_sub_resource
from boto3.docs.waiter import document_resource_waiter
class ActionDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_action(*args, **kwargs)
class LoadReloadDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_load_reload_action(*args, **kwargs)
class SubResourceDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_sub_resource(*args, **kwargs)
class AttributeDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_attribute(*args, **kwargs)
class IdentifierDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_identifier(*args, **kwargs)
class ReferenceDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_reference(*args, **kwargs)
class CollectionDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_collection_object(*args, **kwargs)
class CollectionMethodDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_collection_method(*args, **kwargs)
class BatchActionDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_batch_action(*args, **kwargs)
class ResourceWaiterDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_resource_waiter(*args, **kwargs)

View File

@@ -0,0 +1,77 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from botocore.docs.method import document_model_driven_method
def document_model_driven_resource_method(
section,
method_name,
operation_model,
event_emitter,
method_description=None,
example_prefix=None,
include_input=None,
include_output=None,
exclude_input=None,
exclude_output=None,
document_output=True,
resource_action_model=None,
include_signature=True,
):
document_model_driven_method(
section=section,
method_name=method_name,
operation_model=operation_model,
event_emitter=event_emitter,
method_description=method_description,
example_prefix=example_prefix,
include_input=include_input,
include_output=include_output,
exclude_input=exclude_input,
exclude_output=exclude_output,
document_output=document_output,
include_signature=include_signature,
)
# If this action returns a resource modify the return example to
# appropriately reflect that.
if resource_action_model.resource:
if 'return' in section.available_sections:
section.delete_section('return')
resource_type = resource_action_model.resource.type
new_return_section = section.add_new_section('return')
return_resource_type = '{}.{}'.format(
operation_model.service_model.service_name, resource_type
)
return_type = f':py:class:`{return_resource_type}`'
return_description = f'{resource_type} resource'
if _method_returns_resource_list(resource_action_model.resource):
return_type = f'list({return_type})'
return_description = f'A list of {resource_type} resources'
new_return_section.style.new_line()
new_return_section.write(f':rtype: {return_type}')
new_return_section.style.new_line()
new_return_section.write(f':returns: {return_description}')
new_return_section.style.new_line()
def _method_returns_resource_list(resource):
for identifier in resource.identifiers:
if identifier.path and '[]' in identifier.path:
return True
return False

View File

@@ -0,0 +1,364 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
from botocore import xform_name
from botocore.docs.bcdoc.restdoc import DocumentStructure
from botocore.docs.utils import get_official_service_name
from boto3.docs.action import ActionDocumenter
from boto3.docs.attr import (
document_attribute,
document_identifier,
document_reference,
)
from boto3.docs.base import BaseDocumenter
from boto3.docs.collection import CollectionDocumenter
from boto3.docs.subresource import SubResourceDocumenter
from boto3.docs.utils import (
add_resource_type_overview,
get_identifier_args_for_signature,
get_identifier_description,
get_identifier_values_for_example,
)
from boto3.docs.waiter import WaiterResourceDocumenter
class ResourceDocumenter(BaseDocumenter):
def __init__(self, resource, botocore_session, root_docs_path):
super().__init__(resource)
self._botocore_session = botocore_session
self._root_docs_path = root_docs_path
self._resource_sub_path = self._resource_name.lower()
if self._resource_name == self._service_name:
self._resource_sub_path = 'service-resource'
def document_resource(self, section):
self._add_title(section)
self._add_resource_note(section)
self._add_intro(section)
self._add_identifiers(section)
self._add_attributes(section)
self._add_references(section)
self._add_actions(section)
self._add_sub_resources(section)
self._add_collections(section)
self._add_waiters(section)
def _add_title(self, section):
title_section = section.add_new_section('title')
title_section.style.h2(self._resource_name)
def _add_intro(self, section):
identifier_names = []
if self._resource_model.identifiers:
for identifier in self._resource_model.identifiers:
identifier_names.append(identifier.name)
# Write out the class signature.
class_args = get_identifier_args_for_signature(identifier_names)
start_class = section.add_new_section('start_class')
start_class.style.start_sphinx_py_class(
class_name=f'{self.class_name}({class_args})'
)
# Add as short description about the resource
description_section = start_class.add_new_section('description')
self._add_description(description_section)
# Add an example of how to instantiate the resource
example_section = start_class.add_new_section('example')
self._add_example(example_section, identifier_names)
# Add the description for the parameters to instantiate the
# resource.
param_section = start_class.add_new_section('params')
self._add_params_description(param_section, identifier_names)
end_class = section.add_new_section('end_class')
end_class.style.end_sphinx_py_class()
def _add_description(self, section):
official_service_name = get_official_service_name(self._service_model)
section.write(
'A resource representing an {} {}'.format(
official_service_name, self._resource_name
)
)
def _add_example(self, section, identifier_names):
section.style.start_codeblock()
section.style.new_line()
section.write('import boto3')
section.style.new_line()
section.style.new_line()
section.write(
'{} = boto3.resource(\'{}\')'.format(
self._service_name, self._service_name
)
)
section.style.new_line()
example_values = get_identifier_values_for_example(identifier_names)
section.write(
'{} = {}.{}({})'.format(
xform_name(self._resource_name),
self._service_name,
self._resource_name,
example_values,
)
)
section.style.end_codeblock()
def _add_params_description(self, section, identifier_names):
for identifier_name in identifier_names:
description = get_identifier_description(
self._resource_name, identifier_name
)
section.write(f':type {identifier_name}: string')
section.style.new_line()
section.write(f':param {identifier_name}: {description}')
section.style.new_line()
def _add_overview_of_member_type(self, section, resource_member_type):
section.style.new_line()
section.write(
f'These are the resource\'s available {resource_member_type}:'
)
section.style.new_line()
section.style.toctree()
for member in self.member_map[resource_member_type]:
section.style.tocitem(f'{member}')
def _add_identifiers(self, section):
identifiers = self._resource.meta.resource_model.identifiers
section = section.add_new_section('identifiers')
member_list = []
if identifiers:
self.member_map['identifiers'] = member_list
add_resource_type_overview(
section=section,
resource_type='Identifiers',
description=(
'Identifiers are properties of a resource that are '
'set upon instantiation of the resource.'
),
intro_link='identifiers_attributes_intro',
)
for identifier in identifiers:
member_list.append(identifier.name)
# Create a new DocumentStructure for each identifier and add contents.
identifier_doc = DocumentStructure(identifier.name, target='html')
breadcrumb_section = identifier_doc.add_new_section('breadcrumb')
breadcrumb_section.style.ref(self._resource_class_name, 'index')
breadcrumb_section.write(f' / Identifier / {identifier.name}')
identifier_doc.add_title_section(identifier.name)
identifier_section = identifier_doc.add_new_section(
identifier.name,
context={'qualifier': f'{self.class_name}.'},
)
document_identifier(
section=identifier_section,
resource_name=self._resource_name,
identifier_model=identifier,
)
# Write identifiers in individual/nested files.
# Path: <root>/reference/services/<service>/<resource_name>/<identifier_name>.rst
identifiers_dir_path = os.path.join(
self._root_docs_path,
f'{self._service_name}',
f'{self._resource_sub_path}',
)
identifier_doc.write_to_file(identifiers_dir_path, identifier.name)
if identifiers:
self._add_overview_of_member_type(section, 'identifiers')
def _add_attributes(self, section):
service_model = self._resource.meta.client.meta.service_model
attributes = {}
if self._resource.meta.resource_model.shape:
shape = service_model.shape_for(
self._resource.meta.resource_model.shape
)
attributes = self._resource.meta.resource_model.get_attributes(
shape
)
section = section.add_new_section('attributes')
attribute_list = []
if attributes:
add_resource_type_overview(
section=section,
resource_type='Attributes',
description=(
'Attributes provide access'
' to the properties of a resource. Attributes are lazy-'
'loaded the first time one is accessed via the'
' :py:meth:`load` method.'
),
intro_link='identifiers_attributes_intro',
)
self.member_map['attributes'] = attribute_list
for attr_name in sorted(attributes):
_, attr_shape = attributes[attr_name]
attribute_list.append(attr_name)
# Create a new DocumentStructure for each attribute and add contents.
attribute_doc = DocumentStructure(attr_name, target='html')
breadcrumb_section = attribute_doc.add_new_section('breadcrumb')
breadcrumb_section.style.ref(self._resource_class_name, 'index')
breadcrumb_section.write(f' / Attribute / {attr_name}')
attribute_doc.add_title_section(attr_name)
attribute_section = attribute_doc.add_new_section(
attr_name,
context={'qualifier': f'{self.class_name}.'},
)
document_attribute(
section=attribute_section,
service_name=self._service_name,
resource_name=self._resource_name,
attr_name=attr_name,
event_emitter=self._resource.meta.client.meta.events,
attr_model=attr_shape,
)
# Write attributes in individual/nested files.
# Path: <root>/reference/services/<service>/<resource_name>/<attribute_name>.rst
attributes_dir_path = os.path.join(
self._root_docs_path,
f'{self._service_name}',
f'{self._resource_sub_path}',
)
attribute_doc.write_to_file(attributes_dir_path, attr_name)
if attributes:
self._add_overview_of_member_type(section, 'attributes')
def _add_references(self, section):
section = section.add_new_section('references')
references = self._resource.meta.resource_model.references
reference_list = []
if references:
add_resource_type_overview(
section=section,
resource_type='References',
description=(
'References are related resource instances that have '
'a belongs-to relationship.'
),
intro_link='references_intro',
)
self.member_map['references'] = reference_list
self._add_overview_of_member_type(section, 'references')
for reference in references:
reference_list.append(reference.name)
# Create a new DocumentStructure for each reference and add contents.
reference_doc = DocumentStructure(reference.name, target='html')
breadcrumb_section = reference_doc.add_new_section('breadcrumb')
breadcrumb_section.style.ref(self._resource_class_name, 'index')
breadcrumb_section.write(f' / Reference / {reference.name}')
reference_doc.add_title_section(reference.name)
reference_section = reference_doc.add_new_section(
reference.name,
context={'qualifier': f'{self.class_name}.'},
)
document_reference(
section=reference_section,
reference_model=reference,
)
# Write references in individual/nested files.
# Path: <root>/reference/services/<service>/<resource_name>/<reference_name>.rst
references_dir_path = os.path.join(
self._root_docs_path,
f'{self._service_name}',
f'{self._resource_sub_path}',
)
reference_doc.write_to_file(references_dir_path, reference.name)
if references:
self._add_overview_of_member_type(section, 'references')
def _add_actions(self, section):
section = section.add_new_section('actions')
actions = self._resource.meta.resource_model.actions
if actions:
documenter = ActionDocumenter(self._resource, self._root_docs_path)
documenter.member_map = self.member_map
documenter.document_actions(section)
self._add_overview_of_member_type(section, 'actions')
def _add_sub_resources(self, section):
section = section.add_new_section('sub-resources')
sub_resources = self._resource.meta.resource_model.subresources
if sub_resources:
documenter = SubResourceDocumenter(
self._resource, self._root_docs_path
)
documenter.member_map = self.member_map
documenter.document_sub_resources(section)
self._add_overview_of_member_type(section, 'sub-resources')
def _add_collections(self, section):
section = section.add_new_section('collections')
collections = self._resource.meta.resource_model.collections
if collections:
documenter = CollectionDocumenter(
self._resource, self._root_docs_path
)
documenter.member_map = self.member_map
documenter.document_collections(section)
self._add_overview_of_member_type(section, 'collections')
def _add_waiters(self, section):
section = section.add_new_section('waiters')
waiters = self._resource.meta.resource_model.waiters
if waiters:
service_waiter_model = self._botocore_session.get_waiter_model(
self._service_name
)
documenter = WaiterResourceDocumenter(
self._resource, service_waiter_model, self._root_docs_path
)
documenter.member_map = self.member_map
documenter.document_resource_waiters(section)
self._add_overview_of_member_type(section, 'waiters')
def _add_resource_note(self, section):
section = section.add_new_section('feature-freeze')
section.style.start_note()
section.write(
"Before using anything on this page, please refer to the resources "
":doc:`user guide <../../../../guide/resources>` for the most recent "
"guidance on using resources."
)
section.style.end_note()
class ServiceResourceDocumenter(ResourceDocumenter):
@property
def class_name(self):
return f'{self._service_docs_name}.ServiceResource'
def _add_title(self, section):
title_section = section.add_new_section('title')
title_section.style.h2('Service Resource')
def _add_description(self, section):
official_service_name = get_official_service_name(self._service_model)
section.write(f'A resource representing {official_service_name}')
def _add_example(self, section, identifier_names):
section.style.start_codeblock()
section.style.new_line()
section.write('import boto3')
section.style.new_line()
section.style.new_line()
section.write(
f'{self._service_name} = boto3.resource(\'{self._service_name}\')'
)
section.style.end_codeblock()

View File

@@ -0,0 +1,202 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
from botocore.docs.bcdoc.restdoc import DocumentStructure
from botocore.docs.service import ServiceDocumenter as BaseServiceDocumenter
from botocore.exceptions import DataNotFoundError
import boto3
from boto3.docs.client import Boto3ClientDocumenter
from boto3.docs.resource import ResourceDocumenter, ServiceResourceDocumenter
from boto3.utils import ServiceContext
class ServiceDocumenter(BaseServiceDocumenter):
# The path used to find examples
EXAMPLE_PATH = os.path.join(os.path.dirname(boto3.__file__), 'examples')
def __init__(self, service_name, session, root_docs_path):
super().__init__(
service_name=service_name,
# I know that this is an internal attribute, but the botocore session
# is needed to load the paginator and waiter models.
session=session._session,
root_docs_path=root_docs_path,
)
self._boto3_session = session
self._client = self._boto3_session.client(service_name)
self._service_resource = None
if self._service_name in self._boto3_session.get_available_resources():
self._service_resource = self._boto3_session.resource(service_name)
self.sections = [
'title',
'client',
'paginators',
'waiters',
'resources',
'examples',
'context-params',
]
self._root_docs_path = root_docs_path
self._USER_GUIDE_LINK = (
'https://boto3.amazonaws.com/'
'v1/documentation/api/latest/guide/resources.html'
)
def document_service(self):
"""Documents an entire service.
:returns: The reStructured text of the documented service.
"""
doc_structure = DocumentStructure(
self._service_name, section_names=self.sections, target='html'
)
self.title(doc_structure.get_section('title'))
self.client_api(doc_structure.get_section('client'))
self.paginator_api(doc_structure.get_section('paginators'))
self.waiter_api(doc_structure.get_section('waiters'))
if self._service_resource:
self.resource_section(doc_structure.get_section('resources'))
self._document_examples(doc_structure.get_section('examples'))
context_params_section = doc_structure.get_section('context-params')
self.client_context_params(context_params_section)
return doc_structure.flush_structure()
def client_api(self, section):
examples = None
try:
examples = self.get_examples(self._service_name)
except DataNotFoundError:
pass
Boto3ClientDocumenter(
self._client, self._root_docs_path, examples
).document_client(section)
def resource_section(self, section):
section.style.h2('Resources')
section.style.new_line()
section.write(
'Resources are available in boto3 via the '
'``resource`` method. For more detailed instructions '
'and examples on the usage of resources, see the '
'resources '
)
section.style.external_link(
title='user guide',
link=self._USER_GUIDE_LINK,
)
section.write('.')
section.style.new_line()
section.style.new_line()
section.write('The available resources are:')
section.style.new_line()
section.style.toctree()
self._document_service_resource(section)
self._document_resources(section)
def _document_service_resource(self, section):
# Create a new DocumentStructure for each Service Resource and add contents.
service_resource_doc = DocumentStructure(
'service-resource', target='html'
)
breadcrumb_section = service_resource_doc.add_new_section('breadcrumb')
breadcrumb_section.style.ref(
self._client.__class__.__name__, f'../../{self._service_name}'
)
breadcrumb_section.write(' / Resource / ServiceResource')
ServiceResourceDocumenter(
self._service_resource, self._session, self._root_docs_path
).document_resource(service_resource_doc)
# Write collections in individual/nested files.
# Path: <root>/reference/services/<service>/<resource_name>/<collection_name>.rst
resource_name = self._service_resource.meta.resource_model.name
if resource_name == self._service_name:
resource_name = 'service-resource'
service_resource_dir_path = os.path.join(
self._root_docs_path,
f'{self._service_name}',
f'{resource_name.lower()}',
)
service_resource_doc.write_to_file(service_resource_dir_path, 'index')
section.style.tocitem(f'{self._service_name}/{resource_name}/index')
def _document_resources(self, section):
temp_identifier_value = 'foo'
loader = self._session.get_component('data_loader')
json_resource_model = loader.load_service_model(
self._service_name, 'resources-1'
)
service_model = self._service_resource.meta.client.meta.service_model
for resource_name in json_resource_model['resources']:
resource_model = json_resource_model['resources'][resource_name]
resource_cls = (
self._boto3_session.resource_factory.load_from_definition(
resource_name=resource_name,
single_resource_json_definition=resource_model,
service_context=ServiceContext(
service_name=self._service_name,
resource_json_definitions=json_resource_model[
'resources'
],
service_model=service_model,
service_waiter_model=None,
),
)
)
identifiers = resource_cls.meta.resource_model.identifiers
args = []
for _ in identifiers:
args.append(temp_identifier_value)
resource = resource_cls(*args, client=self._client)
# Create a new DocumentStructure for each Resource and add contents.
resource_name = resource.meta.resource_model.name.lower()
resource_doc = DocumentStructure(resource_name, target='html')
breadcrumb_section = resource_doc.add_new_section('breadcrumb')
breadcrumb_section.style.ref(
self._client.__class__.__name__, f'../../{self._service_name}'
)
breadcrumb_section.write(
f' / Resource / {resource.meta.resource_model.name}'
)
ResourceDocumenter(
resource, self._session, self._root_docs_path
).document_resource(
resource_doc.add_new_section(resource.meta.resource_model.name)
)
# Write collections in individual/nested files.
# Path: <root>/reference/services/<service>/<resource_name>/<index>.rst
service_resource_dir_path = os.path.join(
self._root_docs_path,
f'{self._service_name}',
f'{resource_name}',
)
resource_doc.write_to_file(service_resource_dir_path, 'index')
section.style.tocitem(
f'{self._service_name}/{resource_name}/index'
)
def _get_example_file(self):
return os.path.realpath(
os.path.join(self.EXAMPLE_PATH, self._service_name + '.rst')
)
def _document_examples(self, section):
examples_file = self._get_example_file()
if os.path.isfile(examples_file):
section.style.h2('Examples')
section.style.new_line()
with open(examples_file) as f:
section.write(f.read())

View File

@@ -0,0 +1,153 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
from botocore import xform_name
from botocore.docs.bcdoc.restdoc import DocumentStructure
from botocore.utils import get_service_module_name
from boto3.docs.base import NestedDocumenter
from boto3.docs.utils import (
add_resource_type_overview,
get_identifier_args_for_signature,
get_identifier_description,
get_identifier_values_for_example,
)
class SubResourceDocumenter(NestedDocumenter):
def document_sub_resources(self, section):
add_resource_type_overview(
section=section,
resource_type='Sub-resources',
description=(
'Sub-resources are methods that create a new instance of a'
' child resource. This resource\'s identifiers get passed'
' along to the child.'
),
intro_link='subresources_intro',
)
sub_resources = sorted(
self._resource.meta.resource_model.subresources,
key=lambda sub_resource: sub_resource.name,
)
sub_resources_list = []
self.member_map['sub-resources'] = sub_resources_list
for sub_resource in sub_resources:
sub_resources_list.append(sub_resource.name)
# Create a new DocumentStructure for each sub_resource and add contents.
sub_resource_doc = DocumentStructure(
sub_resource.name, target='html'
)
breadcrumb_section = sub_resource_doc.add_new_section('breadcrumb')
breadcrumb_section.style.ref(self._resource_class_name, 'index')
breadcrumb_section.write(f' / Sub-Resource / {sub_resource.name}')
sub_resource_doc.add_title_section(sub_resource.name)
sub_resource_section = sub_resource_doc.add_new_section(
sub_resource.name,
context={'qualifier': f'{self.class_name}.'},
)
document_sub_resource(
section=sub_resource_section,
resource_name=self._resource_name,
sub_resource_model=sub_resource,
service_model=self._service_model,
)
# Write sub_resources in individual/nested files.
# Path: <root>/reference/services/<service>/<resource_name>/<sub_resource_name>.rst
sub_resources_dir_path = os.path.join(
self._root_docs_path,
f'{self._service_name}',
f'{self._resource_sub_path}',
)
sub_resource_doc.write_to_file(
sub_resources_dir_path, sub_resource.name
)
def document_sub_resource(
section,
resource_name,
sub_resource_model,
service_model,
include_signature=True,
):
"""Documents a resource action
:param section: The section to write to
:param resource_name: The name of the resource
:param sub_resource_model: The model of the subresource
:param service_model: The model of the service
:param include_signature: Whether or not to include the signature.
It is useful for generating docstrings.
"""
identifiers_needed = []
for identifier in sub_resource_model.resource.identifiers:
if identifier.source == 'input':
identifiers_needed.append(xform_name(identifier.target))
if include_signature:
signature_args = get_identifier_args_for_signature(identifiers_needed)
full_sub_resource_name = (
f"{section.context.get('qualifier', '')}{sub_resource_model.name}"
)
section.style.start_sphinx_py_method(
full_sub_resource_name, signature_args
)
method_intro_section = section.add_new_section('method-intro')
description = f'Creates a {sub_resource_model.resource.type} resource.'
method_intro_section.include_doc_string(description)
example_section = section.add_new_section('example')
example_values = get_identifier_values_for_example(identifiers_needed)
example_resource_name = xform_name(resource_name)
if service_model.service_name == resource_name:
example_resource_name = resource_name
example = '{} = {}.{}({})'.format(
xform_name(sub_resource_model.resource.type),
example_resource_name,
sub_resource_model.name,
example_values,
)
example_section.style.start_codeblock()
example_section.write(example)
example_section.style.end_codeblock()
param_section = section.add_new_section('params')
for identifier in identifiers_needed:
description = get_identifier_description(
sub_resource_model.name, identifier
)
param_section.write(f':type {identifier}: string')
param_section.style.new_line()
param_section.write(f':param {identifier}: {description}')
param_section.style.new_line()
return_section = section.add_new_section('return')
return_section.style.new_line()
return_section.write(
':rtype: :py:class:`{}.{}`'.format(
get_service_module_name(service_model),
sub_resource_model.resource.type,
)
)
return_section.style.new_line()
return_section.write(
f':returns: A {sub_resource_model.resource.type} resource'
)
return_section.style.new_line()

View File

@@ -0,0 +1,146 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import inspect
import jmespath
def get_resource_ignore_params(params):
"""Helper method to determine which parameters to ignore for actions
:returns: A list of the parameter names that does not need to be
included in a resource's method call for documentation purposes.
"""
ignore_params = []
for param in params:
result = jmespath.compile(param.target)
current = result.parsed
# Use JMESPath to find the left most element in the target expression
# which will be the parameter to ignore in the action call.
while current['children']:
current = current['children'][0]
# Make sure the parameter we are about to ignore is a field.
# If it is not, we should ignore the result to avoid false positives.
if current['type'] == 'field':
ignore_params.append(current['value'])
return ignore_params
def is_resource_action(action_handle):
return inspect.isfunction(action_handle)
def get_resource_public_actions(resource_class):
resource_class_members = inspect.getmembers(resource_class)
resource_methods = {}
for name, member in resource_class_members:
if not name.startswith('_'):
if not name[0].isupper():
if not name.startswith('wait_until'):
if is_resource_action(member):
resource_methods[name] = member
return resource_methods
def get_identifier_values_for_example(identifier_names):
return ','.join([f'\'{identifier}\'' for identifier in identifier_names])
def get_identifier_args_for_signature(identifier_names):
return ','.join(identifier_names)
def get_identifier_description(resource_name, identifier_name):
return (
f"The {resource_name}'s {identifier_name} identifier. "
f"This **must** be set."
)
def add_resource_type_overview(
section, resource_type, description, intro_link=None
):
section.style.new_line()
section.style.h3(resource_type)
section.style.new_line()
section.style.new_line()
section.write(description)
section.style.new_line()
if intro_link is not None:
section.write(
f'For more information about {resource_type.lower()} refer to the '
f':ref:`Resources Introduction Guide<{intro_link}>`.'
)
section.style.new_line()
class DocumentModifiedShape:
def __init__(
self, shape_name, new_type, new_description, new_example_value
):
self._shape_name = shape_name
self._new_type = new_type
self._new_description = new_description
self._new_example_value = new_example_value
def replace_documentation_for_matching_shape(
self, event_name, section, **kwargs
):
if self._shape_name == section.context.get('shape'):
self._replace_documentation(event_name, section)
for section_name in section.available_sections:
sub_section = section.get_section(section_name)
if self._shape_name == sub_section.context.get('shape'):
self._replace_documentation(event_name, sub_section)
else:
self.replace_documentation_for_matching_shape(
event_name, sub_section
)
def _replace_documentation(self, event_name, section):
if event_name.startswith(
'docs.request-example'
) or event_name.startswith('docs.response-example'):
section.remove_all_sections()
section.clear_text()
section.write(self._new_example_value)
if event_name.startswith(
'docs.request-params'
) or event_name.startswith('docs.response-params'):
allowed_sections = (
'param-name',
'param-documentation',
'end-structure',
'param-type',
'end-param',
)
for section_name in section.available_sections:
# Delete any extra members as a new shape is being
# used.
if section_name not in allowed_sections:
section.delete_section(section_name)
# Update the documentation
description_section = section.get_section('param-documentation')
description_section.clear_text()
description_section.write(self._new_description)
# Update the param type
type_section = section.get_section('param-type')
if type_section.getvalue().decode('utf-8').startswith(':type'):
type_section.clear_text()
type_section.write(f':type {section.name}: {self._new_type}')
else:
type_section.clear_text()
type_section.style.italics(f'({self._new_type}) -- ')

View File

@@ -0,0 +1,130 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
from botocore import xform_name
from botocore.docs.bcdoc.restdoc import DocumentStructure
from botocore.docs.method import document_model_driven_method
from botocore.utils import get_service_module_name
from boto3.docs.base import NestedDocumenter
from boto3.docs.utils import (
add_resource_type_overview,
get_resource_ignore_params,
)
class WaiterResourceDocumenter(NestedDocumenter):
def __init__(self, resource, service_waiter_model, root_docs_path):
super().__init__(resource, root_docs_path)
self._service_waiter_model = service_waiter_model
def document_resource_waiters(self, section):
waiters = self._resource.meta.resource_model.waiters
add_resource_type_overview(
section=section,
resource_type='Waiters',
description=(
'Waiters provide an interface to wait for a resource'
' to reach a specific state.'
),
intro_link='waiters_intro',
)
waiter_list = []
self.member_map['waiters'] = waiter_list
for waiter in waiters:
waiter_list.append(waiter.name)
# Create a new DocumentStructure for each waiter and add contents.
waiter_doc = DocumentStructure(waiter.name, target='html')
breadcrumb_section = waiter_doc.add_new_section('breadcrumb')
breadcrumb_section.style.ref(self._resource_class_name, 'index')
breadcrumb_section.write(f' / Waiter / {waiter.name}')
waiter_doc.add_title_section(waiter.name)
waiter_section = waiter_doc.add_new_section(
waiter.name,
context={'qualifier': f'{self.class_name}.'},
)
document_resource_waiter(
section=waiter_section,
resource_name=self._resource_name,
event_emitter=self._resource.meta.client.meta.events,
service_model=self._service_model,
resource_waiter_model=waiter,
service_waiter_model=self._service_waiter_model,
)
# Write waiters in individual/nested files.
# Path: <root>/reference/services/<service>/<resource_name>/<waiter_name>.rst
waiters_dir_path = os.path.join(
self._root_docs_path,
f'{self._service_name}',
f'{self._resource_sub_path}',
)
waiter_doc.write_to_file(waiters_dir_path, waiter.name)
def document_resource_waiter(
section,
resource_name,
event_emitter,
service_model,
resource_waiter_model,
service_waiter_model,
include_signature=True,
):
waiter_model = service_waiter_model.get_waiter(
resource_waiter_model.waiter_name
)
operation_model = service_model.operation_model(waiter_model.operation)
ignore_params = get_resource_ignore_params(resource_waiter_model.params)
service_module_name = get_service_module_name(service_model)
description = (
'Waits until this {} is {}. This method calls '
':py:meth:`{}.Waiter.{}.wait` which polls '
':py:meth:`{}.Client.{}` every {} seconds until '
'a successful state is reached. An error is returned '
'after {} failed checks.'.format(
resource_name,
' '.join(resource_waiter_model.name.split('_')[2:]),
service_module_name,
xform_name(resource_waiter_model.waiter_name),
service_module_name,
xform_name(waiter_model.operation),
waiter_model.delay,
waiter_model.max_attempts,
)
)
example_prefix = '{}.{}'.format(
xform_name(resource_name), resource_waiter_model.name
)
full_waiter_name = (
f"{section.context.get('qualifier', '')}{resource_waiter_model.name}"
)
document_model_driven_method(
section=section,
method_name=full_waiter_name,
operation_model=operation_model,
event_emitter=event_emitter,
example_prefix=example_prefix,
method_description=description,
exclude_input=ignore_params,
include_signature=include_signature,
)
if 'return' in section.available_sections:
# Waiters do not return anything so we should remove
# any sections that may document the underlying return
# value of the client method.
return_section = section.get_section('return')
return_section.clear_text()
return_section.remove_all_sections()
return_section.write(':returns: None')

View File

@@ -0,0 +1,12 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.

View File

@@ -0,0 +1,461 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import re
from collections import namedtuple
from boto3.exceptions import (
DynamoDBNeedsConditionError,
DynamoDBNeedsKeyConditionError,
DynamoDBOperationNotSupportedError,
)
ATTR_NAME_REGEX = re.compile(r'[^.\[\]]+(?![^\[]*\])')
class ConditionBase:
expression_format = ''
expression_operator = ''
has_grouped_values = False
def __init__(self, *values):
self._values = values
def __and__(self, other):
if not isinstance(other, ConditionBase):
raise DynamoDBOperationNotSupportedError('AND', other)
return And(self, other)
def __or__(self, other):
if not isinstance(other, ConditionBase):
raise DynamoDBOperationNotSupportedError('OR', other)
return Or(self, other)
def __invert__(self):
return Not(self)
def get_expression(self):
return {
'format': self.expression_format,
'operator': self.expression_operator,
'values': self._values,
}
def __eq__(self, other):
if isinstance(other, type(self)):
if self._values == other._values:
return True
return False
def __ne__(self, other):
return not self.__eq__(other)
class AttributeBase:
def __init__(self, name):
self.name = name
def __and__(self, value):
raise DynamoDBOperationNotSupportedError('AND', self)
def __or__(self, value):
raise DynamoDBOperationNotSupportedError('OR', self)
def __invert__(self):
raise DynamoDBOperationNotSupportedError('NOT', self)
def eq(self, value):
"""Creates a condition where the attribute is equal to the value.
:param value: The value that the attribute is equal to.
"""
return Equals(self, value)
def lt(self, value):
"""Creates a condition where the attribute is less than the value.
:param value: The value that the attribute is less than.
"""
return LessThan(self, value)
def lte(self, value):
"""Creates a condition where the attribute is less than or equal to the
value.
:param value: The value that the attribute is less than or equal to.
"""
return LessThanEquals(self, value)
def gt(self, value):
"""Creates a condition where the attribute is greater than the value.
:param value: The value that the attribute is greater than.
"""
return GreaterThan(self, value)
def gte(self, value):
"""Creates a condition where the attribute is greater than or equal to
the value.
:param value: The value that the attribute is greater than or equal to.
"""
return GreaterThanEquals(self, value)
def begins_with(self, value):
"""Creates a condition where the attribute begins with the value.
:param value: The value that the attribute begins with.
"""
return BeginsWith(self, value)
def between(self, low_value, high_value):
"""Creates a condition where the attribute is greater than or equal
to the low value and less than or equal to the high value.
:param low_value: The value that the attribute is greater than or equal to.
:param high_value: The value that the attribute is less than or equal to.
"""
return Between(self, low_value, high_value)
def __eq__(self, other):
return isinstance(other, type(self)) and self.name == other.name
def __ne__(self, other):
return not self.__eq__(other)
class ConditionAttributeBase(ConditionBase, AttributeBase):
"""This base class is for conditions that can have attribute methods.
One example is the Size condition. To complete a condition, you need
to apply another AttributeBase method like eq().
"""
def __init__(self, *values):
ConditionBase.__init__(self, *values)
# This is assuming the first value to the condition is the attribute
# in which can be used to generate its attribute base.
AttributeBase.__init__(self, values[0].name)
def __eq__(self, other):
return ConditionBase.__eq__(self, other) and AttributeBase.__eq__(
self, other
)
def __ne__(self, other):
return not self.__eq__(other)
class ComparisonCondition(ConditionBase):
expression_format = '{0} {operator} {1}'
class Equals(ComparisonCondition):
expression_operator = '='
class NotEquals(ComparisonCondition):
expression_operator = '<>'
class LessThan(ComparisonCondition):
expression_operator = '<'
class LessThanEquals(ComparisonCondition):
expression_operator = '<='
class GreaterThan(ComparisonCondition):
expression_operator = '>'
class GreaterThanEquals(ComparisonCondition):
expression_operator = '>='
class In(ComparisonCondition):
expression_operator = 'IN'
has_grouped_values = True
class Between(ConditionBase):
expression_operator = 'BETWEEN'
expression_format = '{0} {operator} {1} AND {2}'
class BeginsWith(ConditionBase):
expression_operator = 'begins_with'
expression_format = '{operator}({0}, {1})'
class Contains(ConditionBase):
expression_operator = 'contains'
expression_format = '{operator}({0}, {1})'
class Size(ConditionAttributeBase):
expression_operator = 'size'
expression_format = '{operator}({0})'
class AttributeType(ConditionBase):
expression_operator = 'attribute_type'
expression_format = '{operator}({0}, {1})'
class AttributeExists(ConditionBase):
expression_operator = 'attribute_exists'
expression_format = '{operator}({0})'
class AttributeNotExists(ConditionBase):
expression_operator = 'attribute_not_exists'
expression_format = '{operator}({0})'
class And(ConditionBase):
expression_operator = 'AND'
expression_format = '({0} {operator} {1})'
class Or(ConditionBase):
expression_operator = 'OR'
expression_format = '({0} {operator} {1})'
class Not(ConditionBase):
expression_operator = 'NOT'
expression_format = '({operator} {0})'
class Key(AttributeBase):
pass
class Attr(AttributeBase):
"""Represents an DynamoDB item's attribute."""
def ne(self, value):
"""Creates a condition where the attribute is not equal to the value
:param value: The value that the attribute is not equal to.
"""
return NotEquals(self, value)
def is_in(self, value):
"""Creates a condition where the attribute is in the value,
:type value: list
:param value: The value that the attribute is in.
"""
return In(self, value)
def exists(self):
"""Creates a condition where the attribute exists."""
return AttributeExists(self)
def not_exists(self):
"""Creates a condition where the attribute does not exist."""
return AttributeNotExists(self)
def contains(self, value):
"""Creates a condition where the attribute contains the value.
:param value: The value the attribute contains.
"""
return Contains(self, value)
def size(self):
"""Creates a condition for the attribute size.
Note another AttributeBase method must be called on the returned
size condition to be a valid DynamoDB condition.
"""
return Size(self)
def attribute_type(self, value):
"""Creates a condition for the attribute type.
:param value: The type of the attribute.
"""
return AttributeType(self, value)
BuiltConditionExpression = namedtuple(
'BuiltConditionExpression',
[
'condition_expression',
'attribute_name_placeholders',
'attribute_value_placeholders',
],
)
class ConditionExpressionBuilder:
"""This class is used to build condition expressions with placeholders"""
def __init__(self):
self._name_count = 0
self._value_count = 0
self._name_placeholder = 'n'
self._value_placeholder = 'v'
def _get_name_placeholder(self):
return '#' + self._name_placeholder + str(self._name_count)
def _get_value_placeholder(self):
return ':' + self._value_placeholder + str(self._value_count)
def reset(self):
"""Resets the placeholder name and values"""
self._name_count = 0
self._value_count = 0
def build_expression(self, condition, is_key_condition=False):
"""Builds the condition expression and the dictionary of placeholders.
:type condition: ConditionBase
:param condition: A condition to be built into a condition expression
string with any necessary placeholders.
:type is_key_condition: Boolean
:param is_key_condition: True if the expression is for a
KeyConditionExpression. False otherwise.
:rtype: (string, dict, dict)
:returns: Will return a string representing the condition with
placeholders inserted where necessary, a dictionary of
placeholders for attribute names, and a dictionary of
placeholders for attribute values. Here is a sample return value:
('#n0 = :v0', {'#n0': 'myattribute'}, {':v1': 'myvalue'})
"""
if not isinstance(condition, ConditionBase):
raise DynamoDBNeedsConditionError(condition)
attribute_name_placeholders = {}
attribute_value_placeholders = {}
condition_expression = self._build_expression(
condition,
attribute_name_placeholders,
attribute_value_placeholders,
is_key_condition=is_key_condition,
)
return BuiltConditionExpression(
condition_expression=condition_expression,
attribute_name_placeholders=attribute_name_placeholders,
attribute_value_placeholders=attribute_value_placeholders,
)
def _build_expression(
self,
condition,
attribute_name_placeholders,
attribute_value_placeholders,
is_key_condition,
):
expression_dict = condition.get_expression()
replaced_values = []
for value in expression_dict['values']:
# Build the necessary placeholders for that value.
# Placeholders are built for both attribute names and values.
replaced_value = self._build_expression_component(
value,
attribute_name_placeholders,
attribute_value_placeholders,
condition.has_grouped_values,
is_key_condition,
)
replaced_values.append(replaced_value)
# Fill out the expression using the operator and the
# values that have been replaced with placeholders.
return expression_dict['format'].format(
*replaced_values, operator=expression_dict['operator']
)
def _build_expression_component(
self,
value,
attribute_name_placeholders,
attribute_value_placeholders,
has_grouped_values,
is_key_condition,
):
# Continue to recurse if the value is a ConditionBase in order
# to extract out all parts of the expression.
if isinstance(value, ConditionBase):
return self._build_expression(
value,
attribute_name_placeholders,
attribute_value_placeholders,
is_key_condition,
)
# If it is not a ConditionBase, we can recurse no further.
# So we check if it is an attribute and add placeholders for
# its name
elif isinstance(value, AttributeBase):
if is_key_condition and not isinstance(value, Key):
raise DynamoDBNeedsKeyConditionError(
f'Attribute object {value.name} is of type {type(value)}. '
f'KeyConditionExpression only supports Attribute objects '
f'of type Key'
)
return self._build_name_placeholder(
value, attribute_name_placeholders
)
# If it is anything else, we treat it as a value and thus placeholders
# are needed for the value.
else:
return self._build_value_placeholder(
value, attribute_value_placeholders, has_grouped_values
)
def _build_name_placeholder(self, value, attribute_name_placeholders):
attribute_name = value.name
# Figure out which parts of the attribute name that needs replacement.
attribute_name_parts = ATTR_NAME_REGEX.findall(attribute_name)
# Add a temporary placeholder for each of these parts.
placeholder_format = ATTR_NAME_REGEX.sub('%s', attribute_name)
str_format_args = []
for part in attribute_name_parts:
name_placeholder = self._get_name_placeholder()
self._name_count += 1
str_format_args.append(name_placeholder)
# Add the placeholder and value to dictionary of name placeholders.
attribute_name_placeholders[name_placeholder] = part
# Replace the temporary placeholders with the designated placeholders.
return placeholder_format % tuple(str_format_args)
def _build_value_placeholder(
self, value, attribute_value_placeholders, has_grouped_values=False
):
# If the values are grouped, we need to add a placeholder for
# each element inside of the actual value.
if has_grouped_values:
placeholder_list = []
for v in value:
value_placeholder = self._get_value_placeholder()
self._value_count += 1
placeholder_list.append(value_placeholder)
attribute_value_placeholders[value_placeholder] = v
# Assuming the values are grouped by parenthesis.
# IN is the currently the only one that uses this so it maybe
# needed to be changed in future.
return '(' + ', '.join(placeholder_list) + ')'
# Otherwise, treat the value as a single value that needs only
# one placeholder.
else:
value_placeholder = self._get_value_placeholder()
self._value_count += 1
attribute_value_placeholders[value_placeholder] = value
return value_placeholder

View File

@@ -0,0 +1,167 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import logging
logger = logging.getLogger(__name__)
def register_table_methods(base_classes, **kwargs):
base_classes.insert(0, TableResource)
# This class can be used to add any additional methods we want
# onto a table resource. Ideally to avoid creating a new
# base class for every method we can just update this
# class instead. Just be sure to move the bulk of the
# actual method implementation to another class.
class TableResource:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def batch_writer(self, overwrite_by_pkeys=None):
"""Create a batch writer object.
This method creates a context manager for writing
objects to Amazon DynamoDB in batch.
The batch writer will automatically handle buffering and sending items
in batches. In addition, the batch writer will also automatically
handle any unprocessed items and resend them as needed. All you need
to do is call ``put_item`` for any items you want to add, and
``delete_item`` for any items you want to delete.
Example usage::
with table.batch_writer() as batch:
for _ in range(1000000):
batch.put_item(Item={'HashKey': '...',
'Otherstuff': '...'})
# You can also delete_items in a batch.
batch.delete_item(Key={'HashKey': 'SomeHashKey'})
:type overwrite_by_pkeys: list(string)
:param overwrite_by_pkeys: De-duplicate request items in buffer
if match new request item on specified primary keys. i.e
``["partition_key1", "sort_key2", "sort_key3"]``
"""
return BatchWriter(
self.name, self.meta.client, overwrite_by_pkeys=overwrite_by_pkeys
)
class BatchWriter:
"""Automatically handle batch writes to DynamoDB for a single table."""
def __init__(
self, table_name, client, flush_amount=25, overwrite_by_pkeys=None
):
"""
:type table_name: str
:param table_name: The name of the table. The class handles
batch writes to a single table.
:type client: ``botocore.client.Client``
:param client: A botocore client. Note this client
**must** have the dynamodb customizations applied
to it for transforming AttributeValues into the
wire protocol. What this means in practice is that
you need to use a client that comes from a DynamoDB
resource if you're going to instantiate this class
directly, i.e
``boto3.resource('dynamodb').Table('foo').meta.client``.
:type flush_amount: int
:param flush_amount: The number of items to keep in
a local buffer before sending a batch_write_item
request to DynamoDB.
:type overwrite_by_pkeys: list(string)
:param overwrite_by_pkeys: De-duplicate request items in buffer
if match new request item on specified primary keys. i.e
``["partition_key1", "sort_key2", "sort_key3"]``
"""
self._table_name = table_name
self._client = client
self._items_buffer = []
self._flush_amount = flush_amount
self._overwrite_by_pkeys = overwrite_by_pkeys
def put_item(self, Item):
self._add_request_and_process({'PutRequest': {'Item': Item}})
def delete_item(self, Key):
self._add_request_and_process({'DeleteRequest': {'Key': Key}})
def _add_request_and_process(self, request):
if self._overwrite_by_pkeys:
self._remove_dup_pkeys_request_if_any(request)
self._items_buffer.append(request)
self._flush_if_needed()
def _remove_dup_pkeys_request_if_any(self, request):
pkey_values_new = self._extract_pkey_values(request)
for item in self._items_buffer:
if self._extract_pkey_values(item) == pkey_values_new:
self._items_buffer.remove(item)
logger.debug(
"With overwrite_by_pkeys enabled, skipping " "request:%s",
item,
)
def _extract_pkey_values(self, request):
if request.get('PutRequest'):
return [
request['PutRequest']['Item'][key]
for key in self._overwrite_by_pkeys
]
elif request.get('DeleteRequest'):
return [
request['DeleteRequest']['Key'][key]
for key in self._overwrite_by_pkeys
]
return None
def _flush_if_needed(self):
if len(self._items_buffer) >= self._flush_amount:
self._flush()
def _flush(self):
items_to_send = self._items_buffer[: self._flush_amount]
self._items_buffer = self._items_buffer[self._flush_amount :]
response = self._client.batch_write_item(
RequestItems={self._table_name: items_to_send}
)
unprocessed_items = response['UnprocessedItems']
if not unprocessed_items:
unprocessed_items = {}
item_list = unprocessed_items.get(self._table_name, [])
# Any unprocessed_items are immediately added to the
# next batch we send.
self._items_buffer.extend(item_list)
logger.debug(
"Batch write sent %s, unprocessed: %s",
len(items_to_send),
len(self._items_buffer),
)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
# When we exit, we need to keep flushing whatever's left
# until there's nothing left in our items buffer.
while self._items_buffer:
self._flush()

View File

@@ -0,0 +1,343 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import copy
from boto3.compat import collections_abc
from boto3.docs.utils import DocumentModifiedShape
from boto3.dynamodb.conditions import ConditionBase, ConditionExpressionBuilder
from boto3.dynamodb.types import TypeDeserializer, TypeSerializer
def register_high_level_interface(base_classes, **kwargs):
base_classes.insert(0, DynamoDBHighLevelResource)
class _ForgetfulDict(dict):
"""A dictionary that discards any items set on it. For use as `memo` in
`copy.deepcopy()` when every instance of a repeated object in the deepcopied
data structure should result in a separate copy.
"""
def __setitem__(self, key, value):
pass
def copy_dynamodb_params(params, **kwargs):
return copy.deepcopy(params, memo=_ForgetfulDict())
class DynamoDBHighLevelResource:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Apply handler that creates a copy of the user provided dynamodb
# item such that it can be modified.
self.meta.client.meta.events.register(
'provide-client-params.dynamodb',
copy_dynamodb_params,
unique_id='dynamodb-create-params-copy',
)
self._injector = TransformationInjector()
# Apply the handler that generates condition expressions including
# placeholders.
self.meta.client.meta.events.register(
'before-parameter-build.dynamodb',
self._injector.inject_condition_expressions,
unique_id='dynamodb-condition-expression',
)
# Apply the handler that serializes the request from python
# types to dynamodb types.
self.meta.client.meta.events.register(
'before-parameter-build.dynamodb',
self._injector.inject_attribute_value_input,
unique_id='dynamodb-attr-value-input',
)
# Apply the handler that deserializes the response from dynamodb
# types to python types.
self.meta.client.meta.events.register(
'after-call.dynamodb',
self._injector.inject_attribute_value_output,
unique_id='dynamodb-attr-value-output',
)
# Apply the documentation customizations to account for
# the transformations.
attr_value_shape_docs = DocumentModifiedShape(
'AttributeValue',
new_type='valid DynamoDB type',
new_description=(
'- The value of the attribute. The valid value types are '
'listed in the '
':ref:`DynamoDB Reference Guide<ref_valid_dynamodb_types>`.'
),
new_example_value=(
'\'string\'|123|Binary(b\'bytes\')|True|None|set([\'string\'])'
'|set([123])|set([Binary(b\'bytes\')])|[]|{}'
),
)
key_expression_shape_docs = DocumentModifiedShape(
'KeyExpression',
new_type=(
'condition from :py:class:`boto3.dynamodb.conditions.Key` '
'method'
),
new_description=(
'The condition(s) a key(s) must meet. Valid conditions are '
'listed in the '
':ref:`DynamoDB Reference Guide<ref_dynamodb_conditions>`.'
),
new_example_value='Key(\'mykey\').eq(\'myvalue\')',
)
con_expression_shape_docs = DocumentModifiedShape(
'ConditionExpression',
new_type=(
'condition from :py:class:`boto3.dynamodb.conditions.Attr` '
'method'
),
new_description=(
'The condition(s) an attribute(s) must meet. Valid conditions '
'are listed in the '
':ref:`DynamoDB Reference Guide<ref_dynamodb_conditions>`.'
),
new_example_value='Attr(\'myattribute\').eq(\'myvalue\')',
)
self.meta.client.meta.events.register(
'docs.*.dynamodb.*.complete-section',
attr_value_shape_docs.replace_documentation_for_matching_shape,
unique_id='dynamodb-attr-value-docs',
)
self.meta.client.meta.events.register(
'docs.*.dynamodb.*.complete-section',
key_expression_shape_docs.replace_documentation_for_matching_shape,
unique_id='dynamodb-key-expression-docs',
)
self.meta.client.meta.events.register(
'docs.*.dynamodb.*.complete-section',
con_expression_shape_docs.replace_documentation_for_matching_shape,
unique_id='dynamodb-cond-expression-docs',
)
class TransformationInjector:
"""Injects the transformations into the user provided parameters."""
def __init__(
self,
transformer=None,
condition_builder=None,
serializer=None,
deserializer=None,
):
self._transformer = transformer
if transformer is None:
self._transformer = ParameterTransformer()
self._condition_builder = condition_builder
if condition_builder is None:
self._condition_builder = ConditionExpressionBuilder()
self._serializer = serializer
if serializer is None:
self._serializer = TypeSerializer()
self._deserializer = deserializer
if deserializer is None:
self._deserializer = TypeDeserializer()
def inject_condition_expressions(self, params, model, **kwargs):
"""Injects the condition expression transformation into the parameters
This injection includes transformations for ConditionExpression shapes
and KeyExpression shapes. It also handles any placeholder names and
values that are generated when transforming the condition expressions.
"""
self._condition_builder.reset()
generated_names = {}
generated_values = {}
# Create and apply the Condition Expression transformation.
transformation = ConditionExpressionTransformation(
self._condition_builder,
placeholder_names=generated_names,
placeholder_values=generated_values,
is_key_condition=False,
)
self._transformer.transform(
params, model.input_shape, transformation, 'ConditionExpression'
)
# Create and apply the Key Condition Expression transformation.
transformation = ConditionExpressionTransformation(
self._condition_builder,
placeholder_names=generated_names,
placeholder_values=generated_values,
is_key_condition=True,
)
self._transformer.transform(
params, model.input_shape, transformation, 'KeyExpression'
)
expr_attr_names_input = 'ExpressionAttributeNames'
expr_attr_values_input = 'ExpressionAttributeValues'
# Now that all of the condition expression transformation are done,
# update the placeholder dictionaries in the request.
if expr_attr_names_input in params:
params[expr_attr_names_input].update(generated_names)
else:
if generated_names:
params[expr_attr_names_input] = generated_names
if expr_attr_values_input in params:
params[expr_attr_values_input].update(generated_values)
else:
if generated_values:
params[expr_attr_values_input] = generated_values
def inject_attribute_value_input(self, params, model, **kwargs):
"""Injects DynamoDB serialization into parameter input"""
self._transformer.transform(
params,
model.input_shape,
self._serializer.serialize,
'AttributeValue',
)
def inject_attribute_value_output(self, parsed, model, **kwargs):
"""Injects DynamoDB deserialization into responses"""
if model.output_shape is not None:
self._transformer.transform(
parsed,
model.output_shape,
self._deserializer.deserialize,
'AttributeValue',
)
class ConditionExpressionTransformation:
"""Provides a transformation for condition expressions
The ``ParameterTransformer`` class can call this class directly
to transform the condition expressions in the parameters provided.
"""
def __init__(
self,
condition_builder,
placeholder_names,
placeholder_values,
is_key_condition=False,
):
self._condition_builder = condition_builder
self._placeholder_names = placeholder_names
self._placeholder_values = placeholder_values
self._is_key_condition = is_key_condition
def __call__(self, value):
if isinstance(value, ConditionBase):
# Create a conditional expression string with placeholders
# for the provided condition.
built_expression = self._condition_builder.build_expression(
value, is_key_condition=self._is_key_condition
)
self._placeholder_names.update(
built_expression.attribute_name_placeholders
)
self._placeholder_values.update(
built_expression.attribute_value_placeholders
)
return built_expression.condition_expression
# Use the user provided value if it is not a ConditonBase object.
return value
class ParameterTransformer:
"""Transforms the input to and output from botocore based on shape"""
def transform(self, params, model, transformation, target_shape):
"""Transforms the dynamodb input to or output from botocore
It applies a specified transformation whenever a specific shape name
is encountered while traversing the parameters in the dictionary.
:param params: The parameters structure to transform.
:param model: The operation model.
:param transformation: The function to apply the parameter
:param target_shape: The name of the shape to apply the
transformation to
"""
self._transform_parameters(model, params, transformation, target_shape)
def _transform_parameters(
self, model, params, transformation, target_shape
):
type_name = model.type_name
if type_name in ('structure', 'map', 'list'):
getattr(self, f'_transform_{type_name}')(
model, params, transformation, target_shape
)
def _transform_structure(
self, model, params, transformation, target_shape
):
if not isinstance(params, collections_abc.Mapping):
return
for param in params:
if param in model.members:
member_model = model.members[param]
member_shape = member_model.name
if member_shape == target_shape:
params[param] = transformation(params[param])
else:
self._transform_parameters(
member_model,
params[param],
transformation,
target_shape,
)
def _transform_map(self, model, params, transformation, target_shape):
if not isinstance(params, collections_abc.Mapping):
return
value_model = model.value
value_shape = value_model.name
for key, value in params.items():
if value_shape == target_shape:
params[key] = transformation(value)
else:
self._transform_parameters(
value_model, params[key], transformation, target_shape
)
def _transform_list(self, model, params, transformation, target_shape):
if not isinstance(params, collections_abc.MutableSequence):
return
member_model = model.member
member_shape = member_model.name
for i, item in enumerate(params):
if member_shape == target_shape:
params[i] = transformation(item)
else:
self._transform_parameters(
member_model, params[i], transformation, target_shape
)

View File

@@ -0,0 +1,310 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from decimal import (
Clamped,
Context,
Decimal,
Inexact,
Overflow,
Rounded,
Underflow,
)
from boto3.compat import collections_abc
STRING = 'S'
NUMBER = 'N'
BINARY = 'B'
STRING_SET = 'SS'
NUMBER_SET = 'NS'
BINARY_SET = 'BS'
NULL = 'NULL'
BOOLEAN = 'BOOL'
MAP = 'M'
LIST = 'L'
DYNAMODB_CONTEXT = Context(
Emin=-128,
Emax=126,
prec=38,
traps=[Clamped, Overflow, Inexact, Rounded, Underflow],
)
BINARY_TYPES = (bytearray, bytes)
class Binary:
"""A class for representing Binary in dynamodb
Especially for Python 2, use this class to explicitly specify
binary data for item in DynamoDB. It is essentially a wrapper around
binary. Unicode and Python 3 string types are not allowed.
"""
def __init__(self, value):
if not isinstance(value, BINARY_TYPES):
types = ', '.join([str(t) for t in BINARY_TYPES])
raise TypeError(f'Value must be of the following types: {types}')
self.value = value
def __eq__(self, other):
if isinstance(other, Binary):
return self.value == other.value
return self.value == other
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return f'Binary({self.value!r})'
def __str__(self):
return self.value
def __bytes__(self):
return self.value
def __hash__(self):
return hash(self.value)
class TypeSerializer:
"""This class serializes Python data types to DynamoDB types."""
def serialize(self, value):
"""The method to serialize the Python data types.
:param value: A python value to be serialized to DynamoDB. Here are
the various conversions:
Python DynamoDB
------ --------
None {'NULL': True}
True/False {'BOOL': True/False}
int/Decimal {'N': str(value)}
string {'S': string}
Binary/bytearray/bytes (py3 only) {'B': bytes}
set([int/Decimal]) {'NS': [str(value)]}
set([string]) {'SS': [string])
set([Binary/bytearray/bytes]) {'BS': [bytes]}
list {'L': list}
dict {'M': dict}
For types that involve numbers, it is recommended that ``Decimal``
objects are used to be able to round-trip the Python type.
For types that involve binary, it is recommended that ``Binary``
objects are used to be able to round-trip the Python type.
:rtype: dict
:returns: A dictionary that represents a dynamoDB data type. These
dictionaries can be directly passed to botocore methods.
"""
dynamodb_type = self._get_dynamodb_type(value)
serializer = getattr(self, f'_serialize_{dynamodb_type}'.lower())
return {dynamodb_type: serializer(value)}
def _get_dynamodb_type(self, value):
dynamodb_type = None
if self._is_null(value):
dynamodb_type = NULL
elif self._is_boolean(value):
dynamodb_type = BOOLEAN
elif self._is_number(value):
dynamodb_type = NUMBER
elif self._is_string(value):
dynamodb_type = STRING
elif self._is_binary(value):
dynamodb_type = BINARY
elif self._is_type_set(value, self._is_number):
dynamodb_type = NUMBER_SET
elif self._is_type_set(value, self._is_string):
dynamodb_type = STRING_SET
elif self._is_type_set(value, self._is_binary):
dynamodb_type = BINARY_SET
elif self._is_map(value):
dynamodb_type = MAP
elif self._is_listlike(value):
dynamodb_type = LIST
else:
msg = f'Unsupported type "{type(value)}" for value "{value}"'
raise TypeError(msg)
return dynamodb_type
def _is_null(self, value):
if value is None:
return True
return False
def _is_boolean(self, value):
if isinstance(value, bool):
return True
return False
def _is_number(self, value):
if isinstance(value, (int, Decimal)):
return True
elif isinstance(value, float):
raise TypeError(
'Float types are not supported. Use Decimal types instead.'
)
return False
def _is_string(self, value):
if isinstance(value, str):
return True
return False
def _is_binary(self, value):
if isinstance(value, (Binary, bytearray, bytes)):
return True
return False
def _is_set(self, value):
if isinstance(value, collections_abc.Set):
return True
return False
def _is_type_set(self, value, type_validator):
if self._is_set(value):
if False not in map(type_validator, value):
return True
return False
def _is_map(self, value):
if isinstance(value, collections_abc.Mapping):
return True
return False
def _is_listlike(self, value):
if isinstance(value, (list, tuple)):
return True
return False
def _serialize_null(self, value):
return True
def _serialize_bool(self, value):
return value
def _serialize_n(self, value):
number = str(DYNAMODB_CONTEXT.create_decimal(value))
if number in ['Infinity', 'NaN']:
raise TypeError('Infinity and NaN not supported')
return number
def _serialize_s(self, value):
return value
def _serialize_b(self, value):
if isinstance(value, Binary):
value = value.value
return value
def _serialize_ss(self, value):
return [self._serialize_s(s) for s in value]
def _serialize_ns(self, value):
return [self._serialize_n(n) for n in value]
def _serialize_bs(self, value):
return [self._serialize_b(b) for b in value]
def _serialize_l(self, value):
return [self.serialize(v) for v in value]
def _serialize_m(self, value):
return {k: self.serialize(v) for k, v in value.items()}
class TypeDeserializer:
"""This class deserializes DynamoDB types to Python types."""
def deserialize(self, value):
"""The method to deserialize the DynamoDB data types.
:param value: A DynamoDB value to be deserialized to a pythonic value.
Here are the various conversions:
DynamoDB Python
-------- ------
{'NULL': True} None
{'BOOL': True/False} True/False
{'N': str(value)} Decimal(str(value))
{'S': string} string
{'B': bytes} Binary(bytes)
{'NS': [str(value)]} set([Decimal(str(value))])
{'SS': [string]} set([string])
{'BS': [bytes]} set([bytes])
{'L': list} list
{'M': dict} dict
:returns: The pythonic value of the DynamoDB type.
"""
if not value:
raise TypeError(
'Value must be a nonempty dictionary whose key '
'is a valid dynamodb type.'
)
dynamodb_type = list(value.keys())[0]
try:
deserializer = getattr(
self, f'_deserialize_{dynamodb_type}'.lower()
)
except AttributeError:
raise TypeError(f'Dynamodb type {dynamodb_type} is not supported')
return deserializer(value[dynamodb_type])
def _deserialize_null(self, value):
return None
def _deserialize_bool(self, value):
return value
def _deserialize_n(self, value):
return DYNAMODB_CONTEXT.create_decimal(value)
def _deserialize_s(self, value):
return value
def _deserialize_b(self, value):
return Binary(value)
def _deserialize_ns(self, value):
return set(map(self._deserialize_n, value))
def _deserialize_ss(self, value):
return set(map(self._deserialize_s, value))
def _deserialize_bs(self, value):
return set(map(self._deserialize_b, value))
def _deserialize_l(self, value):
return [self.deserialize(v) for v in value]
def _deserialize_m(self, value):
return {k: self.deserialize(v) for k, v in value.items()}

View File

@@ -0,0 +1,12 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.

View File

@@ -0,0 +1,40 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
def inject_create_tags(event_name, class_attributes, **kwargs):
"""This injects a custom create_tags method onto the ec2 service resource
This is needed because the resource model is not able to express
creating multiple tag resources based on the fact you can apply a set
of tags to multiple ec2 resources.
"""
class_attributes['create_tags'] = create_tags
def create_tags(self, **kwargs):
# Call the client method
self.meta.client.create_tags(**kwargs)
resources = kwargs.get('Resources', [])
tags = kwargs.get('Tags', [])
tag_resources = []
# Generate all of the tag resources that just were created with the
# preceding client call.
for resource in resources:
for tag in tags:
# Add each tag from the tag set for each resource to the list
# that is returned by the method.
tag_resource = self.Tag(resource, tag['Key'], tag['Value'])
tag_resources.append(tag_resource)
return tag_resources

View File

@@ -0,0 +1,37 @@
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from boto3.resources.action import CustomModeledAction
def inject_delete_tags(event_emitter, **kwargs):
action_model = {
'request': {
'operation': 'DeleteTags',
'params': [
{
'target': 'Resources[0]',
'source': 'identifier',
'name': 'Id',
}
],
}
}
action = CustomModeledAction(
'delete_tags', action_model, delete_tags, event_emitter
)
action.inject(**kwargs)
def delete_tags(self, **kwargs):
kwargs['Resources'] = [self.id]
return self.meta.client.delete_tags(**kwargs)

View File

@@ -0,0 +1,35 @@
Generate a signed URL for Amazon CloudFront
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The following example shows how to generate a signed URL for Amazon CloudFront.
Note that you will need the ``cryptography`` `library <https://cryptography.io/en/latest/>`__ to follow this example::
import datetime
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import padding
from botocore.signers import CloudFrontSigner
def rsa_signer(message):
with open('path/to/key.pem', 'rb') as key_file:
private_key = serialization.load_pem_private_key(
key_file.read(),
password=None,
backend=default_backend()
)
return private_key.sign(message, padding.PKCS1v15(), hashes.SHA1())
key_id = 'AKIAIOSFODNN7EXAMPLE'
url = 'http://d2949o5mkkp72v.cloudfront.net/hello.txt'
expire_date = datetime.datetime(2017, 1, 1)
cloudfront_signer = CloudFrontSigner(key_id, rsa_signer)
# Create a signed url that will be valid until the specific expiry date
# provided using a canned policy.
signed_url = cloudfront_signer.generate_presigned_url(
url, date_less_than=expire_date)
print(signed_url)

View File

@@ -0,0 +1,185 @@
List objects in an Amazon S3 bucket
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The following example shows how to use an Amazon S3 bucket resource to list
the objects in the bucket.
.. code-block:: python
import boto3
s3 = boto3.resource('s3')
bucket = s3.Bucket('my-bucket')
for obj in bucket.objects.all():
print(obj.key)
List top-level common prefixes in Amazon S3 bucket
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
This example shows how to list all of the top-level common prefixes in an
Amazon S3 bucket:
.. code-block:: python
import boto3
client = boto3.client('s3')
paginator = client.get_paginator('list_objects')
result = paginator.paginate(Bucket='my-bucket', Delimiter='/')
for prefix in result.search('CommonPrefixes'):
print(prefix.get('Prefix'))
Restore Glacier objects in an Amazon S3 bucket
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The following example shows how to initiate restoration of glacier objects in
an Amazon S3 bucket, determine if a restoration is on-going, and determine if a
restoration is finished.
.. code-block:: python
import boto3
s3 = boto3.resource('s3')
bucket = s3.Bucket('glacier-bucket')
for obj_sum in bucket.objects.all():
obj = s3.Object(obj_sum.bucket_name, obj_sum.key)
if obj.storage_class == 'GLACIER':
# Try to restore the object if the storage class is glacier and
# the object does not have a completed or ongoing restoration
# request.
if obj.restore is None:
print('Submitting restoration request: %s' % obj.key)
obj.restore_object(RestoreRequest={'Days': 1})
# Print out objects whose restoration is on-going
elif 'ongoing-request="true"' in obj.restore:
print('Restoration in-progress: %s' % obj.key)
# Print out objects whose restoration is complete
elif 'ongoing-request="false"' in obj.restore:
print('Restoration complete: %s' % obj.key)
Uploading/downloading files using SSE KMS
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
This example shows how to use SSE-KMS to upload objects using
server side encryption with a key managed by KMS.
We can either use the default KMS master key, or create a
custom key in AWS and use it to encrypt the object by passing in its
key id.
With KMS, nothing else needs to be provided for getting the
object; S3 already knows how to decrypt the object.
.. code-block:: python
import boto3
import os
BUCKET = 'your-bucket-name'
s3 = boto3.client('s3')
keyid = '<the key id>'
print("Uploading S3 object with SSE-KMS")
s3.put_object(Bucket=BUCKET,
Key='encrypt-key',
Body=b'foobar',
ServerSideEncryption='aws:kms',
# Optional: SSEKMSKeyId
SSEKMSKeyId=keyid)
print("Done")
# Getting the object:
print("Getting S3 object...")
response = s3.get_object(Bucket=BUCKET,
Key='encrypt-key')
print("Done, response body:")
print(response['Body'].read())
Uploading/downloading files using SSE Customer Keys
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
This example shows how to use SSE-C to upload objects using
server side encryption with a customer provided key.
First, we'll need a 32 byte key. For this example, we'll
randomly generate a key but you can use any 32 byte key
you want. Remember, you must the same key to download
the object. If you lose the encryption key, you lose
the object.
Also note how we don't have to provide the SSECustomerKeyMD5.
Boto3 will automatically compute this value for us.
.. code-block:: python
import boto3
import os
BUCKET = 'your-bucket-name'
KEY = os.urandom(32)
s3 = boto3.client('s3')
print("Uploading S3 object with SSE-C")
s3.put_object(Bucket=BUCKET,
Key='encrypt-key',
Body=b'foobar',
SSECustomerKey=KEY,
SSECustomerAlgorithm='AES256')
print("Done")
# Getting the object:
print("Getting S3 object...")
# Note how we're using the same ``KEY`` we
# created earlier.
response = s3.get_object(Bucket=BUCKET,
Key='encrypt-key',
SSECustomerKey=KEY,
SSECustomerAlgorithm='AES256')
print("Done, response body:")
print(response['Body'].read())
Downloading a specific version of an S3 object
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
This example shows how to download a specific version of an
S3 object.
.. code-block:: python
import boto3
s3 = boto3.client('s3')
s3.download_file(
"bucket-name", "key-name", "tmp.txt",
ExtraArgs={"VersionId": "my-version-id"}
)
Filter objects by last modified time using JMESPath
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
This example shows how to filter objects by last modified time
using JMESPath.
.. code-block:: python
import boto3
s3 = boto3.client("s3")
s3_paginator = s3.get_paginator('list_objects_v2')
s3_iterator = s3_paginator.paginate(Bucket='your-bucket-name')
filtered_iterator = s3_iterator.search(
"Contents[?to_string(LastModified)>='\"2022-01-05 08:05:37+00:00\"'].Key"
)
for key_data in filtered_iterator:
print(key_data)

View File

@@ -0,0 +1,126 @@
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
# All exceptions in this class should subclass from Boto3Error.
import botocore.exceptions
# All exceptions should subclass from Boto3Error in this module.
class Boto3Error(Exception):
"""Base class for all Boto3 errors."""
class ResourceLoadException(Boto3Error):
pass
# NOTE: This doesn't appear to be used anywhere.
# It's probably safe to remove this.
class NoVersionFound(Boto3Error):
pass
# We're subclassing from botocore.exceptions.DataNotFoundError
# to keep backwards compatibility with anyone that was catching
# this low level Botocore error before this exception was
# introduced in boto3.
# Same thing for ResourceNotExistsError below.
class UnknownAPIVersionError(
Boto3Error, botocore.exceptions.DataNotFoundError
):
def __init__(self, service_name, bad_api_version, available_api_versions):
msg = (
f"The '{service_name}' resource does not support an API version of: {bad_api_version}\n"
f"Valid API versions are: {available_api_versions}"
)
# Not using super because we don't want the DataNotFoundError
# to be called, it has a different __init__ signature.
Boto3Error.__init__(self, msg)
class ResourceNotExistsError(
Boto3Error, botocore.exceptions.DataNotFoundError
):
"""Raised when you attempt to create a resource that does not exist."""
def __init__(self, service_name, available_services, has_low_level_client):
msg = (
"The '{}' resource does not exist.\n"
"The available resources are:\n"
" - {}\n".format(
service_name, '\n - '.join(available_services)
)
)
if has_low_level_client:
msg = (
f"{msg}\nConsider using a boto3.client('{service_name}') "
f"instead of a resource for '{service_name}'"
)
# Not using super because we don't want the DataNotFoundError
# to be called, it has a different __init__ signature.
Boto3Error.__init__(self, msg)
class RetriesExceededError(Boto3Error):
def __init__(self, last_exception, msg='Max Retries Exceeded'):
super().__init__(msg)
self.last_exception = last_exception
class S3TransferFailedError(Boto3Error):
pass
class S3UploadFailedError(Boto3Error):
pass
class DynamoDBOperationNotSupportedError(Boto3Error):
"""Raised for operations that are not supported for an operand."""
def __init__(self, operation, value):
msg = (
f'{operation} operation cannot be applied to value {value} of type '
f'{type(value)} directly. Must use AttributeBase object methods '
f'(i.e. Attr().eq()). to generate ConditionBase instances first.'
)
Exception.__init__(self, msg)
# FIXME: Backward compatibility
DynanmoDBOperationNotSupportedError = DynamoDBOperationNotSupportedError
class DynamoDBNeedsConditionError(Boto3Error):
"""Raised when input is not a condition"""
def __init__(self, value):
msg = (
f'Expecting a ConditionBase object. Got {value} of type {type(value)}. '
f'Use AttributeBase object methods (i.e. Attr().eq()). to '
f'generate ConditionBase instances.'
)
Exception.__init__(self, msg)
class DynamoDBNeedsKeyConditionError(Boto3Error):
pass
class PythonDeprecationWarning(Warning):
"""
Python version being used is scheduled to become unsupported
in an future release. See warning for specifics.
"""
pass

View File

@@ -0,0 +1,257 @@
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import logging
from botocore import xform_name
from boto3.docs.docstring import ActionDocstring
from boto3.utils import inject_attribute
from .model import Action
from .params import create_request_parameters
from .response import RawHandler, ResourceHandler
logger = logging.getLogger(__name__)
class ServiceAction:
"""
A class representing a callable action on a resource, for example
``sqs.get_queue_by_name(...)`` or ``s3.Bucket('foo').delete()``.
The action may construct parameters from existing resource identifiers
and may return either a raw response or a new resource instance.
:type action_model: :py:class`~boto3.resources.model.Action`
:param action_model: The action model.
:type factory: ResourceFactory
:param factory: The factory that created the resource class to which
this action is attached.
:type service_context: :py:class:`~boto3.utils.ServiceContext`
:param service_context: Context about the AWS service
"""
def __init__(self, action_model, factory=None, service_context=None):
self._action_model = action_model
# In the simplest case we just return the response, but if a
# resource is defined, then we must create these before returning.
resource_response_model = action_model.resource
if resource_response_model:
self._response_handler = ResourceHandler(
search_path=resource_response_model.path,
factory=factory,
resource_model=resource_response_model,
service_context=service_context,
operation_name=action_model.request.operation,
)
else:
self._response_handler = RawHandler(action_model.path)
def __call__(self, parent, *args, **kwargs):
"""
Perform the action's request operation after building operation
parameters and build any defined resources from the response.
:type parent: :py:class:`~boto3.resources.base.ServiceResource`
:param parent: The resource instance to which this action is attached.
:rtype: dict or ServiceResource or list(ServiceResource)
:return: The response, either as a raw dict or resource instance(s).
"""
operation_name = xform_name(self._action_model.request.operation)
# First, build predefined params and then update with the
# user-supplied kwargs, which allows overriding the pre-built
# params if needed.
params = create_request_parameters(parent, self._action_model.request)
params.update(kwargs)
logger.debug(
'Calling %s:%s with %r',
parent.meta.service_name,
operation_name,
params,
)
response = getattr(parent.meta.client, operation_name)(*args, **params)
logger.debug('Response: %r', response)
return self._response_handler(parent, params, response)
class BatchAction(ServiceAction):
"""
An action which operates on a batch of items in a collection, typically
a single page of results from the collection's underlying service
operation call. For example, this allows you to delete up to 999
S3 objects in a single operation rather than calling ``.delete()`` on
each one individually.
:type action_model: :py:class`~boto3.resources.model.Action`
:param action_model: The action model.
:type factory: ResourceFactory
:param factory: The factory that created the resource class to which
this action is attached.
:type service_context: :py:class:`~boto3.utils.ServiceContext`
:param service_context: Context about the AWS service
"""
def __call__(self, parent, *args, **kwargs):
"""
Perform the batch action's operation on every page of results
from the collection.
:type parent:
:py:class:`~boto3.resources.collection.ResourceCollection`
:param parent: The collection iterator to which this action
is attached.
:rtype: list(dict)
:return: A list of low-level response dicts from each call.
"""
service_name = None
client = None
responses = []
operation_name = xform_name(self._action_model.request.operation)
# Unlike the simple action above, a batch action must operate
# on batches (or pages) of items. So we get each page, construct
# the necessary parameters and call the batch operation.
for page in parent.pages():
params = {}
for index, resource in enumerate(page):
# There is no public interface to get a service name
# or low-level client from a collection, so we get
# these from the first resource in the collection.
if service_name is None:
service_name = resource.meta.service_name
if client is None:
client = resource.meta.client
create_request_parameters(
resource,
self._action_model.request,
params=params,
index=index,
)
if not params:
# There are no items, no need to make a call.
break
params.update(kwargs)
logger.debug(
'Calling %s:%s with %r', service_name, operation_name, params
)
response = getattr(client, operation_name)(*args, **params)
logger.debug('Response: %r', response)
responses.append(self._response_handler(parent, params, response))
return responses
class WaiterAction:
"""
A class representing a callable waiter action on a resource, for example
``s3.Bucket('foo').wait_until_bucket_exists()``.
The waiter action may construct parameters from existing resource
identifiers.
:type waiter_model: :py:class`~boto3.resources.model.Waiter`
:param waiter_model: The action waiter.
:type waiter_resource_name: string
:param waiter_resource_name: The name of the waiter action for the
resource. It usually begins with a
``wait_until_``
"""
def __init__(self, waiter_model, waiter_resource_name):
self._waiter_model = waiter_model
self._waiter_resource_name = waiter_resource_name
def __call__(self, parent, *args, **kwargs):
"""
Perform the wait operation after building operation
parameters.
:type parent: :py:class:`~boto3.resources.base.ServiceResource`
:param parent: The resource instance to which this action is attached.
"""
client_waiter_name = xform_name(self._waiter_model.waiter_name)
# First, build predefined params and then update with the
# user-supplied kwargs, which allows overriding the pre-built
# params if needed.
params = create_request_parameters(parent, self._waiter_model)
params.update(kwargs)
logger.debug(
'Calling %s:%s with %r',
parent.meta.service_name,
self._waiter_resource_name,
params,
)
client = parent.meta.client
waiter = client.get_waiter(client_waiter_name)
response = waiter.wait(**params)
logger.debug('Response: %r', response)
class CustomModeledAction:
"""A custom, modeled action to inject into a resource."""
def __init__(self, action_name, action_model, function, event_emitter):
"""
:type action_name: str
:param action_name: The name of the action to inject, e.g.
'delete_tags'
:type action_model: dict
:param action_model: A JSON definition of the action, as if it were
part of the resource model.
:type function: function
:param function: The function to perform when the action is called.
The first argument should be 'self', which will be the resource
the function is to be called on.
:type event_emitter: :py:class:`botocore.hooks.BaseEventHooks`
:param event_emitter: The session event emitter.
"""
self.name = action_name
self.model = action_model
self.function = function
self.emitter = event_emitter
def inject(self, class_attributes, service_context, event_name, **kwargs):
resource_name = event_name.rsplit(".")[-1]
action = Action(self.name, self.model, {})
self.function.__name__ = self.name
self.function.__doc__ = ActionDocstring(
resource_name=resource_name,
event_emitter=self.emitter,
action_model=action,
service_model=service_context.service_model,
include_signature=False,
)
inject_attribute(class_attributes, self.name, self.function)

View File

@@ -0,0 +1,155 @@
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import logging
import boto3
logger = logging.getLogger(__name__)
class ResourceMeta:
"""
An object containing metadata about a resource.
"""
def __init__(
self,
service_name,
identifiers=None,
client=None,
data=None,
resource_model=None,
):
#: (``string``) The service name, e.g. 's3'
self.service_name = service_name
if identifiers is None:
identifiers = []
#: (``list``) List of identifier names
self.identifiers = identifiers
#: (:py:class:`~botocore.client.BaseClient`) Low-level Botocore client
self.client = client
#: (``dict``) Loaded resource data attributes
self.data = data
# The resource model for that resource
self.resource_model = resource_model
def __repr__(self):
return 'ResourceMeta(\'{}\', identifiers={})'.format(
self.service_name, self.identifiers
)
def __eq__(self, other):
# Two metas are equal if their components are all equal
if other.__class__.__name__ != self.__class__.__name__:
return False
return self.__dict__ == other.__dict__
def copy(self):
"""
Create a copy of this metadata object.
"""
params = self.__dict__.copy()
service_name = params.pop('service_name')
return ResourceMeta(service_name, **params)
class ServiceResource:
"""
A base class for resources.
:type client: botocore.client
:param client: A low-level Botocore client instance
"""
meta = None
"""
Stores metadata about this resource instance, such as the
``service_name``, the low-level ``client`` and any cached ``data``
from when the instance was hydrated. For example::
# Get a low-level client from a resource instance
client = resource.meta.client
response = client.operation(Param='foo')
# Print the resource instance's service short name
print(resource.meta.service_name)
See :py:class:`ResourceMeta` for more information.
"""
def __init__(self, *args, **kwargs):
# Always work on a copy of meta, otherwise we would affect other
# instances of the same subclass.
self.meta = self.meta.copy()
# Create a default client if none was passed
if kwargs.get('client') is not None:
self.meta.client = kwargs.get('client')
else:
self.meta.client = boto3.client(self.meta.service_name)
# Allow setting identifiers as positional arguments in the order
# in which they were defined in the ResourceJSON.
for i, value in enumerate(args):
setattr(self, '_' + self.meta.identifiers[i], value)
# Allow setting identifiers via keyword arguments. Here we need
# extra logic to ignore other keyword arguments like ``client``.
for name, value in kwargs.items():
if name == 'client':
continue
if name not in self.meta.identifiers:
raise ValueError(f'Unknown keyword argument: {name}')
setattr(self, '_' + name, value)
# Validate that all identifiers have been set.
for identifier in self.meta.identifiers:
if getattr(self, identifier) is None:
raise ValueError(f'Required parameter {identifier} not set')
def __repr__(self):
identifiers = []
for identifier in self.meta.identifiers:
identifiers.append(
f'{identifier}={repr(getattr(self, identifier))}'
)
return "{}({})".format(
self.__class__.__name__,
', '.join(identifiers),
)
def __eq__(self, other):
# Should be instances of the same resource class
if other.__class__.__name__ != self.__class__.__name__:
return False
# Each of the identifiers should have the same value in both
# instances, e.g. two buckets need the same name to be equal.
for identifier in self.meta.identifiers:
if getattr(self, identifier) != getattr(other, identifier):
return False
return True
def __hash__(self):
identifiers = []
for identifier in self.meta.identifiers:
identifiers.append(getattr(self, identifier))
return hash((self.__class__.__name__, tuple(identifiers)))

View File

@@ -0,0 +1,572 @@
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import copy
import logging
from botocore import xform_name
from botocore.utils import merge_dicts
from ..docs import docstring
from .action import BatchAction
from .params import create_request_parameters
from .response import ResourceHandler
logger = logging.getLogger(__name__)
class ResourceCollection:
"""
Represents a collection of resources, which can be iterated through,
optionally with filtering. Collections automatically handle pagination
for you.
See :ref:`guide_collections` for a high-level overview of collections,
including when remote service requests are performed.
:type model: :py:class:`~boto3.resources.model.Collection`
:param model: Collection model
:type parent: :py:class:`~boto3.resources.base.ServiceResource`
:param parent: The collection's parent resource
:type handler: :py:class:`~boto3.resources.response.ResourceHandler`
:param handler: The resource response handler used to create resource
instances
"""
def __init__(self, model, parent, handler, **kwargs):
self._model = model
self._parent = parent
self._py_operation_name = xform_name(model.request.operation)
self._handler = handler
self._params = copy.deepcopy(kwargs)
def __repr__(self):
return '{}({}, {})'.format(
self.__class__.__name__,
self._parent,
'{}.{}'.format(
self._parent.meta.service_name, self._model.resource.type
),
)
def __iter__(self):
"""
A generator which yields resource instances after doing the
appropriate service operation calls and handling any pagination
on your behalf.
Page size, item limit, and filter parameters are applied
if they have previously been set.
>>> bucket = s3.Bucket('boto3')
>>> for obj in bucket.objects.all():
... print(obj.key)
'key1'
'key2'
"""
limit = self._params.get('limit', None)
count = 0
for page in self.pages():
for item in page:
yield item
# If the limit is set and has been reached, then
# we stop processing items here.
count += 1
if limit is not None and count >= limit:
return
def _clone(self, **kwargs):
"""
Create a clone of this collection. This is used by the methods
below to provide a chainable interface that returns copies
rather than the original. This allows things like:
>>> base = collection.filter(Param1=1)
>>> query1 = base.filter(Param2=2)
>>> query2 = base.filter(Param3=3)
>>> query1.params
{'Param1': 1, 'Param2': 2}
>>> query2.params
{'Param1': 1, 'Param3': 3}
:rtype: :py:class:`ResourceCollection`
:return: A clone of this resource collection
"""
params = copy.deepcopy(self._params)
merge_dicts(params, kwargs, append_lists=True)
clone = self.__class__(
self._model, self._parent, self._handler, **params
)
return clone
def pages(self):
"""
A generator which yields pages of resource instances after
doing the appropriate service operation calls and handling
any pagination on your behalf. Non-paginated calls will
return a single page of items.
Page size, item limit, and filter parameters are applied
if they have previously been set.
>>> bucket = s3.Bucket('boto3')
>>> for page in bucket.objects.pages():
... for obj in page:
... print(obj.key)
'key1'
'key2'
:rtype: list(:py:class:`~boto3.resources.base.ServiceResource`)
:return: List of resource instances
"""
client = self._parent.meta.client
cleaned_params = self._params.copy()
limit = cleaned_params.pop('limit', None)
page_size = cleaned_params.pop('page_size', None)
params = create_request_parameters(self._parent, self._model.request)
merge_dicts(params, cleaned_params, append_lists=True)
# Is this a paginated operation? If so, we need to get an
# iterator for the various pages. If not, then we simply
# call the operation and return the result as a single
# page in a list. For non-paginated results, we just ignore
# the page size parameter.
if client.can_paginate(self._py_operation_name):
logger.debug(
'Calling paginated %s:%s with %r',
self._parent.meta.service_name,
self._py_operation_name,
params,
)
paginator = client.get_paginator(self._py_operation_name)
pages = paginator.paginate(
PaginationConfig={'MaxItems': limit, 'PageSize': page_size},
**params
)
else:
logger.debug(
'Calling %s:%s with %r',
self._parent.meta.service_name,
self._py_operation_name,
params,
)
pages = [getattr(client, self._py_operation_name)(**params)]
# Now that we have a page iterator or single page of results
# we start processing and yielding individual items.
count = 0
for page in pages:
page_items = []
for item in self._handler(self._parent, params, page):
page_items.append(item)
# If the limit is set and has been reached, then
# we stop processing items here.
count += 1
if limit is not None and count >= limit:
break
yield page_items
# Stop reading pages if we've reached out limit
if limit is not None and count >= limit:
break
def all(self):
"""
Get all items from the collection, optionally with a custom
page size and item count limit.
This method returns an iterable generator which yields
individual resource instances. Example use::
# Iterate through items
>>> for queue in sqs.queues.all():
... print(queue.url)
'https://url1'
'https://url2'
# Convert to list
>>> queues = list(sqs.queues.all())
>>> len(queues)
2
"""
return self._clone()
def filter(self, **kwargs):
"""
Get items from the collection, passing keyword arguments along
as parameters to the underlying service operation, which are
typically used to filter the results.
This method returns an iterable generator which yields
individual resource instances. Example use::
# Iterate through items
>>> for queue in sqs.queues.filter(Param='foo'):
... print(queue.url)
'https://url1'
'https://url2'
# Convert to list
>>> queues = list(sqs.queues.filter(Param='foo'))
>>> len(queues)
2
:rtype: :py:class:`ResourceCollection`
"""
return self._clone(**kwargs)
def limit(self, count):
"""
Return at most this many resources.
>>> for bucket in s3.buckets.limit(5):
... print(bucket.name)
'bucket1'
'bucket2'
'bucket3'
'bucket4'
'bucket5'
:type count: int
:param count: Return no more than this many items
:rtype: :py:class:`ResourceCollection`
"""
return self._clone(limit=count)
def page_size(self, count):
"""
Fetch at most this many resources per service request.
>>> for obj in s3.Bucket('boto3').objects.page_size(100):
... print(obj.key)
:type count: int
:param count: Fetch this many items per request
:rtype: :py:class:`ResourceCollection`
"""
return self._clone(page_size=count)
class CollectionManager:
"""
A collection manager provides access to resource collection instances,
which can be iterated and filtered. The manager exposes some
convenience functions that are also found on resource collections,
such as :py:meth:`~ResourceCollection.all` and
:py:meth:`~ResourceCollection.filter`.
Get all items::
>>> for bucket in s3.buckets.all():
... print(bucket.name)
Get only some items via filtering::
>>> for queue in sqs.queues.filter(QueueNamePrefix='AWS'):
... print(queue.url)
Get whole pages of items:
>>> for page in s3.Bucket('boto3').objects.pages():
... for obj in page:
... print(obj.key)
A collection manager is not iterable. You **must** call one of the
methods that return a :py:class:`ResourceCollection` before trying
to iterate, slice, or convert to a list.
See the :ref:`guide_collections` guide for a high-level overview
of collections, including when remote service requests are performed.
:type collection_model: :py:class:`~boto3.resources.model.Collection`
:param model: Collection model
:type parent: :py:class:`~boto3.resources.base.ServiceResource`
:param parent: The collection's parent resource
:type factory: :py:class:`~boto3.resources.factory.ResourceFactory`
:param factory: The resource factory to create new resources
:type service_context: :py:class:`~boto3.utils.ServiceContext`
:param service_context: Context about the AWS service
"""
# The class to use when creating an iterator
_collection_cls = ResourceCollection
def __init__(self, collection_model, parent, factory, service_context):
self._model = collection_model
operation_name = self._model.request.operation
self._parent = parent
search_path = collection_model.resource.path
self._handler = ResourceHandler(
search_path=search_path,
factory=factory,
resource_model=collection_model.resource,
service_context=service_context,
operation_name=operation_name,
)
def __repr__(self):
return '{}({}, {})'.format(
self.__class__.__name__,
self._parent,
'{}.{}'.format(
self._parent.meta.service_name, self._model.resource.type
),
)
def iterator(self, **kwargs):
"""
Get a resource collection iterator from this manager.
:rtype: :py:class:`ResourceCollection`
:return: An iterable representing the collection of resources
"""
return self._collection_cls(
self._model, self._parent, self._handler, **kwargs
)
# Set up some methods to proxy ResourceCollection methods
def all(self):
return self.iterator()
all.__doc__ = ResourceCollection.all.__doc__
def filter(self, **kwargs):
return self.iterator(**kwargs)
filter.__doc__ = ResourceCollection.filter.__doc__
def limit(self, count):
return self.iterator(limit=count)
limit.__doc__ = ResourceCollection.limit.__doc__
def page_size(self, count):
return self.iterator(page_size=count)
page_size.__doc__ = ResourceCollection.page_size.__doc__
def pages(self):
return self.iterator().pages()
pages.__doc__ = ResourceCollection.pages.__doc__
class CollectionFactory:
"""
A factory to create new
:py:class:`CollectionManager` and :py:class:`ResourceCollection`
subclasses from a :py:class:`~boto3.resources.model.Collection`
model. These subclasses include methods to perform batch operations.
"""
def load_from_definition(
self, resource_name, collection_model, service_context, event_emitter
):
"""
Loads a collection from a model, creating a new
:py:class:`CollectionManager` subclass
with the correct properties and methods, named based on the service
and resource name, e.g. ec2.InstanceCollectionManager. It also
creates a new :py:class:`ResourceCollection` subclass which is used
by the new manager class.
:type resource_name: string
:param resource_name: Name of the resource to look up. For services,
this should match the ``service_name``.
:type service_context: :py:class:`~boto3.utils.ServiceContext`
:param service_context: Context about the AWS service
:type event_emitter: :py:class:`~botocore.hooks.HierarchialEmitter`
:param event_emitter: An event emitter
:rtype: Subclass of :py:class:`CollectionManager`
:return: The collection class.
"""
attrs = {}
collection_name = collection_model.name
# Create the batch actions for a collection
self._load_batch_actions(
attrs,
resource_name,
collection_model,
service_context.service_model,
event_emitter,
)
# Add the documentation to the collection class's methods
self._load_documented_collection_methods(
attrs=attrs,
resource_name=resource_name,
collection_model=collection_model,
service_model=service_context.service_model,
event_emitter=event_emitter,
base_class=ResourceCollection,
)
if service_context.service_name == resource_name:
cls_name = '{}.{}Collection'.format(
service_context.service_name, collection_name
)
else:
cls_name = '{}.{}.{}Collection'.format(
service_context.service_name, resource_name, collection_name
)
collection_cls = type(str(cls_name), (ResourceCollection,), attrs)
# Add the documentation to the collection manager's methods
self._load_documented_collection_methods(
attrs=attrs,
resource_name=resource_name,
collection_model=collection_model,
service_model=service_context.service_model,
event_emitter=event_emitter,
base_class=CollectionManager,
)
attrs['_collection_cls'] = collection_cls
cls_name += 'Manager'
return type(str(cls_name), (CollectionManager,), attrs)
def _load_batch_actions(
self,
attrs,
resource_name,
collection_model,
service_model,
event_emitter,
):
"""
Batch actions on the collection become methods on both
the collection manager and iterators.
"""
for action_model in collection_model.batch_actions:
snake_cased = xform_name(action_model.name)
attrs[snake_cased] = self._create_batch_action(
resource_name,
snake_cased,
action_model,
collection_model,
service_model,
event_emitter,
)
def _load_documented_collection_methods(
factory_self,
attrs,
resource_name,
collection_model,
service_model,
event_emitter,
base_class,
):
# The base class already has these methods defined. However
# the docstrings are generic and not based for a particular service
# or resource. So we override these methods by proxying to the
# base class's builtin method and adding a docstring
# that pertains to the resource.
# A collection's all() method.
def all(self):
return base_class.all(self)
all.__doc__ = docstring.CollectionMethodDocstring(
resource_name=resource_name,
action_name='all',
event_emitter=event_emitter,
collection_model=collection_model,
service_model=service_model,
include_signature=False,
)
attrs['all'] = all
# The collection's filter() method.
def filter(self, **kwargs):
return base_class.filter(self, **kwargs)
filter.__doc__ = docstring.CollectionMethodDocstring(
resource_name=resource_name,
action_name='filter',
event_emitter=event_emitter,
collection_model=collection_model,
service_model=service_model,
include_signature=False,
)
attrs['filter'] = filter
# The collection's limit method.
def limit(self, count):
return base_class.limit(self, count)
limit.__doc__ = docstring.CollectionMethodDocstring(
resource_name=resource_name,
action_name='limit',
event_emitter=event_emitter,
collection_model=collection_model,
service_model=service_model,
include_signature=False,
)
attrs['limit'] = limit
# The collection's page_size method.
def page_size(self, count):
return base_class.page_size(self, count)
page_size.__doc__ = docstring.CollectionMethodDocstring(
resource_name=resource_name,
action_name='page_size',
event_emitter=event_emitter,
collection_model=collection_model,
service_model=service_model,
include_signature=False,
)
attrs['page_size'] = page_size
def _create_batch_action(
factory_self,
resource_name,
snake_cased,
action_model,
collection_model,
service_model,
event_emitter,
):
"""
Creates a new method which makes a batch operation request
to the underlying service API.
"""
action = BatchAction(action_model)
def batch_action(self, *args, **kwargs):
return action(self, *args, **kwargs)
batch_action.__name__ = str(snake_cased)
batch_action.__doc__ = docstring.BatchActionDocstring(
resource_name=resource_name,
event_emitter=event_emitter,
batch_action_model=action_model,
service_model=service_model,
collection_model=collection_model,
include_signature=False,
)
return batch_action

View File

@@ -0,0 +1,601 @@
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import logging
from functools import partial
from ..docs import docstring
from ..exceptions import ResourceLoadException
from .action import ServiceAction, WaiterAction
from .base import ResourceMeta, ServiceResource
from .collection import CollectionFactory
from .model import ResourceModel
from .response import ResourceHandler, build_identifiers
logger = logging.getLogger(__name__)
class ResourceFactory:
"""
A factory to create new :py:class:`~boto3.resources.base.ServiceResource`
classes from a :py:class:`~boto3.resources.model.ResourceModel`. There are
two types of lookups that can be done: one on the service itself (e.g. an
SQS resource) and another on models contained within the service (e.g. an
SQS Queue resource).
"""
def __init__(self, emitter):
self._collection_factory = CollectionFactory()
self._emitter = emitter
def load_from_definition(
self, resource_name, single_resource_json_definition, service_context
):
"""
Loads a resource from a model, creating a new
:py:class:`~boto3.resources.base.ServiceResource` subclass
with the correct properties and methods, named based on the service
and resource name, e.g. EC2.Instance.
:type resource_name: string
:param resource_name: Name of the resource to look up. For services,
this should match the ``service_name``.
:type single_resource_json_definition: dict
:param single_resource_json_definition:
The loaded json of a single service resource or resource
definition.
:type service_context: :py:class:`~boto3.utils.ServiceContext`
:param service_context: Context about the AWS service
:rtype: Subclass of :py:class:`~boto3.resources.base.ServiceResource`
:return: The service or resource class.
"""
logger.debug(
'Loading %s:%s', service_context.service_name, resource_name
)
# Using the loaded JSON create a ResourceModel object.
resource_model = ResourceModel(
resource_name,
single_resource_json_definition,
service_context.resource_json_definitions,
)
# Do some renaming of the shape if there was a naming collision
# that needed to be accounted for.
shape = None
if resource_model.shape:
shape = service_context.service_model.shape_for(
resource_model.shape
)
resource_model.load_rename_map(shape)
# Set some basic info
meta = ResourceMeta(
service_context.service_name, resource_model=resource_model
)
attrs = {
'meta': meta,
}
# Create and load all of attributes of the resource class based
# on the models.
# Identifiers
self._load_identifiers(
attrs=attrs,
meta=meta,
resource_name=resource_name,
resource_model=resource_model,
)
# Load/Reload actions
self._load_actions(
attrs=attrs,
resource_name=resource_name,
resource_model=resource_model,
service_context=service_context,
)
# Attributes that get auto-loaded
self._load_attributes(
attrs=attrs,
meta=meta,
resource_name=resource_name,
resource_model=resource_model,
service_context=service_context,
)
# Collections and their corresponding methods
self._load_collections(
attrs=attrs,
resource_model=resource_model,
service_context=service_context,
)
# References and Subresources
self._load_has_relations(
attrs=attrs,
resource_name=resource_name,
resource_model=resource_model,
service_context=service_context,
)
# Waiter resource actions
self._load_waiters(
attrs=attrs,
resource_name=resource_name,
resource_model=resource_model,
service_context=service_context,
)
# Create the name based on the requested service and resource
cls_name = resource_name
if service_context.service_name == resource_name:
cls_name = 'ServiceResource'
cls_name = service_context.service_name + '.' + cls_name
base_classes = [ServiceResource]
if self._emitter is not None:
self._emitter.emit(
f'creating-resource-class.{cls_name}',
class_attributes=attrs,
base_classes=base_classes,
service_context=service_context,
)
return type(str(cls_name), tuple(base_classes), attrs)
def _load_identifiers(self, attrs, meta, resource_model, resource_name):
"""
Populate required identifiers. These are arguments without which
the resource cannot be used. Identifiers become arguments for
operations on the resource.
"""
for identifier in resource_model.identifiers:
meta.identifiers.append(identifier.name)
attrs[identifier.name] = self._create_identifier(
identifier, resource_name
)
def _load_actions(
self, attrs, resource_name, resource_model, service_context
):
"""
Actions on the resource become methods, with the ``load`` method
being a special case which sets internal data for attributes, and
``reload`` is an alias for ``load``.
"""
if resource_model.load:
attrs['load'] = self._create_action(
action_model=resource_model.load,
resource_name=resource_name,
service_context=service_context,
is_load=True,
)
attrs['reload'] = attrs['load']
for action in resource_model.actions:
attrs[action.name] = self._create_action(
action_model=action,
resource_name=resource_name,
service_context=service_context,
)
def _load_attributes(
self, attrs, meta, resource_name, resource_model, service_context
):
"""
Load resource attributes based on the resource shape. The shape
name is referenced in the resource JSON, but the shape itself
is defined in the Botocore service JSON, hence the need for
access to the ``service_model``.
"""
if not resource_model.shape:
return
shape = service_context.service_model.shape_for(resource_model.shape)
identifiers = {
i.member_name: i
for i in resource_model.identifiers
if i.member_name
}
attributes = resource_model.get_attributes(shape)
for name, (orig_name, member) in attributes.items():
if name in identifiers:
prop = self._create_identifier_alias(
resource_name=resource_name,
identifier=identifiers[name],
member_model=member,
service_context=service_context,
)
else:
prop = self._create_autoload_property(
resource_name=resource_name,
name=orig_name,
snake_cased=name,
member_model=member,
service_context=service_context,
)
attrs[name] = prop
def _load_collections(self, attrs, resource_model, service_context):
"""
Load resource collections from the model. Each collection becomes
a :py:class:`~boto3.resources.collection.CollectionManager` instance
on the resource instance, which allows you to iterate and filter
through the collection's items.
"""
for collection_model in resource_model.collections:
attrs[collection_model.name] = self._create_collection(
resource_name=resource_model.name,
collection_model=collection_model,
service_context=service_context,
)
def _load_has_relations(
self, attrs, resource_name, resource_model, service_context
):
"""
Load related resources, which are defined via a ``has``
relationship but conceptually come in two forms:
1. A reference, which is a related resource instance and can be
``None``, such as an EC2 instance's ``vpc``.
2. A subresource, which is a resource constructor that will always
return a resource instance which shares identifiers/data with
this resource, such as ``s3.Bucket('name').Object('key')``.
"""
for reference in resource_model.references:
# This is a dangling reference, i.e. we have all
# the data we need to create the resource, so
# this instance becomes an attribute on the class.
attrs[reference.name] = self._create_reference(
reference_model=reference,
resource_name=resource_name,
service_context=service_context,
)
for subresource in resource_model.subresources:
# This is a sub-resource class you can create
# by passing in an identifier, e.g. s3.Bucket(name).
attrs[subresource.name] = self._create_class_partial(
subresource_model=subresource,
resource_name=resource_name,
service_context=service_context,
)
self._create_available_subresources_command(
attrs, resource_model.subresources
)
def _create_available_subresources_command(self, attrs, subresources):
_subresources = [subresource.name for subresource in subresources]
_subresources = sorted(_subresources)
def get_available_subresources(factory_self):
"""
Returns a list of all the available sub-resources for this
Resource.
:returns: A list containing the name of each sub-resource for this
resource
:rtype: list of str
"""
return _subresources
attrs['get_available_subresources'] = get_available_subresources
def _load_waiters(
self, attrs, resource_name, resource_model, service_context
):
"""
Load resource waiters from the model. Each waiter allows you to
wait until a resource reaches a specific state by polling the state
of the resource.
"""
for waiter in resource_model.waiters:
attrs[waiter.name] = self._create_waiter(
resource_waiter_model=waiter,
resource_name=resource_name,
service_context=service_context,
)
def _create_identifier(factory_self, identifier, resource_name):
"""
Creates a read-only property for identifier attributes.
"""
def get_identifier(self):
# The default value is set to ``None`` instead of
# raising an AttributeError because when resources are
# instantiated a check is made such that none of the
# identifiers have a value ``None``. If any are ``None``,
# a more informative user error than a generic AttributeError
# is raised.
return getattr(self, '_' + identifier.name, None)
get_identifier.__name__ = str(identifier.name)
get_identifier.__doc__ = docstring.IdentifierDocstring(
resource_name=resource_name,
identifier_model=identifier,
include_signature=False,
)
return property(get_identifier)
def _create_identifier_alias(
factory_self, resource_name, identifier, member_model, service_context
):
"""
Creates a read-only property that aliases an identifier.
"""
def get_identifier(self):
return getattr(self, '_' + identifier.name, None)
get_identifier.__name__ = str(identifier.member_name)
get_identifier.__doc__ = docstring.AttributeDocstring(
service_name=service_context.service_name,
resource_name=resource_name,
attr_name=identifier.member_name,
event_emitter=factory_self._emitter,
attr_model=member_model,
include_signature=False,
)
return property(get_identifier)
def _create_autoload_property(
factory_self,
resource_name,
name,
snake_cased,
member_model,
service_context,
):
"""
Creates a new property on the resource to lazy-load its value
via the resource's ``load`` method (if it exists).
"""
# The property loader will check to see if this resource has already
# been loaded and return the cached value if possible. If not, then
# it first checks to see if it CAN be loaded (raise if not), then
# calls the load before returning the value.
def property_loader(self):
if self.meta.data is None:
if hasattr(self, 'load'):
self.load()
else:
raise ResourceLoadException(
f'{self.__class__.__name__} has no load method'
)
return self.meta.data.get(name)
property_loader.__name__ = str(snake_cased)
property_loader.__doc__ = docstring.AttributeDocstring(
service_name=service_context.service_name,
resource_name=resource_name,
attr_name=snake_cased,
event_emitter=factory_self._emitter,
attr_model=member_model,
include_signature=False,
)
return property(property_loader)
def _create_waiter(
factory_self, resource_waiter_model, resource_name, service_context
):
"""
Creates a new wait method for each resource where both a waiter and
resource model is defined.
"""
waiter = WaiterAction(
resource_waiter_model,
waiter_resource_name=resource_waiter_model.name,
)
def do_waiter(self, *args, **kwargs):
waiter(self, *args, **kwargs)
do_waiter.__name__ = str(resource_waiter_model.name)
do_waiter.__doc__ = docstring.ResourceWaiterDocstring(
resource_name=resource_name,
event_emitter=factory_self._emitter,
service_model=service_context.service_model,
resource_waiter_model=resource_waiter_model,
service_waiter_model=service_context.service_waiter_model,
include_signature=False,
)
return do_waiter
def _create_collection(
factory_self, resource_name, collection_model, service_context
):
"""
Creates a new property on the resource to lazy-load a collection.
"""
cls = factory_self._collection_factory.load_from_definition(
resource_name=resource_name,
collection_model=collection_model,
service_context=service_context,
event_emitter=factory_self._emitter,
)
def get_collection(self):
return cls(
collection_model=collection_model,
parent=self,
factory=factory_self,
service_context=service_context,
)
get_collection.__name__ = str(collection_model.name)
get_collection.__doc__ = docstring.CollectionDocstring(
collection_model=collection_model, include_signature=False
)
return property(get_collection)
def _create_reference(
factory_self, reference_model, resource_name, service_context
):
"""
Creates a new property on the resource to lazy-load a reference.
"""
# References are essentially an action with no request
# or response, so we can re-use the response handlers to
# build up resources from identifiers and data members.
handler = ResourceHandler(
search_path=reference_model.resource.path,
factory=factory_self,
resource_model=reference_model.resource,
service_context=service_context,
)
# Are there any identifiers that need access to data members?
# This is important when building the resource below since
# it requires the data to be loaded.
needs_data = any(
i.source == 'data' for i in reference_model.resource.identifiers
)
def get_reference(self):
# We need to lazy-evaluate the reference to handle circular
# references between resources. We do this by loading the class
# when first accessed.
# This is using a *response handler* so we need to make sure
# our data is loaded (if possible) and pass that data into
# the handler as if it were a response. This allows references
# to have their data loaded properly.
if needs_data and self.meta.data is None and hasattr(self, 'load'):
self.load()
return handler(self, {}, self.meta.data)
get_reference.__name__ = str(reference_model.name)
get_reference.__doc__ = docstring.ReferenceDocstring(
reference_model=reference_model, include_signature=False
)
return property(get_reference)
def _create_class_partial(
factory_self, subresource_model, resource_name, service_context
):
"""
Creates a new method which acts as a functools.partial, passing
along the instance's low-level `client` to the new resource
class' constructor.
"""
name = subresource_model.resource.type
def create_resource(self, *args, **kwargs):
# We need a new method here because we want access to the
# instance's client.
positional_args = []
# We lazy-load the class to handle circular references.
json_def = service_context.resource_json_definitions.get(name, {})
resource_cls = factory_self.load_from_definition(
resource_name=name,
single_resource_json_definition=json_def,
service_context=service_context,
)
# Assumes that identifiers are in order, which lets you do
# e.g. ``sqs.Queue('foo').Message('bar')`` to create a new message
# linked with the ``foo`` queue and which has a ``bar`` receipt
# handle. If we did kwargs here then future positional arguments
# would lead to failure.
identifiers = subresource_model.resource.identifiers
if identifiers is not None:
for identifier, value in build_identifiers(identifiers, self):
positional_args.append(value)
return partial(
resource_cls, *positional_args, client=self.meta.client
)(*args, **kwargs)
create_resource.__name__ = str(name)
create_resource.__doc__ = docstring.SubResourceDocstring(
resource_name=resource_name,
sub_resource_model=subresource_model,
service_model=service_context.service_model,
include_signature=False,
)
return create_resource
def _create_action(
factory_self,
action_model,
resource_name,
service_context,
is_load=False,
):
"""
Creates a new method which makes a request to the underlying
AWS service.
"""
# Create the action in in this closure but before the ``do_action``
# method below is invoked, which allows instances of the resource
# to share the ServiceAction instance.
action = ServiceAction(
action_model, factory=factory_self, service_context=service_context
)
# A resource's ``load`` method is special because it sets
# values on the resource instead of returning the response.
if is_load:
# We need a new method here because we want access to the
# instance via ``self``.
def do_action(self, *args, **kwargs):
response = action(self, *args, **kwargs)
self.meta.data = response
# Create the docstring for the load/reload methods.
lazy_docstring = docstring.LoadReloadDocstring(
action_name=action_model.name,
resource_name=resource_name,
event_emitter=factory_self._emitter,
load_model=action_model,
service_model=service_context.service_model,
include_signature=False,
)
else:
# We need a new method here because we want access to the
# instance via ``self``.
def do_action(self, *args, **kwargs):
response = action(self, *args, **kwargs)
if hasattr(self, 'load'):
# Clear cached data. It will be reloaded the next
# time that an attribute is accessed.
# TODO: Make this configurable in the future?
self.meta.data = None
return response
lazy_docstring = docstring.ActionDocstring(
resource_name=resource_name,
event_emitter=factory_self._emitter,
action_model=action_model,
service_model=service_context.service_model,
include_signature=False,
)
do_action.__name__ = str(action_model.name)
do_action.__doc__ = lazy_docstring
return do_action

View File

@@ -0,0 +1,632 @@
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""
The models defined in this file represent the resource JSON description
format and provide a layer of abstraction from the raw JSON. The advantages
of this are:
* Pythonic interface (e.g. ``action.request.operation``)
* Consumers need not change for minor JSON changes (e.g. renamed field)
These models are used both by the resource factory to generate resource
classes as well as by the documentation generator.
"""
import logging
from botocore import xform_name
logger = logging.getLogger(__name__)
class Identifier:
"""
A resource identifier, given by its name.
:type name: string
:param name: The name of the identifier
"""
def __init__(self, name, member_name=None):
#: (``string``) The name of the identifier
self.name = name
self.member_name = member_name
class Action:
"""
A service operation action.
:type name: string
:param name: The name of the action
:type definition: dict
:param definition: The JSON definition
:type resource_defs: dict
:param resource_defs: All resources defined in the service
"""
def __init__(self, name, definition, resource_defs):
self._definition = definition
#: (``string``) The name of the action
self.name = name
#: (:py:class:`Request`) This action's request or ``None``
self.request = None
if 'request' in definition:
self.request = Request(definition.get('request', {}))
#: (:py:class:`ResponseResource`) This action's resource or ``None``
self.resource = None
if 'resource' in definition:
self.resource = ResponseResource(
definition.get('resource', {}), resource_defs
)
#: (``string``) The JMESPath search path or ``None``
self.path = definition.get('path')
class DefinitionWithParams:
"""
An item which has parameters exposed via the ``params`` property.
A request has an operation and parameters, while a waiter has
a name, a low-level waiter name and parameters.
:type definition: dict
:param definition: The JSON definition
"""
def __init__(self, definition):
self._definition = definition
@property
def params(self):
"""
Get a list of auto-filled parameters for this request.
:type: list(:py:class:`Parameter`)
"""
params = []
for item in self._definition.get('params', []):
params.append(Parameter(**item))
return params
class Parameter:
"""
An auto-filled parameter which has a source and target. For example,
the ``QueueUrl`` may be auto-filled from a resource's ``url`` identifier
when making calls to ``queue.receive_messages``.
:type target: string
:param target: The destination parameter name, e.g. ``QueueUrl``
:type source_type: string
:param source_type: Where the source is defined.
:type source: string
:param source: The source name, e.g. ``Url``
"""
def __init__(
self, target, source, name=None, path=None, value=None, **kwargs
):
#: (``string``) The destination parameter name
self.target = target
#: (``string``) Where the source is defined
self.source = source
#: (``string``) The name of the source, if given
self.name = name
#: (``string``) The JMESPath query of the source
self.path = path
#: (``string|int|float|bool``) The source constant value
self.value = value
# Complain if we encounter any unknown values.
if kwargs:
logger.warning('Unknown parameter options found: %s', kwargs)
class Request(DefinitionWithParams):
"""
A service operation action request.
:type definition: dict
:param definition: The JSON definition
"""
def __init__(self, definition):
super().__init__(definition)
#: (``string``) The name of the low-level service operation
self.operation = definition.get('operation')
class Waiter(DefinitionWithParams):
"""
An event waiter specification.
:type name: string
:param name: Name of the waiter
:type definition: dict
:param definition: The JSON definition
"""
PREFIX = 'WaitUntil'
def __init__(self, name, definition):
super().__init__(definition)
#: (``string``) The name of this waiter
self.name = name
#: (``string``) The name of the underlying event waiter
self.waiter_name = definition.get('waiterName')
class ResponseResource:
"""
A resource response to create after performing an action.
:type definition: dict
:param definition: The JSON definition
:type resource_defs: dict
:param resource_defs: All resources defined in the service
"""
def __init__(self, definition, resource_defs):
self._definition = definition
self._resource_defs = resource_defs
#: (``string``) The name of the response resource type
self.type = definition.get('type')
#: (``string``) The JMESPath search query or ``None``
self.path = definition.get('path')
@property
def identifiers(self):
"""
A list of resource identifiers.
:type: list(:py:class:`Identifier`)
"""
identifiers = []
for item in self._definition.get('identifiers', []):
identifiers.append(Parameter(**item))
return identifiers
@property
def model(self):
"""
Get the resource model for the response resource.
:type: :py:class:`ResourceModel`
"""
return ResourceModel(
self.type, self._resource_defs[self.type], self._resource_defs
)
class Collection(Action):
"""
A group of resources. See :py:class:`Action`.
:type name: string
:param name: The name of the collection
:type definition: dict
:param definition: The JSON definition
:type resource_defs: dict
:param resource_defs: All resources defined in the service
"""
@property
def batch_actions(self):
"""
Get a list of batch actions supported by the resource type
contained in this action. This is a shortcut for accessing
the same information through the resource model.
:rtype: list(:py:class:`Action`)
"""
return self.resource.model.batch_actions
class ResourceModel:
"""
A model representing a resource, defined via a JSON description
format. A resource has identifiers, attributes, actions,
sub-resources, references and collections. For more information
on resources, see :ref:`guide_resources`.
:type name: string
:param name: The name of this resource, e.g. ``sqs`` or ``Queue``
:type definition: dict
:param definition: The JSON definition
:type resource_defs: dict
:param resource_defs: All resources defined in the service
"""
def __init__(self, name, definition, resource_defs):
self._definition = definition
self._resource_defs = resource_defs
self._renamed = {}
#: (``string``) The name of this resource
self.name = name
#: (``string``) The service shape name for this resource or ``None``
self.shape = definition.get('shape')
def load_rename_map(self, shape=None):
"""
Load a name translation map given a shape. This will set
up renamed values for any collisions, e.g. if the shape,
an action, and a subresource all are all named ``foo``
then the resource will have an action ``foo``, a subresource
named ``Foo`` and a property named ``foo_attribute``.
This is the order of precedence, from most important to
least important:
* Load action (resource.load)
* Identifiers
* Actions
* Subresources
* References
* Collections
* Waiters
* Attributes (shape members)
Batch actions are only exposed on collections, so do not
get modified here. Subresources use upper camel casing, so
are unlikely to collide with anything but other subresources.
Creates a structure like this::
renames = {
('action', 'id'): 'id_action',
('collection', 'id'): 'id_collection',
('attribute', 'id'): 'id_attribute'
}
# Get the final name for an action named 'id'
name = renames.get(('action', 'id'), 'id')
:type shape: botocore.model.Shape
:param shape: The underlying shape for this resource.
"""
# Meta is a reserved name for resources
names = {'meta'}
self._renamed = {}
if self._definition.get('load'):
names.add('load')
for item in self._definition.get('identifiers', []):
self._load_name_with_category(names, item['name'], 'identifier')
for name in self._definition.get('actions', {}):
self._load_name_with_category(names, name, 'action')
for name, ref in self._get_has_definition().items():
# Subresources require no data members, just typically
# identifiers and user input.
data_required = False
for identifier in ref['resource']['identifiers']:
if identifier['source'] == 'data':
data_required = True
break
if not data_required:
self._load_name_with_category(
names, name, 'subresource', snake_case=False
)
else:
self._load_name_with_category(names, name, 'reference')
for name in self._definition.get('hasMany', {}):
self._load_name_with_category(names, name, 'collection')
for name in self._definition.get('waiters', {}):
self._load_name_with_category(
names, Waiter.PREFIX + name, 'waiter'
)
if shape is not None:
for name in shape.members.keys():
self._load_name_with_category(names, name, 'attribute')
def _load_name_with_category(self, names, name, category, snake_case=True):
"""
Load a name with a given category, possibly renaming it
if that name is already in use. The name will be stored
in ``names`` and possibly be set up in ``self._renamed``.
:type names: set
:param names: Existing names (Python attributes, properties, or
methods) on the resource.
:type name: string
:param name: The original name of the value.
:type category: string
:param category: The value type, such as 'identifier' or 'action'
:type snake_case: bool
:param snake_case: True (default) if the name should be snake cased.
"""
if snake_case:
name = xform_name(name)
if name in names:
logger.debug(f'Renaming {self.name} {category} {name}')
self._renamed[(category, name)] = name + '_' + category
name += '_' + category
if name in names:
# This isn't good, let's raise instead of trying to keep
# renaming this value.
raise ValueError(
'Problem renaming {} {} to {}!'.format(
self.name, category, name
)
)
names.add(name)
def _get_name(self, category, name, snake_case=True):
"""
Get a possibly renamed value given a category and name. This
uses the rename map set up in ``load_rename_map``, so that
method must be called once first.
:type category: string
:param category: The value type, such as 'identifier' or 'action'
:type name: string
:param name: The original name of the value
:type snake_case: bool
:param snake_case: True (default) if the name should be snake cased.
:rtype: string
:return: Either the renamed value if it is set, otherwise the
original name.
"""
if snake_case:
name = xform_name(name)
return self._renamed.get((category, name), name)
def get_attributes(self, shape):
"""
Get a dictionary of attribute names to original name and shape
models that represent the attributes of this resource. Looks
like the following:
{
'some_name': ('SomeName', <Shape...>)
}
:type shape: botocore.model.Shape
:param shape: The underlying shape for this resource.
:rtype: dict
:return: Mapping of resource attributes.
"""
attributes = {}
identifier_names = [i.name for i in self.identifiers]
for name, member in shape.members.items():
snake_cased = xform_name(name)
if snake_cased in identifier_names:
# Skip identifiers, these are set through other means
continue
snake_cased = self._get_name(
'attribute', snake_cased, snake_case=False
)
attributes[snake_cased] = (name, member)
return attributes
@property
def identifiers(self):
"""
Get a list of resource identifiers.
:type: list(:py:class:`Identifier`)
"""
identifiers = []
for item in self._definition.get('identifiers', []):
name = self._get_name('identifier', item['name'])
member_name = item.get('memberName', None)
if member_name:
member_name = self._get_name('attribute', member_name)
identifiers.append(Identifier(name, member_name))
return identifiers
@property
def load(self):
"""
Get the load action for this resource, if it is defined.
:type: :py:class:`Action` or ``None``
"""
action = self._definition.get('load')
if action is not None:
action = Action('load', action, self._resource_defs)
return action
@property
def actions(self):
"""
Get a list of actions for this resource.
:type: list(:py:class:`Action`)
"""
actions = []
for name, item in self._definition.get('actions', {}).items():
name = self._get_name('action', name)
actions.append(Action(name, item, self._resource_defs))
return actions
@property
def batch_actions(self):
"""
Get a list of batch actions for this resource.
:type: list(:py:class:`Action`)
"""
actions = []
for name, item in self._definition.get('batchActions', {}).items():
name = self._get_name('batch_action', name)
actions.append(Action(name, item, self._resource_defs))
return actions
def _get_has_definition(self):
"""
Get a ``has`` relationship definition from a model, where the
service resource model is treated special in that it contains
a relationship to every resource defined for the service. This
allows things like ``s3.Object('bucket-name', 'key')`` to
work even though the JSON doesn't define it explicitly.
:rtype: dict
:return: Mapping of names to subresource and reference
definitions.
"""
if self.name not in self._resource_defs:
# This is the service resource, so let us expose all of
# the defined resources as subresources.
definition = {}
for name, resource_def in self._resource_defs.items():
# It's possible for the service to have renamed a
# resource or to have defined multiple names that
# point to the same resource type, so we need to
# take that into account.
found = False
has_items = self._definition.get('has', {}).items()
for has_name, has_def in has_items:
if has_def.get('resource', {}).get('type') == name:
definition[has_name] = has_def
found = True
if not found:
# Create a relationship definition and attach it
# to the model, such that all identifiers must be
# supplied by the user. It will look something like:
#
# {
# 'resource': {
# 'type': 'ResourceName',
# 'identifiers': [
# {'target': 'Name1', 'source': 'input'},
# {'target': 'Name2', 'source': 'input'},
# ...
# ]
# }
# }
#
fake_has = {'resource': {'type': name, 'identifiers': []}}
for identifier in resource_def.get('identifiers', []):
fake_has['resource']['identifiers'].append(
{'target': identifier['name'], 'source': 'input'}
)
definition[name] = fake_has
else:
definition = self._definition.get('has', {})
return definition
def _get_related_resources(self, subresources):
"""
Get a list of sub-resources or references.
:type subresources: bool
:param subresources: ``True`` to get sub-resources, ``False`` to
get references.
:rtype: list(:py:class:`Action`)
"""
resources = []
for name, definition in self._get_has_definition().items():
if subresources:
name = self._get_name('subresource', name, snake_case=False)
else:
name = self._get_name('reference', name)
action = Action(name, definition, self._resource_defs)
data_required = False
for identifier in action.resource.identifiers:
if identifier.source == 'data':
data_required = True
break
if subresources and not data_required:
resources.append(action)
elif not subresources and data_required:
resources.append(action)
return resources
@property
def subresources(self):
"""
Get a list of sub-resources.
:type: list(:py:class:`Action`)
"""
return self._get_related_resources(True)
@property
def references(self):
"""
Get a list of reference resources.
:type: list(:py:class:`Action`)
"""
return self._get_related_resources(False)
@property
def collections(self):
"""
Get a list of collections for this resource.
:type: list(:py:class:`Collection`)
"""
collections = []
for name, item in self._definition.get('hasMany', {}).items():
name = self._get_name('collection', name)
collections.append(Collection(name, item, self._resource_defs))
return collections
@property
def waiters(self):
"""
Get a list of waiters for this resource.
:type: list(:py:class:`Waiter`)
"""
waiters = []
for name, item in self._definition.get('waiters', {}).items():
name = self._get_name('waiter', Waiter.PREFIX + name)
waiters.append(Waiter(name, item))
return waiters

View File

@@ -0,0 +1,167 @@
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import re
import jmespath
from botocore import xform_name
from ..exceptions import ResourceLoadException
INDEX_RE = re.compile(r'\[(.*)\]$')
def get_data_member(parent, path):
"""
Get a data member from a parent using a JMESPath search query,
loading the parent if required. If the parent cannot be loaded
and no data is present then an exception is raised.
:type parent: ServiceResource
:param parent: The resource instance to which contains data we
are interested in.
:type path: string
:param path: The JMESPath expression to query
:raises ResourceLoadException: When no data is present and the
resource cannot be loaded.
:returns: The queried data or ``None``.
"""
# Ensure the parent has its data loaded, if possible.
if parent.meta.data is None:
if hasattr(parent, 'load'):
parent.load()
else:
raise ResourceLoadException(
f'{parent.__class__.__name__} has no load method!'
)
return jmespath.search(path, parent.meta.data)
def create_request_parameters(parent, request_model, params=None, index=None):
"""
Handle request parameters that can be filled in from identifiers,
resource data members or constants.
By passing ``params``, you can invoke this method multiple times and
build up a parameter dict over time, which is particularly useful
for reverse JMESPath expressions that append to lists.
:type parent: ServiceResource
:param parent: The resource instance to which this action is attached.
:type request_model: :py:class:`~boto3.resources.model.Request`
:param request_model: The action request model.
:type params: dict
:param params: If set, then add to this existing dict. It is both
edited in-place and returned.
:type index: int
:param index: The position of an item within a list
:rtype: dict
:return: Pre-filled parameters to be sent to the request operation.
"""
if params is None:
params = {}
for param in request_model.params:
source = param.source
target = param.target
if source == 'identifier':
# Resource identifier, e.g. queue.url
value = getattr(parent, xform_name(param.name))
elif source == 'data':
# If this is a data member then it may incur a load
# action before returning the value.
value = get_data_member(parent, param.path)
elif source in ['string', 'integer', 'boolean']:
# These are hard-coded values in the definition
value = param.value
elif source == 'input':
# This is provided by the user, so ignore it here
continue
else:
raise NotImplementedError(f'Unsupported source type: {source}')
build_param_structure(params, target, value, index)
return params
def build_param_structure(params, target, value, index=None):
"""
This method provides a basic reverse JMESPath implementation that
lets you go from a JMESPath-like string to a possibly deeply nested
object. The ``params`` are mutated in-place, so subsequent calls
can modify the same element by its index.
>>> build_param_structure(params, 'test[0]', 1)
>>> print(params)
{'test': [1]}
>>> build_param_structure(params, 'foo.bar[0].baz', 'hello world')
>>> print(params)
{'test': [1], 'foo': {'bar': [{'baz': 'hello, world'}]}}
"""
pos = params
parts = target.split('.')
# First, split into parts like 'foo', 'bar[0]', 'baz' and process
# each piece. It can either be a list or a dict, depending on if
# an index like `[0]` is present. We detect this via a regular
# expression, and keep track of where we are in params via the
# pos variable, walking down to the last item. Once there, we
# set the value.
for i, part in enumerate(parts):
# Is it indexing an array?
result = INDEX_RE.search(part)
if result:
if result.group(1):
if result.group(1) == '*':
part = part[:-3]
else:
# We have an explicit index
index = int(result.group(1))
part = part[: -len(str(index) + '[]')]
else:
# Index will be set after we know the proper part
# name and that it's a list instance.
index = None
part = part[:-2]
if part not in pos or not isinstance(pos[part], list):
pos[part] = []
# This means we should append, e.g. 'foo[]'
if index is None:
index = len(pos[part])
while len(pos[part]) <= index:
# Assume it's a dict until we set the final value below
pos[part].append({})
# Last item? Set the value, otherwise set the new position
if i == len(parts) - 1:
pos[part][index] = value
else:
# The new pos is the *item* in the array, not the array!
pos = pos[part][index]
else:
if part not in pos:
pos[part] = {}
# Last item? Set the value, otherwise set the new position
if i == len(parts) - 1:
pos[part] = value
else:
pos = pos[part]

View File

@@ -0,0 +1,318 @@
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import jmespath
from botocore import xform_name
from .params import get_data_member
def all_not_none(iterable):
"""
Return True if all elements of the iterable are not None (or if the
iterable is empty). This is like the built-in ``all``, except checks
against None, so 0 and False are allowable values.
"""
for element in iterable:
if element is None:
return False
return True
def build_identifiers(identifiers, parent, params=None, raw_response=None):
"""
Builds a mapping of identifier names to values based on the
identifier source location, type, and target. Identifier
values may be scalars or lists depending on the source type
and location.
:type identifiers: list
:param identifiers: List of :py:class:`~boto3.resources.model.Parameter`
definitions
:type parent: ServiceResource
:param parent: The resource instance to which this action is attached.
:type params: dict
:param params: Request parameters sent to the service.
:type raw_response: dict
:param raw_response: Low-level operation response.
:rtype: list
:return: An ordered list of ``(name, value)`` identifier tuples.
"""
results = []
for identifier in identifiers:
source = identifier.source
target = identifier.target
if source == 'response':
value = jmespath.search(identifier.path, raw_response)
elif source == 'requestParameter':
value = jmespath.search(identifier.path, params)
elif source == 'identifier':
value = getattr(parent, xform_name(identifier.name))
elif source == 'data':
# If this is a data member then it may incur a load
# action before returning the value.
value = get_data_member(parent, identifier.path)
elif source == 'input':
# This value is set by the user, so ignore it here
continue
else:
raise NotImplementedError(f'Unsupported source type: {source}')
results.append((xform_name(target), value))
return results
def build_empty_response(search_path, operation_name, service_model):
"""
Creates an appropriate empty response for the type that is expected,
based on the service model's shape type. For example, a value that
is normally a list would then return an empty list. A structure would
return an empty dict, and a number would return None.
:type search_path: string
:param search_path: JMESPath expression to search in the response
:type operation_name: string
:param operation_name: Name of the underlying service operation.
:type service_model: :ref:`botocore.model.ServiceModel`
:param service_model: The Botocore service model
:rtype: dict, list, or None
:return: An appropriate empty value
"""
response = None
operation_model = service_model.operation_model(operation_name)
shape = operation_model.output_shape
if search_path:
# Walk the search path and find the final shape. For example, given
# a path of ``foo.bar[0].baz``, we first find the shape for ``foo``,
# then the shape for ``bar`` (ignoring the indexing), and finally
# the shape for ``baz``.
for item in search_path.split('.'):
item = item.strip('[0123456789]$')
if shape.type_name == 'structure':
shape = shape.members[item]
elif shape.type_name == 'list':
shape = shape.member
else:
raise NotImplementedError(
'Search path hits shape type {} from {}'.format(
shape.type_name, item
)
)
# Anything not handled here is set to None
if shape.type_name == 'structure':
response = {}
elif shape.type_name == 'list':
response = []
elif shape.type_name == 'map':
response = {}
return response
class RawHandler:
"""
A raw action response handler. This passed through the response
dictionary, optionally after performing a JMESPath search if one
has been defined for the action.
:type search_path: string
:param search_path: JMESPath expression to search in the response
:rtype: dict
:return: Service response
"""
def __init__(self, search_path):
self.search_path = search_path
def __call__(self, parent, params, response):
"""
:type parent: ServiceResource
:param parent: The resource instance to which this action is attached.
:type params: dict
:param params: Request parameters sent to the service.
:type response: dict
:param response: Low-level operation response.
"""
# TODO: Remove the '$' check after JMESPath supports it
if self.search_path and self.search_path != '$':
response = jmespath.search(self.search_path, response)
return response
class ResourceHandler:
"""
Creates a new resource or list of new resources from the low-level
response based on the given response resource definition.
:type search_path: string
:param search_path: JMESPath expression to search in the response
:type factory: ResourceFactory
:param factory: The factory that created the resource class to which
this action is attached.
:type resource_model: :py:class:`~boto3.resources.model.ResponseResource`
:param resource_model: Response resource model.
:type service_context: :py:class:`~boto3.utils.ServiceContext`
:param service_context: Context about the AWS service
:type operation_name: string
:param operation_name: Name of the underlying service operation, if it
exists.
:rtype: ServiceResource or list
:return: New resource instance(s).
"""
def __init__(
self,
search_path,
factory,
resource_model,
service_context,
operation_name=None,
):
self.search_path = search_path
self.factory = factory
self.resource_model = resource_model
self.operation_name = operation_name
self.service_context = service_context
def __call__(self, parent, params, response):
"""
:type parent: ServiceResource
:param parent: The resource instance to which this action is attached.
:type params: dict
:param params: Request parameters sent to the service.
:type response: dict
:param response: Low-level operation response.
"""
resource_name = self.resource_model.type
json_definition = self.service_context.resource_json_definitions.get(
resource_name
)
# Load the new resource class that will result from this action.
resource_cls = self.factory.load_from_definition(
resource_name=resource_name,
single_resource_json_definition=json_definition,
service_context=self.service_context,
)
raw_response = response
search_response = None
# Anytime a path is defined, it means the response contains the
# resource's attributes, so resource_data gets set here. It
# eventually ends up in resource.meta.data, which is where
# the attribute properties look for data.
if self.search_path:
search_response = jmespath.search(self.search_path, raw_response)
# First, we parse all the identifiers, then create the individual
# response resources using them. Any identifiers that are lists
# will have one item consumed from the front of the list for each
# resource that is instantiated. Items which are not a list will
# be set as the same value on each new resource instance.
identifiers = dict(
build_identifiers(
self.resource_model.identifiers, parent, params, raw_response
)
)
# If any of the identifiers is a list, then the response is plural
plural = [v for v in identifiers.values() if isinstance(v, list)]
if plural:
response = []
# The number of items in an identifier that is a list will
# determine how many resource instances to create.
for i in range(len(plural[0])):
# Response item data is *only* available if a search path
# was given. This prevents accidentally loading unrelated
# data that may be in the response.
response_item = None
if search_response:
response_item = search_response[i]
response.append(
self.handle_response_item(
resource_cls, parent, identifiers, response_item
)
)
elif all_not_none(identifiers.values()):
# All identifiers must always exist, otherwise the resource
# cannot be instantiated.
response = self.handle_response_item(
resource_cls, parent, identifiers, search_response
)
else:
# The response should be empty, but that may mean an
# empty dict, list, or None based on whether we make
# a remote service call and what shape it is expected
# to return.
response = None
if self.operation_name is not None:
# A remote service call was made, so try and determine
# its shape.
response = build_empty_response(
self.search_path,
self.operation_name,
self.service_context.service_model,
)
return response
def handle_response_item(
self, resource_cls, parent, identifiers, resource_data
):
"""
Handles the creation of a single response item by setting
parameters and creating the appropriate resource instance.
:type resource_cls: ServiceResource subclass
:param resource_cls: The resource class to instantiate.
:type parent: ServiceResource
:param parent: The resource instance to which this action is attached.
:type identifiers: dict
:param identifiers: Map of identifier names to value or values.
:type resource_data: dict or None
:param resource_data: Data for resource attributes.
:rtype: ServiceResource
:return: New resource instance.
"""
kwargs = {
'client': parent.meta.client,
}
for name, value in identifiers.items():
# If value is a list, then consume the next item
if isinstance(value, list):
value = value.pop(0)
kwargs[name] = value
resource = resource_cls(**kwargs)
if resource_data is not None:
resource.meta.data = resource_data
return resource

View File

@@ -0,0 +1,12 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.

View File

@@ -0,0 +1,17 @@
# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
# TransferConfig preferred_transfer_client settings
CLASSIC_TRANSFER_CLIENT = "classic"
AUTO_RESOLVE_TRANSFER_CLIENT = "auto"

View File

@@ -0,0 +1,897 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import copy as python_copy
from botocore.exceptions import ClientError
from boto3 import utils
from boto3.s3.transfer import (
ProgressCallbackInvoker,
S3Transfer,
TransferConfig,
create_transfer_manager,
)
def inject_s3_transfer_methods(class_attributes, **kwargs):
utils.inject_attribute(class_attributes, 'upload_file', upload_file)
utils.inject_attribute(class_attributes, 'download_file', download_file)
utils.inject_attribute(class_attributes, 'copy', copy)
utils.inject_attribute(class_attributes, 'upload_fileobj', upload_fileobj)
utils.inject_attribute(
class_attributes, 'download_fileobj', download_fileobj
)
def inject_bucket_methods(class_attributes, **kwargs):
utils.inject_attribute(class_attributes, 'load', bucket_load)
utils.inject_attribute(class_attributes, 'upload_file', bucket_upload_file)
utils.inject_attribute(
class_attributes, 'download_file', bucket_download_file
)
utils.inject_attribute(class_attributes, 'copy', bucket_copy)
utils.inject_attribute(
class_attributes, 'upload_fileobj', bucket_upload_fileobj
)
utils.inject_attribute(
class_attributes, 'download_fileobj', bucket_download_fileobj
)
def inject_object_methods(class_attributes, **kwargs):
utils.inject_attribute(class_attributes, 'upload_file', object_upload_file)
utils.inject_attribute(
class_attributes, 'download_file', object_download_file
)
utils.inject_attribute(class_attributes, 'copy', object_copy)
utils.inject_attribute(
class_attributes, 'upload_fileobj', object_upload_fileobj
)
utils.inject_attribute(
class_attributes, 'download_fileobj', object_download_fileobj
)
def inject_object_summary_methods(class_attributes, **kwargs):
utils.inject_attribute(class_attributes, 'load', object_summary_load)
def bucket_load(self, *args, **kwargs):
"""
Calls s3.Client.list_buckets() to update the attributes of the Bucket
resource.
"""
# The docstring above is phrased this way to match what the autogenerated
# docs produce.
# We can't actually get the bucket's attributes from a HeadBucket,
# so we need to use a ListBuckets and search for our bucket.
# However, we may fail if we lack permissions to ListBuckets
# or the bucket is in another account. In which case, creation_date
# will be None.
self.meta.data = {}
try:
response = self.meta.client.list_buckets()
for bucket_data in response['Buckets']:
if bucket_data['Name'] == self.name:
self.meta.data = bucket_data
break
except ClientError as e:
if not e.response.get('Error', {}).get('Code') == 'AccessDenied':
raise
def object_summary_load(self, *args, **kwargs):
"""
Calls s3.Client.head_object to update the attributes of the ObjectSummary
resource.
"""
response = self.meta.client.head_object(
Bucket=self.bucket_name, Key=self.key
)
if 'ContentLength' in response:
response['Size'] = response.pop('ContentLength')
self.meta.data = response
def upload_file(
self, Filename, Bucket, Key, ExtraArgs=None, Callback=None, Config=None
):
"""Upload a file to an S3 object.
Usage::
import boto3
s3 = boto3.client('s3')
s3.upload_file('/tmp/hello.txt', 'mybucket', 'hello.txt')
Similar behavior as S3Transfer's upload_file() method, except that
argument names are capitalized. Detailed examples can be found at
:ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
:type Filename: str
:param Filename: The path to the file to upload.
:type Bucket: str
:param Bucket: The name of the bucket to upload to.
:type Key: str
:param Key: The name of the key to upload to.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed upload arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the upload.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
transfer.
"""
with S3Transfer(self, Config) as transfer:
return transfer.upload_file(
filename=Filename,
bucket=Bucket,
key=Key,
extra_args=ExtraArgs,
callback=Callback,
)
def download_file(
self, Bucket, Key, Filename, ExtraArgs=None, Callback=None, Config=None
):
"""Download an S3 object to a file.
Usage::
import boto3
s3 = boto3.resource('s3')
s3.meta.client.download_file('mybucket', 'hello.txt', '/tmp/hello.txt')
Similar behavior as S3Transfer's download_file() method,
except that parameters are capitalized. Detailed examples can be found at
:ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
:type Bucket: str
:param Bucket: The name of the bucket to download from.
:type Key: str
:param Key: The name of the key to download from.
:type Filename: str
:param Filename: The path to the file to download to.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed download arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the download.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
transfer.
"""
with S3Transfer(self, Config) as transfer:
return transfer.download_file(
bucket=Bucket,
key=Key,
filename=Filename,
extra_args=ExtraArgs,
callback=Callback,
)
def bucket_upload_file(
self, Filename, Key, ExtraArgs=None, Callback=None, Config=None
):
"""Upload a file to an S3 object.
Usage::
import boto3
s3 = boto3.resource('s3')
s3.Bucket('mybucket').upload_file('/tmp/hello.txt', 'hello.txt')
Similar behavior as S3Transfer's upload_file() method,
except that parameters are capitalized. Detailed examples can be found at
:ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
:type Filename: str
:param Filename: The path to the file to upload.
:type Key: str
:param Key: The name of the key to upload to.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed upload arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the upload.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
transfer.
"""
return self.meta.client.upload_file(
Filename=Filename,
Bucket=self.name,
Key=Key,
ExtraArgs=ExtraArgs,
Callback=Callback,
Config=Config,
)
def bucket_download_file(
self, Key, Filename, ExtraArgs=None, Callback=None, Config=None
):
"""Download an S3 object to a file.
Usage::
import boto3
s3 = boto3.resource('s3')
s3.Bucket('mybucket').download_file('hello.txt', '/tmp/hello.txt')
Similar behavior as S3Transfer's download_file() method,
except that parameters are capitalized. Detailed examples can be found at
:ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
:type Key: str
:param Key: The name of the key to download from.
:type Filename: str
:param Filename: The path to the file to download to.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed download arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the download.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
transfer.
"""
return self.meta.client.download_file(
Bucket=self.name,
Key=Key,
Filename=Filename,
ExtraArgs=ExtraArgs,
Callback=Callback,
Config=Config,
)
def object_upload_file(
self, Filename, ExtraArgs=None, Callback=None, Config=None
):
"""Upload a file to an S3 object.
Usage::
import boto3
s3 = boto3.resource('s3')
s3.Object('mybucket', 'hello.txt').upload_file('/tmp/hello.txt')
Similar behavior as S3Transfer's upload_file() method,
except that parameters are capitalized. Detailed examples can be found at
:ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
:type Filename: str
:param Filename: The path to the file to upload.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed upload arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the upload.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
transfer.
"""
return self.meta.client.upload_file(
Filename=Filename,
Bucket=self.bucket_name,
Key=self.key,
ExtraArgs=ExtraArgs,
Callback=Callback,
Config=Config,
)
def object_download_file(
self, Filename, ExtraArgs=None, Callback=None, Config=None
):
"""Download an S3 object to a file.
Usage::
import boto3
s3 = boto3.resource('s3')
s3.Object('mybucket', 'hello.txt').download_file('/tmp/hello.txt')
Similar behavior as S3Transfer's download_file() method,
except that parameters are capitalized. Detailed examples can be found at
:ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
:type Filename: str
:param Filename: The path to the file to download to.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed download arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the download.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
transfer.
"""
return self.meta.client.download_file(
Bucket=self.bucket_name,
Key=self.key,
Filename=Filename,
ExtraArgs=ExtraArgs,
Callback=Callback,
Config=Config,
)
def copy(
self,
CopySource,
Bucket,
Key,
ExtraArgs=None,
Callback=None,
SourceClient=None,
Config=None,
):
"""Copy an object from one S3 location to another.
This is a managed transfer which will perform a multipart copy in
multiple threads if necessary.
Usage::
import boto3
s3 = boto3.resource('s3')
copy_source = {
'Bucket': 'mybucket',
'Key': 'mykey'
}
s3.meta.client.copy(copy_source, 'otherbucket', 'otherkey')
:type CopySource: dict
:param CopySource: The name of the source bucket, key name of the
source object, and optional version ID of the source object. The
dictionary format is:
``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
that the ``VersionId`` key is optional and may be omitted.
:type Bucket: str
:param Bucket: The name of the bucket to copy to
:type Key: str
:param Key: The name of the key to copy to
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed download arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the copy.
:type SourceClient: botocore or boto3 Client
:param SourceClient: The client to be used for operation that
may happen at the source object. For example, this client is
used for the head_object that determines the size of the copy.
If no client is provided, the current client is used as the client
for the source object.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
copy.
"""
subscribers = None
if Callback is not None:
subscribers = [ProgressCallbackInvoker(Callback)]
config = Config
if config is None:
config = TransferConfig()
# copy is not supported in the CRT
new_config = python_copy.copy(config)
new_config.preferred_transfer_client = "classic"
with create_transfer_manager(self, new_config) as manager:
future = manager.copy(
copy_source=CopySource,
bucket=Bucket,
key=Key,
extra_args=ExtraArgs,
subscribers=subscribers,
source_client=SourceClient,
)
return future.result()
def bucket_copy(
self,
CopySource,
Key,
ExtraArgs=None,
Callback=None,
SourceClient=None,
Config=None,
):
"""Copy an object from one S3 location to an object in this bucket.
This is a managed transfer which will perform a multipart copy in
multiple threads if necessary.
Usage::
import boto3
s3 = boto3.resource('s3')
copy_source = {
'Bucket': 'mybucket',
'Key': 'mykey'
}
bucket = s3.Bucket('otherbucket')
bucket.copy(copy_source, 'otherkey')
:type CopySource: dict
:param CopySource: The name of the source bucket, key name of the
source object, and optional version ID of the source object. The
dictionary format is:
``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
that the ``VersionId`` key is optional and may be omitted.
:type Key: str
:param Key: The name of the key to copy to
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed download arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the copy.
:type SourceClient: botocore or boto3 Client
:param SourceClient: The client to be used for operation that
may happen at the source object. For example, this client is
used for the head_object that determines the size of the copy.
If no client is provided, the current client is used as the client
for the source object.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
copy.
"""
return self.meta.client.copy(
CopySource=CopySource,
Bucket=self.name,
Key=Key,
ExtraArgs=ExtraArgs,
Callback=Callback,
SourceClient=SourceClient,
Config=Config,
)
def object_copy(
self,
CopySource,
ExtraArgs=None,
Callback=None,
SourceClient=None,
Config=None,
):
"""Copy an object from one S3 location to this object.
This is a managed transfer which will perform a multipart copy in
multiple threads if necessary.
Usage::
import boto3
s3 = boto3.resource('s3')
copy_source = {
'Bucket': 'mybucket',
'Key': 'mykey'
}
bucket = s3.Bucket('otherbucket')
obj = bucket.Object('otherkey')
obj.copy(copy_source)
:type CopySource: dict
:param CopySource: The name of the source bucket, key name of the
source object, and optional version ID of the source object. The
dictionary format is:
``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
that the ``VersionId`` key is optional and may be omitted.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed download arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the copy.
:type SourceClient: botocore or boto3 Client
:param SourceClient: The client to be used for operation that
may happen at the source object. For example, this client is
used for the head_object that determines the size of the copy.
If no client is provided, the current client is used as the client
for the source object.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
copy.
"""
return self.meta.client.copy(
CopySource=CopySource,
Bucket=self.bucket_name,
Key=self.key,
ExtraArgs=ExtraArgs,
Callback=Callback,
SourceClient=SourceClient,
Config=Config,
)
def upload_fileobj(
self, Fileobj, Bucket, Key, ExtraArgs=None, Callback=None, Config=None
):
"""Upload a file-like object to S3.
The file-like object must be in binary mode.
This is a managed transfer which will perform a multipart upload in
multiple threads if necessary.
Usage::
import boto3
s3 = boto3.client('s3')
with open('filename', 'rb') as data:
s3.upload_fileobj(data, 'mybucket', 'mykey')
:type Fileobj: a file-like object
:param Fileobj: A file-like object to upload. At a minimum, it must
implement the `read` method, and must return bytes.
:type Bucket: str
:param Bucket: The name of the bucket to upload to.
:type Key: str
:param Key: The name of the key to upload to.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed upload arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the upload.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
upload.
"""
if not hasattr(Fileobj, 'read'):
raise ValueError('Fileobj must implement read')
subscribers = None
if Callback is not None:
subscribers = [ProgressCallbackInvoker(Callback)]
config = Config
if config is None:
config = TransferConfig()
with create_transfer_manager(self, config) as manager:
future = manager.upload(
fileobj=Fileobj,
bucket=Bucket,
key=Key,
extra_args=ExtraArgs,
subscribers=subscribers,
)
return future.result()
def bucket_upload_fileobj(
self, Fileobj, Key, ExtraArgs=None, Callback=None, Config=None
):
"""Upload a file-like object to this bucket.
The file-like object must be in binary mode.
This is a managed transfer which will perform a multipart upload in
multiple threads if necessary.
Usage::
import boto3
s3 = boto3.resource('s3')
bucket = s3.Bucket('mybucket')
with open('filename', 'rb') as data:
bucket.upload_fileobj(data, 'mykey')
:type Fileobj: a file-like object
:param Fileobj: A file-like object to upload. At a minimum, it must
implement the `read` method, and must return bytes.
:type Key: str
:param Key: The name of the key to upload to.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed upload arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the upload.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
upload.
"""
return self.meta.client.upload_fileobj(
Fileobj=Fileobj,
Bucket=self.name,
Key=Key,
ExtraArgs=ExtraArgs,
Callback=Callback,
Config=Config,
)
def object_upload_fileobj(
self, Fileobj, ExtraArgs=None, Callback=None, Config=None
):
"""Upload a file-like object to this object.
The file-like object must be in binary mode.
This is a managed transfer which will perform a multipart upload in
multiple threads if necessary.
Usage::
import boto3
s3 = boto3.resource('s3')
bucket = s3.Bucket('mybucket')
obj = bucket.Object('mykey')
with open('filename', 'rb') as data:
obj.upload_fileobj(data)
:type Fileobj: a file-like object
:param Fileobj: A file-like object to upload. At a minimum, it must
implement the `read` method, and must return bytes.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed upload arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the upload.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
upload.
"""
return self.meta.client.upload_fileobj(
Fileobj=Fileobj,
Bucket=self.bucket_name,
Key=self.key,
ExtraArgs=ExtraArgs,
Callback=Callback,
Config=Config,
)
def download_fileobj(
self, Bucket, Key, Fileobj, ExtraArgs=None, Callback=None, Config=None
):
"""Download an object from S3 to a file-like object.
The file-like object must be in binary mode.
This is a managed transfer which will perform a multipart download in
multiple threads if necessary.
Usage::
import boto3
s3 = boto3.client('s3')
with open('filename', 'wb') as data:
s3.download_fileobj('mybucket', 'mykey', data)
:type Bucket: str
:param Bucket: The name of the bucket to download from.
:type Key: str
:param Key: The name of the key to download from.
:type Fileobj: a file-like object
:param Fileobj: A file-like object to download into. At a minimum, it must
implement the `write` method and must accept bytes.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed download arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the download.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
download.
"""
if not hasattr(Fileobj, 'write'):
raise ValueError('Fileobj must implement write')
subscribers = None
if Callback is not None:
subscribers = [ProgressCallbackInvoker(Callback)]
config = Config
if config is None:
config = TransferConfig()
with create_transfer_manager(self, config) as manager:
future = manager.download(
bucket=Bucket,
key=Key,
fileobj=Fileobj,
extra_args=ExtraArgs,
subscribers=subscribers,
)
return future.result()
def bucket_download_fileobj(
self, Key, Fileobj, ExtraArgs=None, Callback=None, Config=None
):
"""Download an object from this bucket to a file-like-object.
The file-like object must be in binary mode.
This is a managed transfer which will perform a multipart download in
multiple threads if necessary.
Usage::
import boto3
s3 = boto3.resource('s3')
bucket = s3.Bucket('mybucket')
with open('filename', 'wb') as data:
bucket.download_fileobj('mykey', data)
:type Fileobj: a file-like object
:param Fileobj: A file-like object to download into. At a minimum, it must
implement the `write` method and must accept bytes.
:type Key: str
:param Key: The name of the key to download from.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed download arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the download.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
download.
"""
return self.meta.client.download_fileobj(
Bucket=self.name,
Key=Key,
Fileobj=Fileobj,
ExtraArgs=ExtraArgs,
Callback=Callback,
Config=Config,
)
def object_download_fileobj(
self, Fileobj, ExtraArgs=None, Callback=None, Config=None
):
"""Download this object from S3 to a file-like object.
The file-like object must be in binary mode.
This is a managed transfer which will perform a multipart download in
multiple threads if necessary.
Usage::
import boto3
s3 = boto3.resource('s3')
bucket = s3.Bucket('mybucket')
obj = bucket.Object('mykey')
with open('filename', 'wb') as data:
obj.download_fileobj(data)
:type Fileobj: a file-like object
:param Fileobj: A file-like object to download into. At a minimum, it must
implement the `write` method and must accept bytes.
:type ExtraArgs: dict
:param ExtraArgs: Extra arguments that may be passed to the
client operation. For allowed download arguments see
boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS.
:type Callback: function
:param Callback: A method which takes a number of bytes transferred to
be periodically called during the download.
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
download.
"""
return self.meta.client.download_fileobj(
Bucket=self.bucket_name,
Key=self.key,
Fileobj=Fileobj,
ExtraArgs=ExtraArgs,
Callback=Callback,
Config=Config,
)

View File

@@ -0,0 +1,437 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Abstractions over S3's upload/download operations.
This module provides high level abstractions for efficient
uploads/downloads. It handles several things for the user:
* Automatically switching to multipart transfers when
a file is over a specific size threshold
* Uploading/downloading a file in parallel
* Progress callbacks to monitor transfers
* Retries. While botocore handles retries for streaming uploads,
it is not possible for it to handle retries for streaming
downloads. This module handles retries for both cases so
you don't need to implement any retry logic yourself.
This module has a reasonable set of defaults. It also allows you
to configure many aspects of the transfer process including:
* Multipart threshold size
* Max parallel downloads
* Socket timeouts
* Retry amounts
There is no support for s3->s3 multipart copies at this
time.
.. _ref_s3transfer_usage:
Usage
=====
The simplest way to use this module is:
.. code-block:: python
client = boto3.client('s3', 'us-west-2')
transfer = S3Transfer(client)
# Upload /tmp/myfile to s3://bucket/key
transfer.upload_file('/tmp/myfile', 'bucket', 'key')
# Download s3://bucket/key to /tmp/myfile
transfer.download_file('bucket', 'key', '/tmp/myfile')
The ``upload_file`` and ``download_file`` methods also accept
``**kwargs``, which will be forwarded through to the corresponding
client operation. Here are a few examples using ``upload_file``::
# Making the object public
transfer.upload_file('/tmp/myfile', 'bucket', 'key',
extra_args={'ACL': 'public-read'})
# Setting metadata
transfer.upload_file('/tmp/myfile', 'bucket', 'key',
extra_args={'Metadata': {'a': 'b', 'c': 'd'}})
# Setting content type
transfer.upload_file('/tmp/myfile.json', 'bucket', 'key',
extra_args={'ContentType': "application/json"})
The ``S3Transfer`` class also supports progress callbacks so you can
provide transfer progress to users. Both the ``upload_file`` and
``download_file`` methods take an optional ``callback`` parameter.
Here's an example of how to print a simple progress percentage
to the user:
.. code-block:: python
class ProgressPercentage(object):
def __init__(self, filename):
self._filename = filename
self._size = float(os.path.getsize(filename))
self._seen_so_far = 0
self._lock = threading.Lock()
def __call__(self, bytes_amount):
# To simplify we'll assume this is hooked up
# to a single filename.
with self._lock:
self._seen_so_far += bytes_amount
percentage = (self._seen_so_far / self._size) * 100
sys.stdout.write(
"\r%s %s / %s (%.2f%%)" % (
self._filename, self._seen_so_far, self._size,
percentage))
sys.stdout.flush()
transfer = S3Transfer(boto3.client('s3', 'us-west-2'))
# Upload /tmp/myfile to s3://bucket/key and print upload progress.
transfer.upload_file('/tmp/myfile', 'bucket', 'key',
callback=ProgressPercentage('/tmp/myfile'))
You can also provide a TransferConfig object to the S3Transfer
object that gives you more fine grained control over the
transfer. For example:
.. code-block:: python
client = boto3.client('s3', 'us-west-2')
config = TransferConfig(
multipart_threshold=8 * 1024 * 1024,
max_concurrency=10,
num_download_attempts=10,
)
transfer = S3Transfer(client, config)
transfer.upload_file('/tmp/foo', 'bucket', 'key')
"""
import logging
import threading
from os import PathLike, fspath, getpid
from botocore.compat import HAS_CRT
from botocore.exceptions import ClientError
from s3transfer.exceptions import (
RetriesExceededError as S3TransferRetriesExceededError,
)
from s3transfer.futures import NonThreadedExecutor
from s3transfer.manager import TransferConfig as S3TransferConfig
from s3transfer.manager import TransferManager
from s3transfer.subscribers import BaseSubscriber
from s3transfer.utils import OSUtils
import boto3.s3.constants as constants
from boto3.exceptions import RetriesExceededError, S3UploadFailedError
if HAS_CRT:
import awscrt.s3
from boto3.crt import create_crt_transfer_manager
KB = 1024
MB = KB * KB
logger = logging.getLogger(__name__)
def create_transfer_manager(client, config, osutil=None):
"""Creates a transfer manager based on configuration
:type client: boto3.client
:param client: The S3 client to use
:type config: boto3.s3.transfer.TransferConfig
:param config: The transfer config to use
:type osutil: s3transfer.utils.OSUtils
:param osutil: The os utility to use
:rtype: s3transfer.manager.TransferManager
:returns: A transfer manager based on parameters provided
"""
if _should_use_crt(config):
crt_transfer_manager = create_crt_transfer_manager(client, config)
if crt_transfer_manager is not None:
logger.debug(
f"Using CRT client. pid: {getpid()}, thread: {threading.get_ident()}"
)
return crt_transfer_manager
# If we don't resolve something above, fallback to the default.
logger.debug(
f"Using default client. pid: {getpid()}, thread: {threading.get_ident()}"
)
return _create_default_transfer_manager(client, config, osutil)
def _should_use_crt(config):
# This feature requires awscrt>=0.19.18
if HAS_CRT and has_minimum_crt_version((0, 19, 18)):
is_optimized_instance = awscrt.s3.is_optimized_for_system()
else:
is_optimized_instance = False
pref_transfer_client = config.preferred_transfer_client.lower()
if (
is_optimized_instance
and pref_transfer_client == constants.AUTO_RESOLVE_TRANSFER_CLIENT
):
logger.debug(
"Attempting to use CRTTransferManager. Config settings may be ignored."
)
return True
logger.debug(
"Opting out of CRT Transfer Manager. Preferred client: "
f"{pref_transfer_client}, CRT available: {HAS_CRT}, "
f"Instance Optimized: {is_optimized_instance}."
)
return False
def has_minimum_crt_version(minimum_version):
"""Not intended for use outside boto3."""
if not HAS_CRT:
return False
crt_version_str = awscrt.__version__
try:
crt_version_ints = map(int, crt_version_str.split("."))
crt_version_tuple = tuple(crt_version_ints)
except (TypeError, ValueError):
return False
return crt_version_tuple >= minimum_version
def _create_default_transfer_manager(client, config, osutil):
"""Create the default TransferManager implementation for s3transfer."""
executor_cls = None
if not config.use_threads:
executor_cls = NonThreadedExecutor
return TransferManager(client, config, osutil, executor_cls)
class TransferConfig(S3TransferConfig):
ALIAS = {
'max_concurrency': 'max_request_concurrency',
'max_io_queue': 'max_io_queue_size',
}
def __init__(
self,
multipart_threshold=8 * MB,
max_concurrency=10,
multipart_chunksize=8 * MB,
num_download_attempts=5,
max_io_queue=100,
io_chunksize=256 * KB,
use_threads=True,
max_bandwidth=None,
preferred_transfer_client=constants.AUTO_RESOLVE_TRANSFER_CLIENT,
):
"""Configuration object for managed S3 transfers
:param multipart_threshold: The transfer size threshold for which
multipart uploads, downloads, and copies will automatically be
triggered.
:param max_concurrency: The maximum number of threads that will be
making requests to perform a transfer. If ``use_threads`` is
set to ``False``, the value provided is ignored as the transfer
will only ever use the main thread.
:param multipart_chunksize: The partition size of each part for a
multipart transfer.
:param num_download_attempts: The number of download attempts that
will be retried upon errors with downloading an object in S3.
Note that these retries account for errors that occur when
streaming down the data from s3 (i.e. socket errors and read
timeouts that occur after receiving an OK response from s3).
Other retryable exceptions such as throttling errors and 5xx
errors are already retried by botocore (this default is 5). This
does not take into account the number of exceptions retried by
botocore.
:param max_io_queue: The maximum amount of read parts that can be
queued in memory to be written for a download. The size of each
of these read parts is at most the size of ``io_chunksize``.
:param io_chunksize: The max size of each chunk in the io queue.
Currently, this is size used when ``read`` is called on the
downloaded stream as well.
:param use_threads: If True, threads will be used when performing
S3 transfers. If False, no threads will be used in
performing transfers; all logic will be run in the main thread.
:param max_bandwidth: The maximum bandwidth that will be consumed
in uploading and downloading file content. The value is an integer
in terms of bytes per second.
:param preferred_transfer_client: String specifying preferred transfer
client for transfer operations.
Current supported settings are:
* auto (default) - Use the CRTTransferManager when calls
are made with supported environment and settings.
* classic - Only use the origin S3TransferManager with
requests. Disables possible CRT upgrade on requests.
"""
super().__init__(
multipart_threshold=multipart_threshold,
max_request_concurrency=max_concurrency,
multipart_chunksize=multipart_chunksize,
num_download_attempts=num_download_attempts,
max_io_queue_size=max_io_queue,
io_chunksize=io_chunksize,
max_bandwidth=max_bandwidth,
)
# Some of the argument names are not the same as the inherited
# S3TransferConfig so we add aliases so you can still access the
# old version of the names.
for alias in self.ALIAS:
setattr(self, alias, getattr(self, self.ALIAS[alias]))
self.use_threads = use_threads
self.preferred_transfer_client = preferred_transfer_client
def __setattr__(self, name, value):
# If the alias name is used, make sure we set the name that it points
# to as that is what actually is used in governing the TransferManager.
if name in self.ALIAS:
super().__setattr__(self.ALIAS[name], value)
# Always set the value of the actual name provided.
super().__setattr__(name, value)
class S3Transfer:
ALLOWED_DOWNLOAD_ARGS = TransferManager.ALLOWED_DOWNLOAD_ARGS
ALLOWED_UPLOAD_ARGS = TransferManager.ALLOWED_UPLOAD_ARGS
def __init__(self, client=None, config=None, osutil=None, manager=None):
if not client and not manager:
raise ValueError(
'Either a boto3.Client or s3transfer.manager.TransferManager '
'must be provided'
)
if manager and any([client, config, osutil]):
raise ValueError(
'Manager cannot be provided with client, config, '
'nor osutil. These parameters are mutually exclusive.'
)
if config is None:
config = TransferConfig()
if osutil is None:
osutil = OSUtils()
if manager:
self._manager = manager
else:
self._manager = create_transfer_manager(client, config, osutil)
def upload_file(
self, filename, bucket, key, callback=None, extra_args=None
):
"""Upload a file to an S3 object.
Variants have also been injected into S3 client, Bucket and Object.
You don't have to use S3Transfer.upload_file() directly.
.. seealso::
:py:meth:`S3.Client.upload_file`
:py:meth:`S3.Client.upload_fileobj`
"""
if isinstance(filename, PathLike):
filename = fspath(filename)
if not isinstance(filename, str):
raise ValueError('Filename must be a string or a path-like object')
subscribers = self._get_subscribers(callback)
future = self._manager.upload(
filename, bucket, key, extra_args, subscribers
)
try:
future.result()
# If a client error was raised, add the backwards compatibility layer
# that raises a S3UploadFailedError. These specific errors were only
# ever thrown for upload_parts but now can be thrown for any related
# client error.
except ClientError as e:
raise S3UploadFailedError(
"Failed to upload {} to {}: {}".format(
filename, '/'.join([bucket, key]), e
)
)
def download_file(
self, bucket, key, filename, extra_args=None, callback=None
):
"""Download an S3 object to a file.
Variants have also been injected into S3 client, Bucket and Object.
You don't have to use S3Transfer.download_file() directly.
.. seealso::
:py:meth:`S3.Client.download_file`
:py:meth:`S3.Client.download_fileobj`
"""
if isinstance(filename, PathLike):
filename = fspath(filename)
if not isinstance(filename, str):
raise ValueError('Filename must be a string or a path-like object')
subscribers = self._get_subscribers(callback)
future = self._manager.download(
bucket, key, filename, extra_args, subscribers
)
try:
future.result()
# This is for backwards compatibility where when retries are
# exceeded we need to throw the same error from boto3 instead of
# s3transfer's built in RetriesExceededError as current users are
# catching the boto3 one instead of the s3transfer exception to do
# their own retries.
except S3TransferRetriesExceededError as e:
raise RetriesExceededError(e.last_exception)
def _get_subscribers(self, callback):
if not callback:
return None
return [ProgressCallbackInvoker(callback)]
def __enter__(self):
return self
def __exit__(self, *args):
self._manager.__exit__(*args)
class ProgressCallbackInvoker(BaseSubscriber):
"""A back-compat wrapper to invoke a provided callback via a subscriber
:param callback: A callable that takes a single positional argument for
how many bytes were transferred.
"""
def __init__(self, callback):
self._callback = callback
def on_progress(self, bytes_transferred, **kwargs):
self._callback(bytes_transferred)

View File

@@ -0,0 +1,531 @@
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import copy
import os
import botocore.session
from botocore.client import Config
from botocore.exceptions import DataNotFoundError, UnknownServiceError
import boto3
import boto3.utils
from boto3.exceptions import ResourceNotExistsError, UnknownAPIVersionError
from .resources.factory import ResourceFactory
class Session:
"""
A session stores configuration state and allows you to create service
clients and resources.
:type aws_access_key_id: string
:param aws_access_key_id: AWS access key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: AWS secret access key
:type aws_session_token: string
:param aws_session_token: AWS temporary session token
:type region_name: string
:param region_name: Default region when creating new connections
:type botocore_session: botocore.session.Session
:param botocore_session: Use this Botocore session instead of creating
a new default one.
:type profile_name: string
:param profile_name: The name of a profile to use. If not given, then
the default profile is used.
"""
def __init__(
self,
aws_access_key_id=None,
aws_secret_access_key=None,
aws_session_token=None,
region_name=None,
botocore_session=None,
profile_name=None,
):
if botocore_session is not None:
self._session = botocore_session
else:
# Create a new default session
self._session = botocore.session.get_session()
# Setup custom user-agent string if it isn't already customized
if self._session.user_agent_name == 'Botocore':
botocore_info = 'Botocore/{}'.format(
self._session.user_agent_version
)
if self._session.user_agent_extra:
self._session.user_agent_extra += ' ' + botocore_info
else:
self._session.user_agent_extra = botocore_info
self._session.user_agent_name = 'Boto3'
self._session.user_agent_version = boto3.__version__
if profile_name is not None:
self._session.set_config_variable('profile', profile_name)
if aws_access_key_id or aws_secret_access_key or aws_session_token:
self._session.set_credentials(
aws_access_key_id, aws_secret_access_key, aws_session_token
)
if region_name is not None:
self._session.set_config_variable('region', region_name)
self.resource_factory = ResourceFactory(
self._session.get_component('event_emitter')
)
self._setup_loader()
self._register_default_handlers()
def __repr__(self):
return '{}(region_name={})'.format(
self.__class__.__name__,
repr(self._session.get_config_variable('region')),
)
@property
def profile_name(self):
"""
The **read-only** profile name.
"""
return self._session.profile or 'default'
@property
def region_name(self):
"""
The **read-only** region name.
"""
return self._session.get_config_variable('region')
@property
def events(self):
"""
The event emitter for a session
"""
return self._session.get_component('event_emitter')
@property
def available_profiles(self):
"""
The profiles available to the session credentials
"""
return self._session.available_profiles
def _setup_loader(self):
"""
Setup loader paths so that we can load resources.
"""
self._loader = self._session.get_component('data_loader')
self._loader.search_paths.append(
os.path.join(os.path.dirname(__file__), 'data')
)
def get_available_services(self):
"""
Get a list of available services that can be loaded as low-level
clients via :py:meth:`Session.client`.
:rtype: list
:return: List of service names
"""
return self._session.get_available_services()
def get_available_resources(self):
"""
Get a list of available services that can be loaded as resource
clients via :py:meth:`Session.resource`.
:rtype: list
:return: List of service names
"""
return self._loader.list_available_services(type_name='resources-1')
def get_available_partitions(self):
"""Lists the available partitions
:rtype: list
:return: Returns a list of partition names (e.g., ["aws", "aws-cn"])
"""
return self._session.get_available_partitions()
def get_available_regions(
self, service_name, partition_name='aws', allow_non_regional=False
):
"""Lists the region and endpoint names of a particular partition.
The list of regions returned by this method are regions that are
explicitly known by the client to exist and is not comprehensive. A
region not returned in this list may still be available for the
provided service.
:type service_name: string
:param service_name: Name of a service to list endpoint for (e.g., s3).
:type partition_name: string
:param partition_name: Name of the partition to limit endpoints to.
(e.g., aws for the public AWS endpoints, aws-cn for AWS China
endpoints, aws-us-gov for AWS GovCloud (US) Endpoints, etc.)
:type allow_non_regional: bool
:param allow_non_regional: Set to True to include endpoints that are
not regional endpoints (e.g., s3-external-1,
fips-us-gov-west-1, etc).
:return: Returns a list of endpoint names (e.g., ["us-east-1"]).
"""
return self._session.get_available_regions(
service_name=service_name,
partition_name=partition_name,
allow_non_regional=allow_non_regional,
)
def get_credentials(self):
"""
Return the :class:`botocore.credentials.Credentials` object
associated with this session. If the credentials have not
yet been loaded, this will attempt to load them. If they
have already been loaded, this will return the cached
credentials.
"""
return self._session.get_credentials()
def get_partition_for_region(self, region_name):
"""Lists the partition name of a particular region.
:type region_name: string
:param region_name: Name of the region to list partition for (e.g.,
us-east-1).
:rtype: string
:return: Returns the respective partition name (e.g., aws).
"""
return self._session.get_partition_for_region(region_name)
def client(
self,
service_name,
region_name=None,
api_version=None,
use_ssl=True,
verify=None,
endpoint_url=None,
aws_access_key_id=None,
aws_secret_access_key=None,
aws_session_token=None,
config=None,
):
"""
Create a low-level service client by name.
:type service_name: string
:param service_name: The name of a service, e.g. 's3' or 'ec2'. You
can get a list of available services via
:py:meth:`get_available_services`.
:type region_name: string
:param region_name: The name of the region associated with the client.
A client is associated with a single region.
:type api_version: string
:param api_version: The API version to use. By default, botocore will
use the latest API version when creating a client. You only need
to specify this parameter if you want to use a previous API version
of the client.
:type use_ssl: boolean
:param use_ssl: Whether or not to use SSL. By default, SSL is used.
Note that not all services support non-ssl connections.
:type verify: boolean/string
:param verify: Whether or not to verify SSL certificates. By default
SSL certificates are verified. You can provide the following
values:
* False - do not validate SSL certificates. SSL will still be
used (unless use_ssl is False), but SSL certificates
will not be verified.
* path/to/cert/bundle.pem - A filename of the CA cert bundle to
uses. You can specify this argument if you want to use a
different CA cert bundle than the one used by botocore.
:type endpoint_url: string
:param endpoint_url: The complete URL to use for the constructed
client. Normally, botocore will automatically construct the
appropriate URL to use when communicating with a service. You
can specify a complete URL (including the "http/https" scheme)
to override this behavior. If this value is provided,
then ``use_ssl`` is ignored.
:type aws_access_key_id: string
:param aws_access_key_id: The access key to use when creating
the client. This is entirely optional, and if not provided,
the credentials configured for the session will automatically
be used. You only need to provide this argument if you want
to override the credentials used for this specific client.
:type aws_secret_access_key: string
:param aws_secret_access_key: The secret key to use when creating
the client. Same semantics as aws_access_key_id above.
:type aws_session_token: string
:param aws_session_token: The session token to use when creating
the client. Same semantics as aws_access_key_id above.
:type config: botocore.client.Config
:param config: Advanced client configuration options. If region_name
is specified in the client config, its value will take precedence
over environment variables and configuration values, but not over
a region_name value passed explicitly to the method. See
`botocore config documentation
<https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html>`_
for more details.
:return: Service client instance
"""
return self._session.create_client(
service_name,
region_name=region_name,
api_version=api_version,
use_ssl=use_ssl,
verify=verify,
endpoint_url=endpoint_url,
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
aws_session_token=aws_session_token,
config=config,
)
def resource(
self,
service_name,
region_name=None,
api_version=None,
use_ssl=True,
verify=None,
endpoint_url=None,
aws_access_key_id=None,
aws_secret_access_key=None,
aws_session_token=None,
config=None,
):
"""
Create a resource service client by name.
:type service_name: string
:param service_name: The name of a service, e.g. 's3' or 'ec2'. You
can get a list of available services via
:py:meth:`get_available_resources`.
:type region_name: string
:param region_name: The name of the region associated with the client.
A client is associated with a single region.
:type api_version: string
:param api_version: The API version to use. By default, botocore will
use the latest API version when creating a client. You only need
to specify this parameter if you want to use a previous API version
of the client.
:type use_ssl: boolean
:param use_ssl: Whether or not to use SSL. By default, SSL is used.
Note that not all services support non-ssl connections.
:type verify: boolean/string
:param verify: Whether or not to verify SSL certificates. By default
SSL certificates are verified. You can provide the following
values:
* False - do not validate SSL certificates. SSL will still be
used (unless use_ssl is False), but SSL certificates
will not be verified.
* path/to/cert/bundle.pem - A filename of the CA cert bundle to
uses. You can specify this argument if you want to use a
different CA cert bundle than the one used by botocore.
:type endpoint_url: string
:param endpoint_url: The complete URL to use for the constructed
client. Normally, botocore will automatically construct the
appropriate URL to use when communicating with a service. You
can specify a complete URL (including the "http/https" scheme)
to override this behavior. If this value is provided,
then ``use_ssl`` is ignored.
:type aws_access_key_id: string
:param aws_access_key_id: The access key to use when creating
the client. This is entirely optional, and if not provided,
the credentials configured for the session will automatically
be used. You only need to provide this argument if you want
to override the credentials used for this specific client.
:type aws_secret_access_key: string
:param aws_secret_access_key: The secret key to use when creating
the client. Same semantics as aws_access_key_id above.
:type aws_session_token: string
:param aws_session_token: The session token to use when creating
the client. Same semantics as aws_access_key_id above.
:type config: botocore.client.Config
:param config: Advanced client configuration options. If region_name
is specified in the client config, its value will take precedence
over environment variables and configuration values, but not over
a region_name value passed explicitly to the method. If
user_agent_extra is specified in the client config, it overrides
the default user_agent_extra provided by the resource API. See
`botocore config documentation
<https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html>`_
for more details.
:return: Subclass of :py:class:`~boto3.resources.base.ServiceResource`
"""
try:
resource_model = self._loader.load_service_model(
service_name, 'resources-1', api_version
)
except UnknownServiceError:
available = self.get_available_resources()
has_low_level_client = (
service_name in self.get_available_services()
)
raise ResourceNotExistsError(
service_name, available, has_low_level_client
)
except DataNotFoundError:
# This is because we've provided an invalid API version.
available_api_versions = self._loader.list_api_versions(
service_name, 'resources-1'
)
raise UnknownAPIVersionError(
service_name, api_version, ', '.join(available_api_versions)
)
if api_version is None:
# Even though botocore's load_service_model() can handle
# using the latest api_version if not provided, we need
# to track this api_version in boto3 in order to ensure
# we're pairing a resource model with a client model
# of the same API version. It's possible for the latest
# API version of a resource model in boto3 to not be
# the same API version as a service model in botocore.
# So we need to look up the api_version if one is not
# provided to ensure we load the same API version of the
# client.
#
# Note: This is relying on the fact that
# loader.load_service_model(..., api_version=None)
# and loader.determine_latest_version(..., 'resources-1')
# both load the same api version of the file.
api_version = self._loader.determine_latest_version(
service_name, 'resources-1'
)
# Creating a new resource instance requires the low-level client
# and service model, the resource version and resource JSON data.
# We pass these to the factory and get back a class, which is
# instantiated on top of the low-level client.
if config is not None:
if config.user_agent_extra is None:
config = copy.deepcopy(config)
config.user_agent_extra = 'Resource'
else:
config = Config(user_agent_extra='Resource')
client = self.client(
service_name,
region_name=region_name,
api_version=api_version,
use_ssl=use_ssl,
verify=verify,
endpoint_url=endpoint_url,
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
aws_session_token=aws_session_token,
config=config,
)
service_model = client.meta.service_model
# Create a ServiceContext object to serve as a reference to
# important read-only information about the general service.
service_context = boto3.utils.ServiceContext(
service_name=service_name,
service_model=service_model,
resource_json_definitions=resource_model['resources'],
service_waiter_model=boto3.utils.LazyLoadedWaiterModel(
self._session, service_name, api_version
),
)
# Create the service resource class.
cls = self.resource_factory.load_from_definition(
resource_name=service_name,
single_resource_json_definition=resource_model['service'],
service_context=service_context,
)
return cls(client=client)
def _register_default_handlers(self):
# S3 customizations
self._session.register(
'creating-client-class.s3',
boto3.utils.lazy_call(
'boto3.s3.inject.inject_s3_transfer_methods'
),
)
self._session.register(
'creating-resource-class.s3.Bucket',
boto3.utils.lazy_call('boto3.s3.inject.inject_bucket_methods'),
)
self._session.register(
'creating-resource-class.s3.Object',
boto3.utils.lazy_call('boto3.s3.inject.inject_object_methods'),
)
self._session.register(
'creating-resource-class.s3.ObjectSummary',
boto3.utils.lazy_call(
'boto3.s3.inject.inject_object_summary_methods'
),
)
# DynamoDb customizations
self._session.register(
'creating-resource-class.dynamodb',
boto3.utils.lazy_call(
'boto3.dynamodb.transform.register_high_level_interface'
),
unique_id='high-level-dynamodb',
)
self._session.register(
'creating-resource-class.dynamodb.Table',
boto3.utils.lazy_call(
'boto3.dynamodb.table.register_table_methods'
),
unique_id='high-level-dynamodb-table',
)
# EC2 Customizations
self._session.register(
'creating-resource-class.ec2.ServiceResource',
boto3.utils.lazy_call('boto3.ec2.createtags.inject_create_tags'),
)
self._session.register(
'creating-resource-class.ec2.Instance',
boto3.utils.lazy_call(
'boto3.ec2.deletetags.inject_delete_tags',
event_emitter=self.events,
),
)

View File

@@ -0,0 +1,100 @@
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import sys
from collections import namedtuple
_ServiceContext = namedtuple(
'ServiceContext',
[
'service_name',
'service_model',
'service_waiter_model',
'resource_json_definitions',
],
)
class ServiceContext(_ServiceContext):
"""Provides important service-wide, read-only information about a service
:type service_name: str
:param service_name: The name of the service
:type service_model: :py:class:`botocore.model.ServiceModel`
:param service_model: The model of the service.
:type service_waiter_model: :py:class:`botocore.waiter.WaiterModel` or
a waiter model-like object such as
:py:class:`boto3.utils.LazyLoadedWaiterModel`
:param service_waiter_model: The waiter model of the service.
:type resource_json_definitions: dict
:param resource_json_definitions: The loaded json models of all resource
shapes for a service. It is equivalient of loading a
``resource-1.json`` and retrieving the value at the key "resources".
"""
pass
def import_module(name):
"""Import module given a name.
Does not support relative imports.
"""
__import__(name)
return sys.modules[name]
def lazy_call(full_name, **kwargs):
parent_kwargs = kwargs
def _handler(**kwargs):
module, function_name = full_name.rsplit('.', 1)
module = import_module(module)
kwargs.update(parent_kwargs)
return getattr(module, function_name)(**kwargs)
return _handler
def inject_attribute(class_attributes, name, value):
if name in class_attributes:
raise RuntimeError(
f'Cannot inject class attribute "{name}", attribute '
f'already exists in class dict.'
)
else:
class_attributes[name] = value
class LazyLoadedWaiterModel:
"""A lazily loaded waiter model
This does not load the service waiter model until an attempt is made
to retrieve the waiter model for a specific waiter. This is helpful
in docstring generation where we do not need to actually need to grab
the waiter-2.json until it is accessed through a ``get_waiter`` call
when the docstring is generated/accessed.
"""
def __init__(self, bc_session, service_name, api_version):
self._session = bc_session
self._service_name = service_name
self._api_version = api_version
def get_waiter(self, waiter_name):
return self._session.get_waiter_model(
self._service_name, self._api_version
).get_waiter(waiter_name)