Dive into secure and efficient coding practices with our curated list of the top 10 examples showcasing 'boto' in functional components in Python. Our advanced machine learning engine meticulously scans each line of code, cross-referencing millions of open source libraries to ensure your implementation is not just functional, but also robust and secure. Elevate your React applications to new heights by mastering the art of handling side effects, API calls, and asynchronous operations with confidence and precision.
def test_create_hit_translates_response_back_from_mturk(self, with_mock):
with_mock.mturk.configure_mock(**{
'register_hit_type.return_value': fake_hit_type_response(),
'set_rest_notification.return_value': ResultSet(),
'create_hit.return_value': fake_hit_response(),
})
hit = with_mock.create_hit(**standard_hit_config())
assert hit['max_assignments'] == 1
assert hit['reward'] == .01
assert hit['keywords'] == ['testkw1', 'testkw2']
assert isinstance(hit['created'], datetime.datetime)
assert isinstance(hit['expiration'], datetime.datetime)
def download(directory):
mark_uploaded(cache_name) # reset
try:
print("Downloading {} tarball from S3...".format(cache_name))
with timer():
key.get_contents_to_filename(_tarball_filename_for(directory))
except S3ResponseError as err:
mark_needs_uploading(cache_name)
raise SystemExit("Cached {} download failed!".format(cache_name))
print("Downloaded {}.".format(_tarball_size(directory)))
_extract_tarball(directory)
print("{} successfully installed from cache.".format(cache_name))
import boto.s3.connection
access_key = 'GUSCQ627K6CC4OAL4RGC'
secret_key = 'eU8cXNzdXGEFpMMa4SSINjlIeD1fea4pMO3dIw9T'
conn = boto.connect_s3(
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
host='localhost',
port=8080,
is_secure=False,
calling_format=boto.s3.connection.OrdinaryCallingFormat(),
)
bucket = conn.delete_bucket('test-bucket')
def test_key_save_to_missing_bucket():
conn = boto.connect_s3("the_key", "the_secret")
bucket = conn.get_bucket("mybucket", validate=False)
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string.when.called_with("foobar").should.throw(
S3ResponseError
)
def diagnose(self, key):
k = boto.s3.key.Key(self.s3_bucket)
k.key = key
log_text_gz = k.get_contents_as_string()
log_text = gzip.GzipFile(fileobj=StringIO(log_text_gz)).read().decode('utf-8')
summary = parse_test_failure.extract_failure_summary(log_text)
if not summary:
summary = "Unable to diagnose"
template = Template("""
<h1>Diagnosed failure</h1>
<code><pre>{{ summary|e }}</pre></code>
<h1>Full log</h1>
<code><pre>{{ log_text|e }}</pre></code>
""")
return self.render_container(template.render(summary=summary, log_text=log_text))
"Fn::If": [
"EnvEqualsPrd",
"ami-00000000",
"ami-ffffffff"
]
},
},
"Type": "AWS::EC2::Instance"
},
}
}
dummy_template_json = json.dumps(dummy_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack('test_stack1', template_body=dummy_template_json)
ec2_conn = boto.ec2.connect_to_region("us-west-1")
reservation = ec2_conn.get_all_instances()[0]
ec2_instance = reservation.instances[0]
ec2_instance.image_id.should.equal("ami-ffffffff")
ec2_instance.terminate()
conn = boto.cloudformation.connect_to_region("us-west-2")
conn.create_stack(
'test_stack1', template_body=dummy_template_json, parameters=[("ENV", "prd")])
ec2_conn = boto.ec2.connect_to_region("us-west-2")
reservation = ec2_conn.get_all_instances()[0]
ec2_instance = reservation.instances[0]
ec2_instance.image_id.should.equal("ami-00000000")
:param user_id: a string consists of TENANT and USER name used for
asserting Owner ID (not required S3Connection)
In default, Connection class will be initialized as tester user
behaves as:
user_test_tester = testing .admin
"""
self.aws_access_key = aws_access_key
self.aws_secret_key = aws_secret_key
self.user_id = user_id
# NOTE: auth_host and auth_port can be different from storage location
self.host = tf.config['auth_host']
self.port = int(tf.config['auth_port'])
self.conn = \
S3Connection(aws_access_key, aws_secret_key, is_secure=False,
host=self.host, port=self.port,
calling_format=OrdinaryCallingFormat())
self.conn.auth_region_name = 'us-east-1'
def _check_role_does_not_exist(self, role_name):
if role_name in self.mock_iam_roles:
raise boto.exception.BotoServerError(
409, 'Conflict', body=err_xml(
('Role with name %s already exists.' %
role_name), code='EntityAlreadyExists'))
def test_create_template_without_required_param():
template_json = json.dumps(single_instance_with_ebs_volume.template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack.when.called_with(
"test_stack",
template_body=template_json,
).should.throw(BotoServerError)
boto_logger.setLevel(logging.CRITICAL) # suppress logging for these
def _cred_sub_check(connection_data):
if not id_matcher.match(connection_data["aws_access_key_id"]):
raise Exception("Invalid AWS access Key")
if not secret_matcher.match(connection_data["aws_secret_access_key"]):
raise Exception("Invalid AWS secret Key")
raise Exception("Unknown (Authentication?) Error")
openstack = tempest.clients.Manager()
try:
if urlparse.urlparse(config.boto.ec2_url).hostname is None:
raise Exception("Failed to get hostname from the ec2_url")
ec2client = openstack.ec2api_client
try:
ec2client.get_all_regions()
except exception.BotoServerError as exc:
if exc.error_code is None:
raise Exception("EC2 target does not looks EC2 service")
_cred_sub_check(ec2client.connection_data)
except keystoneclient.exceptions.Unauthorized:
EC2_CAN_CONNECT_ERROR = "AWS credentials not set," +\
" faild to get them even by keystoneclient"
except Exception as exc:
EC2_CAN_CONNECT_ERROR = str(exc)
try:
if urlparse.urlparse(config.boto.s3_url).hostname is None:
raise Exception("Failed to get hostname from the s3_url")
s3client = openstack.s3_client
try:
s3client.get_bucket("^INVALID*#()@INVALID.")