Skip to content

Commit

Permalink
Merge pull request basak#10 from dev/7_sync_with_public_project
Browse files Browse the repository at this point in the history
7 sync with public project

I did a final test run after the cosmetic updates and things still look work.  Next I'll push to @piotr's public fork and update the PR to the original author.
  • Loading branch information
sakoht committed Jan 14, 2014
2 parents c8c58fb + 2afd841 commit bffdeb6
Show file tree
Hide file tree
Showing 11 changed files with 177 additions and 85 deletions.
127 changes: 67 additions & 60 deletions glacier.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import tempfile
import time

from boto.glacier.concurrent import ConcurrentDownloader
from boto.glacier.concurrent import ConcurrentUploader
DEFAULT_NUM_THREADS = 10

Expand Down Expand Up @@ -369,12 +370,12 @@ def recent_enough(job):
def find_complete_job(jobs):
complete_jobs = filter(lambda job: job.completed, jobs)

def most_recent_job(job):
def sortable_date(job):
return iso8601.parse_date(job.completion_date)

sorted_completed_jobs = sorted(
complete_jobs,
key=most_recent_job, reverse=True
key=sortable_date, reverse=True
)

for job in sorted_completed_jobs:
Expand Down Expand Up @@ -506,12 +507,7 @@ def archive_list(self):
if archive_list:
print(*archive_list, sep="\n")

def archive_upload(self,
multipart=False,
encryptor=None,
part_size=DEFAULT_PART_SIZE,
num_threads=DEFAULT_NUM_THREADS):

def archive_upload(self, encryptor=None):
# XXX: "Leading whitespace in archive descriptions is removed."
# XXX: "The description must be less than or equal to 1024 bytes. The
# allowable characters are 7 bit ASCII without control codes,
Expand All @@ -526,60 +522,54 @@ def archive_upload(self,
raise RuntimeError("Archive name not specified. Use --name.")
name = os.path.basename(full_name)

if self.args.encrypt is None:
filename = self.args.file
if self.args.encrypt:
file_obj = tempfile.NamedTemporaryFile()
file_name = file_obj.name
logger.info("Encrypting %s to %s." % (self.args.file, file_name))
encryptor.encrypt_file(self.args.file, file_name)
logger.info("Encryption complete: %s." % file_name)
else:
filename = tempfile.NamedTemporaryFile().name
logger.info("Encrypting %s to %s."
% (self.args.file, filename))
encryptor.encrypt_file(self.args.file, filename)
logger.info("Encryption complete: %s." % filename)
file_obj = self.args.file
file_name = file_obj.name

vault = self.connection.get_vault(self.args.vault)

if not multipart:
if not self.args.concurrent:
logger.info("Uploading in a single part: %s to %s."
% (filename, vault))
file_obj = file(filename)
% (file_name, vault))
archive_id = vault.create_archive_from_file(
file_obj = file_obj, description=name)
file_obj = file_obj, description=name
)
else:
logger.info("Uploading multi-part: %s to %s"
% (filename, vault))
logger.info("Uploading concurrent: %s to %s"
% (file_name, vault))
uploader = ConcurrentUploader(self.connection.layer1,
vault.name,
part_size=part_size,
num_threads=num_threads)
archive_id = uploader.upload(filename, description=name)
part_size=self.args.part_size,
num_threads=self.args.num_threads)
archive_id = uploader.upload(file_name, description=name)

logger.info("Upload complete.")
logger.info("New Archive ID: %s" % archive_id)

self.cache.add_archive(self.args.vault, name, archive_id)

if self.args.encrypt:
os.remove(filename)

@staticmethod
def _write_archive_retrieval_job(f, job, multipart_size,
encryptor=None):

if encryptor:
destfile = tempfile.NamedTemporaryFile()
else:
destfile = f

if job.archive_size > multipart_size:
def fetch(start, end):
byte_range = start, end - 1
destfile.write(job.get_output(byte_range).read())

whole_parts = job.archive_size // multipart_size
for first_byte in xrange(0, whole_parts * multipart_size,
multipart_size):
fetch(first_byte, first_byte + multipart_size)
remainder = job.archive_size % multipart_size
if remainder:
fetch(job.archive_size - remainder, job.archive_size)
downloader = ConcurrentDownloader(
job=job,
part_size=multipart_size,
num_threads=DEFAULT_NUM_THREADS
)
downloader.download(destfile.name)
else:
destfile.write(job.get_output().read())

Expand Down Expand Up @@ -727,7 +717,6 @@ def too_old(last_seen):

print(self.args.name)


def parse_args(self, args=None):
parser = argparse.ArgumentParser()
parser.add_argument('--region', default=DEFAULT_REGION)
Expand Down Expand Up @@ -758,32 +747,37 @@ def parse_args(self, args=None):
archive_upload_subparser = archive_subparser.add_parser('upload')
archive_upload_subparser.set_defaults(func=archive_upload_func)
archive_upload_subparser.add_argument('vault')
archive_upload_subparser.add_argument('file')
archive_upload_subparser.add_argument('--name')
archive_upload_subparser.add_argument('file',
type=argparse.FileType('rb'))
archive_upload_subparser.add_argument(
'--name',
help='The description of the archive.'
)
archive_upload_subparser.add_argument(
'--encrypt', default=False, action="store_true",
help="Encrypt before uploading using default GNUPG encryption."
)
archive_upload_subparser.add_argument(
'--concurrent', default=False, action="store_true",
dest="concurrent",
help='Break the upload into multiple pieces '
'(required for large uploads).'
)
archive_upload_subparser.add_argument(
'--encrypt', default=False, action="store_true")

# Multipart upload command
multipart_archive_upload_func = partial(
self.archive_upload, encryptor=encryptor, multipart=True)
archive_multipart_upload_subparser = archive_subparser.add_parser(
'multipart_upload')
archive_multipart_upload_subparser.set_defaults(
func=multipart_archive_upload_func)
archive_multipart_upload_subparser.add_argument('vault')
archive_multipart_upload_subparser.add_argument('file')
archive_multipart_upload_subparser.add_argument('--name')
archive_multipart_upload_subparser.add_argument(
'--encrypt', default=False, action="store_true")
archive_multipart_upload_subparser.add_argument(
'--part-size',
default=DEFAULT_PART_SIZE,
dest="part_size"
dest="part_size",
help=("For --concurrent uploads, change the "
"part size from the default of %d."
% DEFAULT_PART_SIZE)
)
archive_multipart_upload_subparser.add_argument(
archive_upload_subparser.add_argument(
'--num-threads',
default=DEFAULT_NUM_THREADS,
dest="num_threads"
dest="num_threads",
help=("For --concurrent uploads, change the "
"num threads from the default of %d."
% DEFAULT_NUM_THREADS)
)

# Retrieve command
Expand Down Expand Up @@ -824,7 +818,20 @@ def parse_args(self, args=None):

job_subparser = subparsers.add_parser('job').add_subparsers()
job_subparser.add_parser('list').set_defaults(func=self.job_list)
return parser.parse_args(args)
parsed = parser.parse_args(args)

if (parsed.func == archive_upload_func
and parsed.file is sys.stdin):
if parsed.concurrent:
raise ConsoleError(
"concurrent uploads do not support streaming stdin"
)
if parsed.encrypt:
raise ConsoleError(
"encrypted uploads do not support streaming stdin"
)

return parsed

def __init__(self, args=None, connection=None, cache=None):
args = self.parse_args(args)
Expand Down
1 change: 1 addition & 0 deletions glacier_test.input
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
123
88 changes: 74 additions & 14 deletions glacier_test.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,18 @@

from __future__ import print_function

import os
import sys
import unittest

import mock
from mock import Mock, patch, sentinel
import nose.tools
from tempfile import gettempdir

import glacier

os.environ['GNUPGHOME'] = 'gnupg_test_home'

EX_TEMPFAIL = 75

Expand Down Expand Up @@ -106,22 +109,79 @@ def test_archive_list_force_ids(self):
)

def test_archive_upload(self):
file_obj = Mock()
file_obj.name = 'filename'
open_mock = Mock(return_value=file_obj)
with patch('__builtin__.open', open_mock):
self.run_app(['archive', 'upload', 'vault_name', 'filename'])
self.connection.get_vault.assert_called_with('vault_name')
mock_vault = self.connection.get_vault.return_value
mock_vault.create_archive_from_file.assert_called_once_with(
file_obj=file_obj, description='filename')
for encrypt in (False, True):
args = ['archive',
'upload',
'vault_name',
'glacier_test.input',
'--name',
'glacier_test.input']
if encrypt:
args.append('--encrypt')

open_mock = Mock(wraps=open)
with patch('__builtin__.open', open_mock):
self.run_app(args)

self.connection.get_vault.assert_called_with('vault_name')

mock_vault = self.connection.get_vault.return_value

call_args = mock_vault.create_archive_from_file.call_args
uploaded_name = call_args[1]['file_obj'].name
if encrypt:
nose.tools.assert_equals(uploaded_name.find(gettempdir()), 0)
else:
nose.tools.assert_equals(uploaded_name, 'glacier_test.input')

nose.tools.assert_equals(call_args[1]['description'],
'glacier_test.input')

def test_archive_upload_concurrent(self):
for encrypt in (False, True):
args = ['archive', 'upload', 'vault_name', 'glacier_test.input',
'--name', 'glacier_test.input',
'--concurrent']
if encrypt:
args.append('--encrypt')

open_mock = Mock(wraps=open)
upload_mock = Mock(return_value='fake_archive_id')
with patch('__builtin__.open', open_mock), \
patch('boto.glacier.concurrent.ConcurrentUploader.upload',
upload_mock):
self.run_app(args)

self.connection.get_vault.assert_called_with('vault_name')
call_args = upload_mock.call_args
if encrypt:
nose.tools.assert_equals(call_args[0][0].find(gettempdir()), 0)
else:
nose.tools.assert_equals(call_args[0][0], 'glacier_test.input')
nose.tools.assert_equals(call_args[1]['description'],
'glacier_test.input')

def test_archive_stdin_upload(self):
self.run_app(['archive', 'upload', 'vault_name', '-'])
self.connection.get_vault.assert_called_once_with('vault_name')
vault = self.connection.get_vault.return_value
vault.create_archive_from_file.assert_called_once_with(
file_obj=sys.stdin, description='<stdin>')
for concurrent in (False, True):
for encrypt in (False, True):
args = ['archive', 'upload', 'vault_name', '-']
if concurrent:
args.append('--concurrent')
if encrypt:
args.append('--encrypt')
if concurrent or encrypt:
# NOTE: concurrency and encryption are not
# currently supported when streaming fron stdin.
nose.tools.assert_raises(Exception,
self.run_app, args)
else:
self.run_app(args)
self.connection.get_vault.assert_called_once_with(
'vault_name'
)
vault = self.connection.get_vault.return_value
vault.create_archive_from_file.assert_called_once_with(
file_obj=sys.stdin, description='<stdin>')

def test_archive_retrieve_no_job(self):
self.init_app(['archive', 'retrieve', 'vault_name', 'archive_name'])
Expand Down
Binary file added gnupg_test_home/pubring.gpg
Binary file not shown.
Binary file added gnupg_test_home/pubring.gpg~
Binary file not shown.
Binary file added gnupg_test_home/random_seed
Binary file not shown.
Binary file added gnupg_test_home/secring.gpg
Binary file not shown.
Binary file added gnupg_test_home/trustdb.gpg
Binary file not shown.
34 changes: 29 additions & 5 deletions gpg.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,28 @@
#!/usr/bin/env python

# The MIT License (MIT)
#
# Copyright (c) 2013 Piotr Kaleta and Counsyl
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.

import gnupg
import os

Expand Down Expand Up @@ -31,13 +56,12 @@ def _get_fingerprint(self):
"""Return the fingerprint of the default key."""
return self.gpg.list_keys()[0]["fingerprint"]

def encrypt_file(self, input_filename, output_filename):
"""Encrypt `input_filename` and save results as `output_filename`."""
def encrypt_file(self, input_file, output_filename):
"""Encrypt `input_file` (handle) and save results as `output_filename`."""
fingerprint = self._get_fingerprint()

with open(input_filename, "r") as input_file:
self.gpg.encrypt_file(input_file, fingerprint,
output=output_filename)
self.gpg.encrypt_file(input_file, fingerprint,
output=output_filename)

def encrypt(self, data):
"""Return the encrypted version of `data`."""
Expand Down
2 changes: 1 addition & 1 deletion koality.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,6 @@ test:
machines: 1
scripts:
- unit tests:
script: nosetests --with-xunit
script: nosetests --with-xunit glacier_test.py
xunit: nosetests.xml
timeout: 300
Loading

0 comments on commit bffdeb6

Please sign in to comment.