Commit 7be0c99c authored by Dylan Baker's avatar Dylan Baker
Browse files

framework: Add ability to set a compression method on file backends



This creates a framework for compressing results as part of the
FileBackend class. This allows for the simple addition of compression
formats (gz is easily possible in python2, xz in python3), but does not
actually implement any compression. zip and bz2 compression are also
possible, although they'll require a little more code.

This patch implements a framework, and tests for that framework.

The goal is that other compression methods can easily be added to piglit
simply by setting a few values in framework/backends/compression, and
then everything will just work.

This will allow junit results that are compressed to be read, but
doesn't add support to junit for compressing results, since junit is
mainly intended for consumption by jenkins, which doesn't understand
compression.

v2: - replace tests decorator set_compression with use of
      utils.set_piglit_conf. This is better because it uses a generic
      tools, and because set_compression is somewhat misleading of a
      name.
v3: - backport changes from my attempt to port this to python3. There
      are significant differences between python2 and python3 compression
      handling because of the bytes/str/unicode differences between the
      two versions. There are also code cleanups from that work here
    - Make a better effort to protect the unit tests from the
      environment around them. The goal is that the tests control the
      compression options 100%, with no chance of piglit.conf or
      environment variables affecting the tests.
    - reduce code duplication in unit tests
    - add additional unit tests as made possible by refactoring changes
v4: - add additional unit tests
Signed-off-by: default avatarDylan Baker <dylanx.c.baker@intel.com>
parent 785c0cc1
......@@ -45,6 +45,7 @@ import os
import importlib
from .register import Registry
from .compression import COMPRESSION_SUFFIXES
__all__ = [
'BACKENDS',
......@@ -69,7 +70,7 @@ def _register():
Walk through the list of backends and register them to a name in a
dictionary, so that they can be referenced from helper functions.
"""
registry = {}
......@@ -95,7 +96,7 @@ def get_backend(backend):
If the backend module exists, but there is not active implementation then a
BackendNotImplementedError will be raised, it is also the responsiblity of
the caller to handle this error.
"""
try:
inst = BACKENDS[backend].backend
......@@ -117,45 +118,57 @@ def load(file_path):
then return the TestrunResult instance.
"""
extension = None
# This should be 'not isdir', since an fd does not evaluate to True using
# 'os.path.isfile'
if not os.path.isdir(file_path):
extension = os.path.splitext(file_path)[1]
if not extension:
extension = ''
else:
for file in os.listdir(file_path):
if file.startswith('result'):
extension = os.path.splitext(file)[1]
break
elif file == 'main':
extension = ''
break
tests = os.path.join(file_path, 'tests')
if extension is None:
def get_extension(file_path):
"""Get the extension name to use when searching for a loader.
This function correctly handles compression suffixes, as long as they
are valid.
"""
def _extension(file_path):
"""Helper function to get the extension string."""
compression = 'none'
name, extension = os.path.splitext(file_path)
# If we hit a compressed suffix, get an additional suffix to test
# with.
# i.e: Use .json.gz rather that .gz
if extension in COMPRESSION_SUFFIXES:
compression = extension[1:] # Drop the leading '.'
extension = os.path.splitext(name)[1]
return extension, compression
if not os.path.isdir(file_path):
return _extension(file_path)
else:
for file_ in os.listdir(file_path):
if file_.startswith('result'):
return _extension(file_)
tests = os.path.join(file_path, 'tests')
if os.path.exists(tests):
extension = os.path.splitext(os.listdir(tests)[0])[1]
return _extension(os.listdir(tests)[0])
else:
# At this point we have failed to find any sort of backend, just except
# and die
# At this point we have failed to find any sort of backend, just
# except and die
raise BackendError("No backend found for any file in {}".format(
file_path))
extension, compression = get_extension(file_path)
for backend in BACKENDS.itervalues():
if extension in backend.extensions:
loader = backend.load
break
else:
raise BackendError(
'No module supports file extensions "{}"'.format(extension))
if loader is None:
raise BackendNotImplementedError(
'Loader for {} is not implemented'.format(extension))
if loader is None:
raise BackendNotImplementedError(
'Loader for {} is not implemented'.format(extension))
return loader(file_path, compression)
return loader(file_path)
raise BackendError(
'No module supports file extensions "{}"'.format(extension))
def set_meta(backend, result):
......
......@@ -26,16 +26,34 @@ This module provides mixins and base classes for backend modules.
"""
from __future__ import print_function, absolute_import
import os
import abc
import shutil
import itertools
import contextlib
import itertools
import os
import shutil
from . import compression
from framework.results import TestResult
from framework.status import INCOMPLETE
@contextlib.contextmanager
def write_compressed(filename):
"""Write a the final result using desired compression.
This helper function reads the piglit.conf to decide whether to use
compression, and what type of compression to use.
Currently it implements no compression
"""
if compression.MODE != 'none':
filename = '{}.{}'.format(filename, compression.MODE)
with compression.COMPRESSOR(filename) as f:
yield f
class Backend(object):
""" Abstract base class for summary backends
......@@ -143,8 +161,9 @@ class FileBackend(Backend):
"""
def __init__(self, dest, file_start_count=0, file_fsync=False, **kwargs):
self._dest = dest
self.__counter = itertools.count(file_start_count)
self.__file_sync = file_fsync
self._counter = itertools.count(file_start_count)
self._file_sync = file_fsync
self._write_final = write_compressed
__INCOMPLETE = TestResult({'result': INCOMPLETE})
......@@ -155,7 +174,7 @@ class FileBackend(Backend):
"""
file_.flush()
if self.__file_sync:
if self._file_sync:
os.fsync(file_.fileno())
@abc.abstractmethod
......@@ -188,7 +207,7 @@ class FileBackend(Backend):
shutil.move(tfile, file_)
file_ = os.path.join(self._dest, 'tests', '{}.{}'.format(
next(self.__counter), self._file_extension))
next(self._counter), self._file_extension))
with open(file_, 'w') as f:
self._write(f, name, self.__INCOMPLETE)
......
# Copyright (c) 2015 Intel Corporation
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Compression support for backends.
This includes both compression and decompression support.
The primary way to interact with this module should be through the use of the
COMPRESSORS and DECOMPRESSORS constants.
These constants provide a dictionary
mapping text representations of the compression methods to functions that
should be called as context managers using the filename as the only argument.
For example:
with COMPRESSORS['none'] as f:
f.write('foobar')
COMPRESSOR provides a convenience method for getting the default compressor,
although COMPRESSORS is available, for more advanced uses.
They should always take unicode objects. It is up to the caller to ensure that
they're passing unicode and not bytes.
"""
from __future__ import print_function, absolute_import, division
import functools
import os
from framework import exceptions
from framework.core import PIGLIT_CONFIG
COMPRESSION_SUFFIXES = []
DEFAULT = 'none'
COMPRESSORS = {
'none': functools.partial(open, mode='w'),
}
DECOMPRESSORS = {
'none': functools.partial(open, mode='r')
}
def _set_mode():
"""Set the compression mode.
Try the environment variable PIGLIT_COMPRESSION; then check the
PIGLIT_CONFIG section 'core', option 'compression'; finally fall back to
DEFAULT.
"""
method = (os.environ.get('PIGLIT_COMPRESSION') or
PIGLIT_CONFIG.safe_get('core', 'compression') or
DEFAULT)
if method not in COMPRESSORS:
raise exceptions.PiglitFatalError(
'unsupported compression method {}'.format(method))
if method not in DECOMPRESSORS:
raise exceptions.PiglitFatalError(
'unsupported decompression method {}'.format(method))
return method
MODE = _set_mode()
COMPRESSOR = COMPRESSORS[MODE]
......@@ -32,8 +32,9 @@ except ImportError:
import json
from framework import status, results, exceptions
from .abstract import FileBackend
from .abstract import FileBackend, write_compressed
from .register import Registry
from . import compression
__all__ = [
'REGISTRY',
......@@ -140,10 +141,10 @@ class JSONBackend(FileBackend):
pass
assert data['tests']
# write out the combined file.
with open(os.path.join(self._dest, 'results.json'), 'w') as f:
json.dump(data, f, default=piglit_encoder,
indent=INDENT)
# write out the combined file. Use the compression writer from the
# FileBackend
with self._write_final(os.path.join(self._dest, 'results.json')) as f:
json.dump(data, f, default=piglit_encoder, indent=INDENT)
# Delete the temporary files
os.unlink(os.path.join(self._dest, 'metadata.json'))
......@@ -154,7 +155,7 @@ class JSONBackend(FileBackend):
json.dump({name: data}, f, default=piglit_encoder)
def load_results(filename):
def load_results(filename, compression_):
""" Loader function for TestrunResult class
This function takes a single argument of a results file.
......@@ -182,18 +183,23 @@ def load_results(filename):
# this?
return _resume(filename)
else:
# If there are both old and new results in a directory pick the new
# ones first
if os.path.exists(os.path.join(filename, 'results.json')):
filepath = os.path.join(filename, 'results.json')
# Version 0 results are called 'main'
elif os.path.exists(os.path.join(filename, 'main')):
filepath = os.path.join(filename, 'main')
# Look for a compressed result first, then a bare result, finally for
# an old main file
for name in ['results.json.{}'.format(compression_),
'results.json',
'main']:
if os.path.exists(os.path.join(filename, name)):
filepath = os.path.join(filename, name)
break
else:
raise exceptions.PiglitFatalError(
'No results found in "{}"'.format(filename))
'No results found in "{}" (compression: {})'.format(
filename, compression_))
with open(filepath, 'r') as f:
assert compression_ in compression.COMPRESSORS, \
'unsupported compression type'
with compression.DECOMPRESSORS[compression_](filepath) as f:
testrun = _load(f)
return _update_results(testrun, filepath)
......@@ -310,7 +316,7 @@ def _update_results(results, filepath):
def _write(results, file_):
"""WRite the values of the results out to a file."""
with open(file_, 'w') as f:
with write_compressed(file_) as f:
json.dump({k:v for k, v in results.__dict__.iteritems()},
f,
default=piglit_encoder,
......
......@@ -267,7 +267,7 @@ def _load(results_file):
return run_result
def load(results_dir):
def load(results_dir, compression): # pylint: disable=unused-argument
"""Searches for a results file and returns a TestrunResult.
wraps _load and searches for the result file.
......
......@@ -114,7 +114,7 @@ def test_load():
backends.BACKENDS['test_backend'] = backends.register.Registry(
extensions=['.test_extension'],
backend=None,
load=lambda x: [x],
load=lambda x, y: [x], # y is for a compression value
meta=None,
)
......@@ -146,7 +146,7 @@ def test_load_resume():
backends.BACKENDS['test_backend'] = backends.register.Registry(
extensions=['.test_backend'],
backend=None,
load=lambda x: x,
load=lambda x, y: x,
meta=None,
)
os.mkdir('tests')
......
# Copyright (c) 2015 Intel Corporation
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Tests for compressed backends.
This modules tests for compression support. Most of the tests are fairly basic,
aiming to verify that compression and decompression works as expected.
"""
from __future__ import print_function, absolute_import, division
import os
import functools
import nose.tools as nt
from framework.tests import utils
from framework.backends import compression
# pylint: disable=line-too-long,protected-access
def _add_compression(value):
"""Decorator that temporarily adds support for a compression method."""
def _wrapper(func):
"""The actual wrapper."""
@functools.wraps(func)
def _inner(*args, **kwargs):
"""The function called."""
compression.COMPRESSORS[value] = None
compression.DECOMPRESSORS[value] = None
try:
func(*args, **kwargs)
finally:
del compression.COMPRESSORS[value]
del compression.DECOMPRESSORS[value]
return _inner
return _wrapper
def _test_compressor(mode):
"""Helper to simplify testing compressors."""
func = compression.COMPRESSORS[mode]
with utils.tempdir() as t:
with func(os.path.join(t, 'file')) as f:
f.write('foo')
def _test_decompressor(mode):
"""helper to simplify testing decompressors."""
func = compression.COMPRESSORS[mode]
dec = compression.DECOMPRESSORS[mode]
with utils.tempdir() as t:
path = os.path.join(t, 'file')
with func(path) as f:
f.write('foo')
with dec(path) as f:
nt.eq_(f.read(), 'foo')
@utils.no_error
def test_compress_none():
"""framework.backends.compression: can compress to 'none'"""
_test_compressor('none')
def test_decompress_none():
"""framework.backends.compression: can decompress from 'none'"""
_test_decompressor('none')
@_add_compression('foobar')
@utils.set_env(PIGLIT_COMPRESSION='foobar')
def test_set_mode_env():
"""framework.backends.compression._set_mode: uses PIGlIT_COMPRESSION environment variable"""
nt.eq_(compression._set_mode(), 'foobar')
@_add_compression('foobar')
@utils.set_env(PIGLIT_COMPRESSION=None)
@utils.set_piglit_conf(('core', 'compression', 'foobar'))
def test_set_mode_piglit_conf():
"""framework.backends.compression._set_mode: uses piglit.conf [core]:compression value if env is unset"""
nt.eq_(compression._set_mode(), 'foobar')
@utils.set_env(PIGLIT_COMPRESSION=None)
@utils.set_piglit_conf(('core', 'compression', None))
def test_set_mode_default():
"""framework.backends.compression._set_mode: uses DEFAULT if env and piglit.conf are unset"""
nt.eq_(compression._set_mode(), compression.DEFAULT)
......@@ -31,10 +31,27 @@ except ImportError:
import json
import nose.tools as nt
from framework import results, backends, exceptions
from framework import results, backends, exceptions, grouptools
from framework.backends import compression
import framework.tests.utils as utils
from .backends_tests import BACKEND_INITIAL_META
_SAVED_COMPRESSION = compression.MODE
def setup_module():
# Set the compression mode to a controlled value (no compression), to
# ensure that we're not getting unexpected file extensions. This means that
# the default can be changed, or environment variables set without
# affecting unit tests
compression.MODE = 'none'
compression.COMPRESSOR = compression.COMPRESSORS['none']
def teardown_module():
compression.MODE = _SAVED_COMPRESSION
compression.COMPRESSOR = compression.COMPRESSORS[_SAVED_COMPRESSION]
def test_initialize_jsonbackend():
"""backends.json.JSONBackend: Class initializes
......@@ -91,10 +108,12 @@ class TestJSONTestMethod(utils.StaticDirectory):
nt.assert_dict_equal({self.test_name: self.result}, test)
class TestJSONTestFinalize(utils.StaticDirectory):
# We're explictely setting none here since the default can change from none
@classmethod
def setup_class(cls):
cls.test_name = 'a/test/group/test1'
cls.test_name = grouptools.join('a', 'test', 'group', 'test1')
cls.result = results.TestResult({
'time': 1.2345,
'result': 'pass',
......@@ -117,7 +136,8 @@ class TestJSONTestFinalize(utils.StaticDirectory):
assert not os.path.exists(os.path.join(self.tdir, 'tests'))
def test_create_results(self):
"""backends.json.JSONBackend.finalize(): creates a results.json file"""
"""backends.json.JSONBackend.finalize(): creates a results.json file
"""
assert os.path.exists(os.path.join(self.tdir, 'results.json'))
@utils.no_error
......@@ -227,7 +247,7 @@ def test_resume_load_incomplete():
Because resume, aggregate, and summary all use the function called _resume
we can't remove incomplete tests here. It's probably worth doing a refactor
to split some code out and allow this to be done in the resume path.
"""
with utils.tempdir() as f:
backend = backends.json.JSONBackend(f)
......@@ -252,12 +272,13 @@ def test_resume_load_incomplete():
@utils.no_error
def test_load_results_folder_as_main():
"""backends.json.load_results: takes a folder with a file named main in it"""
"""backends.json.load_results: takes a folder with a file named main in it
"""
with utils.tempdir() as tdir:
with open(os.path.join(tdir, 'main'), 'w') as tfile:
tfile.write(json.dumps(utils.JSON_DATA))
backends.json.load_results(tdir)
backends.json.load_results(tdir, 'none')
@utils.no_error
......@@ -267,14 +288,14 @@ def test_load_results_folder():
with open(os.path.join(tdir, 'results.json'), 'w') as tfile:
tfile.write(json.dumps(utils.JSON_DATA))
backends.json.load_results(tdir)
backends.json.load_results(tdir, 'none')
@utils.no_error
def test_load_results_file():
"""backends.json.load_results: Loads a file passed by name"""
with utils.resultfile() as tfile:
backends.json.load_results(tfile.name)
backends.json.load_results(tfile.name, 'none')
def test_load_json():
......
......@@ -33,11 +33,28 @@ import nose.tools as nt
import framework.tests.utils as utils
from framework import backends, results
from framework.backends import compression
# Disable some errors that cannot be fixed either because tests need to probe
# protected members, or because of nose requirements, like long lines
# pylint: disable=protected-access,invalid-name,line-too-long
_SAVED_COMPRESSION = compression.MODE
def setup_module():
# Set the compression mode to a controlled value (no compression), to
# ensure that we're not getting unexpected file extensions. This means that
# the default can be changed, or environment variables set without
# affecting unit tests
compression.MODE = 'none'
compression.COMPRESSOR = compression.COMPRESSORS['none']
def teardown_module():
compression.MODE = _SAVED_COMPRESSION
compression.COMPRESSOR = compression.COMPRESSORS[_SAVED_COMPRESSION]
class TestV0toV1(object):
"""Tests for version 0 -> version 1 of json results format."""
......@@ -227,7 +244,7 @@ class TestV0toV1(object):
try: