ソースを参照

Update Google Benchmark library.

git-svn-id: https://llvm.org/svn/llvm-project/libcxx/trunk@279989 91177308-0d34-0410-b5e6-96231b3b80d8
Eric Fiselier 9 年 前
コミット
f76a08728e

+ 22 - 1
utils/google-benchmark/CMakeLists.txt

@@ -12,6 +12,7 @@ endforeach()
 
 
 option(BENCHMARK_ENABLE_TESTING "Enable testing of the benchmark library." ON)
 option(BENCHMARK_ENABLE_TESTING "Enable testing of the benchmark library." ON)
 option(BENCHMARK_ENABLE_LTO "Enable link time optimisation of the benchmark library." OFF)
 option(BENCHMARK_ENABLE_LTO "Enable link time optimisation of the benchmark library." OFF)
+option(BENCHMARK_USE_LIBCXX "Build and test using libc++ as the standard library." OFF)
 # Make sure we can import out CMake functions
 # Make sure we can import out CMake functions
 list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
 list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
 
 
@@ -78,8 +79,10 @@ else()
   add_cxx_compiler_flag(-pedantic-errors)
   add_cxx_compiler_flag(-pedantic-errors)
   add_cxx_compiler_flag(-Wshorten-64-to-32)
   add_cxx_compiler_flag(-Wshorten-64-to-32)
   add_cxx_compiler_flag(-Wfloat-equal)
   add_cxx_compiler_flag(-Wfloat-equal)
-  add_cxx_compiler_flag(-Wzero-as-null-pointer-constant)
   add_cxx_compiler_flag(-fstrict-aliasing)
   add_cxx_compiler_flag(-fstrict-aliasing)
+  if (NOT BENCHMARK_USE_LIBCXX)
+    add_cxx_compiler_flag(-Wzero-as-null-pointer-constant)
+  endif()
   if (HAVE_CXX_FLAG_FSTRICT_ALIASING)
   if (HAVE_CXX_FLAG_FSTRICT_ALIASING)
     add_cxx_compiler_flag(-Wstrict-aliasing)
     add_cxx_compiler_flag(-Wstrict-aliasing)
   endif()
   endif()
@@ -126,6 +129,24 @@ else()
   add_cxx_compiler_flag(--coverage COVERAGE)
   add_cxx_compiler_flag(--coverage COVERAGE)
 endif()
 endif()
 
 
+if (BENCHMARK_USE_LIBCXX)
+  if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
+    add_cxx_compiler_flag(-stdlib=libc++)
+  elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" OR
+          "${CMAKE_CXX_COMPILER_ID}" STREQUAL "Intel")
+    add_cxx_compiler_flag(-nostdinc++)
+    message("libc++ header path must be manually specified using CMAKE_CXX_FLAGS")
+    # Adding -nodefaultlibs directly to CMAKE_<TYPE>_LINKER_FLAGS will break
+    # configuration checks such as 'find_package(Threads)'
+    list(APPEND BENCHMARK_CXX_LINKER_FLAGS -nodefaultlibs)
+    # -lc++ cannot be added directly to CMAKE_<TYPE>_LINKER_FLAGS because
+    # linker flags appear before all linker inputs and -lc++ must appear after.
+    list(APPEND BENCHMARK_CXX_LIBRARIES c++)
+  else()
+    message(FATAL "-DBENCHMARK_USE_LIBCXX:BOOL=ON is not supported for compiler")
+  endif()
+endif(BENCHMARK_USE_LIBCXX)
+
 # C++ feature checks
 # C++ feature checks
 cxx_feature_check(STD_REGEX)
 cxx_feature_check(STD_REGEX)
 cxx_feature_check(GNU_POSIX_REGEX)
 cxx_feature_check(GNU_POSIX_REGEX)

+ 7 - 0
utils/google-benchmark/README.md

@@ -391,6 +391,13 @@ The number of runs of each benchmark is specified globally by the
 `Repetitions` on the registered benchmark object. When a benchmark is run
 `Repetitions` on the registered benchmark object. When a benchmark is run
 more than once the mean and standard deviation of the runs will be reported.
 more than once the mean and standard deviation of the runs will be reported.
 
 
+Additionally the `--benchmark_report_aggregates_only={true|false}` flag or
+`ReportAggregatesOnly(bool)` function can be used to change how repeated tests
+are reported. By default the result of each repeated run is reported. When this
+option is 'true' only the mean and standard deviation of the runs is reported.
+Calling `ReportAggregatesOnly(bool)` on a registered benchmark object overrides
+the value of the flag for that benchmark.
+
 ## Fixtures
 ## Fixtures
 Fixture tests are created by
 Fixture tests are created by
 first defining a type that derives from ::benchmark::Fixture and then
 first defining a type that derives from ::benchmark::Fixture and then

+ 3 - 1
utils/google-benchmark/cmake/CXXFeatureCheck.cmake

@@ -26,7 +26,9 @@ function(cxx_feature_check FILE)
   endif()
   endif()
   message("-- Performing Test ${FEATURE}")
   message("-- Performing Test ${FEATURE}")
   try_run(RUN_${FEATURE} COMPILE_${FEATURE}
   try_run(RUN_${FEATURE} COMPILE_${FEATURE}
-          ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/${FILE}.cpp)
+          ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/${FILE}.cpp
+          CMAKE_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS}
+          LINK_LIBRARIES ${BENCHMARK_CXX_LIBRARIES})
   if(RUN_${FEATURE} EQUAL 0)
   if(RUN_${FEATURE} EQUAL 0)
     message("-- Performing Test ${FEATURE} -- success")
     message("-- Performing Test ${FEATURE} -- success")
     set(HAVE_${VAR} 1 CACHE INTERNAL "Feature test for ${FILE}" PARENT_SCOPE)
     set(HAVE_${VAR} 1 CACHE INTERNAL "Feature test for ${FILE}" PARENT_SCOPE)

+ 36 - 1
utils/google-benchmark/include/benchmark/benchmark_api.h

@@ -214,6 +214,10 @@ void UseCharPointer(char const volatile*);
 // registered benchmark.
 // registered benchmark.
 Benchmark* RegisterBenchmarkInternal(Benchmark*);
 Benchmark* RegisterBenchmarkInternal(Benchmark*);
 
 
+// Ensure that the standard streams are properly initialized in every TU.
+int InitializeStreams();
+BENCHMARK_UNUSED static int stream_init_anchor = InitializeStreams();
+
 } // end namespace internal
 } // end namespace internal
 
 
 
 
@@ -425,11 +429,17 @@ public:
 
 
   // Range arguments for this run. CHECKs if the argument has been set.
   // Range arguments for this run. CHECKs if the argument has been set.
   BENCHMARK_ALWAYS_INLINE
   BENCHMARK_ALWAYS_INLINE
-  int range(std::size_t pos) const {
+  int range(std::size_t pos = 0) const {
       assert(range_.size() > pos);
       assert(range_.size() > pos);
       return range_[pos];
       return range_[pos];
   }
   }
 
 
+  BENCHMARK_DEPRECATED_MSG("use 'range(0)' instead")
+  int range_x() const { return range(0); }
+
+  BENCHMARK_DEPRECATED_MSG("use 'range(1)' instead")
+  int range_y() const { return range(1); }
+
   BENCHMARK_ALWAYS_INLINE
   BENCHMARK_ALWAYS_INLINE
   size_t iterations() const { return total_iterations_; }
   size_t iterations() const { return total_iterations_; }
 
 
@@ -498,11 +508,31 @@ public:
   // REQUIRES: The function passed to the constructor must accept arg1, arg2 ...
   // REQUIRES: The function passed to the constructor must accept arg1, arg2 ...
   Benchmark* Args(const std::vector<int>& args);
   Benchmark* Args(const std::vector<int>& args);
 
 
+  // Equivalent to Args({x, y})
+  // NOTE: This is a legacy C++03 interface provided for compatibility only.
+  //   New code should use 'Args'.
+  Benchmark* ArgPair(int x, int y) {
+      std::vector<int> args;
+      args.push_back(x);
+      args.push_back(y);
+      return Args(args);
+  }
+
   // Run this benchmark once for a number of values picked from the
   // Run this benchmark once for a number of values picked from the
   // ranges [start..limit].  (starts and limits are always picked.)
   // ranges [start..limit].  (starts and limits are always picked.)
   // REQUIRES: The function passed to the constructor must accept arg1, arg2 ...
   // REQUIRES: The function passed to the constructor must accept arg1, arg2 ...
   Benchmark* Ranges(const std::vector<std::pair<int, int> >& ranges);
   Benchmark* Ranges(const std::vector<std::pair<int, int> >& ranges);
 
 
+  // Equivalent to Ranges({{lo1, hi1}, {lo2, hi2}}).
+  // NOTE: This is a legacy C++03 interface provided for compatibility only.
+  //   New code should use 'Ranges'.
+  Benchmark* RangePair(int lo1, int hi1, int lo2, int hi2) {
+      std::vector<std::pair<int, int> > ranges;
+      ranges.push_back(std::make_pair(lo1, hi1));
+      ranges.push_back(std::make_pair(lo2, hi2));
+      return Ranges(ranges);
+  }
+
   // Pass this benchmark object to *func, which can customize
   // Pass this benchmark object to *func, which can customize
   // the benchmark by calling various methods like Arg, Args,
   // the benchmark by calling various methods like Arg, Args,
   // Threads, etc.
   // Threads, etc.
@@ -522,6 +552,11 @@ public:
   // REQUIRES: `n > 0`
   // REQUIRES: `n > 0`
   Benchmark* Repetitions(int n);
   Benchmark* Repetitions(int n);
 
 
+  // Specify if each repetition of the benchmark should be reported separately
+  // or if only the final statistics should be reported. If the benchmark
+  // is not repeated then the single result is always reported.
+  Benchmark* ReportAggregatesOnly(bool v = true);
+
   // If a particular benchmark is I/O bound, runs multiple threads internally or
   // If a particular benchmark is I/O bound, runs multiple threads internally or
   // if for some reason CPU timings are not representative, call this method. If
   // if for some reason CPU timings are not representative, call this method. If
   // called, the elapsed time will be used to control how many iterations are
   // called, the elapsed time will be used to control how many iterations are

+ 2 - 0
utils/google-benchmark/include/benchmark/macros.h

@@ -53,8 +53,10 @@
 
 
 #if defined(__GNUC__)
 #if defined(__GNUC__)
 # define BENCHMARK_BUILTIN_EXPECT(x, y) __builtin_expect(x, y)
 # define BENCHMARK_BUILTIN_EXPECT(x, y) __builtin_expect(x, y)
+# define BENCHMARK_DEPRECATED_MSG(msg) __attribute__((deprecated(msg)))
 #else
 #else
 # define BENCHMARK_BUILTIN_EXPECT(x, y) x
 # define BENCHMARK_BUILTIN_EXPECT(x, y) x
+# define BENCHMARK_DEPRECATED_MSG(msg)
 #endif
 #endif
 
 
 #if defined(__GNUC__) && !defined(__clang__)
 #if defined(__GNUC__) && !defined(__clang__)

+ 0 - 320
utils/google-benchmark/mingw.py

@@ -1,320 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-import argparse
-import errno
-import logging
-import os
-import platform
-import re
-import sys
-import subprocess
-import tempfile
-
-try:
-    import winreg
-except ImportError:
-    import _winreg as winreg
-try:
-    import urllib.request as request
-except ImportError:
-    import urllib as request
-try:
-    import urllib.parse as parse
-except ImportError:
-    import urlparse as parse
-
-class EmptyLogger(object):
-    '''
-    Provides an implementation that performs no logging
-    '''
-    def debug(self, *k, **kw):
-        pass
-    def info(self, *k, **kw):
-        pass
-    def warn(self, *k, **kw):
-        pass
-    def error(self, *k, **kw):
-        pass
-    def critical(self, *k, **kw):
-        pass
-    def setLevel(self, *k, **kw):
-        pass
-
-urls = (
-    'http://downloads.sourceforge.net/project/mingw-w64/Toolchains%20'
-        'targetting%20Win32/Personal%20Builds/mingw-builds/installer/'
-        'repository.txt',
-    'http://downloads.sourceforge.net/project/mingwbuilds/host-windows/'
-        'repository.txt'
-)
-'''
-A list of mingw-build repositories
-'''
-
-def repository(urls = urls, log = EmptyLogger()):
-    '''
-    Downloads and parse mingw-build repository files and parses them
-    '''
-    log.info('getting mingw-builds repository')
-    versions = {}
-    re_sourceforge = re.compile(r'http://sourceforge.net/projects/([^/]+)/files')
-    re_sub = r'http://downloads.sourceforge.net/project/\1'
-    for url in urls:
-        log.debug(' - requesting: %s', url)
-        socket = request.urlopen(url)
-        repo = socket.read()
-        if not isinstance(repo, str):
-            repo = repo.decode();
-        socket.close()
-        for entry in repo.split('\n')[:-1]:
-            value = entry.split('|')
-            version = tuple([int(n) for n in value[0].strip().split('.')])
-            version = versions.setdefault(version, {})
-            arch = value[1].strip()
-            if arch == 'x32':
-                arch = 'i686'
-            elif arch == 'x64':
-                arch = 'x86_64'
-            arch = version.setdefault(arch, {})
-            threading = arch.setdefault(value[2].strip(), {})
-            exceptions = threading.setdefault(value[3].strip(), {})
-            revision = exceptions.setdefault(int(value[4].strip()[3:]),
-                re_sourceforge.sub(re_sub, value[5].strip()))
-    return versions
-
-def find_in_path(file, path=None):
-    '''
-    Attempts to find an executable in the path
-    '''
-    if platform.system() == 'Windows':
-        file += '.exe'
-    if path is None:
-        path = os.environ.get('PATH', '')
-    if type(path) is type(''):
-        path = path.split(os.pathsep)
-    return list(filter(os.path.exists,
-        map(lambda dir, file=file: os.path.join(dir, file), path)))
-
-def find_7zip(log = EmptyLogger()):
-    '''
-    Attempts to find 7zip for unpacking the mingw-build archives
-    '''
-    log.info('finding 7zip')
-    path = find_in_path('7z')
-    if not path:
-        key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\7-Zip')
-        path, _ = winreg.QueryValueEx(key, 'Path')
-        path = [os.path.join(path, '7z.exe')]
-    log.debug('found \'%s\'', path[0])
-    return path[0]
-
-find_7zip()
-
-def unpack(archive, location, log = EmptyLogger()):
-    '''
-    Unpacks a mingw-builds archive
-    '''
-    sevenzip = find_7zip(log)
-    log.info('unpacking %s', os.path.basename(archive))
-    cmd = [sevenzip, 'x', archive, '-o' + location, '-y']
-    log.debug(' - %r', cmd)
-    with open(os.devnull, 'w') as devnull:
-        subprocess.check_call(cmd, stdout = devnull)
-
-def download(url, location, log = EmptyLogger()):
-    '''
-    Downloads and unpacks a mingw-builds archive
-    '''
-    log.info('downloading MinGW')
-    log.debug(' - url: %s', url)
-    log.debug(' - location: %s', location)
-
-    re_content = re.compile(r'attachment;[ \t]*filename=(")?([^"]*)(")?[\r\n]*')
-
-    stream = request.urlopen(url)
-    try:
-        content = stream.getheader('Content-Disposition') or ''
-    except AttributeError:
-        content = stream.headers.getheader('Content-Disposition') or ''
-    matches = re_content.match(content)
-    if matches:
-        filename = matches.group(2)
-    else:
-        parsed = parse.urlparse(stream.geturl())
-        filename = os.path.basename(parsed.path)
-
-    try:
-        os.makedirs(location)
-    except OSError as e:
-        if e.errno == errno.EEXIST and os.path.isdir(location):
-            pass
-        else:
-            raise
-
-    archive = os.path.join(location, filename)
-    with open(archive, 'wb') as out:
-        while True:
-            buf = stream.read(1024)
-            if not buf:
-                break
-            out.write(buf)
-    unpack(archive, location, log = log)
-    os.remove(archive)
-
-    possible = os.path.join(location, 'mingw64')
-    if not os.path.exists(possible):
-        possible = os.path.join(location, 'mingw32')
-        if not os.path.exists(possible):
-            raise ValueError('Failed to find unpacked MinGW: ' + possible)
-    return possible
-
-def root(location = None, arch = None, version = None, threading = None,
-        exceptions = None, revision = None, log = EmptyLogger()):
-    '''
-    Returns the root folder of a specific version of the mingw-builds variant
-    of gcc. Will download the compiler if needed
-    '''
-
-    # Get the repository if we don't have all the information
-    if not (arch and version and threading and exceptions and revision):
-        versions = repository(log = log)
-
-    # Determine some defaults
-    version = version or max(versions.keys())
-    if not arch:
-        arch = platform.machine().lower()
-        if arch == 'x86':
-            arch = 'i686'
-        elif arch == 'amd64':
-            arch = 'x86_64'
-    if not threading:
-        keys = versions[version][arch].keys()
-        if 'posix' in keys:
-            threading = 'posix'
-        elif 'win32' in keys:
-            threading = 'win32'
-        else:
-            threading = keys[0]
-    if not exceptions:
-        keys = versions[version][arch][threading].keys()
-        if 'seh' in keys:
-            exceptions = 'seh'
-        elif 'sjlj' in keys:
-            exceptions = 'sjlj'
-        else:
-            exceptions = keys[0]
-    if revision == None:
-        revision = max(versions[version][arch][threading][exceptions].keys())
-    if not location:
-        location = os.path.join(tempfile.gettempdir(), 'mingw-builds')
-
-    # Get the download url
-    url = versions[version][arch][threading][exceptions][revision]
-
-    # Tell the user whatzzup
-    log.info('finding MinGW %s', '.'.join(str(v) for v in version))
-    log.debug(' - arch: %s', arch)
-    log.debug(' - threading: %s', threading)
-    log.debug(' - exceptions: %s', exceptions)
-    log.debug(' - revision: %s', revision)
-    log.debug(' - url: %s', url)
-
-    # Store each specific revision differently
-    slug = '{version}-{arch}-{threading}-{exceptions}-rev{revision}'
-    slug = slug.format(
-        version = '.'.join(str(v) for v in version),
-        arch = arch,
-        threading = threading,
-        exceptions = exceptions,
-        revision = revision
-    )
-    if arch == 'x86_64':
-        root_dir = os.path.join(location, slug, 'mingw64')
-    elif arch == 'i686':
-        root_dir = os.path.join(location, slug, 'mingw32')
-    else:
-        raise ValueError('Unknown MinGW arch: ' + arch)
-
-    # Download if needed
-    if not os.path.exists(root_dir):
-        downloaded = download(url, os.path.join(location, slug), log = log)
-        if downloaded != root_dir:
-            raise ValueError('The location of mingw did not match\n%s\n%s'
-                % (downloaded, root_dir))
-
-    return root_dir
-
-def str2ver(string):
-    '''
-    Converts a version string into a tuple
-    '''
-    try:
-        version = tuple(int(v) for v in string.split('.'))
-        if len(version) is not 3:
-            raise ValueError()
-    except ValueError:
-        raise argparse.ArgumentTypeError(
-            'please provide a three digit version string')
-    return version
-
-def main():
-    '''
-    Invoked when the script is run directly by the python interpreter
-    '''
-    parser = argparse.ArgumentParser(
-        description = 'Downloads a specific version of MinGW',
-        formatter_class = argparse.ArgumentDefaultsHelpFormatter
-    )
-    parser.add_argument('--location',
-        help = 'the location to download the compiler to',
-        default = os.path.join(tempfile.gettempdir(), 'mingw-builds'))
-    parser.add_argument('--arch', required = True, choices = ['i686', 'x86_64'],
-        help = 'the target MinGW architecture string')
-    parser.add_argument('--version', type = str2ver,
-        help = 'the version of GCC to download')
-    parser.add_argument('--threading', choices = ['posix', 'win32'],
-        help = 'the threading type of the compiler')
-    parser.add_argument('--exceptions', choices = ['sjlj', 'seh', 'dwarf'],
-        help = 'the method to throw exceptions')
-    parser.add_argument('--revision', type=int,
-        help = 'the revision of the MinGW release')
-    group = parser.add_mutually_exclusive_group()
-    group.add_argument('-v', '--verbose', action='store_true',
-        help='increase the script output verbosity')
-    group.add_argument('-q', '--quiet', action='store_true',
-        help='only print errors and warning')
-    args = parser.parse_args()
-
-    # Create the logger
-    logger = logging.getLogger('mingw')
-    handler = logging.StreamHandler()
-    formatter = logging.Formatter('%(message)s')
-    handler.setFormatter(formatter)
-    logger.addHandler(handler)
-    logger.setLevel(logging.INFO)
-    if args.quiet:
-        logger.setLevel(logging.WARN)
-    if args.verbose:
-        logger.setLevel(logging.DEBUG)
-
-    # Get MinGW
-    root_dir = root(location = args.location, arch = args.arch,
-        version = args.version, threading = args.threading,
-        exceptions = args.exceptions, revision = args.revision,
-        log = logger)
-
-    sys.stdout.write('%s\n' % os.path.join(root_dir, 'bin'))
-
-if __name__ == '__main__':
-    try:
-        main()
-    except IOError as e:
-        sys.stderr.write('IO error: %s\n' % e)
-        sys.exit(1)
-    except OSError as e:
-        sys.stderr.write('OS error: %s\n' % e)
-        sys.exit(1)
-    except KeyboardInterrupt as e:
-        sys.stderr.write('Killed\n')
-        sys.exit(1)

+ 6 - 2
utils/google-benchmark/src/CMakeLists.txt

@@ -1,6 +1,11 @@
 # Allow the source files to find headers in src/
 # Allow the source files to find headers in src/
 include_directories(${PROJECT_SOURCE_DIR}/src)
 include_directories(${PROJECT_SOURCE_DIR}/src)
 
 
+if (DEFINED BENCHMARK_CXX_LINKER_FLAGS)
+  list(APPEND CMAKE_SHARED_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS})
+  list(APPEND CMAKE_MODULE_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS})
+endif()
+
 # Define the source files
 # Define the source files
 set(SOURCE_FILES "benchmark.cc" "colorprint.cc" "commandlineflags.cc"
 set(SOURCE_FILES "benchmark.cc" "colorprint.cc" "commandlineflags.cc"
                  "console_reporter.cc" "csv_reporter.cc" "json_reporter.cc"
                  "console_reporter.cc" "csv_reporter.cc" "json_reporter.cc"
@@ -19,7 +24,6 @@ endif()
 
 
 add_library(benchmark ${SOURCE_FILES} ${RE_FILES})
 add_library(benchmark ${SOURCE_FILES} ${RE_FILES})
 
 
-
 set_target_properties(benchmark PROPERTIES
 set_target_properties(benchmark PROPERTIES
   OUTPUT_NAME "benchmark"
   OUTPUT_NAME "benchmark"
   VERSION ${GENERIC_LIB_VERSION}
   VERSION ${GENERIC_LIB_VERSION}
@@ -27,7 +31,7 @@ set_target_properties(benchmark PROPERTIES
 )
 )
 
 
 # Link threads.
 # Link threads.
-target_link_libraries(benchmark ${CMAKE_THREAD_LIBS_INIT})
+target_link_libraries(benchmark  ${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
 
 
 # We need extra libraries on Windows
 # We need extra libraries on Windows
 if(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
 if(${CMAKE_SYSTEM_NAME} MATCHES "Windows")

+ 121 - 62
utils/google-benchmark/src/benchmark.cc

@@ -66,6 +66,11 @@ DEFINE_int32(benchmark_repetitions, 1,
              "The number of runs of each benchmark. If greater than 1, the "
              "The number of runs of each benchmark. If greater than 1, the "
              "mean and standard deviation of the runs will be reported.");
              "mean and standard deviation of the runs will be reported.");
 
 
+DEFINE_bool(benchmark_report_aggregates_only, false,
+            "Report the result of each benchmark repetitions. When 'true' is "
+            "specified only the mean, standard deviation, and other statistics "
+            "are reported for repeated benchmarks.");
+
 DEFINE_string(benchmark_format, "console",
 DEFINE_string(benchmark_format, "console",
               "The format to use for console output. Valid values are "
               "The format to use for console output. Valid values are "
               "'console', 'json', or 'csv'.");
               "'console', 'json', or 'csv'.");
@@ -110,6 +115,9 @@ bool IsZero(double n) {
 
 
 // For non-dense Range, intermediate values are powers of kRangeMultiplier.
 // For non-dense Range, intermediate values are powers of kRangeMultiplier.
 static const int kRangeMultiplier = 8;
 static const int kRangeMultiplier = 8;
+// The size of a benchmark family determines is the number of inputs to repeat
+// the benchmark on. If this is "large" then warn the user during configuration.
+static const size_t kMaxFamilySize = 100;
 static const size_t kMaxIterations = 1000000000;
 static const size_t kMaxIterations = 1000000000;
 
 
 bool running_benchmark = false;
 bool running_benchmark = false;
@@ -311,10 +319,17 @@ static std::unique_ptr<TimerManager> timer_manager = nullptr;
 
 
 namespace internal {
 namespace internal {
 
 
+enum ReportMode : unsigned {
+    RM_Unspecified, // The mode has not been manually specified
+    RM_Default,     // The mode is user-specified as default.
+    RM_ReportAggregatesOnly
+};
+
 // Information kept per benchmark we may want to run
 // Information kept per benchmark we may want to run
 struct Benchmark::Instance {
 struct Benchmark::Instance {
   std::string      name;
   std::string      name;
   Benchmark*       benchmark;
   Benchmark*       benchmark;
+  ReportMode       report_mode;
   std::vector<int> arg;
   std::vector<int> arg;
   TimeUnit         time_unit;
   TimeUnit         time_unit;
   int              range_multiplier;
   int              range_multiplier;
@@ -341,7 +356,8 @@ class BenchmarkFamilies {
   // Extract the list of benchmark instances that match the specified
   // Extract the list of benchmark instances that match the specified
   // regular expression.
   // regular expression.
   bool FindBenchmarks(const std::string& re,
   bool FindBenchmarks(const std::string& re,
-                      std::vector<Benchmark::Instance>* benchmarks);
+                      std::vector<Benchmark::Instance>* benchmarks,
+                      std::ostream* Err);
  private:
  private:
   BenchmarkFamilies() {}
   BenchmarkFamilies() {}
 
 
@@ -364,6 +380,7 @@ public:
   void RangeMultiplier(int multiplier);
   void RangeMultiplier(int multiplier);
   void MinTime(double n);
   void MinTime(double n);
   void Repetitions(int n);
   void Repetitions(int n);
+  void ReportAggregatesOnly(bool v);
   void UseRealTime();
   void UseRealTime();
   void UseManualTime();
   void UseManualTime();
   void Complexity(BigO complexity);
   void Complexity(BigO complexity);
@@ -381,6 +398,7 @@ private:
   friend class BenchmarkFamilies;
   friend class BenchmarkFamilies;
 
 
   std::string name_;
   std::string name_;
+  ReportMode report_mode_;
   std::vector< std::vector<int> > args_;  // Args for all benchmark runs
   std::vector< std::vector<int> > args_;  // Args for all benchmark runs
   TimeUnit time_unit_;
   TimeUnit time_unit_;
   int range_multiplier_;
   int range_multiplier_;
@@ -410,18 +428,20 @@ size_t BenchmarkFamilies::AddBenchmark(std::unique_ptr<Benchmark> family) {
 
 
 bool BenchmarkFamilies::FindBenchmarks(
 bool BenchmarkFamilies::FindBenchmarks(
     const std::string& spec,
     const std::string& spec,
-    std::vector<Benchmark::Instance>* benchmarks) {
+    std::vector<Benchmark::Instance>* benchmarks,
+    std::ostream* ErrStream) {
+  CHECK(ErrStream);
+  auto& Err = *ErrStream;
   // Make regular expression out of command-line flag
   // Make regular expression out of command-line flag
   std::string error_msg;
   std::string error_msg;
   Regex re;
   Regex re;
   if (!re.Init(spec, &error_msg)) {
   if (!re.Init(spec, &error_msg)) {
-    std::cerr << "Could not compile benchmark re: " << error_msg << std::endl;
+    Err << "Could not compile benchmark re: " << error_msg << std::endl;
     return false;
     return false;
   }
   }
 
 
   // Special list of thread counts to use when none are specified
   // Special list of thread counts to use when none are specified
-  std::vector<int> one_thread;
-  one_thread.push_back(1);
+  const std::vector<int> one_thread = {1};
 
 
   MutexLock l(mutex_);
   MutexLock l(mutex_);
   for (std::unique_ptr<Benchmark>& bench_family : families_) {
   for (std::unique_ptr<Benchmark>& bench_family : families_) {
@@ -432,17 +452,29 @@ bool BenchmarkFamilies::FindBenchmarks(
     if (family->ArgsCnt() == -1) {
     if (family->ArgsCnt() == -1) {
       family->Args({});
       family->Args({});
     }
     }
-
-    for (auto const& args : family->args_) {
-      const std::vector<int>* thread_counts =
+    const std::vector<int>* thread_counts =
         (family->thread_counts_.empty()
         (family->thread_counts_.empty()
          ? &one_thread
          ? &one_thread
-         : &family->thread_counts_);
+         : &static_cast<const std::vector<int>&>(family->thread_counts_));
+    const size_t family_size = family->args_.size() * thread_counts->size();
+    // The benchmark will be run at least 'family_size' different inputs.
+    // If 'family_size' is very large warn the user.
+    if (family_size > kMaxFamilySize) {
+      Err <<  "The number of inputs is very large. " << family->name_
+          << " will be repeated at least " << family_size << " times.\n";
+    }
+    // reserve in the special case the regex ".", since we know the final
+    // family size.
+    if (spec == ".")
+      benchmarks->reserve(family_size);
+
+    for (auto const& args : family->args_) {
       for (int num_threads : *thread_counts) {
       for (int num_threads : *thread_counts) {
 
 
         Benchmark::Instance instance;
         Benchmark::Instance instance;
         instance.name = family->name_;
         instance.name = family->name_;
         instance.benchmark = bench_family.get();
         instance.benchmark = bench_family.get();
+        instance.report_mode = family->report_mode_;
         instance.arg = args;
         instance.arg = args;
         instance.time_unit = family->time_unit_;
         instance.time_unit = family->time_unit_;
         instance.range_multiplier = family->range_multiplier_;
         instance.range_multiplier = family->range_multiplier_;
@@ -478,8 +510,8 @@ bool BenchmarkFamilies::FindBenchmarks(
         }
         }
 
 
         if (re.Match(instance.name)) {
         if (re.Match(instance.name)) {
-          instance.last_benchmark_instance = (args == family->args_.back());
-          benchmarks->push_back(instance);
+          instance.last_benchmark_instance = (&args == &family->args_.back());
+          benchmarks->push_back(std::move(instance));
         }
         }
       }
       }
     }
     }
@@ -488,7 +520,7 @@ bool BenchmarkFamilies::FindBenchmarks(
 }
 }
 
 
 BenchmarkImp::BenchmarkImp(const char* name)
 BenchmarkImp::BenchmarkImp(const char* name)
-    : name_(name), time_unit_(kNanosecond),
+    : name_(name), report_mode_(RM_Unspecified), time_unit_(kNanosecond),
       range_multiplier_(kRangeMultiplier), min_time_(0.0), repetitions_(0),
       range_multiplier_(kRangeMultiplier), min_time_(0.0), repetitions_(0),
       use_real_time_(false), use_manual_time_(false),
       use_real_time_(false), use_manual_time_(false),
       complexity_(oNone) {
       complexity_(oNone) {
@@ -532,22 +564,23 @@ void BenchmarkImp::Args(const std::vector<int>& args)
 
 
 void BenchmarkImp::Ranges(const std::vector<std::pair<int, int>>& ranges) {
 void BenchmarkImp::Ranges(const std::vector<std::pair<int, int>>& ranges) {
   std::vector<std::vector<int>> arglists(ranges.size());
   std::vector<std::vector<int>> arglists(ranges.size());
-  int total = 1;
+  std::size_t total = 1;
   for (std::size_t i = 0; i < ranges.size(); i++) {
   for (std::size_t i = 0; i < ranges.size(); i++) {
     AddRange(&arglists[i], ranges[i].first, ranges[i].second, range_multiplier_);
     AddRange(&arglists[i], ranges[i].first, ranges[i].second, range_multiplier_);
     total *= arglists[i].size();
     total *= arglists[i].size();
   }
   }
 
 
-  std::vector<std::size_t> ctr(total, 0);
+  std::vector<std::size_t> ctr(arglists.size(), 0);
 
 
-  for (int i = 0; i < total; i++) {
+  for (std::size_t i = 0; i < total; i++) {
     std::vector<int> tmp;
     std::vector<int> tmp;
+    tmp.reserve(arglists.size());
 
 
     for (std::size_t j = 0; j < arglists.size(); j++) {
     for (std::size_t j = 0; j < arglists.size(); j++) {
-      tmp.push_back(arglists[j][ctr[j]]);
+      tmp.push_back(arglists[j].at(ctr[j]));
     }
     }
 
 
-    args_.push_back(tmp);
+    args_.push_back(std::move(tmp));
 
 
     for (std::size_t j = 0; j < arglists.size(); j++) {
     for (std::size_t j = 0; j < arglists.size(); j++) {
       if (ctr[j] + 1 < arglists[j].size()) {
       if (ctr[j] + 1 < arglists[j].size()) {
@@ -575,6 +608,10 @@ void BenchmarkImp::Repetitions(int n) {
   repetitions_ = n;
   repetitions_ = n;
 }
 }
 
 
+void BenchmarkImp::ReportAggregatesOnly(bool value) {
+  report_mode_ = value ? RM_ReportAggregatesOnly : RM_Default;
+}
+
 void BenchmarkImp::UseRealTime() {
 void BenchmarkImp::UseRealTime() {
   CHECK(!use_manual_time_) << "Cannot set UseRealTime and UseManualTime simultaneously.";
   CHECK(!use_manual_time_) << "Cannot set UseRealTime and UseManualTime simultaneously.";
   use_real_time_ = true;
   use_real_time_ = true;
@@ -703,6 +740,11 @@ Benchmark* Benchmark::Repetitions(int t) {
   return this;
   return this;
 }
 }
 
 
+Benchmark* Benchmark::ReportAggregatesOnly(bool value) {
+  imp_->ReportAggregatesOnly(value);
+  return this;
+}
+
 Benchmark* Benchmark::MinTime(double t) {
 Benchmark* Benchmark::MinTime(double t) {
   imp_->MinTime(t);
   imp_->MinTime(t);
   return this;
   return this;
@@ -779,7 +821,8 @@ std::vector<BenchmarkReporter::Run>
 RunBenchmark(const benchmark::internal::Benchmark::Instance& b,
 RunBenchmark(const benchmark::internal::Benchmark::Instance& b,
              std::vector<BenchmarkReporter::Run>* complexity_reports)
              std::vector<BenchmarkReporter::Run>* complexity_reports)
   EXCLUDES(GetBenchmarkLock()) {
   EXCLUDES(GetBenchmarkLock()) {
-   std::vector<BenchmarkReporter::Run> reports; // return value
+  std::vector<BenchmarkReporter::Run> reports; // return value
+
   size_t iters = 1;
   size_t iters = 1;
 
 
   std::vector<std::thread> pool;
   std::vector<std::thread> pool;
@@ -788,6 +831,10 @@ RunBenchmark(const benchmark::internal::Benchmark::Instance& b,
 
 
   const int repeats = b.repetitions != 0 ? b.repetitions
   const int repeats = b.repetitions != 0 ? b.repetitions
                                          : FLAGS_benchmark_repetitions;
                                          : FLAGS_benchmark_repetitions;
+  const bool report_aggregates_only = repeats != 1 &&
+      (b.report_mode == internal::RM_Unspecified
+        ? FLAGS_benchmark_report_aggregates_only
+        : b.report_mode == internal::RM_ReportAggregatesOnly);
   for (int i = 0; i < repeats; i++) {
   for (int i = 0; i < repeats; i++) {
     std::string mem;
     std::string mem;
     for (;;) {
     for (;;) {
@@ -914,22 +961,21 @@ RunBenchmark(const benchmark::internal::Benchmark::Instance& b,
       iters = static_cast<int>(next_iters + 0.5);
       iters = static_cast<int>(next_iters + 0.5);
     }
     }
   }
   }
-  std::vector<BenchmarkReporter::Run> additional_run_stats = ComputeStats(reports);
-  reports.insert(reports.end(), additional_run_stats.begin(),
-                 additional_run_stats.end());
-
-  if((b.complexity != oNone) && b.last_benchmark_instance) {
-    additional_run_stats = ComputeBigO(*complexity_reports);
-    reports.insert(reports.end(), additional_run_stats.begin(),
-                   additional_run_stats.end());
-    complexity_reports->clear();
-  }
-
   if (b.multithreaded) {
   if (b.multithreaded) {
     for (std::thread& thread : pool)
     for (std::thread& thread : pool)
       thread.join();
       thread.join();
   }
   }
+  // Calculate additional statistics
+  auto stat_reports = ComputeStats(reports);
+  if((b.complexity != oNone) && b.last_benchmark_instance) {
+    auto additional_run_stats = ComputeBigO(*complexity_reports);
+    stat_reports.insert(stat_reports.end(), additional_run_stats.begin(),
+                   additional_run_stats.end());
+    complexity_reports->clear();
+  }
 
 
+  if (report_aggregates_only) reports.clear();
+  reports.insert(reports.end(), stat_reports.begin(), stat_reports.end());
   return reports;
   return reports;
 }
 }
 
 
@@ -1064,47 +1110,52 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter,
   if (spec.empty() || spec == "all")
   if (spec.empty() || spec == "all")
     spec = ".";  // Regexp that matches all benchmarks
     spec = ".";  // Regexp that matches all benchmarks
 
 
-  std::vector<internal::Benchmark::Instance> benchmarks;
-  auto families = internal::BenchmarkFamilies::GetInstance();
-  if (!families->FindBenchmarks(spec, &benchmarks)) return 0;
-
-  if (FLAGS_benchmark_list_tests) {
-    for (auto const& benchmark : benchmarks)
-      std::cout <<  benchmark.name << "\n";
-  } else {
-    // Setup the reporters
-    std::ofstream output_file;
-    std::unique_ptr<BenchmarkReporter> default_console_reporter;
-    std::unique_ptr<BenchmarkReporter> default_file_reporter;
-    if (!console_reporter) {
-      auto output_opts = FLAGS_color_print ? ConsoleReporter::OO_Color
-                                           : ConsoleReporter::OO_None;
-      default_console_reporter = internal::CreateReporter(
+  // Setup the reporters
+  std::ofstream output_file;
+  std::unique_ptr<BenchmarkReporter> default_console_reporter;
+  std::unique_ptr<BenchmarkReporter> default_file_reporter;
+  if (!console_reporter) {
+    auto output_opts = FLAGS_color_print ? ConsoleReporter::OO_Color
+                                          : ConsoleReporter::OO_None;
+    default_console_reporter = internal::CreateReporter(
           FLAGS_benchmark_format, output_opts);
           FLAGS_benchmark_format, output_opts);
-      console_reporter = default_console_reporter.get();
-    }
-    std::string const& fname = FLAGS_benchmark_out;
-    if (fname == "" && file_reporter) {
-      std::cerr << "A custom file reporter was provided but "
+    console_reporter = default_console_reporter.get();
+  }
+  auto& Out = console_reporter->GetOutputStream();
+  auto& Err = console_reporter->GetErrorStream();
+
+  std::string const& fname = FLAGS_benchmark_out;
+  if (fname == "" && file_reporter) {
+    Err << "A custom file reporter was provided but "
                    "--benchmark_out=<file> was not specified." << std::endl;
                    "--benchmark_out=<file> was not specified." << std::endl;
+    std::exit(1);
+  }
+  if (fname != "") {
+    output_file.open(fname);
+    if (!output_file.is_open()) {
+      Err << "invalid file name: '" << fname << std::endl;
       std::exit(1);
       std::exit(1);
     }
     }
-    if (fname != "") {
-      output_file.open(fname);
-      if (!output_file.is_open()) {
-        std::cerr << "invalid file name: '" << fname << std::endl;
-        std::exit(1);
-      }
-      if (!file_reporter) {
-        default_file_reporter = internal::CreateReporter(
+    if (!file_reporter) {
+      default_file_reporter = internal::CreateReporter(
             FLAGS_benchmark_out_format, ConsoleReporter::OO_None);
             FLAGS_benchmark_out_format, ConsoleReporter::OO_None);
-        file_reporter = default_file_reporter.get();
-      }
-      file_reporter->SetOutputStream(&output_file);
-      file_reporter->SetErrorStream(&output_file);
+      file_reporter = default_file_reporter.get();
     }
     }
+    file_reporter->SetOutputStream(&output_file);
+    file_reporter->SetErrorStream(&output_file);
+  }
+
+  std::vector<internal::Benchmark::Instance> benchmarks;
+  auto families = internal::BenchmarkFamilies::GetInstance();
+  if (!families->FindBenchmarks(spec, &benchmarks, &Err)) return 0;
+
+  if (FLAGS_benchmark_list_tests) {
+    for (auto const& benchmark : benchmarks)
+      Out <<  benchmark.name << "\n";
+  } else {
     internal::RunMatchingBenchmarks(benchmarks, console_reporter, file_reporter);
     internal::RunMatchingBenchmarks(benchmarks, console_reporter, file_reporter);
   }
   }
+
   return benchmarks.size();
   return benchmarks.size();
 }
 }
 
 
@@ -1117,6 +1168,7 @@ void PrintUsageAndExit() {
           "          [--benchmark_filter=<regex>]\n"
           "          [--benchmark_filter=<regex>]\n"
           "          [--benchmark_min_time=<min_time>]\n"
           "          [--benchmark_min_time=<min_time>]\n"
           "          [--benchmark_repetitions=<num_repetitions>]\n"
           "          [--benchmark_repetitions=<num_repetitions>]\n"
+          "          [--benchmark_report_aggregates_only={true|false}\n"
           "          [--benchmark_format=<console|json|csv>]\n"
           "          [--benchmark_format=<console|json|csv>]\n"
           "          [--benchmark_out=<filename>]\n"
           "          [--benchmark_out=<filename>]\n"
           "          [--benchmark_out_format=<json|console|csv>]\n"
           "          [--benchmark_out_format=<json|console|csv>]\n"
@@ -1137,6 +1189,8 @@ void ParseCommandLineFlags(int* argc, char** argv) {
                         &FLAGS_benchmark_min_time) ||
                         &FLAGS_benchmark_min_time) ||
         ParseInt32Flag(argv[i], "benchmark_repetitions",
         ParseInt32Flag(argv[i], "benchmark_repetitions",
                        &FLAGS_benchmark_repetitions) ||
                        &FLAGS_benchmark_repetitions) ||
+        ParseBoolFlag(argv[i], "benchmark_report_aggregates_only",
+                       &FLAGS_benchmark_report_aggregates_only) ||
         ParseStringFlag(argv[i], "benchmark_format",
         ParseStringFlag(argv[i], "benchmark_format",
                         &FLAGS_benchmark_format) ||
                         &FLAGS_benchmark_format) ||
         ParseStringFlag(argv[i], "benchmark_out",
         ParseStringFlag(argv[i], "benchmark_out",
@@ -1168,6 +1222,11 @@ Benchmark* RegisterBenchmarkInternal(Benchmark* bench) {
     return bench;
     return bench;
 }
 }
 
 
+int InitializeStreams() {
+    static std::ios_base::Init init;
+    return 0;
+}
+
 } // end namespace internal
 } // end namespace internal
 
 
 void Initialize(int* argc, char** argv) {
 void Initialize(int* argc, char** argv) {

+ 18 - 2
utils/google-benchmark/test/CMakeLists.txt

@@ -2,11 +2,27 @@
 
 
 find_package(Threads REQUIRED)
 find_package(Threads REQUIRED)
 
 
+# NOTE: These flags must be added after find_package(Threads REQUIRED) otherwise
+# they will break the configuration check.
+if (DEFINED BENCHMARK_CXX_LINKER_FLAGS)
+  list(APPEND CMAKE_EXE_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS})
+endif()
+
+add_library(output_test_helper STATIC output_test_helper.cc)
+
 macro(compile_benchmark_test name)
 macro(compile_benchmark_test name)
   add_executable(${name} "${name}.cc")
   add_executable(${name} "${name}.cc")
   target_link_libraries(${name} benchmark ${CMAKE_THREAD_LIBS_INIT})
   target_link_libraries(${name} benchmark ${CMAKE_THREAD_LIBS_INIT})
 endmacro(compile_benchmark_test)
 endmacro(compile_benchmark_test)
 
 
+
+macro(compile_output_test name)
+  add_executable(${name} "${name}.cc")
+  target_link_libraries(${name} output_test_helper benchmark
+          ${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
+endmacro(compile_output_test)
+
+
 # Demonstration executable
 # Demonstration executable
 compile_benchmark_test(benchmark_test)
 compile_benchmark_test(benchmark_test)
 add_test(benchmark benchmark_test --benchmark_min_time=0.01)
 add_test(benchmark benchmark_test --benchmark_min_time=0.01)
@@ -54,7 +70,7 @@ add_test(map_test map_test --benchmark_min_time=0.01)
 compile_benchmark_test(multiple_ranges_test)
 compile_benchmark_test(multiple_ranges_test)
 add_test(multiple_ranges_test multiple_ranges_test --benchmark_min_time=0.01)
 add_test(multiple_ranges_test multiple_ranges_test --benchmark_min_time=0.01)
 
 
-compile_benchmark_test(reporter_output_test)
+compile_output_test(reporter_output_test)
 add_test(reporter_output_test reporter_output_test --benchmark_min_time=0.01)
 add_test(reporter_output_test reporter_output_test --benchmark_min_time=0.01)
 
 
 check_cxx_compiler_flag(-std=c++03 BENCHMARK_HAS_CXX03_FLAG)
 check_cxx_compiler_flag(-std=c++03 BENCHMARK_HAS_CXX03_FLAG)
@@ -69,7 +85,7 @@ if (BENCHMARK_HAS_CXX03_FLAG)
   add_test(cxx03 cxx03_test --benchmark_min_time=0.01)
   add_test(cxx03 cxx03_test --benchmark_min_time=0.01)
 endif()
 endif()
 
 
-compile_benchmark_test(complexity_test)
+compile_output_test(complexity_test)
 add_test(complexity_benchmark complexity_test --benchmark_min_time=0.01)
 add_test(complexity_benchmark complexity_test --benchmark_min_time=0.01)
 
 
 # Add the coverage command(s)
 # Add the coverage command(s)

+ 35 - 189
utils/google-benchmark/test/complexity_test.cc

@@ -1,153 +1,46 @@
-
 #undef NDEBUG
 #undef NDEBUG
 #include "benchmark/benchmark.h"
 #include "benchmark/benchmark.h"
-#include "../src/check.h" // NOTE: check.h is for internal use only!
-#include "../src/re.h"    // NOTE: re.h is for internal use only
+#include "output_test.h"
 #include <cassert>
 #include <cassert>
-#include <cstring>
-#include <iostream>
-#include <sstream>
 #include <vector>
 #include <vector>
-#include <utility>
 #include <algorithm>
 #include <algorithm>
+#include <cstdlib>
 #include <cmath>
 #include <cmath>
 
 
 namespace {
 namespace {
 
 
-// ========================================================================= //
-// -------------------------- Testing Case --------------------------------- //
-// ========================================================================= //
-
-enum MatchRules {
-  MR_Default, // Skip non-matching lines until a match is found.
-  MR_Next    // Match must occur on the next line.
-};
-
-struct TestCase {
-  std::string regex;
-  int match_rule;
-
-  TestCase(std::string re, int rule = MR_Default) : regex(re), match_rule(rule) {}
-
-  void Check(std::stringstream& remaining_output) const {
-    benchmark::Regex r;
-    std::string err_str;
-    r.Init(regex, &err_str);
-    CHECK(err_str.empty()) << "Could not construct regex \"" << regex << "\""
-                           << " got Error: " << err_str;
-
-    std::string near = "<EOF>";
-    std::string line;
-    bool first = true;
-    while (remaining_output.eof() == false) {
-        CHECK(remaining_output.good());
-        std::getline(remaining_output, line);
-        // Keep the first line as context.
-        if (first) {
-            near = line;
-            first = false;
-        }
-        if (r.Match(line)) return;
-        CHECK(match_rule != MR_Next) << "Expected line \"" << line
-                                     << "\" to match regex \"" << regex << "\""
-                                     << "\nstarted matching at line: \"" << near << "\"";
-    }
-
-    CHECK(remaining_output.eof() == false)
-        << "End of output reached before match for regex \"" << regex
-        << "\" was found"
-        << "\nstarted matching at line: \"" << near << "\"";
-  }
-};
-
-std::vector<TestCase> ConsoleOutputTests;
-std::vector<TestCase> JSONOutputTests;
-std::vector<TestCase> CSVOutputTests;
-
-// ========================================================================= //
-// -------------------------- Test Helpers --------------------------------- //
-// ========================================================================= //
-
-class TestReporter : public benchmark::BenchmarkReporter {
-public:
-  TestReporter(std::vector<benchmark::BenchmarkReporter*> reps)
-      : reporters_(reps)  {}
-
-  virtual bool ReportContext(const Context& context) {
-    bool last_ret = false;
-    bool first = true;
-    for (auto rep : reporters_) {
-      bool new_ret = rep->ReportContext(context);
-      CHECK(first || new_ret == last_ret)
-          << "Reports return different values for ReportContext";
-      first = false;
-      last_ret = new_ret;
-    }
-    return last_ret;
-  }
-
-  virtual void ReportRuns(const std::vector<Run>& report) {
-    for (auto rep : reporters_)
-      rep->ReportRuns(report);
-  }
-
-  virtual void Finalize() {
-      for (auto rep : reporters_)
-        rep->Finalize();
-  }
-
-private:
-  std::vector<benchmark::BenchmarkReporter*> reporters_;
-};
-
-
-#define CONCAT2(x, y) x##y
-#define CONCAT(x, y) CONCAT2(x, y)
-
-#define ADD_CASES(...) \
-    int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__)
-
-int AddCases(std::vector<TestCase>* out, std::initializer_list<TestCase> const& v) {
-  for (auto const& TC : v)
-    out->push_back(TC);
-  return 0;
-}
-
-template <class First>
-std::string join(First f) { return f; }
-
-template <class First, class ...Args>
-std::string join(First f, Args&&... args) {
-    return std::string(std::move(f)) + "[ ]+" + join(std::forward<Args>(args)...);
-}
-
-std::string dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
-
 #define ADD_COMPLEXITY_CASES(...) \
 #define ADD_COMPLEXITY_CASES(...) \
     int CONCAT(dummy, __LINE__) = AddComplexityTest(__VA_ARGS__)
     int CONCAT(dummy, __LINE__) = AddComplexityTest(__VA_ARGS__)
 
 
-int AddComplexityTest(std::vector<TestCase>* console_out, std::vector<TestCase>* json_out,
-                      std::vector<TestCase>* csv_out, std::string big_o_test_name, 
+int AddComplexityTest(std::string big_o_test_name,
                       std::string rms_test_name, std::string big_o) {
                       std::string rms_test_name, std::string big_o) {
-  std::string big_o_str = dec_re + " " + big_o;
-  AddCases(console_out, {
-    {join("^" + big_o_test_name + "", big_o_str, big_o_str) + "[ ]*$"},
-    {join("^" + rms_test_name + "", "[0-9]+ %", "[0-9]+ %") + "[ ]*$"}
+  SetSubstitutions({
+        {"%bigo_name", big_o_test_name},
+        {"%rms_name", rms_test_name},
+        {"%bigo_str", "[ ]*" + std::string(dec_re) + " " + big_o},
+        {"%bigo", big_o},
+        {"%rms", "[ ]*[0-9]+ %"}
   });
   });
-  AddCases(json_out, {
-    {"\"name\": \"" + big_o_test_name + "\",$"},
+  AddCases(TC_ConsoleOut, {
+    {"^%bigo_name %bigo_str %bigo_str[ ]*$"},
+    {"^%bigo_name", MR_Not}, // Assert we we didn't only matched a name.
+    {"^%rms_name %rms %rms[ ]*$", MR_Next}
+  });
+  AddCases(TC_JSONOut, {
+    {"\"name\": \"%bigo_name\",$"},
     {"\"cpu_coefficient\": [0-9]+,$", MR_Next},
     {"\"cpu_coefficient\": [0-9]+,$", MR_Next},
     {"\"real_coefficient\": [0-9]{1,5},$", MR_Next},
     {"\"real_coefficient\": [0-9]{1,5},$", MR_Next},
-    {"\"big_o\": \"" + big_o + "\",$", MR_Next},
+    {"\"big_o\": \"%bigo\",$", MR_Next},
     {"\"time_unit\": \"ns\"$", MR_Next},
     {"\"time_unit\": \"ns\"$", MR_Next},
     {"}", MR_Next},
     {"}", MR_Next},
-    {"\"name\": \"" + rms_test_name + "\",$"},
+    {"\"name\": \"%rms_name\",$"},
     {"\"rms\": [0-9]+%$", MR_Next},
     {"\"rms\": [0-9]+%$", MR_Next},
     {"}", MR_Next}
     {"}", MR_Next}
   });
   });
-  AddCases(csv_out, {
-    {"^\"" + big_o_test_name + "\",," + dec_re + "," + dec_re + "," + big_o + ",,,,,$"},
-    {"^\"" + rms_test_name + "\",," + dec_re + "," + dec_re + ",,,,,,$", MR_Next}
+  AddCases(TC_CSVOut, {
+    {"^\"%bigo_name\",,%float,%float,%bigo,,,,,$"},
+    {"^\"%bigo_name\"", MR_Not},
+    {"^\"%rms_name\",,%float,%float,,,,,,$", MR_Next}
   });
   });
   return 0;
   return 0;
 }
 }
@@ -172,20 +65,20 @@ BENCHMARK(BM_Complexity_O1) -> Range(1, 1<<18) -> Complexity([](int){return 1.0;
 
 
 const char* big_o_1_test_name = "BM_Complexity_O1_BigO";
 const char* big_o_1_test_name = "BM_Complexity_O1_BigO";
 const char* rms_o_1_test_name = "BM_Complexity_O1_RMS";
 const char* rms_o_1_test_name = "BM_Complexity_O1_RMS";
-const char* enum_auto_big_o_1 = "\\([0-9]+\\)";
+const char* enum_big_o_1 = "\\([0-9]+\\)";
+// FIXME: Tolerate both '(1)' and 'lgN' as output when the complexity is auto deduced.
+// See https://github.com/google/benchmark/issues/272
+const char* auto_big_o_1 = "(\\([0-9]+\\))|(lgN)";
 const char* lambda_big_o_1 = "f\\(N\\)";
 const char* lambda_big_o_1 = "f\\(N\\)";
 
 
 // Add enum tests
 // Add enum tests
-ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests, 
-                     big_o_1_test_name, rms_o_1_test_name, enum_auto_big_o_1);
+ADD_COMPLEXITY_CASES(big_o_1_test_name, rms_o_1_test_name, enum_big_o_1);
 
 
 // Add auto enum tests
 // Add auto enum tests
-ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests,
-                     big_o_1_test_name, rms_o_1_test_name, enum_auto_big_o_1);
+ADD_COMPLEXITY_CASES(big_o_1_test_name, rms_o_1_test_name, auto_big_o_1);
 
 
 // Add lambda tests
 // Add lambda tests
-ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests, 
-                     big_o_1_test_name, rms_o_1_test_name, lambda_big_o_1);
+ADD_COMPLEXITY_CASES(big_o_1_test_name, rms_o_1_test_name, lambda_big_o_1);
 
 
 // ========================================================================= //
 // ========================================================================= //
 // --------------------------- Testing BigO O(N) --------------------------- //
 // --------------------------- Testing BigO O(N) --------------------------- //
@@ -195,7 +88,7 @@ std::vector<int> ConstructRandomVector(int size) {
   std::vector<int> v;
   std::vector<int> v;
   v.reserve(size);
   v.reserve(size);
   for (int i = 0; i < size; ++i) {
   for (int i = 0; i < size; ++i) {
-    v.push_back(rand() % size);
+    v.push_back(std::rand() % size);
   }
   }
   return v;
   return v;
 }
 }
@@ -218,12 +111,10 @@ const char* enum_auto_big_o_n = "N";
 const char* lambda_big_o_n = "f\\(N\\)";
 const char* lambda_big_o_n = "f\\(N\\)";
 
 
 // Add enum tests
 // Add enum tests
-ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests, 
-                     big_o_n_test_name, rms_o_n_test_name, enum_auto_big_o_n);
+ADD_COMPLEXITY_CASES(big_o_n_test_name, rms_o_n_test_name, enum_auto_big_o_n);
 
 
 // Add lambda tests
 // Add lambda tests
-ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests, 
-                     big_o_n_test_name, rms_o_n_test_name, lambda_big_o_n);
+ADD_COMPLEXITY_CASES(big_o_n_test_name, rms_o_n_test_name, lambda_big_o_n);
 
 
 // ========================================================================= //
 // ========================================================================= //
 // ------------------------- Testing BigO O(N*lgN) ------------------------- //
 // ------------------------- Testing BigO O(N*lgN) ------------------------- //
@@ -246,62 +137,17 @@ const char* enum_auto_big_o_n_lg_n = "NlgN";
 const char* lambda_big_o_n_lg_n = "f\\(N\\)";
 const char* lambda_big_o_n_lg_n = "f\\(N\\)";
 
 
 // Add enum tests
 // Add enum tests
-ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests, 
-                     big_o_n_lg_n_test_name, rms_o_n_lg_n_test_name, enum_auto_big_o_n_lg_n);
+ADD_COMPLEXITY_CASES(big_o_n_lg_n_test_name, rms_o_n_lg_n_test_name, enum_auto_big_o_n_lg_n);
 
 
 // Add lambda tests
 // Add lambda tests
-ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests, 
-                     big_o_n_lg_n_test_name, rms_o_n_lg_n_test_name, lambda_big_o_n_lg_n);
+ADD_COMPLEXITY_CASES(big_o_n_lg_n_test_name, rms_o_n_lg_n_test_name, lambda_big_o_n_lg_n);
 
 
 
 
 // ========================================================================= //
 // ========================================================================= //
 // --------------------------- TEST CASES END ------------------------------ //
 // --------------------------- TEST CASES END ------------------------------ //
 // ========================================================================= //
 // ========================================================================= //
 
 
-
 int main(int argc, char* argv[]) {
 int main(int argc, char* argv[]) {
-  benchmark::Initialize(&argc, argv);
-  benchmark::ConsoleReporter CR(benchmark::ConsoleReporter::OO_None);
-  benchmark::JSONReporter JR;
-  benchmark::CSVReporter CSVR;
-  struct ReporterTest {
-    const char* name;
-    std::vector<TestCase>& output_cases;
-    benchmark::BenchmarkReporter& reporter;
-    std::stringstream out_stream;
-    std::stringstream err_stream;
-
-    ReporterTest(const char* n,
-                 std::vector<TestCase>& out_tc,
-                 benchmark::BenchmarkReporter& br)
-        : name(n), output_cases(out_tc), reporter(br) {
-        reporter.SetOutputStream(&out_stream);
-        reporter.SetErrorStream(&err_stream);
-    }
-  } TestCases[] = {
-      {"ConsoleReporter", ConsoleOutputTests, CR},
-      {"JSONReporter", JSONOutputTests, JR},
-      {"CSVReporter", CSVOutputTests, CSVR}
-  };
-
-  // Create the test reporter and run the benchmarks.
-  std::cout << "Running benchmarks...\n";
-  TestReporter test_rep({&CR, &JR, &CSVR});
-  benchmark::RunSpecifiedBenchmarks(&test_rep);
-
-  for (auto& rep_test : TestCases) {
-      std::string msg = std::string("\nTesting ") + rep_test.name + " Output\n";
-      std::string banner(msg.size() - 1, '-');
-      std::cout << banner << msg << banner << "\n";
-
-      std::cerr << rep_test.err_stream.str();
-      std::cout << rep_test.out_stream.str();
-
-      for (const auto& TC : rep_test.output_cases)
-        TC.Check(rep_test.out_stream);
-
-      std::cout << "\n";
-  }
-  return 0;
+  RunOutputTests(argc, argv);
 }
 }
 
 

+ 12 - 1
utils/google-benchmark/test/cxx03_test.cc

@@ -1,5 +1,6 @@
-
+#undef NDEBUG
 #include <cstddef>
 #include <cstddef>
+#include <cassert>
 
 
 #include "benchmark/benchmark.h"
 #include "benchmark/benchmark.h"
 
 
@@ -15,6 +16,16 @@ void BM_empty(benchmark::State& state) {
 }
 }
 BENCHMARK(BM_empty);
 BENCHMARK(BM_empty);
 
 
+// The new C++11 interface for args/ranges requires initializer list support.
+// Therefore we provide the old interface to support C++03.
+void BM_old_arg_range_interface(benchmark::State& state) {
+    assert((state.range(0) == 1 && state.range(1) == 2) ||
+           (state.range(0) == 5 && state.range(1) == 6));
+    while (state.KeepRunning()) {
+    }
+}
+BENCHMARK(BM_old_arg_range_interface)->ArgPair(1, 2)->RangePair(5, 5, 6, 6);
+
 template <class T, class U>
 template <class T, class U>
 void BM_template2(benchmark::State& state) {
 void BM_template2(benchmark::State& state) {
     BM_empty(state);
     BM_empty(state);

+ 16 - 1
utils/google-benchmark/test/multiple_ranges_test.cc

@@ -41,6 +41,21 @@ BENCHMARK_DEFINE_F(MultipleRangesFixture, Empty)(benchmark::State& state) {
   }
   }
 }
 }
 
 
-BENCHMARK_REGISTER_F(MultipleRangesFixture, Empty)->RangeMultiplier(2)->Ranges({{1, 2}, {3, 7}, {5, 15}})->Args({7, 6, 3});
+BENCHMARK_REGISTER_F(MultipleRangesFixture, Empty)->RangeMultiplier(2)
+    ->Ranges({{1, 2}, {3, 7}, {5, 15}})->Args({7, 6, 3});
+
+void BM_CheckDefaultArgument(benchmark::State& state) {
+  // Test that the 'range()' without an argument is the same as 'range(0)'.
+  assert(state.range() == state.range(0));
+  assert(state.range() != state.range(1));
+  while (state.KeepRunning()) {}
+}
+BENCHMARK(BM_CheckDefaultArgument)->Ranges({{1, 5}, {6, 10}});
+
+static void BM_MultipleRanges(benchmark::State& st) {
+    while (st.KeepRunning()) {}
+}
+BENCHMARK(BM_MultipleRanges)->Ranges({{5, 5}, {6, 6}});
+
 
 
 BENCHMARK_MAIN()
 BENCHMARK_MAIN()

+ 72 - 0
utils/google-benchmark/test/output_test.h

@@ -0,0 +1,72 @@
+#ifndef TEST_OUTPUT_TEST_H
+#define TEST_OUTPUT_TEST_H
+
+#undef NDEBUG
+#include "benchmark/benchmark.h"
+#include "../src/re.h"
+#include <vector>
+#include <string>
+#include <initializer_list>
+#include <memory>
+#include <utility>
+
+#define CONCAT2(x, y) x##y
+#define CONCAT(x, y) CONCAT2(x, y)
+
+#define ADD_CASES(...) \
+    int CONCAT(dummy, __LINE__) = ::AddCases(__VA_ARGS__)
+
+#define SET_SUBSTITUTIONS(...) \
+    int CONCAT(dummy, __LINE__) = ::SetSubstitutions(__VA_ARGS__)
+
+enum MatchRules {
+  MR_Default, // Skip non-matching lines until a match is found.
+  MR_Next,    // Match must occur on the next line.
+  MR_Not      // No line between the current position and the next match matches
+              // the regex
+};
+
+struct TestCase {
+  TestCase(std::string re, int rule = MR_Default);
+
+  std::string regex_str;
+  int match_rule;
+  std::string substituted_regex;
+  std::shared_ptr<benchmark::Regex> regex;
+};
+
+enum TestCaseID {
+  TC_ConsoleOut,
+  TC_ConsoleErr,
+  TC_JSONOut,
+  TC_JSONErr,
+  TC_CSVOut,
+  TC_CSVErr,
+
+  TC_NumID // PRIVATE
+};
+
+// Add a list of test cases to be run against the output specified by
+// 'ID'
+int AddCases(TestCaseID ID, std::initializer_list<TestCase> il);
+
+// Add or set a list of substitutions to be performed on constructed regex's
+// See 'output_test_helper.cc' for a list of default substitutions.
+int SetSubstitutions(
+    std::initializer_list<std::pair<std::string, std::string>> il);
+
+// Run all output tests.
+void RunOutputTests(int argc, char* argv[]);
+
+// ========================================================================= //
+// --------------------------- Misc Utilities ------------------------------ //
+// ========================================================================= //
+
+namespace {
+
+const char* const dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
+
+} //  end namespace
+
+
+#endif // TEST_OUTPUT_TEST_H

+ 224 - 0
utils/google-benchmark/test/output_test_helper.cc

@@ -0,0 +1,224 @@
+#include "output_test.h"
+#include "../src/check.h" // NOTE: check.h is for internal use only!
+#include "../src/re.h" // NOTE: re.h is for internal use only
+#include <memory>
+#include <map>
+#include <iostream>
+#include <sstream>
+
+
+// ========================================================================= //
+// ------------------------------ Internals -------------------------------- //
+// ========================================================================= //
+namespace internal { namespace {
+
+using TestCaseList = std::vector<TestCase>;
+
+// Use a vector because the order elements are added matters during iteration.
+// std::map/unordered_map don't guarantee that.
+// For example:
+//  SetSubstitutions({{"%HelloWorld", "Hello"}, {"%Hello", "Hi"}});
+//     Substitute("%HelloWorld") // Always expands to Hello.
+using SubMap = std::vector<std::pair<std::string, std::string>>;
+
+TestCaseList& GetTestCaseList(TestCaseID ID) {
+    // Uses function-local statics to ensure initialization occurs
+    // before first use.
+    static TestCaseList lists[TC_NumID];
+    return lists[ID];
+}
+
+SubMap& GetSubstitutions() {
+    // Don't use 'dec_re' from header because it may not yet be initialized.
+    static std::string dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
+    static SubMap map = {
+        {"%float", "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?"},
+        {"%int", "[ ]*[0-9]+"},
+        {" %s ", "[ ]+"},
+        {"%time", "[ ]*[0-9]{1,5} ns"},
+        {"%console_report", "[ ]*[0-9]{1,5} ns [ ]*[0-9]{1,5} ns [ ]*[0-9]+"},
+        {"%csv_report", "[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,"}
+    };
+    return map;
+}
+
+std::string PerformSubstitutions(std::string source) {
+    SubMap const& subs = GetSubstitutions();
+    using SizeT = std::string::size_type;
+    for (auto const& KV : subs) {
+        SizeT pos;
+        SizeT next_start = 0;
+        while ((pos = source.find(KV.first, next_start)) != std::string::npos) {
+            next_start = pos + KV.second.size();
+            source.replace(pos, KV.first.size(), KV.second);
+        }
+    }
+    return source;
+}
+
+void CheckCase(std::stringstream& remaining_output, TestCase const& TC,
+               TestCaseList const& not_checks)
+{
+    std::string first_line;
+    bool on_first = true;
+    std::string line;
+    while (remaining_output.eof() == false) {
+        CHECK(remaining_output.good());
+        std::getline(remaining_output, line);
+        if (on_first) {
+            first_line = line;
+            on_first = false;
+        }
+        for (auto& NC : not_checks) {
+            CHECK(!NC.regex->Match(line))
+                << "Unexpected match for line \"" << line
+                << "\" for MR_Not regex \"" << NC.regex_str << "\""
+                << "\n    actual regex string \"" << TC.substituted_regex << "\""
+                << "\n    started matching near: " << first_line;
+        }
+        if (TC.regex->Match(line)) return;
+        CHECK(TC.match_rule != MR_Next)
+            << "Expected line \"" << line << "\" to match regex \"" << TC.regex_str << "\""
+            << "\n    actual regex string \"" << TC.substituted_regex << "\""
+            << "\n    started matching near: " << first_line;
+    }
+    CHECK(remaining_output.eof() == false)
+        << "End of output reached before match for regex \"" << TC.regex_str
+        << "\" was found"
+        << "\n    actual regex string \"" << TC.substituted_regex << "\""
+        << "\n    started matching near: " << first_line;
+}
+
+
+void CheckCases(TestCaseList const& checks, std::stringstream& output) {
+    std::vector<TestCase> not_checks;
+    for (size_t i=0; i < checks.size(); ++i) {
+        const auto& TC = checks[i];
+        if (TC.match_rule == MR_Not) {
+            not_checks.push_back(TC);
+            continue;
+        }
+        CheckCase(output, TC, not_checks);
+        not_checks.clear();
+    }
+}
+
+class TestReporter : public benchmark::BenchmarkReporter {
+public:
+  TestReporter(std::vector<benchmark::BenchmarkReporter*> reps)
+      : reporters_(reps)  {}
+
+  virtual bool ReportContext(const Context& context) {
+    bool last_ret = false;
+    bool first = true;
+    for (auto rep : reporters_) {
+      bool new_ret = rep->ReportContext(context);
+      CHECK(first || new_ret == last_ret)
+          << "Reports return different values for ReportContext";
+      first = false;
+      last_ret = new_ret;
+    }
+    return last_ret;
+  }
+
+  void ReportRuns(const std::vector<Run>& report)
+    { for (auto rep : reporters_) rep->ReportRuns(report); }
+  void Finalize() { for (auto rep : reporters_) rep->Finalize(); }
+
+private:
+  std::vector<benchmark::BenchmarkReporter*> reporters_;
+};
+
+}} // end namespace internal
+
+// ========================================================================= //
+// -------------------------- Public API Definitions------------------------ //
+// ========================================================================= //
+
+TestCase::TestCase(std::string re, int rule)
+    : regex_str(std::move(re)), match_rule(rule),
+      substituted_regex(internal::PerformSubstitutions(regex_str)),
+      regex(std::make_shared<benchmark::Regex>())
+{
+    std::string err_str;
+    regex->Init(substituted_regex, &err_str);
+    CHECK(err_str.empty())
+        << "Could not construct regex \"" << substituted_regex << "\""
+        << "\n    originally \"" << regex_str << "\""
+        << "\n    got error: " << err_str;
+}
+
+int AddCases(TestCaseID ID, std::initializer_list<TestCase> il) {
+    auto& L = internal::GetTestCaseList(ID);
+    L.insert(L.end(), il);
+    return 0;
+}
+
+int SetSubstitutions(std::initializer_list<std::pair<std::string, std::string>> il) {
+    auto& subs = internal::GetSubstitutions();
+    for (auto const& KV : il) {
+        bool exists = false;
+        for (auto& EKV : subs) {
+            if (EKV.first == KV.first) {
+                EKV.second = KV.second;
+                exists = true;
+                break;
+            }
+        }
+        if (!exists) subs.push_back(KV);
+    }
+    return 0;
+}
+
+void RunOutputTests(int argc, char* argv[]) {
+  using internal::GetTestCaseList;
+  benchmark::Initialize(&argc, argv);
+  benchmark::ConsoleReporter CR(benchmark::ConsoleReporter::OO_None);
+  benchmark::JSONReporter JR;
+  benchmark::CSVReporter CSVR;
+  struct ReporterTest {
+    const char* name;
+    std::vector<TestCase>& output_cases;
+    std::vector<TestCase>& error_cases;
+    benchmark::BenchmarkReporter& reporter;
+    std::stringstream out_stream;
+    std::stringstream err_stream;
+
+    ReporterTest(const char* n,
+                 std::vector<TestCase>& out_tc,
+                 std::vector<TestCase>& err_tc,
+                 benchmark::BenchmarkReporter& br)
+        : name(n), output_cases(out_tc), error_cases(err_tc), reporter(br) {
+        reporter.SetOutputStream(&out_stream);
+        reporter.SetErrorStream(&err_stream);
+    }
+  } TestCases[] = {
+      {"ConsoleReporter", GetTestCaseList(TC_ConsoleOut),
+                          GetTestCaseList(TC_ConsoleErr), CR},
+      {"JSONReporter",    GetTestCaseList(TC_JSONOut),
+                          GetTestCaseList(TC_JSONErr), JR},
+      {"CSVReporter",     GetTestCaseList(TC_CSVOut),
+                          GetTestCaseList(TC_CSVErr), CSVR},
+  };
+
+  // Create the test reporter and run the benchmarks.
+  std::cout << "Running benchmarks...\n";
+  internal::TestReporter test_rep({&CR, &JR, &CSVR});
+  benchmark::RunSpecifiedBenchmarks(&test_rep);
+
+  for (auto& rep_test : TestCases) {
+      std::string msg = std::string("\nTesting ") + rep_test.name + " Output\n";
+      std::string banner(msg.size() - 1, '-');
+      std::cout << banner << msg << banner << "\n";
+
+      std::cerr << rep_test.err_stream.str();
+      std::cout << rep_test.out_stream.str();
+
+      internal::CheckCases(rep_test.error_cases,rep_test.err_stream);
+      internal::CheckCases(rep_test.output_cases, rep_test.out_stream);
+
+      std::cout << "\n";
+  }
+}
+
+

+ 84 - 181
utils/google-benchmark/test/reporter_output_test.cc

@@ -1,134 +1,19 @@
 
 
 #undef NDEBUG
 #undef NDEBUG
 #include "benchmark/benchmark.h"
 #include "benchmark/benchmark.h"
-#include "../src/check.h" // NOTE: check.h is for internal use only!
-#include "../src/re.h" // NOTE: re.h is for internal use only
-#include <cassert>
-#include <cstring>
-#include <iostream>
-#include <sstream>
-#include <vector>
+#include "output_test.h"
 #include <utility>
 #include <utility>
 
 
-namespace {
-
-// ========================================================================= //
-// -------------------------- Testing Case --------------------------------- //
-// ========================================================================= //
-
-enum MatchRules {
-  MR_Default, // Skip non-matching lines until a match is found.
-  MR_Next    // Match must occur on the next line.
-};
-
-struct TestCase {
-  std::string regex;
-  int match_rule;
-
-  TestCase(std::string re, int rule = MR_Default) : regex(re), match_rule(rule) {}
-
-  void Check(std::stringstream& remaining_output) const {
-    benchmark::Regex r;
-    std::string err_str;
-    r.Init(regex, &err_str);
-    CHECK(err_str.empty()) << "Could not construct regex \"" << regex << "\""
-                           << " got Error: " << err_str;
-
-    std::string line;
-    while (remaining_output.eof() == false) {
-        CHECK(remaining_output.good());
-        std::getline(remaining_output, line);
-        if (r.Match(line)) return;
-        CHECK(match_rule != MR_Next) << "Expected line \"" << line
-                                     << "\" to match regex \"" << regex << "\"";
-    }
-
-    CHECK(remaining_output.eof() == false)
-        << "End of output reached before match for regex \"" << regex
-        << "\" was found";
-  }
-};
-
-std::vector<TestCase> ConsoleOutputTests;
-std::vector<TestCase> JSONOutputTests;
-std::vector<TestCase> CSVOutputTests;
-
-std::vector<TestCase> ConsoleErrorTests;
-std::vector<TestCase> JSONErrorTests;
-std::vector<TestCase> CSVErrorTests;
-
-// ========================================================================= //
-// -------------------------- Test Helpers --------------------------------- //
-// ========================================================================= //
-
-class TestReporter : public benchmark::BenchmarkReporter {
-public:
-  TestReporter(std::vector<benchmark::BenchmarkReporter*> reps)
-      : reporters_(reps)  {}
-
-  virtual bool ReportContext(const Context& context) {
-    bool last_ret = false;
-    bool first = true;
-    for (auto rep : reporters_) {
-      bool new_ret = rep->ReportContext(context);
-      CHECK(first || new_ret == last_ret)
-          << "Reports return different values for ReportContext";
-      first = false;
-      last_ret = new_ret;
-    }
-    return last_ret;
-  }
-
-  virtual void ReportRuns(const std::vector<Run>& report) {
-    for (auto rep : reporters_)
-      rep->ReportRuns(report);
-  }
-
-  virtual void Finalize() {
-      for (auto rep : reporters_)
-        rep->Finalize();
-  }
-
-private:
-  std::vector<benchmark::BenchmarkReporter*> reporters_;
-};
-
-
-#define CONCAT2(x, y) x##y
-#define CONCAT(x, y) CONCAT2(x, y)
-
-#define ADD_CASES(...) \
-    int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__)
-
-int AddCases(std::vector<TestCase>* out, std::initializer_list<TestCase> const& v) {
-  for (auto const& TC : v)
-    out->push_back(TC);
-  return 0;
-}
-
-template <class First>
-std::string join(First f) { return f; }
-
-template <class First, class ...Args>
-std::string join(First f, Args&&... args) {
-    return std::string(std::move(f)) + "[ ]+" + join(std::forward<Args>(args)...);
-}
-
-
-
-std::string dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
-
-}  // end namespace
 
 
 // ========================================================================= //
 // ========================================================================= //
 // ---------------------- Testing Prologue Output -------------------------- //
 // ---------------------- Testing Prologue Output -------------------------- //
 // ========================================================================= //
 // ========================================================================= //
 
 
-ADD_CASES(&ConsoleOutputTests, {
-    {join("^Benchmark", "Time", "CPU", "Iterations$"), MR_Next},
+ADD_CASES(TC_ConsoleOut, {
+    {"^Benchmark %s Time %s CPU %s Iterations$", MR_Next},
     {"^[-]+$", MR_Next}
     {"^[-]+$", MR_Next}
 });
 });
-ADD_CASES(&CSVOutputTests, {
+ADD_CASES(TC_CSVOut, {
   {"name,iterations,real_time,cpu_time,time_unit,bytes_per_second,items_per_second,"
   {"name,iterations,real_time,cpu_time,time_unit,bytes_per_second,items_per_second,"
     "label,error_occurred,error_message"}
     "label,error_occurred,error_message"}
 });
 });
@@ -142,19 +27,19 @@ void BM_basic(benchmark::State& state) {
 }
 }
 BENCHMARK(BM_basic);
 BENCHMARK(BM_basic);
 
 
-ADD_CASES(&ConsoleOutputTests, {
-    {"^BM_basic[ ]+[0-9]{1,5} ns[ ]+[0-9]{1,5} ns[ ]+[0-9]+$"}
+ADD_CASES(TC_ConsoleOut, {
+    {"^BM_basic %console_report$"}
 });
 });
-ADD_CASES(&JSONOutputTests, {
+ADD_CASES(TC_JSONOut, {
     {"\"name\": \"BM_basic\",$"},
     {"\"name\": \"BM_basic\",$"},
-    {"\"iterations\": [0-9]+,$", MR_Next},
-    {"\"real_time\": [0-9]{1,5},$", MR_Next},
-    {"\"cpu_time\": [0-9]{1,5},$", MR_Next},
+    {"\"iterations\": %int,$", MR_Next},
+    {"\"real_time\": %int,$", MR_Next},
+    {"\"cpu_time\": %int,$", MR_Next},
     {"\"time_unit\": \"ns\"$", MR_Next},
     {"\"time_unit\": \"ns\"$", MR_Next},
     {"}", MR_Next}
     {"}", MR_Next}
 });
 });
-ADD_CASES(&CSVOutputTests, {
-    {"^\"BM_basic\",[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,$"}
+ADD_CASES(TC_CSVOut, {
+    {"^\"BM_basic\",%csv_report$"}
 });
 });
 
 
 // ========================================================================= //
 // ========================================================================= //
@@ -166,16 +51,16 @@ void BM_error(benchmark::State& state) {
     while(state.KeepRunning()) {}
     while(state.KeepRunning()) {}
 }
 }
 BENCHMARK(BM_error);
 BENCHMARK(BM_error);
-ADD_CASES(&ConsoleOutputTests, {
+ADD_CASES(TC_ConsoleOut, {
     {"^BM_error[ ]+ERROR OCCURRED: 'message'$"}
     {"^BM_error[ ]+ERROR OCCURRED: 'message'$"}
 });
 });
-ADD_CASES(&JSONOutputTests, {
+ADD_CASES(TC_JSONOut, {
     {"\"name\": \"BM_error\",$"},
     {"\"name\": \"BM_error\",$"},
     {"\"error_occurred\": true,$", MR_Next},
     {"\"error_occurred\": true,$", MR_Next},
     {"\"error_message\": \"message\",$", MR_Next}
     {"\"error_message\": \"message\",$", MR_Next}
 });
 });
 
 
-ADD_CASES(&CSVOutputTests, {
+ADD_CASES(TC_CSVOut, {
     {"^\"BM_error\",,,,,,,,true,\"message\"$"}
     {"^\"BM_error\",,,,,,,,true,\"message\"$"}
 });
 });
 
 
@@ -190,66 +75,84 @@ void BM_Complexity_O1(benchmark::State& state) {
   state.SetComplexityN(state.range(0));
   state.SetComplexityN(state.range(0));
 }
 }
 BENCHMARK(BM_Complexity_O1)->Range(1, 1<<18)->Complexity(benchmark::o1);
 BENCHMARK(BM_Complexity_O1)->Range(1, 1<<18)->Complexity(benchmark::o1);
+SET_SUBSTITUTIONS({
+  {"%bigOStr", "[ ]*[0-9]+\\.[0-9]+ \\([0-9]+\\)"},
+  {"%RMS", "[ ]*[0-9]+ %"}
+});
+ADD_CASES(TC_ConsoleOut, {
+   {"^BM_Complexity_O1_BigO %bigOStr %bigOStr[ ]*$"},
+   {"^BM_Complexity_O1_RMS %RMS %RMS[ ]*$"}
+});
+
+
+// ========================================================================= //
+// ----------------------- Testing Aggregate Output ------------------------ //
+// ========================================================================= //
 
 
-std::string bigOStr = "[0-9]+\\.[0-9]+ \\([0-9]+\\)";
+// Test that non-aggregate data is printed by default
+void BM_Repeat(benchmark::State& state) { while (state.KeepRunning()) {} }
+BENCHMARK(BM_Repeat)->Repetitions(3);
+ADD_CASES(TC_ConsoleOut, {
+    {"^BM_Repeat/repeats:3 %console_report$"},
+    {"^BM_Repeat/repeats:3 %console_report$"},
+    {"^BM_Repeat/repeats:3 %console_report$"},
+    {"^BM_Repeat/repeats:3_mean %console_report$"},
+    {"^BM_Repeat/repeats:3_stddev %console_report$"}
+});
+ADD_CASES(TC_JSONOut, {
+    {"\"name\": \"BM_Repeat/repeats:3\",$"},
+    {"\"name\": \"BM_Repeat/repeats:3\",$"},
+    {"\"name\": \"BM_Repeat/repeats:3\",$"},
+    {"\"name\": \"BM_Repeat/repeats:3_mean\",$"},
+    {"\"name\": \"BM_Repeat/repeats:3_stddev\",$"}
+});
+ADD_CASES(TC_CSVOut, {
+    {"^\"BM_Repeat/repeats:3\",%csv_report$"},
+    {"^\"BM_Repeat/repeats:3\",%csv_report$"},
+    {"^\"BM_Repeat/repeats:3\",%csv_report$"},
+    {"^\"BM_Repeat/repeats:3_mean\",%csv_report$"},
+    {"^\"BM_Repeat/repeats:3_stddev\",%csv_report$"}
+});
 
 
-ADD_CASES(&ConsoleOutputTests, {
-   {join("^BM_Complexity_O1_BigO", bigOStr, bigOStr) + "[ ]*$"},
-   {join("^BM_Complexity_O1_RMS", "[0-9]+ %", "[0-9]+ %") + "[ ]*$"}
+// Test that a non-repeated test still prints non-aggregate results even when
+// only-aggregate reports have been requested
+void BM_RepeatOnce(benchmark::State& state) { while (state.KeepRunning()) {} }
+BENCHMARK(BM_RepeatOnce)->Repetitions(1)->ReportAggregatesOnly();
+ADD_CASES(TC_ConsoleOut, {
+    {"^BM_RepeatOnce/repeats:1 %console_report$"}
+});
+ADD_CASES(TC_JSONOut, {
+    {"\"name\": \"BM_RepeatOnce/repeats:1\",$"}
+});
+ADD_CASES(TC_CSVOut, {
+    {"^\"BM_RepeatOnce/repeats:1\",%csv_report$"}
 });
 });
 
 
 
 
+// Test that non-aggregate data is not reported
+void BM_SummaryRepeat(benchmark::State& state) { while (state.KeepRunning()) {} }
+BENCHMARK(BM_SummaryRepeat)->Repetitions(3)->ReportAggregatesOnly();
+ADD_CASES(TC_ConsoleOut, {
+    {".*BM_SummaryRepeat/repeats:3 ", MR_Not},
+    {"^BM_SummaryRepeat/repeats:3_mean %console_report$"},
+    {"^BM_SummaryRepeat/repeats:3_stddev %console_report$"}
+});
+ADD_CASES(TC_JSONOut, {
+    {".*BM_SummaryRepeat/repeats:3 ", MR_Not},
+    {"\"name\": \"BM_SummaryRepeat/repeats:3_mean\",$"},
+    {"\"name\": \"BM_SummaryRepeat/repeats:3_stddev\",$"}
+});
+ADD_CASES(TC_CSVOut, {
+    {".*BM_SummaryRepeat/repeats:3 ", MR_Not},
+    {"^\"BM_SummaryRepeat/repeats:3_mean\",%csv_report$"},
+    {"^\"BM_SummaryRepeat/repeats:3_stddev\",%csv_report$"}
+});
+
 // ========================================================================= //
 // ========================================================================= //
 // --------------------------- TEST CASES END ------------------------------ //
 // --------------------------- TEST CASES END ------------------------------ //
 // ========================================================================= //
 // ========================================================================= //
 
 
 
 
 int main(int argc, char* argv[]) {
 int main(int argc, char* argv[]) {
-  benchmark::Initialize(&argc, argv);
-  benchmark::ConsoleReporter CR(benchmark::ConsoleReporter::OO_None);
-  benchmark::JSONReporter JR;
-  benchmark::CSVReporter CSVR;
-  struct ReporterTest {
-    const char* name;
-    std::vector<TestCase>& output_cases;
-    std::vector<TestCase>& error_cases;
-    benchmark::BenchmarkReporter& reporter;
-    std::stringstream out_stream;
-    std::stringstream err_stream;
-
-    ReporterTest(const char* n,
-                 std::vector<TestCase>& out_tc,
-                 std::vector<TestCase>& err_tc,
-                 benchmark::BenchmarkReporter& br)
-        : name(n), output_cases(out_tc), error_cases(err_tc), reporter(br) {
-        reporter.SetOutputStream(&out_stream);
-        reporter.SetErrorStream(&err_stream);
-    }
-  } TestCases[] = {
-      {"ConsoleReporter", ConsoleOutputTests, ConsoleErrorTests, CR},
-      {"JSONReporter", JSONOutputTests, JSONErrorTests, JR},
-      {"CSVReporter", CSVOutputTests, CSVErrorTests, CSVR}
-  };
-
-  // Create the test reporter and run the benchmarks.
-  std::cout << "Running benchmarks...\n";
-  TestReporter test_rep({&CR, &JR, &CSVR});
-  benchmark::RunSpecifiedBenchmarks(&test_rep);
-
-  for (auto& rep_test : TestCases) {
-      std::string msg = std::string("\nTesting ") + rep_test.name + " Output\n";
-      std::string banner(msg.size() - 1, '-');
-      std::cout << banner << msg << banner << "\n";
-
-      std::cerr << rep_test.err_stream.str();
-      std::cout << rep_test.out_stream.str();
-
-      for (const auto& TC : rep_test.error_cases)
-        TC.Check(rep_test.err_stream);
-      for (const auto& TC : rep_test.output_cases)
-        TC.Check(rep_test.out_stream);
-
-      std::cout << "\n";
-  }
-  return 0;
+  RunOutputTests(argc, argv);
 }
 }