http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/1e1823ab/thirdparty/jsoncpp/devtools/agent_vmw7.json
----------------------------------------------------------------------
diff --git a/thirdparty/jsoncpp/devtools/agent_vmw7.json 
b/thirdparty/jsoncpp/devtools/agent_vmw7.json
new file mode 100644
index 0000000..95d62ba
--- /dev/null
+++ b/thirdparty/jsoncpp/devtools/agent_vmw7.json
@@ -0,0 +1,33 @@
+{
+    "cmake_variants" : [
+        {"name": "generator",
+         "generators": [
+            {"generator": [
+                "Visual Studio 7 .NET 2003",
+                "Visual Studio 9 2008",
+                "Visual Studio 9 2008 Win64",
+                "Visual Studio 10",
+                "Visual Studio 10 Win64",
+                "Visual Studio 11",
+                "Visual Studio 11 Win64"
+                ]
+            },
+            {"generator": ["MinGW Makefiles"],
+             "env_prepend": [{"path": "c:/wut/prg/MinGW/bin"}]
+            }
+         ]
+        },
+        {"name": "shared_dll",
+         "variables": [
+            ["BUILD_SHARED_LIBS=true"],
+            ["BUILD_SHARED_LIBS=false"]
+          ]
+        },
+        {"name": "build_type",
+         "build_types": [
+            "debug",
+            "release"
+            ]
+        }
+    ]
+}

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/1e1823ab/thirdparty/jsoncpp/devtools/agent_vmxp.json
----------------------------------------------------------------------
diff --git a/thirdparty/jsoncpp/devtools/agent_vmxp.json 
b/thirdparty/jsoncpp/devtools/agent_vmxp.json
new file mode 100644
index 0000000..39d5e53
--- /dev/null
+++ b/thirdparty/jsoncpp/devtools/agent_vmxp.json
@@ -0,0 +1,26 @@
+{
+    "cmake_variants" : [
+        {"name": "generator",
+         "generators": [
+            {"generator": [
+                "Visual Studio 6",
+                "Visual Studio 7",
+                "Visual Studio 8 2005"
+                ]
+            }
+         ]
+        },
+        {"name": "shared_dll",
+         "variables": [
+            ["BUILD_SHARED_LIBS=true"],
+            ["BUILD_SHARED_LIBS=false"]
+          ]
+        },
+        {"name": "build_type",
+         "build_types": [
+            "debug",
+            "release"
+            ]
+        }
+    ]
+}

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/1e1823ab/thirdparty/jsoncpp/devtools/antglob.py
----------------------------------------------------------------------
diff --git a/thirdparty/jsoncpp/devtools/antglob.py 
b/thirdparty/jsoncpp/devtools/antglob.py
new file mode 100644
index 0000000..c272f66
--- /dev/null
+++ b/thirdparty/jsoncpp/devtools/antglob.py
@@ -0,0 +1,205 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Copyright 2009 Baptiste Lepilleur
+# Distributed under MIT license, or public domain if desired and
+# recognized in your jurisdiction.
+# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+from __future__ import print_function
+from dircache import listdir
+import re
+import fnmatch
+import os.path
+
+
+# These fnmatch expressions are used by default to prune the directory tree
+# while doing the recursive traversal in the glob_impl method of glob function.
+prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS '
+
+# These fnmatch expressions are used by default to exclude files and dirs
+# while doing the recursive traversal in the glob_impl method of glob function.
+##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore 
vssver.scc .DS_Store'.split()
+
+# These ant_glob expressions are used by default to exclude files and dirs and 
also prune the directory tree
+# while doing the recursive traversal in the glob_impl method of glob function.
+default_excludes = '''
+**/*~
+**/#*#
+**/.#*
+**/%*%
+**/._*
+**/CVS
+**/CVS/**
+**/.cvsignore
+**/SCCS
+**/SCCS/**
+**/vssver.scc
+**/.svn
+**/.svn/**
+**/.git
+**/.git/**
+**/.gitignore
+**/.bzr
+**/.bzr/**
+**/.hg
+**/.hg/**
+**/_MTN
+**/_MTN/**
+**/_darcs
+**/_darcs/**
+**/.DS_Store '''
+
+DIR = 1
+FILE = 2
+DIR_LINK = 4
+FILE_LINK = 8
+LINKS = DIR_LINK | FILE_LINK
+ALL_NO_LINK = DIR | FILE
+ALL = DIR | FILE | LINKS
+
+_ANT_RE = re.compile(r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)')
+
+def ant_pattern_to_re(ant_pattern):
+    """Generates a regular expression from the ant pattern.
+    Matching convention:
+    **/a: match 'a', 'dir/a', 'dir1/dir2/a'
+    a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
+    *.py: match 'script.py' but not 'a/script.py'
+    """
+    rex = ['^']
+    next_pos = 0
+    sep_rex = r'(?:/|%s)' % re.escape(os.path.sep)
+##    print 'Converting', ant_pattern
+    for match in _ANT_RE.finditer(ant_pattern):
+##        print 'Matched', match.group()
+##        print match.start(0), next_pos
+        if match.start(0) != next_pos:
+            raise ValueError("Invalid ant pattern")
+        if match.group(1): # /**/
+            rex.append(sep_rex + '(?:.*%s)?' % sep_rex)
+        elif match.group(2): # **/
+            rex.append('(?:.*%s)?' % sep_rex)
+        elif match.group(3): # /**
+            rex.append(sep_rex + '.*')
+        elif match.group(4): # *
+            rex.append('[^/%s]*' % re.escape(os.path.sep))
+        elif match.group(5): # /
+            rex.append(sep_rex)
+        else: # somepath
+            rex.append(re.escape(match.group(6)))
+        next_pos = match.end()
+    rex.append('$')
+    return re.compile(''.join(rex))
+
+def _as_list(l):
+    if isinstance(l, basestring):
+        return l.split()
+    return l
+
+def glob(dir_path,
+         includes = '**/*',
+         excludes = default_excludes,
+         entry_type = FILE,
+         prune_dirs = prune_dirs,
+         max_depth = 25):
+    include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)]
+    exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
+    prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
+    dir_path = dir_path.replace('/',os.path.sep)
+    entry_type_filter = entry_type
+
+    def is_pruned_dir(dir_name):
+        for pattern in prune_dirs:
+            if fnmatch.fnmatch(dir_name, pattern):
+                return True
+        return False
+
+    def apply_filter(full_path, filter_rexs):
+        """Return True if at least one of the filter regular expression match 
full_path."""
+        for rex in filter_rexs:
+            if rex.match(full_path):
+                return True
+        return False
+
+    def glob_impl(root_dir_path):
+        child_dirs = [root_dir_path]
+        while child_dirs:
+            dir_path = child_dirs.pop()
+            for entry in listdir(dir_path):
+                full_path = os.path.join(dir_path, entry)
+##                print 'Testing:', full_path,
+                is_dir = os.path.isdir(full_path)
+                if is_dir and not is_pruned_dir(entry): # explore child 
directory ?
+##                    print '===> marked for recursion',
+                    child_dirs.append(full_path)
+                included = apply_filter(full_path, include_filter)
+                rejected = apply_filter(full_path, exclude_filter)
+                if not included or rejected: # do not include entry ?
+##                    print '=> not included or rejected'
+                    continue
+                link = os.path.islink(full_path)
+                is_file = os.path.isfile(full_path)
+                if not is_file and not is_dir:
+##                    print '=> unknown entry type'
+                    continue
+                if link:
+                    entry_type = is_file and FILE_LINK or DIR_LINK
+                else:
+                    entry_type = is_file and FILE or DIR
+##                print '=> type: %d' % entry_type, 
+                if (entry_type & entry_type_filter) != 0:
+##                    print ' => KEEP'
+                    yield os.path.join(dir_path, entry)
+##                else:
+##                    print ' => TYPE REJECTED'
+    return list(glob_impl(dir_path))
+
+
+if __name__ == "__main__":
+    import unittest
+
+    class AntPatternToRETest(unittest.TestCase):
+##        def test_conversion(self):
+##            self.assertEqual('^somepath$', 
ant_pattern_to_re('somepath').pattern)
+
+        def test_matching(self):
+            test_cases = [ ('path',
+                             ['path'],
+                             ['somepath', 'pathsuffix', '/path', '/path']),
+                           ('*.py',
+                             ['source.py', 'source.ext.py', '.py'],
+                             ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 
'z.c']),
+                           ('**/path',
+                             ['path', '/path', '/a/path', 'c:/a/path', 
'/a/b/path', '//a/path', '/a/path/b/path'],
+                             ['path/', 'a/path/b', 'dir.py/z', 'somepath', 
'pathsuffix', 'a/somepath']),
+                           ('path/**',
+                             ['path/a', 'path/path/a', 'path//'],
+                             ['path', 'somepath/a', 'a/path', 'a/path/a', 
'pathsuffix/a']),
+                           ('/**/path',
+                             ['/path', '/a/path', '/a/b/path/path', 
'/path/path'],
+                             ['path', 'path/', 'a/path', '/pathsuffix', 
'/somepath']),
+                           ('a/b',
+                             ['a/b'],
+                             ['somea/b', 'a/bsuffix', 'a/b/c']),
+                           ('**/*.py',
+                             ['script.py', 'src/script.py', 'a/b/script.py', 
'/a/b/script.py'],
+                             ['script.pyc', 'script.pyo', 'a.py/b']),
+                           ('src/**/*.py',
+                             ['src/a.py', 'src/dir/a.py'],
+                             ['a/src/a.py', '/src/a.py']),
+                           ]
+            for ant_pattern, accepted_matches, rejected_matches in 
list(test_cases):
+                def local_path(paths):
+                    return [ p.replace('/',os.path.sep) for p in paths ]
+                test_cases.append((ant_pattern, local_path(accepted_matches), 
local_path(rejected_matches)))
+            for ant_pattern, accepted_matches, rejected_matches in test_cases:
+                rex = ant_pattern_to_re(ant_pattern)
+                print('ant_pattern:', ant_pattern, ' => ', rex.pattern)
+                for accepted_match in accepted_matches:
+                    print('Accepted?:', accepted_match)
+                    self.assertTrue(rex.match(accepted_match) is not None)
+                for rejected_match in rejected_matches:
+                    print('Rejected?:', rejected_match)
+                    self.assertTrue(rex.match(rejected_match) is None)
+
+    unittest.main()

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/1e1823ab/thirdparty/jsoncpp/devtools/batchbuild.py
----------------------------------------------------------------------
diff --git a/thirdparty/jsoncpp/devtools/batchbuild.py 
b/thirdparty/jsoncpp/devtools/batchbuild.py
new file mode 100644
index 0000000..0eb0690
--- /dev/null
+++ b/thirdparty/jsoncpp/devtools/batchbuild.py
@@ -0,0 +1,278 @@
+from __future__ import print_function
+import collections
+import itertools
+import json
+import os
+import os.path
+import re
+import shutil
+import string
+import subprocess
+import sys
+import cgi
+
+class BuildDesc:
+    def __init__(self, prepend_envs=None, variables=None, build_type=None, 
generator=None):
+        self.prepend_envs = prepend_envs or [] # [ { "var": "value" } ]
+        self.variables = variables or []
+        self.build_type = build_type
+        self.generator = generator
+
+    def merged_with(self, build_desc):
+        """Returns a new BuildDesc by merging field content.
+           Prefer build_desc fields to self fields for single valued field.
+        """
+        return BuildDesc(self.prepend_envs + build_desc.prepend_envs,
+                          self.variables + build_desc.variables,
+                          build_desc.build_type or self.build_type,
+                          build_desc.generator or self.generator)
+
+    def env(self):
+        environ = os.environ.copy()
+        for values_by_name in self.prepend_envs:
+            for var, value in list(values_by_name.items()):
+                var = var.upper()
+                if type(value) is unicode:
+                    value = value.encode(sys.getdefaultencoding())
+                if var in environ:
+                    environ[var] = value + os.pathsep + environ[var]
+                else:
+                    environ[var] = value
+        return environ
+
+    def cmake_args(self):
+        args = ["-D%s" % var for var in self.variables]
+        # skip build type for Visual Studio solution as it cause warning
+        if self.build_type and 'Visual' not in self.generator:
+            args.append("-DCMAKE_BUILD_TYPE=%s" % self.build_type)
+        if self.generator:
+            args.extend(['-G', self.generator])
+        return args
+
+    def __repr__(self):
+        return "BuildDesc(%s, build_type=%s)" %  (" ".join(self.cmake_args()), 
self.build_type)
+
+class BuildData:
+    def __init__(self, desc, work_dir, source_dir):
+        self.desc = desc
+        self.work_dir = work_dir
+        self.source_dir = source_dir
+        self.cmake_log_path = os.path.join(work_dir, 'batchbuild_cmake.log')
+        self.build_log_path = os.path.join(work_dir, 'batchbuild_build.log')
+        self.cmake_succeeded = False
+        self.build_succeeded = False
+
+    def execute_build(self):
+        print('Build %s' % self.desc)
+        self._make_new_work_dir()
+        self.cmake_succeeded = self._generate_makefiles()
+        if self.cmake_succeeded:
+            self.build_succeeded = self._build_using_makefiles()
+        return self.build_succeeded
+
+    def _generate_makefiles(self):
+        print('  Generating makefiles: ', end=' ')
+        cmd = ['cmake'] + self.desc.cmake_args() + 
[os.path.abspath(self.source_dir)]
+        succeeded = self._execute_build_subprocess(cmd, self.desc.env(), 
self.cmake_log_path)
+        print('done' if succeeded else 'FAILED')
+        return succeeded
+
+    def _build_using_makefiles(self):
+        print('  Building:', end=' ')
+        cmd = ['cmake', '--build', self.work_dir]
+        if self.desc.build_type:
+            cmd += ['--config', self.desc.build_type]
+        succeeded = self._execute_build_subprocess(cmd, self.desc.env(), 
self.build_log_path)
+        print('done' if succeeded else 'FAILED')
+        return succeeded
+
+    def _execute_build_subprocess(self, cmd, env, log_path):
+        process = subprocess.Popen(cmd, stdout=subprocess.PIPE, 
stderr=subprocess.STDOUT, cwd=self.work_dir,
+                                    env=env)
+        stdout, _ = process.communicate()
+        succeeded = (process.returncode == 0)
+        with open(log_path, 'wb') as flog:
+            log = ' '.join(cmd) + '\n' + stdout + '\nExit code: %r\n' % 
process.returncode
+            flog.write(fix_eol(log))
+        return succeeded
+
+    def _make_new_work_dir(self):
+        if os.path.isdir(self.work_dir):
+            print('  Removing work directory', self.work_dir)
+            shutil.rmtree(self.work_dir, ignore_errors=True)
+        if not os.path.isdir(self.work_dir):
+            os.makedirs(self.work_dir)
+
+def fix_eol(stdout):
+    """Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of 
\r\n).
+    """
+    return re.sub('\r*\n', os.linesep, stdout)
+
+def load_build_variants_from_config(config_path):
+    with open(config_path, 'rb') as fconfig:
+        data = json.load(fconfig)
+    variants = data[ 'cmake_variants' ]
+    build_descs_by_axis = collections.defaultdict(list)
+    for axis in variants:
+        axis_name = axis["name"]
+        build_descs = []
+        if "generators" in axis:
+            for generator_data in axis["generators"]:
+                for generator in generator_data["generator"]:
+                    build_desc = BuildDesc(generator=generator,
+                                            
prepend_envs=generator_data.get("env_prepend"))
+                    build_descs.append(build_desc)
+        elif "variables" in axis:
+            for variables in axis["variables"]:
+                build_desc = BuildDesc(variables=variables)
+                build_descs.append(build_desc)
+        elif "build_types" in axis:
+            for build_type in axis["build_types"]:
+                build_desc = BuildDesc(build_type=build_type)
+                build_descs.append(build_desc)
+        build_descs_by_axis[axis_name].extend(build_descs)
+    return build_descs_by_axis
+
+def generate_build_variants(build_descs_by_axis):
+    """Returns a list of BuildDesc generated for the partial BuildDesc for 
each axis."""
+    axis_names = list(build_descs_by_axis.keys())
+    build_descs = []
+    for axis_name, axis_build_descs in list(build_descs_by_axis.items()):
+        if len(build_descs):
+            # for each existing build_desc and each axis build desc, create a 
new build_desc
+            new_build_descs = []
+            for prototype_build_desc, axis_build_desc in 
itertools.product(build_descs, axis_build_descs):
+                
new_build_descs.append(prototype_build_desc.merged_with(axis_build_desc))
+            build_descs = new_build_descs
+        else:
+            build_descs = axis_build_descs
+    return build_descs
+
+HTML_TEMPLATE = string.Template('''<html>
+<head>
+    <title>$title</title>
+    <style type="text/css">
+    td.failed {background-color:#f08080;}
+    td.ok {background-color:#c0eec0;}
+    </style>
+</head>
+<body>
+<table border="1">
+<thead>
+    <tr>
+        <th>Variables</th>
+        $th_vars
+    </tr>
+    <tr>
+        <th>Build type</th>
+        $th_build_types
+    </tr>
+</thead>
+<tbody>
+$tr_builds
+</tbody>
+</table>
+</body></html>''')
+
+def generate_html_report(html_report_path, builds):
+    report_dir = os.path.dirname(html_report_path)
+    # Vertical axis: generator
+    # Horizontal: variables, then build_type
+    builds_by_generator = collections.defaultdict(list)
+    variables = set()
+    build_types_by_variable = collections.defaultdict(set)
+    build_by_pos_key = {} # { (generator, var_key, build_type): build }
+    for build in builds:
+        builds_by_generator[build.desc.generator].append(build)
+        var_key = tuple(sorted(build.desc.variables))
+        variables.add(var_key)
+        build_types_by_variable[var_key].add(build.desc.build_type)
+        pos_key = (build.desc.generator, var_key, build.desc.build_type)
+        build_by_pos_key[pos_key] = build
+    variables = sorted(variables)
+    th_vars = []
+    th_build_types = []
+    for variable in variables:
+        build_types = sorted(build_types_by_variable[variable])
+        nb_build_type = len(build_types_by_variable[variable])
+        th_vars.append('<th colspan="%d">%s</th>' % (nb_build_type, 
cgi.escape(' '.join(variable))))
+        for build_type in build_types:
+            th_build_types.append('<th>%s</th>' % cgi.escape(build_type))
+    tr_builds = []
+    for generator in sorted(builds_by_generator):
+        tds = [ '<td>%s</td>\n' % cgi.escape(generator) ]
+        for variable in variables:
+            build_types = sorted(build_types_by_variable[variable])
+            for build_type in build_types:
+                pos_key = (generator, variable, build_type)
+                build = build_by_pos_key.get(pos_key)
+                if build:
+                    cmake_status = 'ok' if build.cmake_succeeded else 'FAILED'
+                    build_status = 'ok' if build.build_succeeded else 'FAILED'
+                    cmake_log_url = os.path.relpath(build.cmake_log_path, 
report_dir)
+                    build_log_url = os.path.relpath(build.build_log_path, 
report_dir)
+                    td = '<td class="%s"><a href="%s" class="%s">CMake: 
%s</a>' % (                        build_status.lower(), cmake_log_url, 
cmake_status.lower(), cmake_status)
+                    if build.cmake_succeeded:
+                        td += '<br><a href="%s" class="%s">Build: %s</a>' % (  
                          build_log_url, build_status.lower(), build_status)
+                    td += '</td>'
+                else:
+                    td = '<td></td>'
+                tds.append(td)
+        tr_builds.append('<tr>%s</tr>' % '\n'.join(tds))
+    html = HTML_TEMPLATE.substitute(        title='Batch build report',
+        th_vars=' '.join(th_vars),
+        th_build_types=' '.join(th_build_types),
+        tr_builds='\n'.join(tr_builds))
+    with open(html_report_path, 'wt') as fhtml:
+        fhtml.write(html)
+    print('HTML report generated in:', html_report_path)
+
+def main():
+    usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH 
[CONFIG2_JSON_PATH...]
+Build a given CMake based project located in SOURCE_DIR with multiple 
generators/options.dry_run
+as described in CONFIG_JSON_PATH building in WORK_DIR.
+
+Example of call:
+python devtools\batchbuild.py e:\buildbots\jsoncpp\build . 
devtools\agent_vmw7.json
+"""
+    from optparse import OptionParser
+    parser = OptionParser(usage=usage)
+    parser.allow_interspersed_args = True
+#    parser.add_option('-v', '--verbose', dest="verbose", action='store_true',
+#        help="""Be verbose.""")
+    parser.enable_interspersed_args()
+    options, args = parser.parse_args()
+    if len(args) < 3:
+        parser.error("Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH.")
+    work_dir = args[0]
+    source_dir = args[1].rstrip('/\\')
+    config_paths = args[2:]
+    for config_path in config_paths:
+        if not os.path.isfile(config_path):
+            parser.error("Can not read: %r" % config_path)
+
+    # generate build variants
+    build_descs = []
+    for config_path in config_paths:
+        build_descs_by_axis = load_build_variants_from_config(config_path)
+        build_descs.extend(generate_build_variants(build_descs_by_axis))
+    print('Build variants (%d):' % len(build_descs))
+    # assign build directory for each variant
+    if not os.path.isdir(work_dir):
+        os.makedirs(work_dir)
+    builds = []
+    with open(os.path.join(work_dir, 'matrix-dir-map.txt'), 'wt') as 
fmatrixmap:
+        for index, build_desc in enumerate(build_descs):
+            build_desc_work_dir = os.path.join(work_dir, '%03d' % (index+1))
+            builds.append(BuildData(build_desc, build_desc_work_dir, 
source_dir))
+            fmatrixmap.write('%s: %s\n' % (build_desc_work_dir, build_desc))
+    for build in builds:
+        build.execute_build()
+    html_report_path = os.path.join(work_dir, 'batchbuild-report.html')
+    generate_html_report(html_report_path, builds)
+    print('Done')
+
+
+if __name__ == '__main__':
+    main()
+

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/1e1823ab/thirdparty/jsoncpp/devtools/fixeol.py
----------------------------------------------------------------------
diff --git a/thirdparty/jsoncpp/devtools/fixeol.py 
b/thirdparty/jsoncpp/devtools/fixeol.py
new file mode 100644
index 0000000..b55e146
--- /dev/null
+++ b/thirdparty/jsoncpp/devtools/fixeol.py
@@ -0,0 +1,70 @@
+# Copyright 2010 Baptiste Lepilleur
+# Distributed under MIT license, or public domain if desired and
+# recognized in your jurisdiction.
+# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+from __future__ import print_function
+import os.path
+import sys
+
+def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
+    """Makes sure that all sources have the specified eol sequence (default: 
unix)."""
+    if not os.path.isfile(path):
+        raise ValueError('Path "%s" is not a file' % path)
+    try:
+        f = open(path, 'rb')
+    except IOError as msg:
+        print("%s: I/O Error: %s" % (file, str(msg)), file=sys.stderr)
+        return False
+    try:
+        raw_lines = f.readlines()
+    finally:
+        f.close()
+    fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
+    if raw_lines != fixed_lines:
+        print('%s =>' % path, end=' ')
+        if not is_dry_run:
+            f = open(path, "wb")
+            try:
+                f.writelines(fixed_lines)
+            finally:
+                f.close()
+        if verbose:
+            print(is_dry_run and ' NEED FIX' or ' FIXED')
+    return True
+##    
+##    
+##
+##def _do_fix(is_dry_run = True):
+##    from waftools import antglob
+##    python_sources = antglob.glob('.',
+##        includes = '**/*.py **/wscript **/wscript_build',
+##        excludes = antglob.default_excludes + './waf.py',
+##        prune_dirs = antglob.prune_dirs + 'waf-* ./build')
+##    for path in python_sources:
+##        _fix_python_source(path, is_dry_run)
+##
+##    cpp_sources = antglob.glob('.',
+##        includes = '**/*.cpp **/*.h **/*.inl',
+##        prune_dirs = antglob.prune_dirs + 'waf-* ./build')
+##    for path in cpp_sources:
+##        _fix_source_eol(path, is_dry_run)
+##
+##
+##def dry_fix(context):
+##    _do_fix(is_dry_run = True)
+##
+##def fix(context):
+##    _do_fix(is_dry_run = False)
+##
+##def shutdown():
+##    pass
+##
+##def check(context):
+##    # Unit tests are run when "check" target is used
+##    ut = UnitTest.unit_test()
+##    ut.change_to_testfile_dir = True
+##    ut.want_to_see_test_output = True
+##    ut.want_to_see_test_error = True
+##    ut.run()
+##    ut.print_results()

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/1e1823ab/thirdparty/jsoncpp/devtools/licenseupdater.py
----------------------------------------------------------------------
diff --git a/thirdparty/jsoncpp/devtools/licenseupdater.py 
b/thirdparty/jsoncpp/devtools/licenseupdater.py
new file mode 100644
index 0000000..6f82361
--- /dev/null
+++ b/thirdparty/jsoncpp/devtools/licenseupdater.py
@@ -0,0 +1,94 @@
+"""Updates the license text in source file.
+"""
+from __future__ import print_function
+
+# An existing license is found if the file starts with the string below,
+# and ends with the first blank line.
+LICENSE_BEGIN = "// Copyright "
+
+BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at 
http://jsoncpp.sourceforge.net/LICENSE
+
+""".replace('\r\n','\n')
+
+def update_license(path, dry_run, show_diff):
+    """Update the license statement in the specified file.
+    Parameters:
+      path: path of the C++ source file to update.
+      dry_run: if True, just print the path of the file that would be updated,
+               but don't change it.
+      show_diff: if True, print the path of the file that would be modified,
+                 as well as the change made to the file. 
+    """
+    with open(path, 'rt') as fin:
+        original_text = fin.read().replace('\r\n','\n')
+        newline = fin.newlines and fin.newlines[0] or '\n'
+    if not original_text.startswith(LICENSE_BEGIN):
+        # No existing license found => prepend it
+        new_text = BRIEF_LICENSE + original_text
+    else:
+        license_end_index = original_text.index('\n\n') # search first blank 
line
+        new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
+    if original_text != new_text:
+        if not dry_run:
+            with open(path, 'wb') as fout:
+                fout.write(new_text.replace('\n', newline))
+        print('Updated', path)
+        if show_diff:
+            import difflib
+            print('\n'.join(difflib.unified_diff(original_text.split('\n'),
+                                                   new_text.split('\n'))))
+        return True
+    return False
+
+def update_license_in_source_directories(source_dirs, dry_run, show_diff):
+    """Updates license text in C++ source files found in directory source_dirs.
+    Parameters:
+      source_dirs: list of directory to scan for C++ sources. Directories are
+                   scanned recursively.
+      dry_run: if True, just print the path of the file that would be updated,
+               but don't change it.
+      show_diff: if True, print the path of the file that would be modified,
+                 as well as the change made to the file. 
+    """
+    from devtools import antglob
+    prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
+    for source_dir in source_dirs:
+        cpp_sources = antglob.glob(source_dir,
+            includes = '''**/*.h **/*.cpp **/*.inl''',
+            prune_dirs = prune_dirs)
+        for source in cpp_sources:
+            update_license(source, dry_run, show_diff)
+
+def main():
+    usage = """%prog DIR [DIR2...]
+Updates license text in sources of the project in source files found
+in the directory specified on the command-line.
+
+Example of call:
+python devtools\licenseupdater.py include src -n --diff
+=> Show change that would be made to the sources.
+
+python devtools\licenseupdater.py include src
+=> Update license statement on all sources in directories include/ and src/.
+"""
+    from optparse import OptionParser
+    parser = OptionParser(usage=usage)
+    parser.allow_interspersed_args = False
+    parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', 
default=False,
+        help="""Only show what files are updated, do not update the files""")
+    parser.add_option('--diff', dest="show_diff", action='store_true', 
default=False,
+        help="""On update, show change made to the file.""")
+    parser.enable_interspersed_args()
+    options, args = parser.parse_args()
+    update_license_in_source_directories(args, options.dry_run, 
options.show_diff)
+    print('Done')
+
+if __name__ == '__main__':
+    import sys
+    import os.path
+    sys.path.insert(0, 
os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+    main()
+

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/1e1823ab/thirdparty/jsoncpp/devtools/tarball.py
----------------------------------------------------------------------
diff --git a/thirdparty/jsoncpp/devtools/tarball.py 
b/thirdparty/jsoncpp/devtools/tarball.py
new file mode 100644
index 0000000..2e72717
--- /dev/null
+++ b/thirdparty/jsoncpp/devtools/tarball.py
@@ -0,0 +1,52 @@
+# Copyright 2010 Baptiste Lepilleur
+# Distributed under MIT license, or public domain if desired and
+# recognized in your jurisdiction.
+# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+from contextlib import closing
+import os
+import tarfile
+
+TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
+
+def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
+    """Parameters:
+    tarball_path: output path of the .tar.gz file
+    sources: list of sources to include in the tarball, relative to the 
current directory
+    base_dir: if a source file is in a sub-directory of base_dir, then 
base_dir is stripped
+        from path in the tarball.
+    prefix_dir: all files stored in the tarball be sub-directory of 
prefix_dir. Set to ''
+        to make them child of root.
+    """
+    base_dir = os.path.normpath(os.path.abspath(base_dir))
+    def archive_name(path):
+        """Makes path relative to base_dir."""
+        path = os.path.normpath(os.path.abspath(path))
+        common_path = os.path.commonprefix((base_dir, path))
+        archive_name = path[len(common_path):]
+        if os.path.isabs(archive_name):
+            archive_name = archive_name[1:]
+        return os.path.join(prefix_dir, archive_name)
+    def visit(tar, dirname, names):
+        for name in names:
+            path = os.path.join(dirname, name)
+            if os.path.isfile(path):
+                path_in_tar = archive_name(path)
+                tar.add(path, path_in_tar)
+    compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
+    with closing(tarfile.TarFile.open(tarball_path, 'w:gz',
+            compresslevel=compression)) as tar:
+        for source in sources:
+            source_path = source
+            if os.path.isdir(source):
+                for dirpath, dirnames, filenames in os.walk(source_path):
+                    visit(tar, dirpath, filenames)
+            else:
+                path_in_tar = archive_name(source_path)
+                tar.add(source_path, path_in_tar)      # filename, arcname
+
+def decompress(tarball_path, base_dir):
+    """Decompress the gzipped tarball into directory base_dir.
+    """
+    with closing(tarfile.TarFile.open(tarball_path)) as tar:
+        tar.extractall(base_dir)

Reply via email to