diff --git a/SConstruct b/SConstruct index a957617..c714f39 100644 --- a/SConstruct +++ b/SConstruct @@ -15,15 +15,15 @@ options.Add( EnumOption('platform', try: platform = ARGUMENTS['platform'] if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH except KeyError: print 'You must specify a "platform"' sys.exit(2) @@ -95,6 +95,7 @@ env.Tool('doxygen') env.Tool('substinfile') env.Tool('targz') env.Tool('srcdist') +env.Tool('glob') env.Append( CPPPATH = ['#include'], LIBPATH = lib_dir ) @@ -165,9 +166,6 @@ def runJSONTests_action( target, source = None, env = None ): def runJSONTests_string( target, source = None, env = None ): return 'RunJSONTests("%s")' % source -##def buildDoc( doxyfile_path ): -## doc_cmd = env.Doxygen( doxyfile_path ) - import SCons.Action ActionFactory = SCons.Action.ActionFactory RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) @@ -182,4 +180,5 @@ env.Alias( 'src-dist', srcdist_cmd ) buildProjectInDirectory( 'src/jsontestrunner' ) buildProjectInDirectory( 'src/lib_json' ) buildProjectInDirectory( 'doc' ) +#print env.Dump() diff --git a/doc/roadmap.dox b/doc/roadmap.dox index 2a0dcc6..84648b9 100644 --- a/doc/roadmap.dox +++ b/doc/roadmap.dox @@ -21,6 +21,8 @@ - look into iconv, icu and windows API \section ms_strict Adds a strict mode to reader/parser Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support \section ms_separation Expose json reader/writer API that do not impose using Json::Value. Some typical use-case involve an application specific structure to/from a JSON document. - Performance oriented parser/writer: diff --git a/doc/sconscript b/doc/sconscript index 3e4205c..62b481e 100644 --- a/doc/sconscript +++ b/doc/sconscript @@ -1,26 +1,60 @@ Import( 'env' ) import os.path -if 'doxygen' in env['TOOLS']: - doc_topdir = env['ROOTBUILD_DIR'] - doxyfile = env.SubstInFile( '#doc/doxyfile', 'doxyfile.in', - SUBST_DICT = { - '%JSONCPP_VERSION%' : env['JSONCPP_VERSION'], - '%TOPDIR%' : env.Dir('#').abspath, - '%DOC_TOPDIR%' : str(doc_topdir) } ) - doc_cmd = env.Doxygen( doxyfile ) - alias_doc_cmd = env.Alias('doc', doc_cmd ) - env.AlwaysBuild(alias_doc_cmd) +if 'doxygen' in env['TOOLS']: + doc_topdir = str(env['ROOTBUILD_DIR']) + html_dir = 'jsoncpp-api-doc' - for dir in doc_cmd: - env.Alias('doc', env.Install( '#' + dir.path, '#README.txt' ) ) - filename = os.path.split(dir.path)[1] - targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % filename ) - zip_doc_cmd = env.TarGz( targz_path, [env.Dir(dir)], - TARGZ_BASEDIR = doc_topdir ) - env.Depends( zip_doc_cmd, alias_doc_cmd ) - env.Alias( 'doc-dist', zip_doc_cmd ) + doxygen_inputs = env.Glob( includes = '*.dox', dir = '#doc' ) \ + + env.Glob( includes = '*.h', dir = '#include/json/' ) \ + + env.Glob( includes = ('*.dox','*.h','*.inl','*.cpp'), + dir = '#src/lib_json' ) +## for p in doxygen_inputs: +## print p.abspath - # When doxyfile gets updated, I get errors on the first pass. - # I have to run scons twice. Something is wrong with the dependencies - # here, but I avoid it by running "scons doc/doxyfile" first. + top_dir = env.Dir('#').abspath + include_top_dir = env.Dir('#include').abspath + env['DOXYFILE_DICT'] = { 'PROJECT_NAME': 'JsonCpp', + 'PROJECT_NUMBER': env['JSONCPP_VERSION'], + 'STRIP_FROM_PATH': top_dir, + 'STRIP_FROM_INC_PATH': include_top_dir, + 'HTML_OUTPUT': html_dir, + 'HTML_HEADER': env.File('#doc/header.html').abspath, + 'HTML_FOOTER': env.File('#doc/footer.html').abspath, + 'INCLUDE_PATH': include_top_dir, + 'PREDEFINED': 'JSONCPP_DOC_EXCLUDE_IMPLEMENTATION JSON_VALUE_USE_INTERNAL_MAP' + } + env['DOXYFILE_FILE'] = 'doxyfile.in' + doxfile_nodes = env.Doxyfile( os.path.join( doc_topdir, 'doxyfile' ), doxygen_inputs ) + html_doc_path = os.path.join( doc_topdir, html_dir ) + doc_nodes = env.Doxygen( source = doxfile_nodes, + target = os.path.join( html_doc_path, 'index.html' ) ) + alias_doc_cmd = env.Alias('doc', doc_nodes ) + env.Alias('doc', env.Install( html_doc_path, '#README.txt' ) ) + targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % html_dir ) + zip_doc_cmd = env.TarGz( targz_path, [env.Dir(html_doc_path)], + TARGZ_BASEDIR = env['ROOTBUILD_DIR'] ) + env.Depends( zip_doc_cmd, alias_doc_cmd ) + env.Alias( 'doc-dist', zip_doc_cmd ) +## +## doxyfile = env.SubstInFile( '#doc/doxyfile', 'doxyfile.in', +## SUBST_DICT = { +## '%JSONCPP_VERSION%' : env['JSONCPP_VERSION'], +## '%TOPDIR%' : env.Dir('#').abspath, +## '%DOC_TOPDIR%' : str(doc_topdir) } ) +## doc_cmd = env.Doxygen( doxyfile ) +## alias_doc_cmd = env.Alias('doc', doc_cmd ) +## env.AlwaysBuild(alias_doc_cmd) +## +## for dir in doc_cmd: +## env.Alias('doc', env.Install( '#' + dir.path, '#README.txt' ) ) +## filename = os.path.split(dir.path)[1] +## targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % filename ) +## zip_doc_cmd = env.TarGz( targz_path, [env.Dir(dir)], +## TARGZ_BASEDIR = doc_topdir ) +## env.Depends( zip_doc_cmd, alias_doc_cmd ) +## env.Alias( 'doc-dist', zip_doc_cmd ) +## +## # When doxyfile gets updated, I get errors on the first pass. +## # I have to run scons twice. Something is wrong with the dependencies +## # here, but I avoid it by running "scons doc/doxyfile" first. diff --git a/scons-tools/doxygen.py b/scons-tools/doxygen.py index 7b8dee4..a03314e 100644 --- a/scons-tools/doxygen.py +++ b/scons-tools/doxygen.py @@ -2,201 +2,112 @@ # emitter depends on doxyfile which is generated from doxyfile.in. # build fails after cleaning and relaunching the build. +# Todo: +# Add helper function to environment like for glob +# Easier passage of header/footer +# Automatic deduction of index.html path based on custom parameters passed to doxyfile + import os import os.path import glob from fnmatch import fnmatch +import SCons -def DoxyfileParse(file_contents): +def Doxyfile_emitter(target, source, env): """ - Parse a Doxygen source file and return a dictionary of all the values. - Values will be strings and lists of strings. + Modify the target and source lists to use the defaults if nothing + else has been specified. + + Dependencies on external HTML documentation references are also + appended to the source list. """ - data = {} + doxyfile_template = env.File(env['DOXYFILE_FILE']) + source.insert(0, doxyfile_template) - import shlex - lex = shlex.shlex(instream = file_contents, posix = True) - lex.wordchars += "*+./-:" - lex.whitespace = lex.whitespace.replace("\n", "") - lex.escape = "" + return target, source - lineno = lex.lineno - last_backslash_lineno = lineno - token = lex.get_token() - key = token # the first token should be a key - last_token = "" - key_token = False - next_key = False - new_data = True +def Doxyfile_Builder(target, source, env): + """Input: + DOXYFILE_FILE + Path of the template file for the output doxyfile - def append_data(data, key, new_data, token): - if new_data or len(data[key]) == 0: - data[key].append(token) - else: - data[key][-1] += token - - while token: - if token in ['\n']: - if last_token not in ['\\']: - key_token = True - elif token in ['\\']: - pass - elif key_token: - key = token - key_token = False - else: - if token == "+=": - if not data.has_key(key): - data[key] = list() - elif token == "=": - data[key] = list() - else: - append_data( data, key, new_data, token ) - new_data = True - - last_token = token - token = lex.get_token() - - if last_token == '\\' and token != '\n': - new_data = False - append_data( data, key, new_data, '\\' ) - - # compress lists of len 1 into single strings - for (k, v) in data.items(): - if len(v) == 0: - data.pop(k) - - # items in the following list will be kept as lists and not converted to strings - if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: - continue - - if len(v) == 1: - data[k] = v[0] - - return data - -def DoxySourceScan(node, env, path): + DOXYFILE_DICT + A dictionnary of parameter to append to the generated doxyfile """ - Doxygen Doxyfile source scanner. This should scan the Doxygen file and add - any files used to generate docs to the list of source files. - """ - default_file_patterns = [ - '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', - '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', - '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', - '*.py', - ] - - default_exclude_patterns = [ - '*~', - ] - - sources = [] - - data = DoxyfileParse(node.get_contents()) - - if data.get("RECURSIVE", "NO") == "YES": - recursive = True - else: - recursive = False - - file_patterns = data.get("FILE_PATTERNS", default_file_patterns) - exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) - - doxyfile_dir = str( node.dir ) - -## print 'running from', os.getcwd() - for node in data.get("INPUT", []): - node_real_path = os.path.normpath( os.path.join( doxyfile_dir, node ) ) - if os.path.isfile(node_real_path): -## print str(node), 'is a file' - sources.append(node) - elif os.path.isdir(node_real_path): -## print str(node), 'is a directory' - if recursive: - for root, dirs, files in os.walk(node): - for f in files: - filename = os.path.join(root, f) - - pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) - exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) - - if pattern_check and not exclude_check: - sources.append(filename) -## print ' adding source', os.path.abspath( filename ) - else: - for pattern in file_patterns: - sources.extend(glob.glob(os.path.join( node, pattern))) -## else: -## print str(node), 'is neither a file nor a directory' - sources = map( lambda path: env.File(path), sources ) - return sources - - -def DoxySourceScanCheck(node, env): - """Check if we should scan this file""" - return os.path.isfile(node.path) - -def DoxyEmitter(source, target, env): - """Doxygen Doxyfile emitter""" - # possible output formats and their default values and output locations - output_formats = { - "HTML": ("YES", "html"), - "LATEX": ("YES", "latex"), - "RTF": ("NO", "rtf"), - "MAN": ("YES", "man"), - "XML": ("NO", "xml"), - } - -## print '#### DoxyEmitter:', source[0].abspath, os.path.exists( source[0].abspath ) - data = DoxyfileParse(source[0].get_contents()) - - targets = [] - out_dir = data.get("OUTPUT_DIRECTORY", ".") - - # add our output locations - for (k, v) in output_formats.items(): - if data.get("GENERATE_" + k, v[0]) == "YES": - targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) - - # don't clobber targets - for node in targets: - env.Precious(node) - - # set up cleaning stuff - for node in targets: - clean_cmd = env.Clean(node, node) - env.Depends( clean_cmd, source ) - - return (targets, source) + subdir = os.path.split(source[0].abspath)[0] + doc_top_dir = os.path.split(target[0].abspath)[0] + doxyfile_path = source[0].abspath + doxy_file = file( target[0].abspath, 'wt' ) + try: + # First, output the template file + try: + f = file(doxyfile_path, 'rt') + doxy_file.write( f.read() ) + f.close() + doxy_file.write( '\n' ) + doxy_file.write( '# Generated content:\n' ) + except: + raise SCons.Errors.UserError, "Can't read doxygen template file '%s'" % doxyfile_path + # Then, the input files + doxy_file.write( 'INPUT = \\\n' ) + for source in source: + if source.abspath != doxyfile_path: # skip doxyfile path, which is the first source + doxy_file.write( '"%s" \\\n' % source.abspath ) + doxy_file.write( '\n' ) + # Dot... + values_dict = { 'HAVE_DOT': env.get('DOT') and 'YES' or 'NO', + 'DOT_PATH': env.get('DOT') and os.path.split(env['DOT'])[0] or '', + 'OUTPUT_DIRECTORY': doc_top_dir, + 'WARN_LOGFILE': target[0].abspath + '-warning.log'} + values_dict.update( env['DOXYFILE_DICT'] ) + # Finally, output user dictionary values which override any of the previously set parameters. + for key, value in values_dict.iteritems(): + doxy_file.write ('%s = "%s"\n' % (key, str(value))) + finally: + doxy_file.close() def generate(env): """ Add builders and construction variables for the - Doxygen tool. This is currently for Doxygen 1.4.6. + Doxygen tool. """ - doxyfile_scanner = env.Scanner( - DoxySourceScan, - "DoxySourceScan", - scan_check = DoxySourceScanCheck, - ) + ## Doxyfile builder + def doxyfile_message (target, source, env): + return "creating Doxygen config file '%s'" % target[0] - doxyfile_builder = env.Builder( - action = env.Action("cd ${SOURCE.dir} && ${DOXYGEN} ${SOURCE.file}", - varlist=['$SOURCES']), - emitter = DoxyEmitter, - target_factory = env.fs.Entry, - single_source = True, - source_scanner = doxyfile_scanner, - ) + doxyfile_variables = [ + 'DOXYFILE_DICT', + 'DOXYFILE_FILE' + ] - env.Append(BUILDERS = { - 'Doxygen': doxyfile_builder, - }) + doxyfile_action = SCons.Action.Action( Doxyfile_Builder, doxyfile_message, + doxyfile_variables ) - env.AppendUnique( - DOXYGEN = 'doxygen', - ) + doxyfile_builder = SCons.Builder.Builder( action = doxyfile_action, + emitter = Doxyfile_emitter ) + + env['BUILDERS']['Doxyfile'] = doxyfile_builder + env['DOXYFILE_DICT'] = {} + env['DOXYFILE_FILE'] = 'doxyfile.in' + + ## Doxygen builder + def Doxygen_emitter(target, source, env): + output_dir = str( source[0].dir ) + if str(target[0]) == str(source[0]): + target = env.File( os.path.join( output_dir, 'html', 'index.html' ) ) + return target, source + + doxygen_action = SCons.Action.Action( [ '$DOXYGEN_COM'] ) + doxygen_builder = SCons.Builder.Builder( action = doxygen_action, + emitter = Doxygen_emitter ) + env['BUILDERS']['Doxygen'] = doxygen_builder + env['DOXYGEN_COM'] = '$DOXYGEN $DOXYGEN_FLAGS $SOURCE' + env['DOXYGEN_FLAGS'] = '' + env['DOXYGEN'] = 'doxygen' + + dot_path = env.WhereIs("dot") + if dot_path: + env['DOT'] = dot_path def exists(env): """ diff --git a/scons-tools/glob.py b/scons-tools/glob.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/scons-tools/glob.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True