2002-03-27 08:20:32 -08:00
|
|
|
|
|
|
|
{
|
|
|
|
(***********************************************************************)
|
|
|
|
(* OCamldoc *)
|
|
|
|
(* *)
|
|
|
|
(* Maxence Guesdon, projet Cristal, INRIA Rocquencourt *)
|
|
|
|
(* *)
|
|
|
|
(* Copyright 2001 Institut National de Recherche en Informatique et *)
|
|
|
|
(* en Automatique. All rights reserved. This file is distributed *)
|
|
|
|
(* under the terms of the Q Public License version 1.0. *)
|
|
|
|
(* *)
|
|
|
|
(***********************************************************************)
|
|
|
|
|
2003-11-24 02:44:07 -08:00
|
|
|
(* $Id$ *)
|
|
|
|
|
2002-03-27 08:20:32 -08:00
|
|
|
(** Generation of html code to display OCaml code. *)
|
2008-01-11 08:13:18 -08:00
|
|
|
open Lexing
|
2002-03-27 08:20:32 -08:00
|
|
|
|
|
|
|
exception Fatal_error
|
|
|
|
|
|
|
|
let fatal_error msg =
|
|
|
|
prerr_string ">> Fatal error: "; prerr_endline msg; raise Fatal_error
|
|
|
|
|
|
|
|
type error =
|
|
|
|
| Illegal_character of char
|
|
|
|
| Unterminated_comment
|
|
|
|
| Unterminated_string
|
|
|
|
| Unterminated_string_in_comment
|
|
|
|
| Keyword_as_label of string
|
|
|
|
;;
|
|
|
|
|
|
|
|
exception Error of error * int * int
|
|
|
|
|
2008-01-11 08:13:18 -08:00
|
|
|
let base_escape_strings = [
|
|
|
|
("&", "&") ;
|
|
|
|
("<", "<") ;
|
|
|
|
(">", ">") ;
|
|
|
|
]
|
2002-03-27 08:20:32 -08:00
|
|
|
|
|
|
|
let pre_escape_strings = [
|
|
|
|
(" ", " ") ;
|
|
|
|
("\n", "<br>\n") ;
|
|
|
|
("\t", " ") ;
|
2008-01-11 08:13:18 -08:00
|
|
|
]
|
2002-03-27 08:20:32 -08:00
|
|
|
|
|
|
|
|
|
|
|
let pre = ref false
|
|
|
|
let fmt = ref Format.str_formatter
|
|
|
|
|
|
|
|
(** Escape the strings which would clash with html syntax,
|
|
|
|
and some other strings if we want to get a PRE style.*)
|
2008-01-11 08:13:18 -08:00
|
|
|
let escape s =
|
2002-03-27 08:20:32 -08:00
|
|
|
List.fold_left
|
|
|
|
(fun acc -> fun (s, s2) -> Str.global_replace (Str.regexp s) s2 acc)
|
|
|
|
s
|
|
|
|
(if !pre then base_escape_strings @ pre_escape_strings else base_escape_strings)
|
|
|
|
|
|
|
|
(** Escape the strings which would clash with html syntax. *)
|
|
|
|
let escape_base s =
|
|
|
|
List.fold_left
|
|
|
|
(fun acc -> fun (s, s2) -> Str.global_replace (Str.regexp s) s2 acc)
|
|
|
|
s
|
|
|
|
base_escape_strings
|
|
|
|
|
|
|
|
(** The output functions *)
|
|
|
|
|
2008-01-11 08:13:18 -08:00
|
|
|
let print ?(esc=true) s =
|
2002-03-27 08:20:32 -08:00
|
|
|
Format.pp_print_string !fmt (if esc then escape s else s)
|
|
|
|
;;
|
|
|
|
|
|
|
|
let print_class ?(esc=true) cl s =
|
|
|
|
print ~esc: false ("<span class=\""^cl^"\">"^
|
2002-07-23 07:12:03 -07:00
|
|
|
(if esc then escape s else s)^
|
|
|
|
"</span>")
|
2002-03-27 08:20:32 -08:00
|
|
|
;;
|
|
|
|
|
|
|
|
(** The table of keywords with colors *)
|
|
|
|
let create_hashtable size init =
|
|
|
|
let tbl = Hashtbl.create size in
|
|
|
|
List.iter (fun (key, data) -> Hashtbl.add tbl key data) init;
|
|
|
|
tbl
|
|
|
|
|
|
|
|
(** The function used to return html code for the given comment body. *)
|
2008-01-11 08:13:18 -08:00
|
|
|
let html_of_comment = ref
|
2002-04-25 06:46:51 -07:00
|
|
|
(fun (s : string) -> "<b>Odoc_ocamlhtml.html_of_comment not initialized</b>")
|
2002-03-27 08:20:32 -08:00
|
|
|
|
|
|
|
let keyword_table =
|
|
|
|
create_hashtable 149 [
|
|
|
|
"and", "keyword" ;
|
|
|
|
"as", "keyword" ;
|
|
|
|
"assert", "keyword" ;
|
|
|
|
"begin", "keyword" ;
|
|
|
|
"class", "keyword" ;
|
|
|
|
"constraint", "keyword" ;
|
|
|
|
"do", "keyword" ;
|
|
|
|
"done", "keyword" ;
|
|
|
|
"downto", "keyword" ;
|
|
|
|
"else", "keyword" ;
|
|
|
|
"end", "keyword" ;
|
|
|
|
"exception", "keyword" ;
|
|
|
|
"external", "keyword" ;
|
|
|
|
"false", "keyword" ;
|
|
|
|
"for", "keyword" ;
|
|
|
|
"fun", "keyword" ;
|
|
|
|
"function", "keyword" ;
|
|
|
|
"functor", "keyword" ;
|
|
|
|
"if", "keyword" ;
|
|
|
|
"in", "keyword" ;
|
|
|
|
"include", "keyword" ;
|
|
|
|
"inherit", "keyword" ;
|
|
|
|
"initializer", "keyword" ;
|
|
|
|
"lazy", "keyword" ;
|
|
|
|
"let", "keyword" ;
|
|
|
|
"match", "keyword" ;
|
|
|
|
"method", "keyword" ;
|
|
|
|
"module", "keyword" ;
|
|
|
|
"mutable", "keyword" ;
|
|
|
|
"new", "keyword" ;
|
|
|
|
"object", "keyword" ;
|
|
|
|
"of", "keyword" ;
|
|
|
|
"open", "keyword" ;
|
|
|
|
"or", "keyword" ;
|
|
|
|
"parser", "keyword" ;
|
|
|
|
"private", "keyword" ;
|
|
|
|
"rec", "keyword" ;
|
|
|
|
"sig", "keyword" ;
|
|
|
|
"struct", "keyword" ;
|
|
|
|
"then", "keyword" ;
|
|
|
|
"to", "keyword" ;
|
|
|
|
"true", "keyword" ;
|
|
|
|
"try", "keyword" ;
|
|
|
|
"type", "keyword" ;
|
|
|
|
"val", "keyword" ;
|
|
|
|
"virtual", "keyword" ;
|
|
|
|
"when", "keyword" ;
|
|
|
|
"while", "keyword" ;
|
|
|
|
"with", "keyword" ;
|
|
|
|
|
|
|
|
"mod", "keyword" ;
|
|
|
|
"land", "keyword" ;
|
|
|
|
"lor", "keyword" ;
|
|
|
|
"lxor", "keyword" ;
|
|
|
|
"lsl", "keyword" ;
|
|
|
|
"lsr", "keyword" ;
|
|
|
|
"asr", "keyword" ;
|
|
|
|
]
|
|
|
|
|
|
|
|
let kwsign_class = "keywordsign"
|
|
|
|
let constructor_class = "constructor"
|
|
|
|
let comment_class = "comment"
|
|
|
|
let string_class = "string"
|
|
|
|
let code_class = "code"
|
|
|
|
|
|
|
|
|
|
|
|
(** To buffer and print comments *)
|
|
|
|
|
|
|
|
|
|
|
|
let margin = ref 0
|
|
|
|
|
|
|
|
let comment_buffer = Buffer.create 32
|
|
|
|
let reset_comment_buffer () = Buffer.reset comment_buffer
|
|
|
|
let store_comment_char = Buffer.add_char comment_buffer
|
2008-01-11 08:13:18 -08:00
|
|
|
let add_comment_string = Buffer.add_string comment_buffer
|
2002-03-27 08:20:32 -08:00
|
|
|
|
|
|
|
let make_margin () =
|
|
|
|
let rec iter n =
|
|
|
|
if n <= 0 then ""
|
|
|
|
else " "^(iter (n-1))
|
|
|
|
in
|
|
|
|
iter !margin
|
|
|
|
|
|
|
|
let print_comment () =
|
|
|
|
let s = Buffer.contents comment_buffer in
|
|
|
|
let len = String.length s in
|
2008-01-11 08:13:18 -08:00
|
|
|
let code =
|
2002-04-25 06:46:51 -07:00
|
|
|
if len < 1 then
|
|
|
|
"<span class=\""^comment_class^"\">(*"^(escape s)^"*)</span>"
|
|
|
|
else
|
2008-01-11 08:13:18 -08:00
|
|
|
match s.[0] with
|
|
|
|
'*' ->
|
2002-07-23 07:12:03 -07:00
|
|
|
(
|
2008-01-11 08:13:18 -08:00
|
|
|
try
|
2002-07-23 07:12:03 -07:00
|
|
|
let html = !html_of_comment (String.sub s 1 (len-1)) in
|
|
|
|
"</code><table><tr><td>"^(make_margin ())^"</td><td>"^
|
|
|
|
"<span class=\""^comment_class^"\">"^
|
|
|
|
"(**"^html^"*)"^
|
|
|
|
"</span></td></tr></table><code class=\""^code_class^"\">"
|
|
|
|
with
|
|
|
|
e ->
|
|
|
|
prerr_endline (Printexc.to_string e);
|
|
|
|
"<span class=\""^comment_class^"\">(*"^(escape s)^"*)</span>"
|
|
|
|
)
|
2002-04-25 06:46:51 -07:00
|
|
|
| _ ->
|
2002-07-23 07:12:03 -07:00
|
|
|
"<span class=\""^comment_class^"\">(*"^(escape s)^"*)</span>"
|
2002-04-25 06:46:51 -07:00
|
|
|
in
|
|
|
|
print ~esc: false code
|
2002-03-27 08:20:32 -08:00
|
|
|
|
|
|
|
(** To buffer string literals *)
|
|
|
|
|
|
|
|
let string_buffer = Buffer.create 32
|
|
|
|
let reset_string_buffer () = Buffer.reset string_buffer
|
|
|
|
let store_string_char = Buffer.add_char string_buffer
|
2008-01-11 08:13:18 -08:00
|
|
|
let get_stored_string () =
|
2002-03-27 08:20:32 -08:00
|
|
|
let s = Buffer.contents string_buffer in
|
2008-12-03 10:09:09 -08:00
|
|
|
s
|
2002-03-27 08:20:32 -08:00
|
|
|
|
|
|
|
(** To translate escape sequences *)
|
|
|
|
|
2004-01-16 07:24:03 -08:00
|
|
|
let char_for_backslash = function
|
|
|
|
| 'n' -> '\010'
|
|
|
|
| 'r' -> '\013'
|
|
|
|
| 'b' -> '\008'
|
|
|
|
| 't' -> '\009'
|
|
|
|
| c -> c
|
2002-03-27 08:20:32 -08:00
|
|
|
|
|
|
|
let char_for_decimal_code lexbuf i =
|
|
|
|
let c = 100 * (Char.code(Lexing.lexeme_char lexbuf i) - 48) +
|
|
|
|
10 * (Char.code(Lexing.lexeme_char lexbuf (i+1)) - 48) +
|
2008-01-11 08:13:18 -08:00
|
|
|
(Char.code(Lexing.lexeme_char lexbuf (i+2)) - 48) in
|
2002-03-27 08:20:32 -08:00
|
|
|
Char.chr(c land 0xFF)
|
|
|
|
|
2008-12-03 10:09:09 -08:00
|
|
|
let char_for_hexa_code lexbuf i =
|
|
|
|
let c = 16 * (Char.code(Lexing.lexeme_char lexbuf i) - 48) +
|
|
|
|
(Char.code(Lexing.lexeme_char lexbuf (i+1)) - 48) in
|
|
|
|
Char.chr(c land 0xFF)
|
|
|
|
|
2002-03-27 08:20:32 -08:00
|
|
|
(** To store the position of the beginning of a string and comment *)
|
|
|
|
let string_start_pos = ref 0;;
|
|
|
|
let comment_start_pos = ref [];;
|
|
|
|
let in_comment () = !comment_start_pos <> [];;
|
|
|
|
|
|
|
|
(** Error report *)
|
|
|
|
|
|
|
|
open Format
|
|
|
|
|
|
|
|
let report_error ppf = function
|
|
|
|
| Illegal_character c ->
|
|
|
|
fprintf ppf "Illegal character (%s)" (Char.escaped c)
|
|
|
|
| Unterminated_comment ->
|
|
|
|
fprintf ppf "Comment not terminated"
|
|
|
|
| Unterminated_string ->
|
|
|
|
fprintf ppf "String literal not terminated"
|
|
|
|
| Unterminated_string_in_comment ->
|
|
|
|
fprintf ppf "This comment contains an unterminated string literal"
|
|
|
|
| Keyword_as_label kwd ->
|
|
|
|
fprintf ppf "`%s' is a keyword, it cannot be used as label name" kwd
|
|
|
|
;;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
let blank = [' ' '\010' '\013' '\009' '\012']
|
|
|
|
let lowercase = ['a'-'z' '\223'-'\246' '\248'-'\255' '_']
|
|
|
|
let uppercase = ['A'-'Z' '\192'-'\214' '\216'-'\222']
|
2008-01-11 08:13:18 -08:00
|
|
|
let identchar =
|
2002-03-27 08:20:32 -08:00
|
|
|
['A'-'Z' 'a'-'z' '_' '\192'-'\214' '\216'-'\246' '\248'-'\255' '\'' '0'-'9']
|
|
|
|
let symbolchar =
|
|
|
|
['!' '$' '%' '&' '*' '+' '-' '.' '/' ':' '<' '=' '>' '?' '@' '^' '|' '~']
|
|
|
|
let decimal_literal = ['0'-'9']+
|
|
|
|
let hex_literal = '0' ['x' 'X'] ['0'-'9' 'A'-'F' 'a'-'f']+
|
|
|
|
let oct_literal = '0' ['o' 'O'] ['0'-'7']+
|
|
|
|
let bin_literal = '0' ['b' 'B'] ['0'-'1']+
|
|
|
|
let float_literal =
|
|
|
|
['0'-'9']+ ('.' ['0'-'9']* )? (['e' 'E'] ['+' '-']? ['0'-'9']+)?
|
|
|
|
|
|
|
|
rule token = parse
|
|
|
|
blank
|
2008-01-11 08:13:18 -08:00
|
|
|
{
|
2002-07-23 07:12:03 -07:00
|
|
|
let s = Lexing.lexeme lexbuf in
|
|
|
|
(
|
|
|
|
match s with
|
2008-01-11 08:13:18 -08:00
|
|
|
" " -> incr margin
|
2002-07-23 07:12:03 -07:00
|
|
|
| "\t" -> margin := !margin + 8
|
|
|
|
| "\n" -> margin := 0
|
|
|
|
| _ -> ()
|
|
|
|
);
|
|
|
|
print s;
|
2008-01-11 08:13:18 -08:00
|
|
|
token lexbuf
|
2002-03-27 08:20:32 -08:00
|
|
|
}
|
|
|
|
| "_"
|
|
|
|
{ print "_" ; token lexbuf }
|
|
|
|
| "~" { print "~" ; token lexbuf }
|
|
|
|
| "~" lowercase identchar * ':'
|
|
|
|
{ let s = Lexing.lexeme lexbuf in
|
|
|
|
let name = String.sub s 1 (String.length s - 2) in
|
|
|
|
if Hashtbl.mem keyword_table name then
|
|
|
|
raise (Error(Keyword_as_label name, Lexing.lexeme_start lexbuf,
|
|
|
|
Lexing.lexeme_end lexbuf));
|
|
|
|
print s ; token lexbuf }
|
|
|
|
| "?" { print "?" ; token lexbuf }
|
|
|
|
| "?" lowercase identchar * ':'
|
|
|
|
{ let s = Lexing.lexeme lexbuf in
|
|
|
|
let name = String.sub s 1 (String.length s - 2) in
|
|
|
|
if Hashtbl.mem keyword_table name then
|
|
|
|
raise (Error(Keyword_as_label name, Lexing.lexeme_start lexbuf,
|
|
|
|
Lexing.lexeme_end lexbuf));
|
|
|
|
print s ; token lexbuf }
|
|
|
|
| lowercase identchar *
|
|
|
|
{ let s = Lexing.lexeme lexbuf in
|
|
|
|
try
|
|
|
|
let cl = Hashtbl.find keyword_table s in
|
2002-07-23 07:12:03 -07:00
|
|
|
(print_class cl s ; token lexbuf )
|
2002-03-27 08:20:32 -08:00
|
|
|
with Not_found ->
|
|
|
|
(print s ; token lexbuf )}
|
|
|
|
| uppercase identchar *
|
|
|
|
{ print_class constructor_class (Lexing.lexeme lexbuf) ; token lexbuf } (* No capitalized keywords *)
|
|
|
|
| decimal_literal | hex_literal | oct_literal | bin_literal
|
|
|
|
{ print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| float_literal
|
|
|
|
{ print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "\""
|
|
|
|
{ reset_string_buffer();
|
|
|
|
let string_start = Lexing.lexeme_start lexbuf in
|
|
|
|
string_start_pos := string_start;
|
|
|
|
string lexbuf;
|
|
|
|
lexbuf.Lexing.lex_start_pos <-
|
|
|
|
string_start - lexbuf.Lexing.lex_abs_pos;
|
|
|
|
print_class string_class ("\""^(get_stored_string())^"\"") ;
|
2002-07-23 07:12:03 -07:00
|
|
|
token lexbuf }
|
2002-03-27 08:20:32 -08:00
|
|
|
| "'" [^ '\\' '\''] "'"
|
|
|
|
{ print_class string_class (Lexing.lexeme lexbuf) ;
|
2002-07-23 07:12:03 -07:00
|
|
|
token lexbuf }
|
2002-03-27 08:20:32 -08:00
|
|
|
| "'" '\\' ['\\' '\'' 'n' 't' 'b' 'r'] "'"
|
|
|
|
{ print_class string_class (Lexing.lexeme lexbuf ) ;
|
2002-07-23 07:12:03 -07:00
|
|
|
token lexbuf }
|
2002-03-27 08:20:32 -08:00
|
|
|
| "'" '\\' ['0'-'9'] ['0'-'9'] ['0'-'9'] "'"
|
|
|
|
{ print_class string_class (Lexing.lexeme lexbuf ) ;
|
2002-07-23 07:12:03 -07:00
|
|
|
token lexbuf }
|
2002-03-27 08:20:32 -08:00
|
|
|
| "(*"
|
2008-01-11 08:13:18 -08:00
|
|
|
{
|
2002-07-23 07:12:03 -07:00
|
|
|
reset_comment_buffer ();
|
2008-01-11 08:13:18 -08:00
|
|
|
comment_start_pos := [Lexing.lexeme_start lexbuf];
|
2002-07-23 07:12:03 -07:00
|
|
|
comment lexbuf ;
|
|
|
|
print_comment ();
|
2002-03-27 08:20:32 -08:00
|
|
|
token lexbuf }
|
|
|
|
| "(*)"
|
|
|
|
{ reset_comment_buffer ();
|
2002-07-23 07:12:03 -07:00
|
|
|
comment_start_pos := [Lexing.lexeme_start lexbuf];
|
2002-03-27 08:20:32 -08:00
|
|
|
comment lexbuf ;
|
2002-07-23 07:12:03 -07:00
|
|
|
print_comment ();
|
2002-03-27 08:20:32 -08:00
|
|
|
token lexbuf
|
|
|
|
}
|
|
|
|
| "*)"
|
|
|
|
{ lexbuf.Lexing.lex_curr_pos <- lexbuf.Lexing.lex_curr_pos - 1;
|
2008-01-11 08:13:18 -08:00
|
|
|
lexbuf.Lexing.lex_curr_p <-
|
2010-01-22 04:48:24 -08:00
|
|
|
{ lexbuf.Lexing.lex_curr_p with
|
|
|
|
pos_cnum = lexbuf.Lexing.lex_curr_p.pos_cnum - 1
|
|
|
|
} ;
|
2002-03-27 08:20:32 -08:00
|
|
|
print (Lexing.lexeme lexbuf) ;
|
2008-01-11 08:13:18 -08:00
|
|
|
token lexbuf
|
2002-03-27 08:20:32 -08:00
|
|
|
}
|
|
|
|
| "#" [' ' '\t']* ['0'-'9']+ [^ '\n' '\r'] * ('\n' | '\r' | "\r\n")
|
|
|
|
(* # linenum ... *)
|
2008-01-11 08:13:18 -08:00
|
|
|
{
|
2002-07-23 07:12:03 -07:00
|
|
|
print (Lexing.lexeme lexbuf);
|
2008-01-11 08:13:18 -08:00
|
|
|
token lexbuf
|
2002-03-27 08:20:32 -08:00
|
|
|
}
|
|
|
|
| "#" { print_class kwsign_class (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "&" { print_class kwsign_class (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "&&" { print_class kwsign_class (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "`" { print_class kwsign_class (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "'" { print_class kwsign_class (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "(" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ")" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "*" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "," { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "??" { print_class kwsign_class (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "->" { print_class kwsign_class (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "." { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ".." { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ":" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "::" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ":=" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ":>" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ";" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ";;" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "<" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "<-" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "=" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "[" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "[|" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "[<" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "]" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "{" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "{<" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "|" { print_class kwsign_class (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "||" { print_class kwsign_class (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "|]" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ">" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ">]" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "}" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ">}" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
|
|
|
|
| "!=" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "+" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "-" { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "-." { print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
|
|
|
|
| "!" symbolchar *
|
|
|
|
{ print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ['~' '?'] symbolchar +
|
|
|
|
{ print_class kwsign_class (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ['=' '<' '>' '|' '&' '$'] symbolchar *
|
|
|
|
{ print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ['@' '^'] symbolchar *
|
|
|
|
{ print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ['+' '-'] symbolchar *
|
|
|
|
{ print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| "**" symbolchar *
|
|
|
|
{ print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| ['*' '/' '%'] symbolchar *
|
|
|
|
{ print (Lexing.lexeme lexbuf) ; token lexbuf }
|
|
|
|
| eof { () }
|
|
|
|
| _
|
|
|
|
{ raise (Error(Illegal_character ((Lexing.lexeme lexbuf).[0]),
|
|
|
|
Lexing.lexeme_start lexbuf, Lexing.lexeme_end lexbuf)) }
|
|
|
|
|
|
|
|
and comment = parse
|
|
|
|
"(*"
|
|
|
|
{ comment_start_pos := Lexing.lexeme_start lexbuf :: !comment_start_pos;
|
2002-07-23 07:12:03 -07:00
|
|
|
store_comment_char '(';
|
|
|
|
store_comment_char '*';
|
2002-03-27 08:20:32 -08:00
|
|
|
comment lexbuf;
|
|
|
|
}
|
|
|
|
| "*)"
|
|
|
|
{ match !comment_start_pos with
|
|
|
|
| [] -> assert false
|
|
|
|
| [x] -> comment_start_pos := []
|
2008-01-11 08:13:18 -08:00
|
|
|
| _ :: l ->
|
2002-07-23 07:12:03 -07:00
|
|
|
store_comment_char '*';
|
|
|
|
store_comment_char ')';
|
|
|
|
comment_start_pos := l;
|
2002-03-27 08:20:32 -08:00
|
|
|
comment lexbuf;
|
|
|
|
}
|
2008-12-03 10:09:09 -08:00
|
|
|
(* These filters are useless
|
2002-03-27 08:20:32 -08:00
|
|
|
| "\""
|
|
|
|
{ reset_string_buffer();
|
|
|
|
string_start_pos := Lexing.lexeme_start lexbuf;
|
2002-07-23 07:12:03 -07:00
|
|
|
store_comment_char '"';
|
2008-01-11 08:13:18 -08:00
|
|
|
begin
|
|
|
|
try string lexbuf; add_comment_string ((get_stored_string()^"\""))
|
|
|
|
with Error (Unterminated_string, _, _) ->
|
2002-03-27 08:20:32 -08:00
|
|
|
let st = List.hd !comment_start_pos in
|
|
|
|
raise (Error (Unterminated_string_in_comment, st, st + 2))
|
|
|
|
end;
|
|
|
|
comment lexbuf }
|
|
|
|
| "'" [^ '\\' '\''] "'"
|
2008-01-11 08:13:18 -08:00
|
|
|
{
|
2002-07-23 07:12:03 -07:00
|
|
|
store_comment_char '\'';
|
|
|
|
store_comment_char (Lexing.lexeme_char lexbuf 1);
|
|
|
|
store_comment_char '\'';
|
|
|
|
comment lexbuf }
|
2002-03-27 08:20:32 -08:00
|
|
|
| "'\\" ['\\' '\'' 'n' 't' 'b' 'r'] "'"
|
2008-01-11 08:13:18 -08:00
|
|
|
{
|
2002-07-23 07:12:03 -07:00
|
|
|
store_comment_char '\'';
|
|
|
|
store_comment_char '\\';
|
|
|
|
store_comment_char(char_for_backslash(Lexing.lexeme_char lexbuf 1)) ;
|
|
|
|
store_comment_char '\'';
|
|
|
|
comment lexbuf }
|
2008-12-03 10:09:09 -08:00
|
|
|
| "\\" ['0'-'9'] ['0'-'9'] ['0'-'9']
|
2008-01-11 08:13:18 -08:00
|
|
|
{
|
2002-07-23 07:12:03 -07:00
|
|
|
store_comment_char(char_for_decimal_code lexbuf 1);
|
2008-12-03 10:09:09 -08:00
|
|
|
comment lexbuf }
|
|
|
|
| "\\x" ['0'-'9' 'A'-'Z' 'a'-'z' ] ['0'-'9' 'A'-'Z' 'a'-'z']
|
|
|
|
{
|
|
|
|
store_comment_char(char_for_hexa_code lexbuf 2);
|
|
|
|
string lexbuf }
|
|
|
|
| "''"
|
|
|
|
{
|
|
|
|
store_comment_char '\'';
|
2002-07-23 07:12:03 -07:00
|
|
|
store_comment_char '\'';
|
|
|
|
comment lexbuf }
|
2008-12-03 10:09:09 -08:00
|
|
|
*)
|
2002-03-27 08:20:32 -08:00
|
|
|
| eof
|
|
|
|
{ let st = List.hd !comment_start_pos in
|
|
|
|
raise (Error (Unterminated_comment, st, st + 2));
|
|
|
|
}
|
|
|
|
| _
|
|
|
|
{ store_comment_char(Lexing.lexeme_char lexbuf 0);
|
2002-07-23 07:12:03 -07:00
|
|
|
comment lexbuf }
|
2002-03-27 08:20:32 -08:00
|
|
|
|
|
|
|
and string = parse
|
|
|
|
'"'
|
|
|
|
{ () }
|
|
|
|
| '\\' ("\010" | "\013" | "\013\010") [' ' '\009'] *
|
|
|
|
{ string lexbuf }
|
2008-12-03 10:09:09 -08:00
|
|
|
| '\\' ['\\' '"' 'n' 't' 'b' 'r' ]
|
|
|
|
{ Buffer.add_string string_buffer (Lexing.lexeme lexbuf) ;
|
2002-03-27 08:20:32 -08:00
|
|
|
string lexbuf }
|
|
|
|
| '\\' ['0'-'9'] ['0'-'9'] ['0'-'9']
|
2008-12-03 10:09:09 -08:00
|
|
|
{
|
|
|
|
Buffer.add_string string_buffer (Lexing.lexeme lexbuf) ;
|
|
|
|
string lexbuf
|
|
|
|
}
|
|
|
|
| '\\' 'x' ['0'-'9' 'A'-'Z' 'a'-'z' ] ['0'-'9' 'A'-'Z' 'a'-'z']
|
|
|
|
{ Buffer.add_string string_buffer (Lexing.lexeme lexbuf) ;
|
2002-03-27 08:20:32 -08:00
|
|
|
string lexbuf }
|
|
|
|
| eof
|
|
|
|
{ raise (Error (Unterminated_string,
|
|
|
|
!string_start_pos, !string_start_pos+1)) }
|
|
|
|
| _
|
|
|
|
{ store_string_char(Lexing.lexeme_char lexbuf 0);
|
|
|
|
string lexbuf }
|
|
|
|
{
|
|
|
|
|
2004-03-14 05:52:01 -08:00
|
|
|
let html_of_code b ?(with_pre=true) code =
|
2002-03-27 08:20:32 -08:00
|
|
|
let old_pre = !pre in
|
|
|
|
let old_margin = !margin in
|
|
|
|
let old_comment_buffer = Buffer.contents comment_buffer in
|
|
|
|
let old_string_buffer = Buffer.contents string_buffer in
|
2002-04-25 05:23:33 -07:00
|
|
|
let buf = Buffer.create 256 in
|
2002-04-25 06:46:51 -07:00
|
|
|
let old_fmt = !fmt in
|
|
|
|
fmt := Format.formatter_of_buffer buf ;
|
2002-03-27 08:20:32 -08:00
|
|
|
pre := with_pre;
|
|
|
|
margin := 0;
|
2008-01-11 08:13:18 -08:00
|
|
|
|
2002-04-25 06:46:51 -07:00
|
|
|
let start = "<code class=\""^code_class^"\">" in
|
|
|
|
let ending = "</code>" in
|
2008-01-11 08:13:18 -08:00
|
|
|
let html =
|
2002-04-25 06:46:51 -07:00
|
|
|
(
|
|
|
|
try
|
|
|
|
print ~esc: false start ;
|
|
|
|
let lexbuf = Lexing.from_string code in
|
|
|
|
let _ = token lexbuf in
|
|
|
|
print ~esc: false ending ;
|
|
|
|
Format.pp_print_flush !fmt () ;
|
|
|
|
Buffer.contents buf
|
|
|
|
with
|
2008-01-11 08:13:18 -08:00
|
|
|
_ ->
|
|
|
|
(* flush str_formatter because we already output
|
2002-07-23 07:12:03 -07:00
|
|
|
something in it *)
|
|
|
|
Format.pp_print_flush !fmt () ;
|
|
|
|
start^code^ending
|
2002-04-25 06:46:51 -07:00
|
|
|
)
|
|
|
|
in
|
2002-03-27 08:20:32 -08:00
|
|
|
pre := old_pre;
|
|
|
|
margin := old_margin ;
|
|
|
|
Buffer.reset comment_buffer;
|
|
|
|
Buffer.add_string comment_buffer old_comment_buffer ;
|
|
|
|
Buffer.reset string_buffer;
|
|
|
|
Buffer.add_string string_buffer old_string_buffer ;
|
2002-04-25 06:46:51 -07:00
|
|
|
fmt := old_fmt ;
|
|
|
|
|
2004-03-14 05:52:01 -08:00
|
|
|
Buffer.add_string b html
|
2002-03-27 08:20:32 -08:00
|
|
|
|
2008-01-11 08:13:18 -08:00
|
|
|
}
|