2014-11-21 08:03:43 +01:00
( function e ( t , n , r ) { function s ( o , u ) { if ( ! n [ o ] ) { if ( ! t [ o ] ) { var a = typeof require == "function" && require ; if ( ! u && a ) return a ( o , ! 0 ) ; if ( i ) return i ( o , ! 0 ) ; throw new Error ( "Cannot find module '" + o + "'" ) } var f = n [ o ] = { exports : { } } ; t [ o ] [ 0 ] . call ( f . exports , function ( e ) { var n = t [ o ] [ 1 ] [ e ] ; return s ( n ? n : e ) } , f , f . exports , e , t , n , r ) } return n [ o ] . exports } var i = typeof require == "function" && require ; for ( var o = 0 ; o < r . length ; o ++ ) s ( r [ o ] ) ; return s } ) ( { 1 : [ function ( require , module , exports ) {
} , { } ] , 2 : [ function ( require , module , exports ) {
( function ( process ) {
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// resolves . and .. elements in a path array with directory names there
// must be no slashes, empty elements, or device names (c:\) in the array
// (so also no leading and trailing slashes - it does not distinguish
// relative and absolute paths)
function normalizeArray ( parts , allowAboveRoot ) {
// if the path tries to go above the root, `up` ends up > 0
var up = 0 ;
for ( var i = parts . length - 1 ; i >= 0 ; i -- ) {
var last = parts [ i ] ;
if ( last === '.' ) {
parts . splice ( i , 1 ) ;
} else if ( last === '..' ) {
parts . splice ( i , 1 ) ;
up ++ ;
} else if ( up ) {
parts . splice ( i , 1 ) ;
up -- ;
}
}
// if the path is allowed to go above the root, restore leading ..s
if ( allowAboveRoot ) {
for ( ; up -- ; up ) {
parts . unshift ( '..' ) ;
}
}
return parts ;
}
// Split a filename into [root, dir, basename, ext], unix version
// 'root' is just a slash, or nothing.
var splitPathRe =
/^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/ ;
var splitPath = function ( filename ) {
return splitPathRe . exec ( filename ) . slice ( 1 ) ;
} ;
// path.resolve([from ...], to)
// posix version
exports . resolve = function ( ) {
var resolvedPath = '' ,
resolvedAbsolute = false ;
for ( var i = arguments . length - 1 ; i >= - 1 && ! resolvedAbsolute ; i -- ) {
var path = ( i >= 0 ) ? arguments [ i ] : process . cwd ( ) ;
// Skip empty and invalid entries
if ( typeof path !== 'string' ) {
throw new TypeError ( 'Arguments to path.resolve must be strings' ) ;
} else if ( ! path ) {
continue ;
}
resolvedPath = path + '/' + resolvedPath ;
resolvedAbsolute = path . charAt ( 0 ) === '/' ;
}
// At this point the path should be resolved to a full absolute path, but
// handle relative paths to be safe (might happen when process.cwd() fails)
// Normalize the path
resolvedPath = normalizeArray ( filter ( resolvedPath . split ( '/' ) , function ( p ) {
return ! ! p ;
} ) , ! resolvedAbsolute ) . join ( '/' ) ;
return ( ( resolvedAbsolute ? '/' : '' ) + resolvedPath ) || '.' ;
} ;
// path.normalize(path)
// posix version
exports . normalize = function ( path ) {
var isAbsolute = exports . isAbsolute ( path ) ,
trailingSlash = substr ( path , - 1 ) === '/' ;
// Normalize the path
path = normalizeArray ( filter ( path . split ( '/' ) , function ( p ) {
return ! ! p ;
} ) , ! isAbsolute ) . join ( '/' ) ;
if ( ! path && ! isAbsolute ) {
path = '.' ;
}
if ( path && trailingSlash ) {
path += '/' ;
}
return ( isAbsolute ? '/' : '' ) + path ;
} ;
// posix version
exports . isAbsolute = function ( path ) {
return path . charAt ( 0 ) === '/' ;
} ;
// posix version
exports . join = function ( ) {
var paths = Array . prototype . slice . call ( arguments , 0 ) ;
return exports . normalize ( filter ( paths , function ( p , index ) {
if ( typeof p !== 'string' ) {
throw new TypeError ( 'Arguments to path.join must be strings' ) ;
}
return p ;
} ) . join ( '/' ) ) ;
} ;
// path.relative(from, to)
// posix version
exports . relative = function ( from , to ) {
from = exports . resolve ( from ) . substr ( 1 ) ;
to = exports . resolve ( to ) . substr ( 1 ) ;
function trim ( arr ) {
var start = 0 ;
for ( ; start < arr . length ; start ++ ) {
if ( arr [ start ] !== '' ) break ;
}
var end = arr . length - 1 ;
for ( ; end >= 0 ; end -- ) {
if ( arr [ end ] !== '' ) break ;
}
if ( start > end ) return [ ] ;
return arr . slice ( start , end - start + 1 ) ;
}
var fromParts = trim ( from . split ( '/' ) ) ;
var toParts = trim ( to . split ( '/' ) ) ;
var length = Math . min ( fromParts . length , toParts . length ) ;
var samePartsLength = length ;
for ( var i = 0 ; i < length ; i ++ ) {
if ( fromParts [ i ] !== toParts [ i ] ) {
samePartsLength = i ;
break ;
}
}
var outputParts = [ ] ;
for ( var i = samePartsLength ; i < fromParts . length ; i ++ ) {
outputParts . push ( '..' ) ;
}
outputParts = outputParts . concat ( toParts . slice ( samePartsLength ) ) ;
return outputParts . join ( '/' ) ;
} ;
exports . sep = '/' ;
exports . delimiter = ':' ;
exports . dirname = function ( path ) {
var result = splitPath ( path ) ,
root = result [ 0 ] ,
dir = result [ 1 ] ;
if ( ! root && ! dir ) {
// No dirname whatsoever
return '.' ;
}
if ( dir ) {
// It has a dirname, strip trailing slash
dir = dir . substr ( 0 , dir . length - 1 ) ;
}
return root + dir ;
} ;
exports . basename = function ( path , ext ) {
var f = splitPath ( path ) [ 2 ] ;
// TODO: make this comparison case-insensitive on windows?
if ( ext && f . substr ( - 1 * ext . length ) === ext ) {
f = f . substr ( 0 , f . length - ext . length ) ;
}
return f ;
} ;
exports . extname = function ( path ) {
return splitPath ( path ) [ 3 ] ;
} ;
function filter ( xs , f ) {
if ( xs . filter ) return xs . filter ( f ) ;
var res = [ ] ;
for ( var i = 0 ; i < xs . length ; i ++ ) {
if ( f ( xs [ i ] , i , xs ) ) res . push ( xs [ i ] ) ;
}
return res ;
}
// String.prototype.substr - negative index don't work in IE8
var substr = 'ab' . substr ( - 1 ) === 'b'
? function ( str , start , len ) { return str . substr ( start , len ) }
: function ( str , start , len ) {
if ( start < 0 ) start = str . length + start ;
return str . substr ( start , len ) ;
}
;
} ) . call ( this , require ( "1YiZ5S" ) )
} , { "1YiZ5S" : 3 } ] , 3 : [ function ( require , module , exports ) {
// shim for using process in browser
var process = module . exports = { } ;
process . nextTick = ( function ( ) {
var canSetImmediate = typeof window !== 'undefined'
&& window . setImmediate ;
var canPost = typeof window !== 'undefined'
&& window . postMessage && window . addEventListener
;
if ( canSetImmediate ) {
return function ( f ) { return window . setImmediate ( f ) } ;
}
if ( canPost ) {
var queue = [ ] ;
window . addEventListener ( 'message' , function ( ev ) {
var source = ev . source ;
if ( ( source === window || source === null ) && ev . data === 'process-tick' ) {
ev . stopPropagation ( ) ;
if ( queue . length > 0 ) {
var fn = queue . shift ( ) ;
fn ( ) ;
}
}
} , true ) ;
return function nextTick ( fn ) {
queue . push ( fn ) ;
window . postMessage ( 'process-tick' , '*' ) ;
} ;
}
return function nextTick ( fn ) {
setTimeout ( fn , 0 ) ;
} ;
} ) ( ) ;
process . title = 'browser' ;
process . browser = true ;
process . env = { } ;
process . argv = [ ] ;
function noop ( ) { }
process . on = noop ;
process . addListener = noop ;
process . once = noop ;
process . off = noop ;
process . removeListener = noop ;
process . removeAllListeners = noop ;
process . emit = noop ;
process . binding = function ( name ) {
throw new Error ( 'process.binding is not supported' ) ;
}
// TODO(shtylman)
process . cwd = function ( ) { return '/' } ;
process . chdir = function ( dir ) {
throw new Error ( 'process.chdir is not supported' ) ;
} ;
} , { } ] , 4 : [ function ( require , module , exports ) {
( function ( global ) {
var graph = require ( './graphDb' ) ;
var flow = require ( './parser/flow' ) ;
/ * *
* Function that adds the vertices found in the graph definition to the graph to be rendered .
* @ param vert Object containing the vertices .
* @ param g The graph that is to be drawn .
* /
var addVertices = function ( vert , g ) {
var keys = Object . keys ( vert ) ;
2014-11-22 15:34:21 +01:00
var styleFromStyleArr = function ( styleStr , arr ) {
var i ;
// Create a compound style definition from the style definitions found for the node in the graph definition
for ( i = 0 ; i < arr . length ; i ++ ) {
if ( typeof arr [ i ] !== 'undefined' ) {
styleStr = styleStr + arr [ i ] + ';' ;
}
}
return styleStr ;
}
2014-11-21 08:03:43 +01:00
// Iterate through each item in the vertice object (containing all the vertices found) in the graph definition
keys . forEach ( function ( id ) {
var vertice = vert [ id ] ;
var verticeText ;
var i ;
2014-11-22 15:34:21 +01:00
2014-11-21 08:03:43 +01:00
var style = '' ;
2014-11-22 15:34:21 +01:00
var classes = graph . getClasses ( ) ;
// Check if class is defined for the node
2014-11-22 17:27:08 +01:00
if ( vertice . classes . length > 0 ) {
for ( i = 0 ; i < vertice . classes . length ; i ++ ) {
style = styleFromStyleArr ( style , classes [ vertice . classes [ i ] ] . styles ) ;
}
}
else {
// Use default classes
style = styleFromStyleArr ( style , classes . default . styles ) ;
2014-11-21 08:03:43 +01:00
}
2014-11-22 15:34:21 +01:00
// Create a compound style definition from the style definitions found for the node in the graph definition
style = styleFromStyleArr ( style , vertice . styles ) ;
2014-11-21 08:03:43 +01:00
// Use vertice id as text in the box if no text is provided by the graph definition
if ( vertice . text === undefined ) {
verticeText = vertice . id ;
}
else {
verticeText = vertice . text ;
}
2014-11-22 15:34:21 +01:00
// Create the node in the graph based on defined form
2014-11-21 08:03:43 +01:00
if ( vertice . type === 'round' ) {
g . setNode ( vertice . id , { label : verticeText , rx : 5 , ry : 5 , style : style } ) ;
} else {
if ( vertice . type === 'diamond' ) {
g . setNode ( vertice . id , { shape : "question" , label : verticeText , rx : 0 , ry : 0 , style : style } ) ;
} else {
g . setNode ( vertice . id , { label : verticeText , rx : 0 , ry : 0 , style : style } ) ;
}
}
} ) ;
} ;
/ * *
* Add edges to graph based on parsed graph defninition
* @ param edges
* @ param g
* /
var addEdges = function ( edges , g ) {
2014-11-22 15:34:21 +01:00
var cnt = 0 ;
2014-11-21 08:03:43 +01:00
edges . forEach ( function ( edge ) {
2014-11-22 15:34:21 +01:00
cnt ++ ;
2014-11-21 08:03:43 +01:00
// Set link type for rendering
if ( edge . type === 'arrow_open' ) {
aHead = 'none' ;
}
else {
aHead = 'vee' ;
}
// Add the edge to the graph
2014-11-22 15:34:21 +01:00
if ( typeof edge . text === 'undefined' ) {
2014-11-21 08:03:43 +01:00
if ( typeof edge . style === 'undefined' ) {
2014-11-22 19:06:04 +01:00
g . setEdge ( edge . start , edge . end , { style : "stroke: #333; stroke-width: 1.5px;fill:none" , arrowheadStyle : "fill: #333" , arrowhead : aHead } , cnt ) ;
2014-11-21 08:03:43 +01:00
} else {
g . setEdge ( edge . start , edge . end , {
style : edge . style , arrowheadStyle : "fill: #333" , arrowhead : aHead
2014-11-22 15:34:21 +01:00
} , cnt ) ;
2014-11-21 08:03:43 +01:00
}
}
2014-11-22 15:34:21 +01:00
// Edge with text
2014-11-21 08:03:43 +01:00
else {
2014-11-22 15:34:21 +01:00
2014-11-21 08:03:43 +01:00
if ( typeof edge . style === 'undefined' ) {
2014-11-22 19:06:04 +01:00
g . setEdge ( edge . start , edge . end , { style : "stroke: #333; stroke-width: 1.5px;fill:none" , label : edge . text , arrowheadStyle : "fill: #333" , arrowhead : aHead } , cnt ) ;
2014-11-21 08:03:43 +01:00
} else {
2014-11-22 15:34:21 +01:00
2014-11-21 08:03:43 +01:00
g . setEdge ( edge . start , edge . end , {
style : edge . style , arrowheadStyle : "fill: #333" , label : edge . text , arrowhead : aHead
2014-11-22 15:34:21 +01:00
} , cnt ) ;
2014-11-21 08:03:43 +01:00
}
}
} ) ;
} ;
/ * *
* Draws a chart in the tag with id : id based on the graph definition in text .
* @ param text
* @ param id
* /
var drawChart = function ( text , id ) {
graph . clear ( ) ;
flow . parser . yy = graph ;
// Parse the graph definition
flow . parser . parse ( text ) ;
// Fetch the default direction, use TD if none was found
var dir ;
dir = graph . getDirection ( ) ;
if ( typeof dir === 'undefined' ) {
dir = 'TD' ;
}
// Create the input mermaid.graph
2014-11-22 15:34:21 +01:00
var g = new dagreD3 . graphlib . Graph ( { multigraph : true } )
2014-11-21 08:03:43 +01:00
. setGraph ( {
rankdir : dir ,
marginx : 20 ,
marginy : 20
2014-11-22 15:34:21 +01:00
2014-11-21 08:03:43 +01:00
} )
. setDefaultEdgeLabel ( function ( ) {
return { } ;
} ) ;
// Fetch the verices/nodes and edges/links from the parsed graph definition
var vert = graph . getVertices ( ) ;
var edges = graph . getEdges ( ) ;
2014-11-22 17:27:08 +01:00
var classes = graph . getClasses ( ) ;
if ( typeof classes . default === 'undefined' ) {
classes . default = { id : 'default' } ;
classes . default . styles = [ 'fill:#eaeaea' , 'stroke:#666' , 'stroke-width:1.5px' ] ;
}
2014-11-21 08:03:43 +01:00
addVertices ( vert , g ) ;
addEdges ( edges , g ) ;
// Create the renderer
var render = new dagreD3 . render ( ) ;
// Add custom shape for rhombus type of boc (decision)
render . shapes ( ) . question = function ( parent , bbox , node ) {
var w = bbox . width ,
h = bbox . height * 3 ,
points = [
{ x : w / 2 , y : 0 } ,
{ x : w , y : - h / 2 } ,
{ x : w / 2 , y : - h } ,
{ x : 0 , y : - h / 2 }
] ;
shapeSvg = parent . insert ( "polygon" , ":first-child" )
. attr ( "points" , points . map ( function ( d ) {
return d . x + "," + d . y ;
} ) . join ( " " ) )
. style ( "fill" , "#fff" )
. style ( "stroke" , "#333" )
. attr ( "rx" , 5 )
. attr ( "ry" , 5 )
. attr ( "transform" , "translate(" + ( - w / 2 ) + "," + ( h * 2 / 4 ) + ")" ) ;
node . intersect = function ( point ) {
return dagreD3 . intersect . polygon ( node , points , point ) ;
} ;
return shapeSvg ;
} ;
// Add our custom arrow - an empty arrowhead
render . arrows ( ) . none = function normal ( parent , id , edge , type ) {
var marker = parent . append ( "marker" )
. attr ( "id" , id )
. attr ( "viewBox" , "0 0 10 10" )
. attr ( "refX" , 9 )
. attr ( "refY" , 5 )
. attr ( "markerUnits" , "strokeWidth" )
. attr ( "markerWidth" , 8 )
. attr ( "markerHeight" , 6 )
. attr ( "orient" , "auto" ) ;
var path = marker . append ( "path" )
. attr ( "d" , "M 0 0 L 0 0 L 0 0 z" ) ;
dagreD3 . util . applyStyle ( path , edge [ type + "Style" ] ) ;
} ;
// Set up an SVG group so that we can translate the final graph.
var svg = d3 . select ( "#" + id ) ;
svgGroup = d3 . select ( "#" + id + " g" ) ;
// Run the renderer. This is what draws the final graph.
render ( d3 . select ( "#" + id + " g" ) , g ) ;
// Center the graph
var xCenterOffset = ( svg . attr ( "width" ) - g . graph ( ) . width ) / 2 ;
//svgGroup.attr("transform", "translate(" + xCenterOffset + ", 20)");
svg . attr ( "height" , g . graph ( ) . height + 40 ) ;
} ;
/ * *
* Go through the document and find the chart definitions in there and render the charts
* /
var init = function ( ) {
var arr = document . querySelectorAll ( '.mermaid' ) ;
var cnt = 0 ;
for ( i = 0 ; i < arr . length ; i ++ ) {
var element = arr [ i ] ;
var id ;
id = 'mermaidChart' + cnt ;
cnt ++ ;
var chartText = element . textContent . trim ( ) ;
element . innerHTML = '<svg id="' + id + '" width="100%">' +
'<g />' +
'</svg>' ;
drawChart ( chartText , id ) ;
}
;
} ;
/ * *
* Version management
* @ returns { string }
* /
exports . version = function ( ) {
2014-11-22 19:06:04 +01:00
return '0.2.2' ;
2014-11-21 08:03:43 +01:00
}
var equals = function ( val , variable ) {
if ( typeof variable !== 'undefined' ) {
return false ;
}
else {
return ( val === variable ) ;
}
} ;
/ * *
* Wait for coument loaded before starting the execution
* /
document . addEventListener ( 'DOMContentLoaded' , function ( ) {
// Check presence of config object
if ( typeof mermaid _config !== 'undefined' ) {
// Check if property startOnLoad is set
if ( equals ( true , mermaid _config . startOnLoad ) ) {
init ( ) ;
}
}
else {
// No config found, do autostart in this simple case
init ( ) ;
}
} , false ) ;
global . mermaid = {
init : function ( ) {
init ( ) ;
} ,
version : function ( ) {
2014-11-22 10:02:38 +01:00
return exports . version ( ) ;
2014-11-21 08:03:43 +01:00
}
} ;
} ) . call ( this , typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : { } )
} , { "./graphDb" : 5 , "./parser/flow" : 6 } ] , 5 : [ function ( require , module , exports ) {
/ * *
* Created by knut on 14 - 11 - 03.
* /
var vertices = { } ;
var edges = [ ] ;
2014-11-22 15:34:21 +01:00
var classes = [ ] ;
2014-11-21 08:03:43 +01:00
var direction ;
/ * *
* Function called by parser when a node definition has been found
* @ param id
* @ param text
* @ param type
* @ param style
* /
exports . addVertex = function ( id , text , type , style ) {
2014-11-22 15:34:21 +01:00
//console.log('Got node ' + id + ' ' + type + ' ' + text + ' styles: ' + JSON.stringify(style));
2014-11-21 08:03:43 +01:00
if ( typeof vertices [ id ] === 'undefined' ) {
2014-11-22 15:34:21 +01:00
vertices [ id ] = { id : id , styles : [ ] , classes : [ ] } ;
2014-11-21 08:03:43 +01:00
}
if ( typeof text !== 'undefined' ) {
vertices [ id ] . text = text ;
}
if ( typeof type !== 'undefined' ) {
vertices [ id ] . type = type ;
}
2014-11-22 15:34:21 +01:00
if ( typeof type !== 'undefined' ) {
vertices [ id ] . type = type ;
}
2014-11-21 08:03:43 +01:00
if ( typeof style !== 'undefined' ) {
if ( style !== null ) {
style . forEach ( function ( s ) {
vertices [ id ] . styles . push ( s ) ;
} ) ;
}
}
} ;
/ * *
* Function called by parser when a link / edge definition has been found
* @ param start
* @ param end
* @ param type
* @ param linktext
* /
exports . addLink = function ( start , end , type , linktext ) {
var edge = { start : start , end : end , type : undefined , text : '' } ;
var linktext = type . text ;
if ( typeof linktext !== 'undefined' ) {
edge . text = linktext ;
}
if ( typeof type !== 'undefined' ) {
edge . type = type . type ;
}
edges . push ( edge ) ;
} ;
/ * *
* Updates a link with a style
* @ param pos
* @ param style
* /
exports . updateLink = function ( pos , style ) {
var position = pos . substr ( 1 ) ;
edges [ position ] . style = style ;
} ;
2014-11-22 15:34:21 +01:00
exports . addClass = function ( id , style ) {
if ( typeof classes [ id ] === 'undefined' ) {
classes [ id ] = { id : id , styles : [ ] } ;
}
if ( typeof style !== 'undefined' ) {
if ( style !== null ) {
style . forEach ( function ( s ) {
classes [ id ] . styles . push ( s ) ;
} ) ;
}
}
} ;
2014-11-21 08:03:43 +01:00
/ * *
* Called by parser when a graph definition is found , stores the direction of the chart .
* @ param dir
* /
exports . setDirection = function ( dir ) {
direction = dir ;
} ;
2014-11-22 15:34:21 +01:00
/ * *
* Called by parser when a graph definition is found , stores the direction of the chart .
* @ param dir
* /
exports . setClass = function ( id , className ) {
if ( id . indexOf ( ',' ) > 0 ) {
id . split ( ',' ) . forEach ( function ( id2 ) {
if ( typeof vertices [ id2 ] !== 'undefined' ) {
vertices [ id2 ] . classes . push ( className ) ;
}
} ) ;
} else {
if ( typeof vertices [ id ] !== 'undefined' ) {
vertices [ id ] . classes . push ( className ) ;
}
}
} ;
2014-11-21 08:03:43 +01:00
exports . getDirection = function ( ) {
return direction ;
} ;
/ * *
* Retrieval function for fetching the found nodes after parsing has completed .
* @ returns { { } | * | vertices }
* /
exports . getVertices = function ( ) {
return vertices ;
} ;
/ * *
* Retrieval function for fetching the found links after parsing has completed .
* @ returns { { } | * | edges }
* /
exports . getEdges = function ( ) {
return edges ;
} ;
2014-11-22 15:34:21 +01:00
/ * *
* Retrieval function for fetching the found class definitions after parsing has completed .
* @ returns { { } | * | classes }
* /
exports . getClasses = function ( ) {
return classes ;
} ;
2014-11-21 08:03:43 +01:00
/ * *
* Clears the internal graph db so that a new graph can be parsed .
* /
exports . clear = function ( ) {
vertices = { } ;
2014-11-22 15:34:21 +01:00
classes = { } ;
2014-11-21 08:03:43 +01:00
edges = [ ] ;
} ;
/ * *
*
* @ returns { string }
* /
exports . defaultStyle = function ( ) {
return "fill:#ffa;stroke: #f66; stroke-width: 3px; stroke-dasharray: 5, 5;fill:#ffa;stroke: #666;" ;
} ;
} , { } ] , 6 : [ function ( require , module , exports ) {
( function ( process ) {
2014-11-13 19:53:31 +01:00
/* parser generated by jison 0.4.15 */
/ *
Returns a Parser object of the following structure :
Parser : {
yy : { }
}
Parser . prototype : {
yy : { } ,
trace : function ( ) ,
symbols _ : { associative list : name == > number } ,
terminals _ : { associative list : number == > name } ,
productions _ : [ ... ] ,
performAction : function anonymous ( yytext , yyleng , yylineno , yy , yystate , $$ , _$ ) ,
table : [ ... ] ,
defaultActions : { ... } ,
parseError : function ( str , hash ) ,
parse : function ( input ) ,
lexer : {
EOF : 1 ,
parseError : function ( str , hash ) ,
setInput : function ( input ) ,
input : function ( ) ,
unput : function ( str ) ,
more : function ( ) ,
less : function ( n ) ,
pastInput : function ( ) ,
upcomingInput : function ( ) ,
showPosition : function ( ) ,
test _match : function ( regex _match _array , rule _index ) ,
next : function ( ) ,
lex : function ( ) ,
begin : function ( condition ) ,
popState : function ( ) ,
_currentRules : function ( ) ,
topState : function ( ) ,
pushState : function ( condition ) ,
options : {
ranges : boolean ( optional : true == > token location info will include a . range [ ] member )
flex : boolean ( optional : true == > flex - like lexing behaviour where the rules are tested exhaustively to find the longest match )
backtrack _lexer : boolean ( optional : true == > lexer regexes are tested in order and for each matching regex the action code is invoked ; the lexer terminates the scan when a token is returned by the action code )
} ,
performAction : function ( yy , yy _ , $avoiding _name _collisions , YY _START ) ,
rules : [ ... ] ,
conditions : { associative list : name == > set } ,
}
}
token location info ( @ $ , _$ , etc . ) : {
first _line : n ,
last _line : n ,
first _column : n ,
last _column : n ,
range : [ start _number , end _number ] ( where the numbers are indexes into the input string , regular zero - based )
}
the parseError function receives a 'hash' object with these members for lexer and parser errors : {
text : ( matched text )
token : ( the produced terminal token , if any )
line : ( yylineno )
}
while parser ( grammar ) errors will also provide these members , i . e . parser errors deliver a superset of attributes : {
loc : ( yylloc )
expected : ( string describing the set of expected tokens )
recoverable : ( boolean : TRUE when the parser has a error recovery rule available for this particular error )
}
* /
var parser = ( function ( ) {
2014-11-22 15:34:21 +01:00
var o = function ( k , v , o , l ) { for ( o = o || { } , l = k . length ; l -- ; o [ k [ l ] ] = v ) ; return o } , $V0 = [ 1 , 7 ] , $V1 = [ 2 , 12 ] , $V2 = [ 1 , 19 ] , $V3 = [ 1 , 20 ] , $V4 = [ 1 , 21 ] , $V5 = [ 1 , 22 ] , $V6 = [ 1 , 23 ] , $V7 = [ 1 , 24 ] , $V8 = [ 1 , 25 ] , $V9 = [ 1 , 26 ] , $Va = [ 1 , 27 ] , $Vb = [ 1 , 28 ] , $Vc = [ 1 , 14 ] , $Vd = [ 1 , 15 ] , $Ve = [ 1 , 13 ] , $Vf = [ 6 , 9 ] , $Vg = [ 11 , 29 , 30 , 31 , 32 , 33 , 34 , 35 , 36 , 37 , 38 , 39 , 47 , 49 , 50 ] , $Vh = [ 2 , 7 ] , $Vi = [ 11 , 42 , 43 , 44 , 45 ] , $Vj = [ 9 , 11 , 20 , 22 , 23 , 24 , 25 , 26 , 42 , 43 , 44 , 45 , 46 ] , $Vk = [ 9 , 11 , 20 , 22 , 23 , 24 , 25 , 26 , 30 , 31 , 32 , 33 , 34 , 35 , 36 , 37 , 38 , 39 , 42 , 43 , 44 , 45 , 46 ] , $Vl = [ 9 , 11 , 20 , 22 , 23 , 24 , 25 , 26 , 29 , 30 , 31 , 32 , 33 , 34 , 35 , 36 , 37 , 38 , 39 , 42 , 43 , 44 , 45 , 46 ] , $Vm = [ 30 , 31 , 32 , 33 , 34 , 35 , 36 , 37 , 38 , 39 ] , $Vn = [ 30 , 31 , 32 , 33 , 34 , 35 , 36 , 37 , 38 , 39 , 46 ] , $Vo = [ 22 , 24 , 26 , 46 ] , $Vp = [ 1 , 87 ] , $Vq = [ 1 , 84 ] , $Vr = [ 1 , 82 ] , $Vs = [ 1 , 85 ] , $Vt = [ 1 , 83 ] , $Vu = [ 1 , 88 ] , $Vv = [ 1 , 86 ] , $Vw = [ 1 , 94 ] , $Vx = [ 11 , 33 ] , $Vy = [ 9 , 11 , 29 , 30 , 31 , 32 , 33 , 51 , 54 ] ;
2014-11-13 19:53:31 +01:00
var parser = { trace : function trace ( ) { } ,
yy : { } ,
2014-11-22 15:34:21 +01:00
symbols _ : { "error" : 2 , "expressions" : 3 , "graphConfig" : 4 , "statements" : 5 , "EOF" : 6 , "spaceList" : 7 , "GRAPH" : 8 , "SPACE" : 9 , "DIR" : 10 , "SEMI" : 11 , "statement" : 12 , "verticeStatement" : 13 , "styleStatement" : 14 , "classDefStatement" : 15 , "classStatement" : 16 , "vertex" : 17 , "link" : 18 , "alphaNum" : 19 , "SQS" : 20 , "text" : 21 , "SQE" : 22 , "PS" : 23 , "PE" : 24 , "DIAMOND_START" : 25 , "DIAMOND_STOP" : 26 , "alphaNumStatement" : 27 , "alphaNumToken" : 28 , "MINUS" : 29 , "ALPHA" : 30 , "NUM" : 31 , "COLON" : 32 , "COMMA" : 33 , "PLUS" : 34 , "EQUALS" : 35 , "MULT" : 36 , "DOT" : 37 , "TAGSTART" : 38 , "TAGEND" : 39 , "linkStatement" : 40 , "arrowText" : 41 , "ARROW_POINT" : 42 , "ARROW_CIRCLE" : 43 , "ARROW_CROSS" : 44 , "ARROW_OPEN" : 45 , "PIPE" : 46 , "CLASSDEF" : 47 , "stylesOpt" : 48 , "CLASS" : 49 , "STYLE" : 50 , "HEX" : 51 , "style" : 52 , "styleComponent" : 53 , "UNIT" : 54 , "$accept" : 0 , "$end" : 1 } ,
terminals _ : { 2 : "error" , 6 : "EOF" , 8 : "GRAPH" , 9 : "SPACE" , 10 : "DIR" , 11 : "SEMI" , 20 : "SQS" , 22 : "SQE" , 23 : "PS" , 24 : "PE" , 25 : "DIAMOND_START" , 26 : "DIAMOND_STOP" , 29 : "MINUS" , 30 : "ALPHA" , 31 : "NUM" , 32 : "COLON" , 33 : "COMMA" , 34 : "PLUS" , 35 : "EQUALS" , 36 : "MULT" , 37 : "DOT" , 38 : "TAGSTART" , 39 : "TAGEND" , 42 : "ARROW_POINT" , 43 : "ARROW_CIRCLE" , 44 : "ARROW_CROSS" , 45 : "ARROW_OPEN" , 46 : "PIPE" , 47 : "CLASSDEF" , 49 : "CLASS" , 50 : "STYLE" , 51 : "HEX" , 54 : "UNIT" } ,
productions _ : [ 0 , [ 3 , 3 ] , [ 3 , 4 ] , [ 4 , 4 ] , [ 5 , 3 ] , [ 5 , 1 ] , [ 7 , 2 ] , [ 7 , 1 ] , [ 12 , 2 ] , [ 12 , 2 ] , [ 12 , 2 ] , [ 12 , 2 ] , [ 13 , 0 ] , [ 13 , 3 ] , [ 13 , 1 ] , [ 17 , 4 ] , [ 17 , 4 ] , [ 17 , 4 ] , [ 17 , 1 ] , [ 19 , 1 ] , [ 19 , 2 ] , [ 27 , 1 ] , [ 27 , 3 ] , [ 28 , 1 ] , [ 28 , 1 ] , [ 28 , 1 ] , [ 28 , 1 ] , [ 28 , 1 ] , [ 28 , 1 ] , [ 28 , 1 ] , [ 28 , 1 ] , [ 28 , 1 ] , [ 28 , 1 ] , [ 18 , 2 ] , [ 18 , 1 ] , [ 40 , 1 ] , [ 40 , 1 ] , [ 40 , 1 ] , [ 40 , 1 ] , [ 41 , 3 ] , [ 21 , 3 ] , [ 21 , 5 ] , [ 21 , 1 ] , [ 15 , 5 ] , [ 16 , 5 ] , [ 14 , 5 ] , [ 14 , 5 ] , [ 48 , 1 ] , [ 48 , 3 ] , [ 52 , 1 ] , [ 52 , 2 ] , [ 53 , 1 ] , [ 53 , 1 ] , [ 53 , 1 ] , [ 53 , 1 ] , [ 53 , 1 ] , [ 53 , 1 ] , [ 53 , 1 ] ] ,
2014-11-13 19:53:31 +01:00
performAction : function anonymous ( yytext , yyleng , yylineno , yy , yystate /* action[1] */ , $$ /* vstack */ , _$ /* lstack */ ) {
/* this == yyval */
var $0 = $$ . length - 1 ;
switch ( yystate ) {
2014-11-15 21:28:26 +01:00
case 2 :
this . $ = $$ [ $0 - 3 ] ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-15 21:28:26 +01:00
case 3 :
2014-11-21 08:03:43 +01:00
yy . setDirection ( $$ [ $0 - 1 ] ) ; this . $ = $$ [ $0 - 1 ] ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 13 :
2014-11-21 08:03:43 +01:00
yy . addLink ( $$ [ $0 - 2 ] , $$ [ $0 ] , $$ [ $0 - 1 ] ) ; this . $ = 'oy'
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 14 :
2014-11-21 08:03:43 +01:00
this . $ = 'yo' ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 15 :
2014-11-15 21:28:26 +01:00
this . $ = $$ [ $0 - 3 ] ; yy . addVertex ( $$ [ $0 - 3 ] , $$ [ $0 - 1 ] , 'square' ) ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 16 :
2014-11-15 21:28:26 +01:00
this . $ = $$ [ $0 - 3 ] ; yy . addVertex ( $$ [ $0 - 3 ] , $$ [ $0 - 1 ] , 'round' ) ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 17 :
2014-11-15 21:28:26 +01:00
this . $ = $$ [ $0 - 3 ] ; yy . addVertex ( $$ [ $0 - 3 ] , $$ [ $0 - 1 ] , 'diamond' ) ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 18 :
2014-11-21 08:03:43 +01:00
this . $ = $$ [ $0 ] ; yy . addVertex ( $$ [ $0 ] ) ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 19 : case 21 : case 23 : case 24 : case 49 :
2014-11-16 09:02:03 +01:00
this . $ = $$ [ $0 ] ;
break ;
2014-11-22 15:34:21 +01:00
case 20 :
2014-11-16 09:02:03 +01:00
this . $ = $$ [ $0 - 1 ] + '' + $$ [ $0 ] ;
break ;
2014-11-22 15:34:21 +01:00
case 22 :
2014-11-16 10:35:18 +01:00
this . $ = $$ [ $0 - 2 ] + '-' + $$ [ $0 ] ;
break ;
2014-11-22 15:34:21 +01:00
case 25 : case 26 : case 27 : case 28 : case 29 : case 30 : case 31 : case 32 : case 34 : case 42 :
2014-11-13 19:53:31 +01:00
this . $ = $$ [ $0 ] ;
break ;
2014-11-22 15:34:21 +01:00
case 33 :
2014-11-22 10:02:38 +01:00
$$ [ $0 - 1 ] . text = $$ [ $0 ] ; this . $ = $$ [ $0 - 1 ] ;
break ;
2014-11-22 15:34:21 +01:00
case 35 :
2014-11-13 19:53:31 +01:00
this . $ = { "type" : "arrow" } ;
break ;
2014-11-22 15:34:21 +01:00
case 36 :
2014-11-13 19:53:31 +01:00
this . $ = { "type" : "arrow_circle" } ;
break ;
2014-11-22 15:34:21 +01:00
case 37 :
2014-11-13 19:53:31 +01:00
this . $ = { "type" : "arrow_cross" } ;
break ;
2014-11-22 15:34:21 +01:00
case 38 :
2014-11-13 19:53:31 +01:00
this . $ = { "type" : "arrow_open" } ;
break ;
2014-11-22 15:34:21 +01:00
case 39 :
2014-11-21 08:03:43 +01:00
this . $ = $$ [ $0 - 1 ] ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 40 :
2014-11-15 21:28:26 +01:00
this . $ = $$ [ $0 - 2 ] + ' ' + $$ [ $0 ] ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 41 :
2014-11-17 07:50:42 +01:00
this . $ = $$ [ $0 - 4 ] + ' - ' + $$ [ $0 ] ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 43 :
this . $ = $$ [ $0 - 4 ] ; yy . addClass ( $$ [ $0 - 2 ] , $$ [ $0 ] ) ;
break ;
case 44 :
this . $ = $$ [ $0 - 4 ] ; yy . setClass ( $$ [ $0 - 2 ] , $$ [ $0 ] ) ;
break ;
case 45 :
2014-11-15 21:28:26 +01:00
this . $ = $$ [ $0 - 4 ] ; yy . addVertex ( $$ [ $0 - 2 ] , undefined , undefined , $$ [ $0 ] ) ;
break ;
2014-11-22 15:34:21 +01:00
case 46 :
2014-11-21 08:03:43 +01:00
this . $ = $$ [ $0 - 4 ] ; yy . updateLink ( $$ [ $0 - 2 ] , $$ [ $0 ] ) ;
2014-11-15 21:28:26 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 47 :
2014-11-15 21:28:26 +01:00
this . $ = [ $$ [ $0 ] ]
break ;
2014-11-22 15:34:21 +01:00
case 48 :
2014-11-15 21:28:26 +01:00
$$ [ $0 - 2 ] . push ( $$ [ $0 ] ) ; this . $ = $$ [ $0 - 2 ] ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 50 :
2014-11-15 21:28:26 +01:00
this . $ = $$ [ $0 - 1 ] + $$ [ $0 ] ;
break ;
2014-11-22 15:34:21 +01:00
case 51 : case 52 : case 53 : case 54 : case 55 : case 56 : case 57 :
2014-11-15 21:28:26 +01:00
this . $ = $$ [ $0 ]
2014-11-13 19:53:31 +01:00
break ;
}
} ,
2014-11-22 15:34:21 +01:00
table : [ { 3 : 1 , 4 : 2 , 8 : [ 1 , 3 ] } , { 1 : [ 3 ] } , { 5 : 4 , 7 : 5 , 9 : $V0 , 11 : $V1 , 12 : 6 , 13 : 8 , 14 : 9 , 15 : 10 , 16 : 11 , 17 : 12 , 19 : 16 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb , 47 : $Vc , 49 : $Vd , 50 : $Ve } , { 9 : [ 1 , 29 ] } , { 6 : [ 1 , 30 ] , 7 : 31 , 9 : $V0 } , { 5 : 32 , 11 : $V1 , 12 : 6 , 13 : 8 , 14 : 9 , 15 : 10 , 16 : 11 , 17 : 12 , 19 : 16 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb , 47 : $Vc , 49 : $Vd , 50 : $Ve } , o ( $Vf , [ 2 , 5 ] ) , o ( $Vg , $Vh , { 7 : 33 , 9 : $V0 } ) , { 11 : [ 1 , 34 ] } , { 11 : [ 1 , 35 ] } , { 11 : [ 1 , 36 ] } , { 11 : [ 1 , 37 ] } , { 11 : [ 2 , 14 ] , 18 : 38 , 40 : 39 , 42 : [ 1 , 40 ] , 43 : [ 1 , 41 ] , 44 : [ 1 , 42 ] , 45 : [ 1 , 43 ] } , { 9 : [ 1 , 44 ] } , { 9 : [ 1 , 45 ] } , { 9 : [ 1 , 46 ] } , o ( $Vi , [ 2 , 18 ] , { 20 : [ 1 , 47 ] , 23 : [ 1 , 48 ] , 25 : [ 1 , 49 ] } ) , o ( $Vj , [ 2 , 19 ] , { 27 : 17 , 28 : 18 , 19 : 50 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb } ) , o ( $Vk , [ 2 , 21 ] , { 29 : [ 1 , 51 ] } ) , o ( $Vl , [ 2 , 23 ] ) , o ( $Vl , [ 2 , 24 ] ) , o ( $Vl , [ 2 , 25 ] ) , o ( $Vl , [ 2 , 26 ] ) , o ( $Vl , [ 2 , 27 ] ) , o ( $Vl , [ 2 , 28 ] ) , o ( $Vl , [ 2 , 29 ] ) , o ( $Vl , [ 2 , 30 ] ) , o ( $Vl , [ 2 , 31 ] ) , o ( $Vl , [ 2 , 32 ] ) , { 10 : [ 1 , 52 ] } , { 1 : [ 2 , 1 ] } , { 11 : $V1 , 12 : 53 , 13 : 8 , 14 : 9 , 15 : 10 , 16 : 11 , 17 : 12 , 19 : 16 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb , 47 : $Vc , 49 : $Vd , 50 : $Ve } , { 6 : [ 1 , 54 ] , 7 : 31 , 9 : $V0 } , o ( $Vg , [ 2 , 6 ] ) , o ( $Vf , [ 2 , 8 ] ) , o ( $Vf , [ 2 , 9 ] ) , o ( $Vf , [ 2 , 10 ] ) , o ( $Vf , [ 2 , 11 ] ) , { 17 : 55 , 19 : 16 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb } , o ( $Vm , [ 2 , 34 ] , { 41 : 56 , 46 : [ 1 , 57 ] } ) , o ( $Vn , [ 2 , 35 ] ) , o ( $Vn , [ 2 , 36 ] ) , o ( $Vn , [ 2 , 37 ] ) , o ( $Vn , [ 2 , 38 ] ) , { 19 : 58 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb , 51 : [ 1 , 59 ] } , { 19 : 60 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb } , { 19 : 61 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb } , { 19 : 63 , 21 : 62 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb } , { 19 : 63 , 21 : 64 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb } , { 19 : 63 , 21 : 65 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb } , o ( $Vj , [ 2 , 20 ] ) , { 28 : 66 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb } , { 11 : [ 1 , 67 ] } , o ( $Vf , [ 2 , 4 ] ) , { 1 : [ 2 , 2 ] } , { 11 : [ 2 , 13 ] } , o ( $Vm , [ 2 , 33 ] ) , { 19 : 63 , 21 : 68 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb } , { 9 : [ 1 , 69 ] } , { 9 : [ 1 , 70 ] } , { 9 : [ 1 , 71 ] } , { 9 : [ 1 , 72 ] } , { 22 : [ 1 , 73 ] } , o ( $Vo , [ 2 , 42 ] , { 7 : 75 , 9 : [ 1 , 74 ] } ) , { 24 : [ 1 , 76 ] } , { 26 : [ 1 , 77 ] } , o ( $Vk , [ 2 , 22 ] ) , o ( [ 9 , 11 , 30 , 31 , 32 , 33 , 34 , 35 , 36 , 37 , 38 , 39 , 47 , 49 , 50 ] , [ 2 , 3 ] ) , { 46 : [ 1 , 78 ] } , { 9 : $Vp , 29 : $Vq , 30 : $Vr , 31 : $Vs , 32 : $Vt , 48 : 79 , 51 : $Vu , 52 : 80 , 53 : 81 , 54 : $Vv } , { 9 : $Vp , 29 : $Vq , 30 : $Vr , 31 : $Vs , 32 : $Vt , 48 : 89 , 51 : $Vu , 52 : 80 , 53 : 81 , 54 : $Vv } , { 9 : $Vp , 29 : $Vq , 30 : $Vr , 31 : $Vs , 32 : $Vt , 48 : 90 , 51 : $Vu , 52 : 80 , 53 : 81 , 54 : $Vv } , { 19 : 91 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb } , o ( $Vi , [ 2 , 15 ] ) , { 7 : 33 , 9 : $V0 , 19 : 63 , 21 : 92 , 27 : 17 , 28 : 18 , 29 : $Vh , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb } , { 29 : [ 1 , 93 ] } , o ( $Vi , [ 2 , 16 ] ) , o ( $Vi , [ 2 , 17 ] ) , o ( $Vm , [ 2 , 39 ] ) , { 11 : [ 2 , 45 ] , 33 : $Vw } , o ( $Vx , [ 2 , 47 ] , { 53 : 95 , 9 : $Vp , 29 : $Vq , 30 : $Vr , 31 : $Vs , 32 : $Vt , 51 : $Vu , 54 : $Vv } ) , o ( $Vy , [ 2 , 49 ] ) , o ( $Vy , [ 2 , 51 ] ) , o ( $Vy , [ 2 , 52 ] ) , o ( $Vy , [ 2 , 53 ] ) , o ( $Vy , [ 2 , 54 ] ) , o ( $Vy , [ 2 , 55 ] ) , o ( $Vy , [ 2 , 56 ] ) , o ( $Vy , [ 2 , 57 ] ) , { 11 : [ 2 , 46 ] , 33 : $Vw } , { 11 : [ 2 , 43 ] , 33 : $Vw } , { 11 : [ 2 , 44 ] } , o ( $Vo , [ 2 , 40 ] ) , { 7 : 96 , 9 : $V0 } , { 9 : $Vp , 29 : $Vq , 30 : $Vr , 31 : $Vs , 32 : $Vt , 51 : $Vu , 52 : 97 , 53 : 81 , 54 : $Vv } , o ( $Vy , [ 2 , 50 ] ) , { 19 : 63 , 21 : 98 , 27 : 17 , 28 : 18 , 30 : $V2 , 31 : $V3 , 32 : $V4 , 33 : $V5 , 34 : $V6 , 35 : $V7 , 36 : $V8 , 37 : $V9 , 38 : $Va , 39 : $Vb } , o ( $Vx , [ 2 , 48 ] , { 53 : 95 , 9 : $Vp , 29 : $Vq , 30 : $Vr , 31 : $Vs , 32 : $Vt , 51 : $Vu , 54 : $Vv } ) , o ( $Vo , [ 2 , 41 ] ) ] ,
defaultActions : { 30 : [ 2 , 1 ] , 54 : [ 2 , 2 ] , 55 : [ 2 , 13 ] , 91 : [ 2 , 44 ] } ,
2014-11-13 19:53:31 +01:00
parseError : function parseError ( str , hash ) {
if ( hash . recoverable ) {
this . trace ( str ) ;
} else {
throw new Error ( str ) ;
}
} ,
parse : function parse ( input ) {
var self = this , stack = [ 0 ] , tstack = [ ] , vstack = [ null ] , lstack = [ ] , table = this . table , yytext = '' , yylineno = 0 , yyleng = 0 , recovering = 0 , TERROR = 2 , EOF = 1 ;
var args = lstack . slice . call ( arguments , 1 ) ;
var lexer = Object . create ( this . lexer ) ;
var sharedState = { yy : { } } ;
for ( var k in this . yy ) {
if ( Object . prototype . hasOwnProperty . call ( this . yy , k ) ) {
sharedState . yy [ k ] = this . yy [ k ] ;
}
}
lexer . setInput ( input , sharedState . yy ) ;
sharedState . yy . lexer = lexer ;
sharedState . yy . parser = this ;
if ( typeof lexer . yylloc == 'undefined' ) {
lexer . yylloc = { } ;
}
var yyloc = lexer . yylloc ;
lstack . push ( yyloc ) ;
var ranges = lexer . options && lexer . options . ranges ;
if ( typeof sharedState . yy . parseError === 'function' ) {
this . parseError = sharedState . yy . parseError ;
} else {
this . parseError = Object . getPrototypeOf ( this ) . parseError ;
}
function popStack ( n ) {
stack . length = stack . length - 2 * n ;
vstack . length = vstack . length - n ;
lstack . length = lstack . length - n ;
}
_token _stack :
function lex ( ) {
var token ;
token = lexer . lex ( ) || EOF ;
if ( typeof token !== 'number' ) {
token = self . symbols _ [ token ] || token ;
}
return token ;
}
var symbol , preErrorSymbol , state , action , a , r , yyval = { } , p , len , newState , expected ;
while ( true ) {
state = stack [ stack . length - 1 ] ;
if ( this . defaultActions [ state ] ) {
action = this . defaultActions [ state ] ;
} else {
if ( symbol === null || typeof symbol == 'undefined' ) {
symbol = lex ( ) ;
}
action = table [ state ] && table [ state ] [ symbol ] ;
}
if ( typeof action === 'undefined' || ! action . length || ! action [ 0 ] ) {
var errStr = '' ;
expected = [ ] ;
for ( p in table [ state ] ) {
if ( this . terminals _ [ p ] && p > TERROR ) {
expected . push ( '\'' + this . terminals _ [ p ] + '\'' ) ;
}
}
if ( lexer . showPosition ) {
errStr = 'Parse error on line ' + ( yylineno + 1 ) + ':\n' + lexer . showPosition ( ) + '\nExpecting ' + expected . join ( ', ' ) + ', got \'' + ( this . terminals _ [ symbol ] || symbol ) + '\'' ;
} else {
errStr = 'Parse error on line ' + ( yylineno + 1 ) + ': Unexpected ' + ( symbol == EOF ? 'end of input' : '\'' + ( this . terminals _ [ symbol ] || symbol ) + '\'' ) ;
}
this . parseError ( errStr , {
text : lexer . match ,
token : this . terminals _ [ symbol ] || symbol ,
line : lexer . yylineno ,
loc : yyloc ,
expected : expected
} ) ;
}
if ( action [ 0 ] instanceof Array && action . length > 1 ) {
throw new Error ( 'Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol ) ;
}
switch ( action [ 0 ] ) {
case 1 :
stack . push ( symbol ) ;
vstack . push ( lexer . yytext ) ;
lstack . push ( lexer . yylloc ) ;
stack . push ( action [ 1 ] ) ;
symbol = null ;
if ( ! preErrorSymbol ) {
yyleng = lexer . yyleng ;
yytext = lexer . yytext ;
yylineno = lexer . yylineno ;
yyloc = lexer . yylloc ;
if ( recovering > 0 ) {
recovering -- ;
}
} else {
symbol = preErrorSymbol ;
preErrorSymbol = null ;
}
break ;
case 2 :
len = this . productions _ [ action [ 1 ] ] [ 1 ] ;
yyval . $ = vstack [ vstack . length - len ] ;
yyval . _$ = {
first _line : lstack [ lstack . length - ( len || 1 ) ] . first _line ,
last _line : lstack [ lstack . length - 1 ] . last _line ,
first _column : lstack [ lstack . length - ( len || 1 ) ] . first _column ,
last _column : lstack [ lstack . length - 1 ] . last _column
} ;
if ( ranges ) {
yyval . _$ . range = [
lstack [ lstack . length - ( len || 1 ) ] . range [ 0 ] ,
lstack [ lstack . length - 1 ] . range [ 1 ]
] ;
}
r = this . performAction . apply ( yyval , [
yytext ,
yyleng ,
yylineno ,
sharedState . yy ,
action [ 1 ] ,
vstack ,
lstack
] . concat ( args ) ) ;
if ( typeof r !== 'undefined' ) {
return r ;
}
if ( len ) {
stack = stack . slice ( 0 , - 1 * len * 2 ) ;
vstack = vstack . slice ( 0 , - 1 * len ) ;
lstack = lstack . slice ( 0 , - 1 * len ) ;
}
stack . push ( this . productions _ [ action [ 1 ] ] [ 0 ] ) ;
vstack . push ( yyval . $ ) ;
lstack . push ( yyval . _$ ) ;
newState = table [ stack [ stack . length - 2 ] ] [ stack [ stack . length - 1 ] ] ;
stack . push ( newState ) ;
break ;
case 3 :
return true ;
}
}
return true ;
} } ;
2014-11-21 08:03:43 +01:00
/* generated by jison-lex 0.3.4 */
2014-11-13 19:53:31 +01:00
var lexer = ( function ( ) {
var lexer = ( {
EOF : 1 ,
parseError : function parseError ( str , hash ) {
if ( this . yy . parser ) {
this . yy . parser . parseError ( str , hash ) ;
} else {
throw new Error ( str ) ;
}
} ,
// resets the lexer, sets new input
setInput : function ( input , yy ) {
this . yy = yy || this . yy || { } ;
this . _input = input ;
this . _more = this . _backtrack = this . done = false ;
this . yylineno = this . yyleng = 0 ;
this . yytext = this . matched = this . match = '' ;
this . conditionStack = [ 'INITIAL' ] ;
this . yylloc = {
first _line : 1 ,
first _column : 0 ,
last _line : 1 ,
last _column : 0
} ;
if ( this . options . ranges ) {
this . yylloc . range = [ 0 , 0 ] ;
}
this . offset = 0 ;
return this ;
} ,
// consumes and returns one char from the input
input : function ( ) {
var ch = this . _input [ 0 ] ;
this . yytext += ch ;
this . yyleng ++ ;
this . offset ++ ;
this . match += ch ;
this . matched += ch ;
var lines = ch . match ( /(?:\r\n?|\n).*/g ) ;
if ( lines ) {
this . yylineno ++ ;
this . yylloc . last _line ++ ;
} else {
this . yylloc . last _column ++ ;
}
if ( this . options . ranges ) {
this . yylloc . range [ 1 ] ++ ;
}
this . _input = this . _input . slice ( 1 ) ;
return ch ;
} ,
// unshifts one char (or a string) into the input
unput : function ( ch ) {
var len = ch . length ;
var lines = ch . split ( /(?:\r\n?|\n)/g ) ;
this . _input = ch + this . _input ;
this . yytext = this . yytext . substr ( 0 , this . yytext . length - len ) ;
//this.yyleng -= len;
this . offset -= len ;
var oldLines = this . match . split ( /(?:\r\n?|\n)/g ) ;
this . match = this . match . substr ( 0 , this . match . length - 1 ) ;
this . matched = this . matched . substr ( 0 , this . matched . length - 1 ) ;
if ( lines . length - 1 ) {
this . yylineno -= lines . length - 1 ;
}
var r = this . yylloc . range ;
this . yylloc = {
first _line : this . yylloc . first _line ,
last _line : this . yylineno + 1 ,
first _column : this . yylloc . first _column ,
last _column : lines ?
( lines . length === oldLines . length ? this . yylloc . first _column : 0 )
+ oldLines [ oldLines . length - lines . length ] . length - lines [ 0 ] . length :
this . yylloc . first _column - len
} ;
if ( this . options . ranges ) {
this . yylloc . range = [ r [ 0 ] , r [ 0 ] + this . yyleng - len ] ;
}
this . yyleng = this . yytext . length ;
return this ;
} ,
// When called from action, caches matched text and appends it on next action
more : function ( ) {
this . _more = true ;
return this ;
} ,
// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
reject : function ( ) {
if ( this . options . backtrack _lexer ) {
this . _backtrack = true ;
} else {
return this . parseError ( 'Lexical error on line ' + ( this . yylineno + 1 ) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this . showPosition ( ) , {
text : "" ,
token : null ,
line : this . yylineno
} ) ;
}
return this ;
} ,
// retain first n characters of the match
less : function ( n ) {
this . unput ( this . match . slice ( n ) ) ;
} ,
// displays already matched input, i.e. for error messages
pastInput : function ( ) {
var past = this . matched . substr ( 0 , this . matched . length - this . match . length ) ;
return ( past . length > 20 ? '...' : '' ) + past . substr ( - 20 ) . replace ( /\n/g , "" ) ;
} ,
// displays upcoming input, i.e. for error messages
upcomingInput : function ( ) {
var next = this . match ;
if ( next . length < 20 ) {
next += this . _input . substr ( 0 , 20 - next . length ) ;
}
return ( next . substr ( 0 , 20 ) + ( next . length > 20 ? '...' : '' ) ) . replace ( /\n/g , "" ) ;
} ,
// displays the character position where the lexing error occurred, i.e. for error messages
showPosition : function ( ) {
var pre = this . pastInput ( ) ;
var c = new Array ( pre . length + 1 ) . join ( "-" ) ;
return pre + this . upcomingInput ( ) + "\n" + c + "^" ;
} ,
// test the lexed token: return FALSE when not a match, otherwise return token
test _match : function ( match , indexed _rule ) {
var token ,
lines ,
backup ;
if ( this . options . backtrack _lexer ) {
// save context
backup = {
yylineno : this . yylineno ,
yylloc : {
first _line : this . yylloc . first _line ,
last _line : this . last _line ,
first _column : this . yylloc . first _column ,
last _column : this . yylloc . last _column
} ,
yytext : this . yytext ,
match : this . match ,
matches : this . matches ,
matched : this . matched ,
yyleng : this . yyleng ,
offset : this . offset ,
_more : this . _more ,
_input : this . _input ,
yy : this . yy ,
conditionStack : this . conditionStack . slice ( 0 ) ,
done : this . done
} ;
if ( this . options . ranges ) {
backup . yylloc . range = this . yylloc . range . slice ( 0 ) ;
}
}
lines = match [ 0 ] . match ( /(?:\r\n?|\n).*/g ) ;
if ( lines ) {
this . yylineno += lines . length ;
}
this . yylloc = {
first _line : this . yylloc . last _line ,
last _line : this . yylineno + 1 ,
first _column : this . yylloc . last _column ,
last _column : lines ?
lines [ lines . length - 1 ] . length - lines [ lines . length - 1 ] . match ( /\r?\n?/ ) [ 0 ] . length :
this . yylloc . last _column + match [ 0 ] . length
} ;
this . yytext += match [ 0 ] ;
this . match += match [ 0 ] ;
this . matches = match ;
this . yyleng = this . yytext . length ;
if ( this . options . ranges ) {
this . yylloc . range = [ this . offset , this . offset += this . yyleng ] ;
}
this . _more = false ;
this . _backtrack = false ;
this . _input = this . _input . slice ( match [ 0 ] . length ) ;
this . matched += match [ 0 ] ;
token = this . performAction . call ( this , this . yy , this , indexed _rule , this . conditionStack [ this . conditionStack . length - 1 ] ) ;
if ( this . done && this . _input ) {
this . done = false ;
}
if ( token ) {
return token ;
} else if ( this . _backtrack ) {
// recover context
for ( var k in backup ) {
this [ k ] = backup [ k ] ;
}
return false ; // rule action called reject() implying the next rule should be tested instead.
}
return false ;
} ,
// return next match in input
next : function ( ) {
if ( this . done ) {
return this . EOF ;
}
if ( ! this . _input ) {
this . done = true ;
}
var token ,
match ,
tempMatch ,
index ;
if ( ! this . _more ) {
this . yytext = '' ;
this . match = '' ;
}
var rules = this . _currentRules ( ) ;
for ( var i = 0 ; i < rules . length ; i ++ ) {
tempMatch = this . _input . match ( this . rules [ rules [ i ] ] ) ;
if ( tempMatch && ( ! match || tempMatch [ 0 ] . length > match [ 0 ] . length ) ) {
match = tempMatch ;
index = i ;
if ( this . options . backtrack _lexer ) {
token = this . test _match ( tempMatch , rules [ i ] ) ;
if ( token !== false ) {
return token ;
} else if ( this . _backtrack ) {
match = false ;
continue ; // rule action called reject() implying a rule MISmatch.
} else {
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
return false ;
}
} else if ( ! this . options . flex ) {
break ;
}
}
}
if ( match ) {
token = this . test _match ( match , rules [ index ] ) ;
if ( token !== false ) {
return token ;
}
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
return false ;
}
if ( this . _input === "" ) {
return this . EOF ;
} else {
return this . parseError ( 'Lexical error on line ' + ( this . yylineno + 1 ) + '. Unrecognized text.\n' + this . showPosition ( ) , {
text : "" ,
token : null ,
line : this . yylineno
} ) ;
}
} ,
// return next match that has a token
lex : function lex ( ) {
var r = this . next ( ) ;
if ( r ) {
return r ;
} else {
return this . lex ( ) ;
}
} ,
// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
begin : function begin ( condition ) {
this . conditionStack . push ( condition ) ;
} ,
// pop the previously active lexer condition state off the condition stack
popState : function popState ( ) {
var n = this . conditionStack . length - 1 ;
if ( n > 0 ) {
return this . conditionStack . pop ( ) ;
} else {
return this . conditionStack [ 0 ] ;
}
} ,
// produce the lexer rule set which is active for the currently active lexer condition state
_currentRules : function _currentRules ( ) {
if ( this . conditionStack . length && this . conditionStack [ this . conditionStack . length - 1 ] ) {
return this . conditions [ this . conditionStack [ this . conditionStack . length - 1 ] ] . rules ;
} else {
return this . conditions [ "INITIAL" ] . rules ;
}
} ,
// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
topState : function topState ( n ) {
n = this . conditionStack . length - 1 - Math . abs ( n || 0 ) ;
if ( n >= 0 ) {
return this . conditionStack [ n ] ;
} else {
return "INITIAL" ;
}
} ,
// alias for begin(condition)
pushState : function pushState ( condition ) {
this . begin ( condition ) ;
} ,
// return the number of states currently on the stack
stateStackSize : function stateStackSize ( ) {
return this . conditionStack . length ;
} ,
options : { } ,
performAction : function anonymous ( yy , yy _ , $avoiding _name _collisions , YY _START ) {
var YYSTATE = YY _START ;
switch ( $avoiding _name _collisions ) {
2014-11-22 15:34:21 +01:00
case 0 : return 50 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 1 : return 47 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 2 : return 49 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 3 : return 8 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 4 : return 10 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 15:34:21 +01:00
case 5 : return 10 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 6 : return 10 ;
break ;
case 7 : return 10 ;
break ;
case 8 : return 10 ;
break ;
case 9 : return 10 ;
break ;
case 10 : return 51 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 11 : return 31 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 12 : return 'BRKT' ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 13 : return 54 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 14 : return 54 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 15 : return 54 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 16 : return 32 ;
2014-11-22 10:02:38 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 17 : return 11 ;
2014-11-22 10:02:38 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 18 : return 33 ;
2014-11-22 10:02:38 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 19 : return 35 ;
2014-11-22 10:02:38 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 20 : return 36 ;
2014-11-22 10:02:38 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 21 : return 37 ;
2014-11-22 10:02:38 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 22 : return 38 ;
2014-11-22 15:34:21 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 23 : return 39 ;
2014-11-22 15:34:21 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 24 : return 44 ;
2014-11-22 10:02:38 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 25 : return 42 ;
2014-11-16 10:35:18 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 26 : return 43 ;
2014-11-15 21:28:26 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 27 : return 45 ;
2014-11-15 21:28:26 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 28 : return 29 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 29 : return 34 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 30 : return 35 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 31 : return 30 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 32 : return 46 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 33 : return 23 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 34 : return 24 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 35 : return 20 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 36 : return 22 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 37 : return 25
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 38 : return 26
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 39 : return 9 ;
2014-11-13 19:53:31 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 40 : return 'NEWLINE' ;
2014-11-15 21:28:26 +01:00
break ;
2014-11-22 17:27:08 +01:00
case 41 : return 6 ;
2014-11-13 19:53:31 +01:00
break ;
}
} ,
2014-11-22 17:27:08 +01:00
rules : [ /^(?:style\b)/ , /^(?:classDef\b)/ , /^(?:class\b)/ , /^(?:graph\b)/ , /^(?:LR\b)/ , /^(?:RL\b)/ , /^(?:TB\b)/ , /^(?:BT\b)/ , /^(?:TD\b)/ , /^(?:BR\b)/ , /^(?:#[a-f0-9]+)/ , /^(?:[0-9]+)/ , /^(?:#)/ , /^(?:px\b)/ , /^(?:pt\b)/ , /^(?:dot\b)/ , /^(?::)/ , /^(?:;)/ , /^(?:,)/ , /^(?:=)/ , /^(?:\*)/ , /^(?:\.)/ , /^(?:<)/ , /^(?:>)/ , /^(?:--[x])/ , /^(?:-->)/ , /^(?:--[o])/ , /^(?:---)/ , /^(?:-)/ , /^(?:\+)/ , /^(?:=)/ , /^(?:[a-zåäöæøA-ZÅÄÖÆØ_]+)/ , /^(?:\|)/ , /^(?:\()/ , /^(?:\))/ , /^(?:\[)/ , /^(?:\])/ , /^(?:\{)/ , /^(?:\})/ , /^(?:\s)/ , /^(?:\n)/ , /^(?:$)/ ] ,
conditions : { "INITIAL" : { "rules" : [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 , 12 , 13 , 14 , 15 , 16 , 17 , 18 , 19 , 20 , 21 , 22 , 23 , 24 , 25 , 26 , 27 , 28 , 29 , 30 , 31 , 32 , 33 , 34 , 35 , 36 , 37 , 38 , 39 , 40 , 41 ] , "inclusive" : true } }
2014-11-13 19:53:31 +01:00
} ) ;
return lexer ;
} ) ( ) ;
parser . lexer = lexer ;
function Parser ( ) {
this . yy = { } ;
}
Parser . prototype = parser ; parser . Parser = Parser ;
return new Parser ;
} ) ( ) ;
if ( typeof require !== 'undefined' && typeof exports !== 'undefined' ) {
exports . parser = parser ;
exports . Parser = parser . Parser ;
exports . parse = function ( ) { return parser . parse . apply ( parser , arguments ) ; } ;
exports . main = function commonjsMain ( args ) {
if ( ! args [ 1 ] ) {
console . log ( 'Usage: ' + args [ 0 ] + ' FILE' ) ;
process . exit ( 1 ) ;
}
var source = require ( 'fs' ) . readFileSync ( require ( 'path' ) . normalize ( args [ 1 ] ) , "utf8" ) ;
return exports . parser . parse ( source ) ;
} ;
if ( typeof module !== 'undefined' && require . main === module ) {
exports . main ( process . argv . slice ( 1 ) ) ;
}
}
2014-11-21 08:03:43 +01:00
} ) . call ( this , require ( "1YiZ5S" ) )
} , { "1YiZ5S" : 3 , "fs" : 1 , "path" : 2 } ] } , { } , [ 4 ] )