mirror of
https://github.com/mermaid-js/mermaid.git
synced 2025-01-28 07:03:17 +08:00
fix: move from objects to Maps
This commit is contained in:
parent
3809732e48
commit
07de090723
@ -194,7 +194,7 @@ the string with all the user styles
|
||||
| :---------- | :-------------------------------------------------------- |
|
||||
| `config` | `MermaidConfig` |
|
||||
| `graphType` | `string` |
|
||||
| `classDefs` | `undefined` \| `Record`<`string`, `DiagramStyleClassDef`> |
|
||||
| `classDefs` | `undefined` \| `Map`<`string`, `DiagramStyleClassDef`> |
|
||||
| `svgId` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
@ -41,12 +41,12 @@ let nodeDb = {};
|
||||
export const addVertices = async function (vert, svgId, root, doc, diagObj, parentLookupDb, graph) {
|
||||
const svg = root.select(`[id="${svgId}"]`);
|
||||
const nodes = svg.insert('g').attr('class', 'nodes');
|
||||
const keys = Object.keys(vert);
|
||||
const keys = [...vert.keys()];
|
||||
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
await Promise.all(
|
||||
keys.map(async function (id) {
|
||||
const vertex = vert[id];
|
||||
const vertex = vert.get(id);
|
||||
|
||||
/**
|
||||
* Variable for storing the classes for the vertex
|
||||
@ -595,7 +595,7 @@ const addMarkersToEdge = function (svgPath, edgeData, diagramType, arrowMarkerAb
|
||||
*
|
||||
* @param text
|
||||
* @param diagObj
|
||||
* @returns {Record<string, import('../../mermaid/src/diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
* @returns {Map<string, import('../../mermaid/src/diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
*/
|
||||
export const getClasses = function (text, diagObj) {
|
||||
log.info('Extracting classes');
|
||||
|
@ -70,7 +70,7 @@ export interface DiagramRenderer {
|
||||
getClasses?: (
|
||||
text: string,
|
||||
diagram: Pick<DiagramDefinition, 'db'>
|
||||
) => Record<string, DiagramStyleClassDef>;
|
||||
) => Map<string, DiagramStyleClassDef>;
|
||||
}
|
||||
|
||||
export interface DiagramDefinition {
|
||||
|
@ -6,16 +6,16 @@ import { clear as commonClear } from '../common/commonDb.js';
|
||||
import type { Block, ClassDef } from './blockTypes.js';
|
||||
|
||||
// Initialize the node database for simple lookups
|
||||
let blockDatabase: Record<string, Block> = {};
|
||||
let blockDatabase: Map<string, Block> = new Map();
|
||||
let edgeList: Block[] = [];
|
||||
let edgeCount: Record<string, number> = {};
|
||||
let edgeCount: Map<string, number> = new Map();
|
||||
|
||||
const COLOR_KEYWORD = 'color';
|
||||
const FILL_KEYWORD = 'fill';
|
||||
const BG_FILL = 'bgFill';
|
||||
const STYLECLASS_SEP = ',';
|
||||
|
||||
let classes = {} as Record<string, ClassDef>;
|
||||
let classes: Map<string, ClassDef> = new Map();
|
||||
|
||||
/**
|
||||
* Called when the parser comes across a (style) class definition
|
||||
@ -26,10 +26,10 @@ let classes = {} as Record<string, ClassDef>;
|
||||
*/
|
||||
export const addStyleClass = function (id: string, styleAttributes = '') {
|
||||
// create a new style class object with this id
|
||||
if (classes[id] === undefined) {
|
||||
classes[id] = { id: id, styles: [], textStyles: [] }; // This is a classDef
|
||||
if (!classes.has(id)) {
|
||||
classes.set(id, { id: id, styles: [], textStyles: [] }); // This is a classDef
|
||||
}
|
||||
const foundClass = classes[id];
|
||||
const foundClass = classes.get(id)!;
|
||||
if (styleAttributes !== undefined && styleAttributes !== null) {
|
||||
styleAttributes.split(STYLECLASS_SEP).forEach((attrib) => {
|
||||
// remove any trailing ;
|
||||
@ -54,7 +54,7 @@ export const addStyleClass = function (id: string, styleAttributes = '') {
|
||||
* @param styles - the string with 1 or more style attributes (each separated by a comma)
|
||||
*/
|
||||
export const addStyle2Node = function (id: string, styles = '') {
|
||||
const foundBlock = blockDatabase[id];
|
||||
const foundBlock = blockDatabase.get(id)!;
|
||||
if (styles !== undefined && styles !== null) {
|
||||
foundBlock.styles = styles.split(STYLECLASS_SEP);
|
||||
}
|
||||
@ -70,16 +70,16 @@ export const addStyle2Node = function (id: string, styles = '') {
|
||||
*/
|
||||
export const setCssClass = function (itemIds: string, cssClassName: string) {
|
||||
itemIds.split(',').forEach(function (id: string) {
|
||||
let foundBlock = blockDatabase[id];
|
||||
let foundBlock = blockDatabase.get(id);
|
||||
if (foundBlock === undefined) {
|
||||
const trimmedId = id.trim();
|
||||
blockDatabase[trimmedId] = { id: trimmedId, type: 'na', children: [] } as Block;
|
||||
foundBlock = blockDatabase[trimmedId];
|
||||
blockDatabase.set(trimmedId, { id: trimmedId, type: 'na', children: [] } as Block);
|
||||
foundBlock = blockDatabase.get(trimmedId);
|
||||
}
|
||||
if (!foundBlock.classes) {
|
||||
foundBlock.classes = [];
|
||||
if (!foundBlock!.classes) {
|
||||
foundBlock!.classes = [];
|
||||
}
|
||||
foundBlock.classes.push(cssClassName);
|
||||
foundBlock!.classes.push(cssClassName);
|
||||
});
|
||||
};
|
||||
|
||||
@ -104,12 +104,12 @@ const populateBlockDatabase = (_blockList: Block[] | Block[][], parent: Block):
|
||||
if (block.type === 'column-setting') {
|
||||
parent.columns = block.columns || -1;
|
||||
} else if (block.type === 'edge') {
|
||||
if (edgeCount[block.id]) {
|
||||
edgeCount[block.id]++;
|
||||
if (edgeCount.has(block.id)) {
|
||||
edgeCount.set(block.id, edgeCount.get(block.id)! + 1);
|
||||
} else {
|
||||
edgeCount[block.id] = 1;
|
||||
edgeCount.set(block.id, 1);
|
||||
}
|
||||
block.id = edgeCount[block.id] + '-' + block.id;
|
||||
block.id = edgeCount.get(block.id)! + '-' + block.id;
|
||||
edgeList.push(block);
|
||||
} else {
|
||||
if (!block.label) {
|
||||
@ -120,16 +120,17 @@ const populateBlockDatabase = (_blockList: Block[] | Block[][], parent: Block):
|
||||
block.label = block.id;
|
||||
}
|
||||
}
|
||||
const newBlock = !blockDatabase[block.id];
|
||||
if (newBlock) {
|
||||
blockDatabase[block.id] = block;
|
||||
const isCreatingBlock = !blockDatabase.has(block.id);
|
||||
|
||||
if (isCreatingBlock) {
|
||||
blockDatabase.set(block.id, block);
|
||||
} else {
|
||||
// Add newer relevant data to aggregated node
|
||||
if (block.type !== 'na') {
|
||||
blockDatabase[block.id].type = block.type;
|
||||
blockDatabase.get(block.id)!.type = block.type;
|
||||
}
|
||||
if (block.label !== block.id) {
|
||||
blockDatabase[block.id].label = block.label;
|
||||
blockDatabase.get(block.id)!.label = block.label;
|
||||
}
|
||||
}
|
||||
|
||||
@ -142,10 +143,10 @@ const populateBlockDatabase = (_blockList: Block[] | Block[][], parent: Block):
|
||||
for (let j = 0; j < w; j++) {
|
||||
const newBlock = clone(block);
|
||||
newBlock.id = newBlock.id + '-' + j;
|
||||
blockDatabase[newBlock.id] = newBlock;
|
||||
blockDatabase.set(newBlock.id, newBlock);
|
||||
children.push(newBlock);
|
||||
}
|
||||
} else if (newBlock) {
|
||||
} else if (isCreatingBlock) {
|
||||
children.push(block);
|
||||
}
|
||||
}
|
||||
@ -160,12 +161,12 @@ const clear = (): void => {
|
||||
log.debug('Clear called');
|
||||
commonClear();
|
||||
rootBlock = { id: 'root', type: 'composite', children: [], columns: -1 } as Block;
|
||||
blockDatabase = { root: rootBlock };
|
||||
blockDatabase = new Map([['root', rootBlock]]);
|
||||
blocks = [] as Block[];
|
||||
classes = {} as Record<string, ClassDef>;
|
||||
classes = new Map();
|
||||
|
||||
edgeList = [];
|
||||
edgeCount = {};
|
||||
edgeCount = new Map();
|
||||
};
|
||||
|
||||
export function typeStr2Type(typeStr: string) {
|
||||
@ -241,7 +242,7 @@ const setHierarchy = (block: Block[]): void => {
|
||||
};
|
||||
|
||||
const getColumns = (blockId: string): number => {
|
||||
const block = blockDatabase[blockId];
|
||||
const block = blockDatabase.get(blockId);
|
||||
if (!block) {
|
||||
return -1;
|
||||
}
|
||||
@ -259,7 +260,7 @@ const getColumns = (blockId: string): number => {
|
||||
* @returns
|
||||
*/
|
||||
const getBlocksFlat = () => {
|
||||
return [...Object.values(blockDatabase)];
|
||||
return [...blockDatabase.values()];
|
||||
};
|
||||
/**
|
||||
* Returns the the hierarchy of blocks
|
||||
@ -273,11 +274,11 @@ const getEdges = () => {
|
||||
return edgeList;
|
||||
};
|
||||
const getBlock = (id: string) => {
|
||||
return blockDatabase[id];
|
||||
return blockDatabase.get(id);
|
||||
};
|
||||
|
||||
const setBlock = (block: Block) => {
|
||||
blockDatabase[block.id] = block;
|
||||
blockDatabase.set(block.id, block);
|
||||
};
|
||||
|
||||
const getLogger = () => console;
|
||||
|
@ -388,7 +388,7 @@ describe('Block diagram', function () {
|
||||
const mc = blocks[0];
|
||||
expect(mc.classes).toContain('black');
|
||||
const classes = db.getClasses();
|
||||
const black = classes.black;
|
||||
const black = classes.get('black')!;
|
||||
expect(black.id).toBe('black');
|
||||
expect(black.styles[0]).toEqual('color:#ffffff');
|
||||
});
|
||||
|
@ -26,10 +26,10 @@ import type {
|
||||
const MERMAID_DOM_ID_PREFIX = 'classId-';
|
||||
|
||||
let relations: ClassRelation[] = [];
|
||||
let classes: ClassMap = {};
|
||||
let classes: Map<string, ClassNode> = new Map();
|
||||
let notes: ClassNote[] = [];
|
||||
let classCounter = 0;
|
||||
let namespaces: NamespaceMap = {};
|
||||
let namespaces: Map<string, NamespaceNode> = new Map();
|
||||
let namespaceCounter = 0;
|
||||
|
||||
let functions: any[] = [];
|
||||
@ -57,7 +57,7 @@ export const setClassLabel = function (_id: string, label: string) {
|
||||
}
|
||||
|
||||
const { className } = splitClassNameAndType(id);
|
||||
classes[className].label = label;
|
||||
classes.get(className)!.label = label;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -70,13 +70,13 @@ export const addClass = function (_id: string) {
|
||||
const id = common.sanitizeText(_id, getConfig());
|
||||
const { className, type } = splitClassNameAndType(id);
|
||||
// Only add class if not exists
|
||||
if (Object.hasOwn(classes, className)) {
|
||||
if (classes.has(className)) {
|
||||
return;
|
||||
}
|
||||
// alert('Adding class: ' + className);
|
||||
const name = common.sanitizeText(className, getConfig());
|
||||
// alert('Adding class after: ' + name);
|
||||
classes[name] = {
|
||||
classes.set(name, {
|
||||
id: name,
|
||||
type: type,
|
||||
label: name,
|
||||
@ -86,7 +86,7 @@ export const addClass = function (_id: string) {
|
||||
annotations: [],
|
||||
styles: [],
|
||||
domId: MERMAID_DOM_ID_PREFIX + name + '-' + classCounter,
|
||||
} as ClassNode;
|
||||
} as ClassNode);
|
||||
|
||||
classCounter++;
|
||||
};
|
||||
@ -99,25 +99,25 @@ export const addClass = function (_id: string) {
|
||||
*/
|
||||
export const lookUpDomId = function (_id: string): string {
|
||||
const id = common.sanitizeText(_id, getConfig());
|
||||
if (id in classes) {
|
||||
return classes[id].domId;
|
||||
if (classes.has(id)) {
|
||||
return classes.get(id)!.domId;
|
||||
}
|
||||
throw new Error('Class not found: ' + id);
|
||||
};
|
||||
|
||||
export const clear = function () {
|
||||
relations = [];
|
||||
classes = {};
|
||||
classes = new Map();
|
||||
notes = [];
|
||||
functions = [];
|
||||
functions.push(setupToolTips);
|
||||
namespaces = {};
|
||||
namespaces = new Map();
|
||||
namespaceCounter = 0;
|
||||
commonClear();
|
||||
};
|
||||
|
||||
export const getClass = function (id: string): ClassNode {
|
||||
return classes[id];
|
||||
return classes.get(id)!;
|
||||
};
|
||||
|
||||
export const getClasses = function (): ClassMap {
|
||||
@ -157,7 +157,7 @@ export const addRelation = function (relation: ClassRelation) {
|
||||
*/
|
||||
export const addAnnotation = function (className: string, annotation: string) {
|
||||
const validatedClassName = splitClassNameAndType(className).className;
|
||||
classes[validatedClassName].annotations.push(annotation);
|
||||
classes.get(validatedClassName)!.annotations.push(annotation);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -173,7 +173,7 @@ export const addMember = function (className: string, member: string) {
|
||||
addClass(className);
|
||||
|
||||
const validatedClassName = splitClassNameAndType(className).className;
|
||||
const theClass = classes[validatedClassName];
|
||||
const theClass = classes.get(validatedClassName)!;
|
||||
|
||||
if (typeof member === 'string') {
|
||||
// Member can contain white spaces, we trim them out
|
||||
@ -226,8 +226,8 @@ export const setCssClass = function (ids: string, className: string) {
|
||||
if (_id[0].match(/\d/)) {
|
||||
id = MERMAID_DOM_ID_PREFIX + id;
|
||||
}
|
||||
if (classes[id] !== undefined) {
|
||||
classes[id].cssClasses.push(className);
|
||||
if (classes.has(id)) {
|
||||
classes.get(id)!.cssClasses.push(className);
|
||||
}
|
||||
});
|
||||
};
|
||||
@ -241,17 +241,17 @@ export const setCssClass = function (ids: string, className: string) {
|
||||
const setTooltip = function (ids: string, tooltip?: string) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
if (tooltip !== undefined) {
|
||||
classes[id].tooltip = sanitizeText(tooltip);
|
||||
classes.get(id)!.tooltip = sanitizeText(tooltip);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const getTooltip = function (id: string, namespace?: string) {
|
||||
if (namespace) {
|
||||
return namespaces[namespace].classes[id].tooltip;
|
||||
if (namespace && namespaces.has(namespace)) {
|
||||
return namespaces.get(namespace)!.classes.get(id)!.tooltip;
|
||||
}
|
||||
|
||||
return classes[id].tooltip;
|
||||
return classes.get(id)!.tooltip;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -268,14 +268,15 @@ export const setLink = function (ids: string, linkStr: string, target: string) {
|
||||
if (_id[0].match(/\d/)) {
|
||||
id = MERMAID_DOM_ID_PREFIX + id;
|
||||
}
|
||||
if (classes[id] !== undefined) {
|
||||
classes[id].link = utils.formatUrl(linkStr, config);
|
||||
if (classes.has(id)) {
|
||||
const theClass = classes.get(id)!;
|
||||
theClass.link = utils.formatUrl(linkStr, config);
|
||||
if (config.securityLevel === 'sandbox') {
|
||||
classes[id].linkTarget = '_top';
|
||||
theClass.linkTarget = '_top';
|
||||
} else if (typeof target === 'string') {
|
||||
classes[id].linkTarget = sanitizeText(target);
|
||||
theClass.linkTarget = sanitizeText(target);
|
||||
} else {
|
||||
classes[id].linkTarget = '_blank';
|
||||
theClass.linkTarget = '_blank';
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -292,7 +293,7 @@ export const setLink = function (ids: string, linkStr: string, target: string) {
|
||||
export const setClickEvent = function (ids: string, functionName: string, functionArgs: string) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
setClickFunc(id, functionName, functionArgs);
|
||||
classes[id].haveCallback = true;
|
||||
classes.get(id)!.haveCallback = true;
|
||||
});
|
||||
setCssClass(ids, 'clickable');
|
||||
};
|
||||
@ -308,7 +309,7 @@ const setClickFunc = function (_domId: string, functionName: string, functionArg
|
||||
}
|
||||
|
||||
const id = domId;
|
||||
if (classes[id] !== undefined) {
|
||||
if (classes.has(id)) {
|
||||
const elemId = lookUpDomId(id);
|
||||
let argList: string[] = [];
|
||||
if (typeof functionArgs === 'string') {
|
||||
@ -417,22 +418,22 @@ const setDirection = (dir: string) => {
|
||||
* @public
|
||||
*/
|
||||
export const addNamespace = function (id: string) {
|
||||
if (namespaces[id] !== undefined) {
|
||||
if (namespaces.has(id)) {
|
||||
return;
|
||||
}
|
||||
|
||||
namespaces[id] = {
|
||||
namespaces.set(id, {
|
||||
id: id,
|
||||
classes: {},
|
||||
classes: new Map(),
|
||||
children: {},
|
||||
domId: MERMAID_DOM_ID_PREFIX + id + '-' + namespaceCounter,
|
||||
} as NamespaceNode;
|
||||
} as NamespaceNode);
|
||||
|
||||
namespaceCounter++;
|
||||
};
|
||||
|
||||
const getNamespace = function (name: string): NamespaceNode {
|
||||
return namespaces[name];
|
||||
return namespaces.get(name)!;
|
||||
};
|
||||
|
||||
const getNamespaces = function (): NamespaceMap {
|
||||
@ -447,18 +448,18 @@ const getNamespaces = function (): NamespaceMap {
|
||||
* @public
|
||||
*/
|
||||
export const addClassesToNamespace = function (id: string, classNames: string[]) {
|
||||
if (namespaces[id] === undefined) {
|
||||
if (!namespaces.has(id)) {
|
||||
return;
|
||||
}
|
||||
for (const name of classNames) {
|
||||
const { className } = splitClassNameAndType(name);
|
||||
classes[className].parent = id;
|
||||
namespaces[id].classes[className] = classes[className];
|
||||
classes.get(className)!.parent = id;
|
||||
namespaces.get(id)!.classes.set(className, classes.get(className)!);
|
||||
}
|
||||
};
|
||||
|
||||
export const setCssStyle = function (id: string, styles: string[]) {
|
||||
const thisClass = classes[id];
|
||||
const thisClass = classes.get(id);
|
||||
if (!styles || !thisClass) {
|
||||
return;
|
||||
}
|
||||
|
@ -2,6 +2,7 @@
|
||||
import { parser } from './parser/classDiagram.jison';
|
||||
import classDb from './classDb.js';
|
||||
import { vi, describe, it, expect } from 'vitest';
|
||||
import type { ClassMap, NamespaceNode } from './classTypes.js';
|
||||
const spyOn = vi.spyOn;
|
||||
|
||||
const staticCssStyle = 'text-decoration:underline;';
|
||||
@ -392,8 +393,8 @@ class C13["With Città foreign language"]
|
||||
Student "1" --o "1" IdCard : carries
|
||||
Student "1" --o "1" Bike : rides`);
|
||||
|
||||
expect(Object.keys(classDb.getClasses()).length).toBe(3);
|
||||
expect(classDb.getClasses().Student).toMatchInlineSnapshot(`
|
||||
expect(classDb.getClasses().size).toBe(3);
|
||||
expect(classDb.getClasses().get('Student')).toMatchInlineSnapshot(`
|
||||
{
|
||||
"annotations": [],
|
||||
"cssClasses": [],
|
||||
@ -1539,12 +1540,12 @@ class Class2
|
||||
}`;
|
||||
parser.parse(str);
|
||||
|
||||
const testNamespace = parser.yy.getNamespace('Namespace1');
|
||||
const testClasses = parser.yy.getClasses();
|
||||
expect(Object.keys(testNamespace.classes).length).toBe(2);
|
||||
const testNamespace: NamespaceNode = parser.yy.getNamespace('Namespace1');
|
||||
const testClasses: ClassMap = parser.yy.getClasses();
|
||||
expect(testNamespace.classes.size).toBe(2);
|
||||
expect(Object.keys(testNamespace.children).length).toBe(0);
|
||||
expect(testNamespace.classes['Class1'].id).toBe('Class1');
|
||||
expect(Object.keys(testClasses).length).toBe(2);
|
||||
expect(testNamespace.classes.get('Class1')?.id).toBe('Class1');
|
||||
expect(testClasses.size).toBe(2);
|
||||
});
|
||||
|
||||
it('should add relations between classes of different namespaces', function () {
|
||||
@ -1573,25 +1574,25 @@ class Class2
|
||||
const testNamespaceB = parser.yy.getNamespace('B');
|
||||
const testClasses = parser.yy.getClasses();
|
||||
const testRelations = parser.yy.getRelations();
|
||||
expect(Object.keys(testNamespaceA.classes).length).toBe(2);
|
||||
expect(testNamespaceA.classes['A1'].members[0].getDisplayDetails().displayText).toBe(
|
||||
expect(testNamespaceA.classes.size).toBe(2);
|
||||
expect(testNamespaceA.classes.get('A1').members[0].getDisplayDetails().displayText).toBe(
|
||||
'+foo : string'
|
||||
);
|
||||
expect(testNamespaceA.classes['A2'].members[0].getDisplayDetails().displayText).toBe(
|
||||
expect(testNamespaceA.classes.get('A2').members[0].getDisplayDetails().displayText).toBe(
|
||||
'+bar : int'
|
||||
);
|
||||
expect(Object.keys(testNamespaceB.classes).length).toBe(2);
|
||||
expect(testNamespaceB.classes['B1'].members[0].getDisplayDetails().displayText).toBe(
|
||||
expect(testNamespaceB.classes.size).toBe(2);
|
||||
expect(testNamespaceB.classes.get('B1').members[0].getDisplayDetails().displayText).toBe(
|
||||
'+foo : bool'
|
||||
);
|
||||
expect(testNamespaceB.classes['B2'].members[0].getDisplayDetails().displayText).toBe(
|
||||
expect(testNamespaceB.classes.get('B2').members[0].getDisplayDetails().displayText).toBe(
|
||||
'+bar : float'
|
||||
);
|
||||
expect(Object.keys(testClasses).length).toBe(4);
|
||||
expect(testClasses['A1'].parent).toBe('A');
|
||||
expect(testClasses['A2'].parent).toBe('A');
|
||||
expect(testClasses['B1'].parent).toBe('B');
|
||||
expect(testClasses['B2'].parent).toBe('B');
|
||||
expect(testClasses.size).toBe(4);
|
||||
expect(testClasses.get('A1').parent).toBe('A');
|
||||
expect(testClasses.get('A2').parent).toBe('A');
|
||||
expect(testClasses.get('B1').parent).toBe('B');
|
||||
expect(testClasses.get('B2').parent).toBe('B');
|
||||
expect(testRelations[0].id1).toBe('A1');
|
||||
expect(testRelations[0].id2).toBe('B1');
|
||||
expect(testRelations[1].id1).toBe('A2');
|
||||
|
@ -44,13 +44,13 @@ export const addNamespaces = function (
|
||||
_id: string,
|
||||
diagObj: any
|
||||
) {
|
||||
const keys = Object.keys(namespaces);
|
||||
const keys = [...namespaces.keys()];
|
||||
log.info('keys:', keys);
|
||||
log.info(namespaces);
|
||||
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
keys.forEach(function (id) {
|
||||
const vertex = namespaces[id];
|
||||
const vertex = namespaces.get(id)!;
|
||||
|
||||
// parent node must be one of [rect, roundedWithTitle, noteGroup, divider]
|
||||
const shape = 'rect';
|
||||
@ -89,15 +89,15 @@ export const addClasses = function (
|
||||
diagObj: any,
|
||||
parent?: string
|
||||
) {
|
||||
const keys = Object.keys(classes);
|
||||
const keys = [...classes.keys()];
|
||||
log.info('keys:', keys);
|
||||
log.info(classes);
|
||||
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
keys
|
||||
.filter((id) => classes[id].parent == parent)
|
||||
.filter((id) => classes.get(id)!.parent == parent)
|
||||
.forEach(function (id) {
|
||||
const vertex = classes[id];
|
||||
const vertex = classes.get(id)!;
|
||||
|
||||
/**
|
||||
* Variable for storing the classes for the vertex
|
||||
@ -346,7 +346,7 @@ export const draw = async function (text: string, id: string, _version: string,
|
||||
}
|
||||
const root =
|
||||
securityLevel === 'sandbox'
|
||||
? select(sandboxElement.nodes()[0].contentDocument.body)
|
||||
? select(sandboxElement!.nodes()[0]!.contentDocument.body)
|
||||
: select('body');
|
||||
const svg = root.select(`[id="${id}"]`);
|
||||
|
||||
@ -366,7 +366,8 @@ export const draw = async function (text: string, id: string, _version: string,
|
||||
|
||||
// Add label rects for non html labels
|
||||
if (!conf?.htmlLabels) {
|
||||
const doc = securityLevel === 'sandbox' ? sandboxElement.nodes()[0].contentDocument : document;
|
||||
const doc =
|
||||
securityLevel === 'sandbox' ? sandboxElement!.nodes()[0]!.contentDocument : document;
|
||||
const labels = doc.querySelectorAll('[id="' + id + '"] .edgeLabel .label');
|
||||
for (const label of labels) {
|
||||
// Get dimensions of label
|
||||
|
@ -175,10 +175,10 @@ export const draw = function (text, id, _version, diagObj) {
|
||||
});
|
||||
|
||||
const classes = diagObj.db.getClasses();
|
||||
const keys = Object.keys(classes);
|
||||
const keys = [...classes.keys()];
|
||||
|
||||
for (const key of keys) {
|
||||
const classDef = classes[key];
|
||||
const classDef = classes.get(key);
|
||||
const node = svgDraw.drawClass(diagram, classDef, conf, diagObj);
|
||||
idCache[node.id] = node;
|
||||
|
||||
|
@ -161,5 +161,5 @@ export interface NamespaceNode {
|
||||
children: NamespaceMap;
|
||||
}
|
||||
|
||||
export type ClassMap = Record<string, ClassNode>;
|
||||
export type NamespaceMap = Record<string, NamespaceNode>;
|
||||
export type ClassMap = Map<string, ClassNode>;
|
||||
export type NamespaceMap = Map<string, NamespaceNode>;
|
||||
|
@ -11,7 +11,7 @@ import {
|
||||
getDiagramTitle,
|
||||
} from '../common/commonDb.js';
|
||||
|
||||
let entities = {};
|
||||
let entities = new Map();
|
||||
let relationships = [];
|
||||
|
||||
const Cardinality = {
|
||||
@ -28,15 +28,15 @@ const Identification = {
|
||||
};
|
||||
|
||||
const addEntity = function (name, alias = undefined) {
|
||||
if (entities[name] === undefined) {
|
||||
entities[name] = { attributes: [], alias: alias };
|
||||
if (!entities.has(name)) {
|
||||
entities.set(name, { attributes: [], alias: alias });
|
||||
log.info('Added new entity :', name);
|
||||
} else if (entities[name] && !entities[name].alias && alias) {
|
||||
entities[name].alias = alias;
|
||||
} else if (entities.has(name) && !entities.get(name).alias && alias) {
|
||||
entities.get(name).alias = alias;
|
||||
log.info(`Add alias '${alias}' to entity '${name}'`);
|
||||
}
|
||||
|
||||
return entities[name];
|
||||
return entities.get(name);
|
||||
};
|
||||
|
||||
const getEntities = () => entities;
|
||||
@ -75,7 +75,7 @@ const addRelationship = function (entA, rolA, entB, rSpec) {
|
||||
const getRelationships = () => relationships;
|
||||
|
||||
const clear = function () {
|
||||
entities = {};
|
||||
entities = new Map();
|
||||
relationships = [];
|
||||
commonClear();
|
||||
};
|
||||
|
@ -301,7 +301,7 @@ const drawAttributes = (groupNode, entityTextNode, attributes) => {
|
||||
* @returns {object} The first entity that was inserted
|
||||
*/
|
||||
const drawEntities = function (svgNode, entities, graph) {
|
||||
const keys = Object.keys(entities);
|
||||
const keys = [...entities.keys()];
|
||||
let firstOne;
|
||||
|
||||
keys.forEach(function (entityName) {
|
||||
@ -326,12 +326,12 @@ const drawEntities = function (svgNode, entities, graph) {
|
||||
.style('text-anchor', 'middle')
|
||||
.style('font-family', getConfig().fontFamily)
|
||||
.style('font-size', conf.fontSize + 'px')
|
||||
.text(entities[entityName].alias ?? entityName);
|
||||
.text(entities.get(entityName).alias ?? entityName);
|
||||
|
||||
const { width: entityWidth, height: entityHeight } = drawAttributes(
|
||||
groupNode,
|
||||
textNode,
|
||||
entities[entityName].attributes
|
||||
entities.get(entityName).attributes
|
||||
);
|
||||
|
||||
// Draw the rectangle - insert it before the text so that the text is not obscured
|
||||
|
@ -17,7 +17,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
const line2 = 'MAINLAND';
|
||||
erDiagram.parser.parse(`erDiagram\n${line1}\n${line2}`);
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(erDb.getRelationships().length).toBe(0);
|
||||
});
|
||||
|
||||
@ -27,7 +27,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n ${name}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
describe('has non A-Za-z0-9_- chars', function () {
|
||||
@ -47,7 +47,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n ${name}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
@ -55,21 +55,21 @@ describe('when parsing ER diagram it...', function () {
|
||||
const name = singleOccurrence;
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(true);
|
||||
expect(entities.has(name)).toBe(true);
|
||||
});
|
||||
|
||||
it(`"${repeatedOccurrence}" repeated occurrence`, function () {
|
||||
const name = repeatedOccurrence;
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(true);
|
||||
expect(entities.has(name)).toBe(true);
|
||||
});
|
||||
|
||||
it(`"${singleOccurrence}" ends with`, function () {
|
||||
const name = endsWith;
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(true);
|
||||
expect(entities.has(name)).toBe(true);
|
||||
});
|
||||
|
||||
it(`"${cannontStartWith}" cannot start with the character`, function () {
|
||||
@ -77,7 +77,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
});
|
||||
@ -88,7 +88,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
const name = 'a' + allCombined;
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(true);
|
||||
expect(entities.has(name)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@ -96,14 +96,14 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n "Blo%rf"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
it('cannot contain \\ because it could start and escape code', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n "Blo\\rf"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
@ -114,7 +114,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n "${badName}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(badName)).toBe(false);
|
||||
expect(entities.has(badName)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
});
|
||||
@ -124,14 +124,14 @@ describe('when parsing ER diagram it...', function () {
|
||||
const beyondEnglishName = 'DUCK-àáâäæãåā';
|
||||
erDiagram.parser.parse(`erDiagram\n${beyondEnglishName}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(beyondEnglishName)).toBe(true);
|
||||
expect(entities.has(beyondEnglishName)).toBe(true);
|
||||
});
|
||||
|
||||
it('can contain - _ without needing ""', function () {
|
||||
const hyphensUnderscore = 'DUCK-BILLED_PLATYPUS';
|
||||
erDiagram.parser.parse(`erDiagram\n${hyphensUnderscore}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(hyphensUnderscore)).toBe(true);
|
||||
expect(entities.has(hyphensUnderscore)).toBe(true);
|
||||
});
|
||||
|
||||
it('can have an alias', function () {
|
||||
@ -139,8 +139,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
const alias = 'bar';
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}["${alias}"]\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(entity)).toBe(true);
|
||||
expect(entities[entity].alias).toBe(alias);
|
||||
expect(entities.has(entity)).toBe(true);
|
||||
expect(entities.get(entity).alias).toBe(alias);
|
||||
});
|
||||
|
||||
it('can have an alias even if the relationship is defined before class', function () {
|
||||
@ -151,10 +151,10 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${firstEntity} ||--o| ${secondEntity} : rel\nclass ${firstEntity}["${alias}"]\n`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(firstEntity)).toBe(true);
|
||||
expect(entities.hasOwnProperty(secondEntity)).toBe(true);
|
||||
expect(entities[firstEntity].alias).toBe(alias);
|
||||
expect(entities[secondEntity].alias).toBeUndefined();
|
||||
expect(entities.has(firstEntity)).toBe(true);
|
||||
expect(entities.has(secondEntity)).toBe(true);
|
||||
expect(entities.get(firstEntity).alias).toBe(alias);
|
||||
expect(entities.get(secondEntity).alias).toBeUndefined();
|
||||
});
|
||||
|
||||
it('can have an alias even if the relationship is defined after class', function () {
|
||||
@ -165,17 +165,17 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\nclass ${firstEntity}["${alias}"]\n${firstEntity} ||--o| ${secondEntity} : rel\n`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(firstEntity)).toBe(true);
|
||||
expect(entities.hasOwnProperty(secondEntity)).toBe(true);
|
||||
expect(entities[firstEntity].alias).toBe(alias);
|
||||
expect(entities[secondEntity].alias).toBeUndefined();
|
||||
expect(entities.has(firstEntity)).toBe(true);
|
||||
expect(entities.has(secondEntity)).toBe(true);
|
||||
expect(entities.get(firstEntity).alias).toBe(alias);
|
||||
expect(entities.get(secondEntity).alias).toBeUndefined();
|
||||
});
|
||||
|
||||
it('can start with an underscore', function () {
|
||||
const entity = '_foo';
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(entity)).toBe(true);
|
||||
expect(entities.has(entity)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@ -191,11 +191,11 @@ describe('when parsing ER diagram it...', function () {
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
expect(entities[entity].attributes[0].attributeName).toBe('myBookTitle');
|
||||
expect(entities[entity].attributes[1].attributeName).toBe('MYBOOKSUBTITLE_1');
|
||||
expect(entities[entity].attributes[2].attributeName).toBe('author-ref[name](1)');
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(3);
|
||||
expect(entities.get(entity).attributes[0].attributeName).toBe('myBookTitle');
|
||||
expect(entities.get(entity).attributes[1].attributeName).toBe('MYBOOKSUBTITLE_1');
|
||||
expect(entities.get(entity).attributes[2].attributeName).toBe('author-ref[name](1)');
|
||||
});
|
||||
|
||||
it('should allow asterisk at the start of attribute name', function () {
|
||||
@ -204,8 +204,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}{\n${attribute}}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow asterisks at the start of attribute declared with type and name', () => {
|
||||
@ -214,8 +214,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should not allow leading numbers, dashes or brackets', function () {
|
||||
@ -236,8 +236,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow an entity with a single attribute to be defined with a key', function () {
|
||||
@ -246,8 +246,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow an entity with a single attribute to be defined with a comment', function () {
|
||||
@ -256,9 +256,9 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities[entity].attributes[0].attributeComment).toBe('comment');
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
expect(entities.get(entity).attributes[0].attributeComment).toBe('comment');
|
||||
});
|
||||
|
||||
it('should allow an entity with a single attribute to be defined with a key and a comment', function () {
|
||||
@ -267,8 +267,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow an entity with attribute starting with fk, pk or uk and a comment', function () {
|
||||
@ -282,7 +282,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1} \n\n${attribute2}\n${attribute3}\n${attribute4}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes.length).toBe(4);
|
||||
expect(entities.get(entity).attributes.length).toBe(4);
|
||||
});
|
||||
|
||||
it('should allow an entity with attributes that have many constraints and comments', function () {
|
||||
@ -297,14 +297,14 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n${attribute3}\n${attribute4}\n${attribute5}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes[0].attributeKeyTypeList).toEqual(['PK', 'FK']);
|
||||
expect(entities[entity].attributes[0].attributeComment).toBe('comment1');
|
||||
expect(entities[entity].attributes[1].attributeKeyTypeList).toEqual(['PK', 'UK', 'FK']);
|
||||
expect(entities[entity].attributes[2].attributeKeyTypeList).toEqual(['PK', 'UK']);
|
||||
expect(entities[entity].attributes[2].attributeComment).toBe('comment3');
|
||||
expect(entities[entity].attributes[3].attributeKeyTypeList).toBeUndefined();
|
||||
expect(entities[entity].attributes[4].attributeKeyTypeList).toBeUndefined();
|
||||
expect(entities[entity].attributes[4].attributeComment).toBe('comment5');
|
||||
expect(entities.get(entity).attributes[0].attributeKeyTypeList).toEqual(['PK', 'FK']);
|
||||
expect(entities.get(entity).attributes[0].attributeComment).toBe('comment1');
|
||||
expect(entities.get(entity).attributes[1].attributeKeyTypeList).toEqual(['PK', 'UK', 'FK']);
|
||||
expect(entities.get(entity).attributes[2].attributeKeyTypeList).toEqual(['PK', 'UK']);
|
||||
expect(entities.get(entity).attributes[2].attributeComment).toBe('comment3');
|
||||
expect(entities.get(entity).attributes[3].attributeKeyTypeList).toBeUndefined();
|
||||
expect(entities.get(entity).attributes[4].attributeKeyTypeList).toBeUndefined();
|
||||
expect(entities.get(entity).attributes[4].attributeComment).toBe('comment5');
|
||||
});
|
||||
|
||||
it('should allow an entity with attribute that has a generic type', function () {
|
||||
@ -317,8 +317,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n${attribute3}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(3);
|
||||
});
|
||||
|
||||
it('should allow an entity with attribute that is an array', function () {
|
||||
@ -328,8 +328,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(2);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow an entity with attribute that is a limited length string', function () {
|
||||
@ -339,10 +339,10 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(2);
|
||||
expect(entities[entity].attributes[0].attributeType).toBe('character(10)');
|
||||
expect(entities[entity].attributes[1].attributeType).toBe('varchar(5)');
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(2);
|
||||
expect(entities.get(entity).attributes[0].attributeType).toBe('character(10)');
|
||||
expect(entities.get(entity).attributes[1].attributeType).toBe('varchar(5)');
|
||||
});
|
||||
|
||||
it('should allow an entity with multiple attributes to be defined', function () {
|
||||
@ -355,7 +355,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n${attribute3}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
expect(entities.get(entity).attributes.length).toBe(3);
|
||||
});
|
||||
|
||||
it('should allow attribute definitions to be split into multiple blocks', function () {
|
||||
@ -368,7 +368,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1}\n}\n${entity} {\n${attribute2}\n${attribute3}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
expect(entities.get(entity).attributes.length).toBe(3);
|
||||
});
|
||||
|
||||
it('should allow an empty attribute block', function () {
|
||||
@ -376,8 +376,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty('BOOK')).toBe(true);
|
||||
expect(entities[entity].attributes.length).toBe(0);
|
||||
expect(entities.has('BOOK')).toBe(true);
|
||||
expect(entities.get(entity).attributes.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should allow an attribute block to start immediately after the entity name', function () {
|
||||
@ -385,8 +385,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}{}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty('BOOK')).toBe(true);
|
||||
expect(entities[entity].attributes.length).toBe(0);
|
||||
expect(entities.has('BOOK')).toBe(true);
|
||||
expect(entities.get(entity).attributes.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should allow an attribute block to be separated from the entity name by spaces', function () {
|
||||
@ -394,8 +394,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty('BOOK')).toBe(true);
|
||||
expect(entities[entity].attributes.length).toBe(0);
|
||||
expect(entities.has('BOOK')).toBe(true);
|
||||
expect(entities.get(entity).attributes.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should allow whitespace before and after attribute definitions', function () {
|
||||
@ -404,8 +404,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n \n\n ${attribute}\n\n \n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow no whitespace before and after attribute definitions', function () {
|
||||
@ -414,8 +414,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}{${attribute}}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should associate two entities correctly', function () {
|
||||
@ -423,8 +423,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
const entities = erDb.getEntities();
|
||||
const relationships = erDb.getRelationships();
|
||||
|
||||
expect(entities.hasOwnProperty('CAR')).toBe(true);
|
||||
expect(entities.hasOwnProperty('DRIVER')).toBe(true);
|
||||
expect(entities.has('CAR')).toBe(true);
|
||||
expect(entities.has('DRIVER')).toBe(true);
|
||||
expect(relationships.length).toBe(1);
|
||||
expect(relationships[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(relationships[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@ -437,7 +437,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse(`erDiagram\n${line1}\n${line2}`);
|
||||
const entities = erDb.getEntities();
|
||||
|
||||
expect(Object.keys(entities).length).toBe(3);
|
||||
expect(entities.size).toBe(3);
|
||||
});
|
||||
|
||||
it('should create the role specified', function () {
|
||||
@ -451,7 +451,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
it('should allow recursive relationships', function () {
|
||||
erDiagram.parser.parse('erDiagram\nNODE ||--o{ NODE : "leads to"');
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(1);
|
||||
expect(erDb.getEntities().size).toBe(1);
|
||||
});
|
||||
|
||||
describe('accessible title and description', () => {
|
||||
@ -491,7 +491,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
const entities = erDb.getEntities();
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(entities).length).toBe(2);
|
||||
expect(entities.size).toBe(2);
|
||||
expect(rels.length).toBe(2);
|
||||
});
|
||||
|
||||
@ -507,7 +507,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||--|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@ -517,7 +517,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||..o{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@ -527,7 +527,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA |o..o{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@ -537,7 +537,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA |o--|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@ -547,7 +547,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }|--|| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@ -557,7 +557,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }o--|| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -567,7 +567,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }o..o| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -577,7 +577,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }|..o| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@ -587,7 +587,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA |o..|| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@ -597,7 +597,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||..|| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@ -607,7 +607,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||--o| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@ -617,7 +617,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA |o..o| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@ -627,7 +627,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }o--o{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -637,7 +637,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }|..|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@ -647,7 +647,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }o--|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -657,7 +657,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }|..o{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@ -667,7 +667,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA one or zero to many B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@ -677,7 +677,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA one or many optionally to zero or one B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@ -687,7 +687,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA zero or more to zero or many B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -697,7 +697,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA many(0) to many(1) B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -707,7 +707,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA many optionally to one B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -717,7 +717,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA only one optionally to 1+ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@ -727,7 +727,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA 0+ optionally to 1 B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -786,7 +786,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
it('should represent parent-child relationship correctly', function () {
|
||||
erDiagram.parser.parse('erDiagram\nPROJECT u--o{ TEAM_MEMBER : "parent"');
|
||||
const rels = erDb.getRelationships();
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.MD_PARENT);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
|
@ -51,17 +51,17 @@ describe('flow db addClass', () => {
|
||||
flowDb.addClass('a,b', ['stroke-width: 8px']);
|
||||
const classes = flowDb.getClasses();
|
||||
|
||||
expect(classes.hasOwnProperty('a')).toBe(true);
|
||||
expect(classes.hasOwnProperty('b')).toBe(true);
|
||||
expect(classes['a']['styles']).toEqual(['stroke-width: 8px']);
|
||||
expect(classes['b']['styles']).toEqual(['stroke-width: 8px']);
|
||||
expect(classes.has('a')).toBe(true);
|
||||
expect(classes.has('b')).toBe(true);
|
||||
expect(classes.get('a')?.['styles']).toEqual(['stroke-width: 8px']);
|
||||
expect(classes.get('b')?.['styles']).toEqual(['stroke-width: 8px']);
|
||||
});
|
||||
|
||||
it('should detect single class', () => {
|
||||
flowDb.addClass('a', ['stroke-width: 8px']);
|
||||
const classes = flowDb.getClasses();
|
||||
|
||||
expect(classes.hasOwnProperty('a')).toBe(true);
|
||||
expect(classes['a']['styles']).toEqual(['stroke-width: 8px']);
|
||||
expect(classes.has('a')).toBe(true);
|
||||
expect(classes.get('a')?.['styles']).toEqual(['stroke-width: 8px']);
|
||||
});
|
||||
});
|
||||
|
@ -17,12 +17,12 @@ import type { FlowVertex, FlowClass, FlowSubGraph, FlowText, FlowEdge, FlowLink
|
||||
const MERMAID_DOM_ID_PREFIX = 'flowchart-';
|
||||
let vertexCounter = 0;
|
||||
let config = getConfig();
|
||||
let vertices: Record<string, FlowVertex> = {};
|
||||
let vertices: Map<string, FlowVertex> = new Map();
|
||||
let edges: FlowEdge[] & { defaultInterpolate?: string; defaultStyle?: string[] } = [];
|
||||
let classes: Record<string, FlowClass> = {};
|
||||
let classes: Map<string, FlowClass> = new Map();
|
||||
let subGraphs: FlowSubGraph[] = [];
|
||||
let subGraphLookup: Record<string, FlowSubGraph> = {};
|
||||
let tooltips: Record<string, string> = {};
|
||||
let subGraphLookup: Map<string, FlowSubGraph> = new Map();
|
||||
let tooltips: Map<string, string> = new Map();
|
||||
let subCount = 0;
|
||||
let firstGraphFlag = true;
|
||||
let direction: string;
|
||||
@ -40,10 +40,10 @@ const sanitizeText = (txt: string) => common.sanitizeText(txt, config);
|
||||
* @param id - id of the node
|
||||
*/
|
||||
export const lookUpDomId = function (id: string) {
|
||||
const vertexKeys = Object.keys(vertices);
|
||||
const vertexKeys = vertices.keys();
|
||||
for (const vertexKey of vertexKeys) {
|
||||
if (vertices[vertexKey].id === id) {
|
||||
return vertices[vertexKey].domId;
|
||||
if (vertices.get(vertexKey)!.id === id) {
|
||||
return vertices.get(vertexKey)!.domId;
|
||||
}
|
||||
}
|
||||
return id;
|
||||
@ -67,50 +67,52 @@ export const addVertex = function (
|
||||
}
|
||||
let txt;
|
||||
|
||||
if (vertices[id] === undefined) {
|
||||
vertices[id] = {
|
||||
if (!vertices.has(id)) {
|
||||
vertices.set(id, {
|
||||
id,
|
||||
labelType: 'text',
|
||||
domId: MERMAID_DOM_ID_PREFIX + id + '-' + vertexCounter,
|
||||
styles: [],
|
||||
classes: [],
|
||||
};
|
||||
});
|
||||
}
|
||||
vertexCounter++;
|
||||
const vertex = vertices.get(id)!;
|
||||
|
||||
if (textObj !== undefined) {
|
||||
config = getConfig();
|
||||
txt = sanitizeText(textObj.text.trim());
|
||||
vertices[id].labelType = textObj.type;
|
||||
vertex.labelType = textObj.type;
|
||||
// strip quotes if string starts and ends with a quote
|
||||
if (txt[0] === '"' && txt[txt.length - 1] === '"') {
|
||||
txt = txt.substring(1, txt.length - 1);
|
||||
}
|
||||
vertices[id].text = txt;
|
||||
vertex.text = txt;
|
||||
} else {
|
||||
if (vertices[id].text === undefined) {
|
||||
vertices[id].text = id;
|
||||
if (vertex.text === undefined) {
|
||||
vertex.text = id;
|
||||
}
|
||||
}
|
||||
if (type !== undefined) {
|
||||
vertices[id].type = type;
|
||||
vertex.type = type;
|
||||
}
|
||||
if (style !== undefined && style !== null) {
|
||||
style.forEach(function (s) {
|
||||
vertices[id].styles.push(s);
|
||||
vertex.styles.push(s);
|
||||
});
|
||||
}
|
||||
if (classes !== undefined && classes !== null) {
|
||||
classes.forEach(function (s) {
|
||||
vertices[id].classes.push(s);
|
||||
vertex.classes.push(s);
|
||||
});
|
||||
}
|
||||
if (dir !== undefined) {
|
||||
vertices[id].dir = dir;
|
||||
vertex.dir = dir;
|
||||
}
|
||||
if (vertices[id].props === undefined) {
|
||||
vertices[id].props = props;
|
||||
if (vertex.props === undefined) {
|
||||
vertex.props = props;
|
||||
} else if (props !== undefined) {
|
||||
Object.assign(vertices[id].props, props);
|
||||
Object.assign(vertex.props, props);
|
||||
}
|
||||
};
|
||||
|
||||
@ -208,17 +210,17 @@ export const updateLink = function (positions: ('default' | number)[], style: st
|
||||
|
||||
export const addClass = function (ids: string, style: string[]) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
if (classes[id] === undefined) {
|
||||
classes[id] = { id, styles: [], textStyles: [] };
|
||||
if (!classes.has(id)) {
|
||||
classes.set(id, { id, styles: [], textStyles: [] });
|
||||
}
|
||||
|
||||
if (style !== undefined && style !== null) {
|
||||
style.forEach(function (s) {
|
||||
if (s.match('color')) {
|
||||
const newStyle = s.replace('fill', 'bgFill').replace('color', 'fill');
|
||||
classes[id].textStyles.push(newStyle);
|
||||
classes.get(id)!.textStyles.push(newStyle);
|
||||
}
|
||||
classes[id].styles.push(s);
|
||||
classes.get(id)!.styles.push(s);
|
||||
});
|
||||
}
|
||||
});
|
||||
@ -255,11 +257,11 @@ export const setDirection = function (dir: string) {
|
||||
*/
|
||||
export const setClass = function (ids: string, className: string) {
|
||||
for (const id of ids.split(',')) {
|
||||
if (vertices[id]) {
|
||||
vertices[id].classes.push(className);
|
||||
if (vertices.has(id)) {
|
||||
vertices.get(id)!.classes.push(className);
|
||||
}
|
||||
if (subGraphLookup[id]) {
|
||||
subGraphLookup[id].classes.push(className);
|
||||
if (subGraphLookup.has(id)) {
|
||||
subGraphLookup.get(id)!.classes.push(className);
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -270,7 +272,7 @@ const setTooltip = function (ids: string, tooltip: string) {
|
||||
}
|
||||
tooltip = sanitizeText(tooltip);
|
||||
for (const id of ids.split(',')) {
|
||||
tooltips[version === 'gen-1' ? lookUpDomId(id) : id] = tooltip;
|
||||
tooltips.set(version === 'gen-1' ? lookUpDomId(id) : id, tooltip);
|
||||
}
|
||||
};
|
||||
|
||||
@ -303,8 +305,8 @@ const setClickFun = function (id: string, functionName: string, functionArgs: st
|
||||
argList.push(id);
|
||||
}
|
||||
|
||||
if (vertices[id] !== undefined) {
|
||||
vertices[id].haveCallback = true;
|
||||
if (vertices.has(id)) {
|
||||
vertices.get(id)!.haveCallback = true;
|
||||
funs.push(function () {
|
||||
const elem = document.querySelector(`[id="${domId}"]`);
|
||||
if (elem !== null) {
|
||||
@ -329,17 +331,18 @@ const setClickFun = function (id: string, functionName: string, functionArgs: st
|
||||
*/
|
||||
export const setLink = function (ids: string, linkStr: string, target: string) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
if (vertices[id] !== undefined) {
|
||||
vertices[id].link = utils.formatUrl(linkStr, config);
|
||||
vertices[id].linkTarget = target;
|
||||
const vertex = vertices.get(id)!;
|
||||
if (vertex !== undefined) {
|
||||
vertex.link = utils.formatUrl(linkStr, config);
|
||||
vertex.linkTarget = target;
|
||||
}
|
||||
});
|
||||
setClass(ids, 'clickable');
|
||||
};
|
||||
|
||||
export const getTooltip = function (id: string) {
|
||||
if (tooltips.hasOwnProperty(id)) {
|
||||
return tooltips[id];
|
||||
if (tooltips.has(id)) {
|
||||
return tooltips.get(id)!;
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
@ -435,14 +438,14 @@ funs.push(setupToolTips);
|
||||
*
|
||||
*/
|
||||
export const clear = function (ver = 'gen-1') {
|
||||
vertices = {};
|
||||
classes = {};
|
||||
vertices = new Map();
|
||||
classes = new Map();
|
||||
edges = [];
|
||||
funs = [setupToolTips];
|
||||
subGraphs = [];
|
||||
subGraphLookup = {};
|
||||
subGraphLookup = new Map();
|
||||
subCount = 0;
|
||||
tooltips = {};
|
||||
tooltips = new Map();
|
||||
firstGraphFlag = true;
|
||||
version = ver;
|
||||
config = getConfig();
|
||||
@ -516,7 +519,7 @@ export const addSubGraph = function (
|
||||
// Remove the members in the new subgraph if they already belong to another subgraph
|
||||
subGraph.nodes = makeUniq(subGraph, subGraphs).nodes;
|
||||
subGraphs.push(subGraph);
|
||||
subGraphLookup[id] = subGraph;
|
||||
subGraphLookup.set(id, subGraph);
|
||||
return id;
|
||||
};
|
||||
|
||||
|
@ -29,11 +29,11 @@ export const setConf = function (cnf) {
|
||||
*/
|
||||
export const addVertices = async function (vert, g, svgId, root, doc, diagObj) {
|
||||
const svg = root.select(`[id="${svgId}"]`);
|
||||
const keys = Object.keys(vert);
|
||||
const keys = vert.keys();
|
||||
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
for (const id of keys) {
|
||||
const vertex = vert[id];
|
||||
const vertex = vert.get(id);
|
||||
|
||||
/**
|
||||
* Variable for storing the classes for the vertex
|
||||
@ -336,7 +336,7 @@ export const addEdges = async function (edges, g, diagObj) {
|
||||
*
|
||||
* @param text
|
||||
* @param diagObj
|
||||
* @returns {Record<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
* @returns {Map<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
*/
|
||||
export const getClasses = function (text, diagObj) {
|
||||
return diagObj.db.getClasses();
|
||||
@ -463,9 +463,9 @@ export const draw = async function (text, id, _version, diagObj) {
|
||||
}
|
||||
|
||||
// If node has a link, wrap it in an anchor SVG object.
|
||||
const keys = Object.keys(vert);
|
||||
keys.forEach(function (key) {
|
||||
const vertex = vert[key];
|
||||
const keys = [...vert.keys()];
|
||||
keys.forEach((key) => {
|
||||
const vertex = vert.get(key);
|
||||
|
||||
if (vertex.link) {
|
||||
const node = select('#' + id + ' [id="' + key + '"]');
|
||||
|
@ -265,7 +265,7 @@ export const addEdges = async function (edges, g, diagObj) {
|
||||
*
|
||||
* @param text
|
||||
* @param diagObj
|
||||
* @returns {Record<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
* @returns {Map<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
*/
|
||||
export const getClasses = function (text, diagObj) {
|
||||
log.info('Extracting classes');
|
||||
@ -452,9 +452,9 @@ export const draw = async function (text, id, _version, diagObj) {
|
||||
setupGraphViewbox(g, svg, conf.diagramPadding, conf.useMaxWidth);
|
||||
|
||||
// If node has a link, wrap it in an anchor SVG object.
|
||||
const keys = Object.keys(vert);
|
||||
const keys = [...vert.keys()];
|
||||
keys.forEach(function (key) {
|
||||
const vertex = vert[key];
|
||||
const vertex = vert.get(key);
|
||||
|
||||
if (vertex.link) {
|
||||
const node = root.select('#' + id + ' [id="' + diagObj.db.lookUpDomId(key) + '"]');
|
||||
|
@ -18,8 +18,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -38,8 +38,8 @@ describe('[Arrows] when parsing', () => {
|
||||
|
||||
expect(direction).toBe('LR');
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -58,8 +58,8 @@ describe('[Arrows] when parsing', () => {
|
||||
|
||||
expect(direction).toBe('RL');
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -78,8 +78,8 @@ describe('[Arrows] when parsing', () => {
|
||||
|
||||
expect(direction).toBe('BT');
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -99,8 +99,8 @@ describe('[Arrows] when parsing', () => {
|
||||
|
||||
expect(direction).toBe('TB');
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -116,8 +116,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -133,8 +133,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -150,8 +150,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[1].start).toBe('B');
|
||||
expect(edges[1].end).toBe('C');
|
||||
@ -169,8 +169,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -186,8 +186,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -203,8 +203,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -220,8 +220,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -237,8 +237,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -254,8 +254,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
@ -19,8 +19,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -34,8 +34,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -49,8 +49,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -64,8 +64,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -79,8 +79,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -94,8 +94,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -109,8 +109,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -124,8 +124,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -143,8 +143,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
@ -48,8 +48,6 @@ describe('[Edges] when parsing', () => {
|
||||
|
||||
it('should handle open ended edges', function () {
|
||||
const res = flow.parser.parse('graph TD;A---B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_open');
|
||||
@ -57,8 +55,6 @@ describe('[Edges] when parsing', () => {
|
||||
|
||||
it('should handle cross ended edges', function () {
|
||||
const res = flow.parser.parse('graph TD;A--xB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
@ -66,8 +62,6 @@ describe('[Edges] when parsing', () => {
|
||||
|
||||
it('should handle open ended edges', function () {
|
||||
const res = flow.parser.parse('graph TD;A--oB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_circle');
|
||||
@ -81,8 +75,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -99,8 +93,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -119,8 +113,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -139,8 +133,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -164,8 +158,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -183,8 +177,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -202,8 +196,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -221,8 +215,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -240,8 +234,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -259,8 +253,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -278,8 +272,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -297,8 +291,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -316,8 +310,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -335,8 +329,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -354,8 +348,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -373,8 +367,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -392,8 +386,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -411,8 +405,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -430,8 +424,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -449,8 +443,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -468,8 +462,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -487,8 +481,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
@ -23,7 +23,7 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
expect(edges.length).toBe(47917);
|
||||
expect(Object.keys(vert).length).toBe(2);
|
||||
expect(vert.size).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -19,12 +19,12 @@ A["\`The cat in **the** hat\`"]-- "\`The *bat* in the chat\`" -->B["The dog in t
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['A'].text).toBe('The cat in **the** hat');
|
||||
expect(vert['A'].labelType).toBe('markdown');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['B'].text).toBe('The dog in the hog');
|
||||
expect(vert['B'].labelType).toBe('string');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('A').text).toBe('The cat in **the** hat');
|
||||
expect(vert.get('A').labelType).toBe('markdown');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('B').text).toBe('The dog in the hog');
|
||||
expect(vert.get('B').labelType).toBe('string');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
@ -43,7 +43,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['A'].styles.length).toBe(0);
|
||||
expect(vert.get('A').styles.length).toBe(0);
|
||||
});
|
||||
it('should handle a single node with white space after it (SN1)', function () {
|
||||
// Silly but syntactically correct
|
||||
@ -53,7 +53,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['A'].styles.length).toBe(0);
|
||||
expect(vert.get('A').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single square node', function () {
|
||||
@ -64,8 +64,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].styles.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('square');
|
||||
expect(vert.get('a').styles.length).toBe(0);
|
||||
expect(vert.get('a').type).toBe('square');
|
||||
});
|
||||
|
||||
it('should handle a single round square node', function () {
|
||||
@ -76,8 +76,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].styles.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('square');
|
||||
expect(vert.get('a').styles.length).toBe(0);
|
||||
expect(vert.get('a').type).toBe('square');
|
||||
});
|
||||
|
||||
it('should handle a single circle node', function () {
|
||||
@ -88,7 +88,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('circle');
|
||||
expect(vert.get('a').type).toBe('circle');
|
||||
});
|
||||
|
||||
it('should handle a single round node', function () {
|
||||
@ -99,7 +99,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('round');
|
||||
expect(vert.get('a').type).toBe('round');
|
||||
});
|
||||
|
||||
it('should handle a single odd node', function () {
|
||||
@ -110,7 +110,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('odd');
|
||||
expect(vert.get('a').type).toBe('odd');
|
||||
});
|
||||
|
||||
it('should handle a single diamond node', function () {
|
||||
@ -121,7 +121,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('diamond');
|
||||
expect(vert.get('a').type).toBe('diamond');
|
||||
});
|
||||
|
||||
it('should handle a single diamond node with whitespace after it', function () {
|
||||
@ -132,7 +132,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('diamond');
|
||||
expect(vert.get('a').type).toBe('diamond');
|
||||
});
|
||||
|
||||
it('should handle a single diamond node with html in it (SN3)', function () {
|
||||
@ -143,8 +143,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('diamond');
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
expect(vert.get('a').type).toBe('diamond');
|
||||
expect(vert.get('a').text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single hexagon node', function () {
|
||||
@ -155,7 +155,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('hexagon');
|
||||
expect(vert.get('a').type).toBe('hexagon');
|
||||
});
|
||||
|
||||
it('should handle a single hexagon node with html in it', function () {
|
||||
@ -166,8 +166,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('hexagon');
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
expect(vert.get('a').type).toBe('hexagon');
|
||||
expect(vert.get('a').text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single round node with html in it', function () {
|
||||
@ -178,8 +178,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('round');
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
expect(vert.get('a').type).toBe('round');
|
||||
expect(vert.get('a').text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single double circle node', function () {
|
||||
@ -190,7 +190,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('doublecircle');
|
||||
expect(vert.get('a').type).toBe('doublecircle');
|
||||
});
|
||||
|
||||
it('should handle a single double circle node with whitespace after it', function () {
|
||||
@ -201,7 +201,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('doublecircle');
|
||||
expect(vert.get('a').type).toBe('doublecircle');
|
||||
});
|
||||
|
||||
it('should handle a single double circle node with html in it (SN3)', function () {
|
||||
@ -212,8 +212,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('doublecircle');
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
expect(vert.get('a').type).toBe('doublecircle');
|
||||
expect(vert.get('a').text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics starting on a char', function () {
|
||||
@ -224,7 +224,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['id1'].styles.length).toBe(0);
|
||||
expect(vert.get('id1').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single node with a single digit', function () {
|
||||
@ -235,7 +235,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['1'].text).toBe('1');
|
||||
expect(vert.get('1').text).toBe('1');
|
||||
});
|
||||
|
||||
it('should handle a single node with a single digit in a subgraph', function () {
|
||||
@ -247,7 +247,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['1'].text).toBe('1');
|
||||
expect(vert.get('1').text).toBe('1');
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics starting on a num', function () {
|
||||
@ -258,7 +258,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['1id'].styles.length).toBe(0);
|
||||
expect(vert.get('1id').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics containing a minus sign', function () {
|
||||
@ -269,7 +269,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['i-d'].styles.length).toBe(0);
|
||||
expect(vert.get('i-d').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics containing a underscore sign', function () {
|
||||
@ -280,33 +280,33 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['i_d'].styles.length).toBe(0);
|
||||
expect(vert.get('i_d').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it.each(keywords)('should handle keywords between dashes "-"', function (keyword) {
|
||||
const res = flow.parser.parse(`graph TD;a-${keyword}-node;`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`a-${keyword}-node`].text).toBe(`a-${keyword}-node`);
|
||||
expect(vert.get(`a-${keyword}-node`).text).toBe(`a-${keyword}-node`);
|
||||
});
|
||||
|
||||
it.each(keywords)('should handle keywords between periods "."', function (keyword) {
|
||||
const res = flow.parser.parse(`graph TD;a.${keyword}.node;`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`a.${keyword}.node`].text).toBe(`a.${keyword}.node`);
|
||||
expect(vert.get(`a.${keyword}.node`).text).toBe(`a.${keyword}.node`);
|
||||
});
|
||||
|
||||
it.each(keywords)('should handle keywords between underscores "_"', function (keyword) {
|
||||
const res = flow.parser.parse(`graph TD;a_${keyword}_node;`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`a_${keyword}_node`].text).toBe(`a_${keyword}_node`);
|
||||
expect(vert.get(`a_${keyword}_node`).text).toBe(`a_${keyword}_node`);
|
||||
});
|
||||
|
||||
it.each(keywords)('should handle nodes ending in %s', function (keyword) {
|
||||
const res = flow.parser.parse(`graph TD;node_${keyword};node.${keyword};node-${keyword};`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`node_${keyword}`].text).toBe(`node_${keyword}`);
|
||||
expect(vert[`node.${keyword}`].text).toBe(`node.${keyword}`);
|
||||
expect(vert[`node-${keyword}`].text).toBe(`node-${keyword}`);
|
||||
expect(vert.get(`node_${keyword}`).text).toBe(`node_${keyword}`);
|
||||
expect(vert.get(`node.${keyword}`).text).toBe(`node.${keyword}`);
|
||||
expect(vert.get(`node-${keyword}`).text).toBe(`node-${keyword}`);
|
||||
});
|
||||
|
||||
const errorKeywords = [
|
||||
@ -337,9 +337,9 @@ describe('[Singlenodes] when parsing', () => {
|
||||
it.each(workingKeywords)('should parse node beginning with %s', function (keyword) {
|
||||
flow.parser.parse(`graph TD; ${keyword}.node;${keyword}-node;${keyword}/node;`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`${keyword}.node`].text).toBe(`${keyword}.node`);
|
||||
expect(vert[`${keyword}-node`].text).toBe(`${keyword}-node`);
|
||||
expect(vert[`${keyword}/node`].text).toBe(`${keyword}/node`);
|
||||
expect(vert.get(`${keyword}.node`).text).toBe(`${keyword}.node`);
|
||||
expect(vert.get(`${keyword}-node`).text).toBe(`${keyword}-node`);
|
||||
expect(vert.get(`${keyword}/node`).text).toBe(`${keyword}/node`);
|
||||
});
|
||||
|
||||
it.each(specialChars)(
|
||||
@ -347,7 +347,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
function (specialChar) {
|
||||
flow.parser.parse(`graph TD; ${specialChar} --> A`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`${specialChar}`].text).toBe(`${specialChar}`);
|
||||
expect(vert.get(`${specialChar}`).text).toBe(`${specialChar}`);
|
||||
}
|
||||
);
|
||||
|
||||
@ -356,7 +356,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
function (specialChar) {
|
||||
flow.parser.parse(`graph TD; ${specialChar}node --> A`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`${specialChar}node`].text).toBe(`${specialChar}node`);
|
||||
expect(vert.get(`${specialChar}node`).text).toBe(`${specialChar}node`);
|
||||
}
|
||||
);
|
||||
|
||||
@ -365,7 +365,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
function (specialChar) {
|
||||
flow.parser.parse(`graph TD; node${specialChar} --> A`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`node${specialChar}`].text).toBe(`node${specialChar}`);
|
||||
expect(vert.get(`node${specialChar}`).text).toBe(`node${specialChar}`);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
@ -20,10 +20,8 @@ describe('[Style] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
const style = vert['Q'].styles[0];
|
||||
|
||||
expect(vert['Q'].styles.length).toBe(1);
|
||||
expect(vert['Q'].styles[0]).toBe('background:#fff');
|
||||
expect(vert.get('Q').styles.length).toBe(1);
|
||||
expect(vert.get('Q').styles[0]).toBe('background:#fff');
|
||||
});
|
||||
|
||||
it('should handle multiple styles for a vortex', function () {
|
||||
@ -32,9 +30,9 @@ describe('[Style] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['R'].styles.length).toBe(2);
|
||||
expect(vert['R'].styles[0]).toBe('background:#fff');
|
||||
expect(vert['R'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('R').styles.length).toBe(2);
|
||||
expect(vert.get('R').styles[0]).toBe('background:#fff');
|
||||
expect(vert.get('R').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should handle multiple styles in a graph', function () {
|
||||
@ -45,11 +43,11 @@ describe('[Style] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['S'].styles.length).toBe(1);
|
||||
expect(vert['T'].styles.length).toBe(2);
|
||||
expect(vert['S'].styles[0]).toBe('background:#aaa');
|
||||
expect(vert['T'].styles[0]).toBe('background:#bbb');
|
||||
expect(vert['T'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('S').styles.length).toBe(1);
|
||||
expect(vert.get('T').styles.length).toBe(2);
|
||||
expect(vert.get('S').styles[0]).toBe('background:#aaa');
|
||||
expect(vert.get('T').styles[0]).toBe('background:#bbb');
|
||||
expect(vert.get('T').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should handle styles and graph definitions in a graph', function () {
|
||||
@ -60,11 +58,11 @@ describe('[Style] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['S'].styles.length).toBe(1);
|
||||
expect(vert['T'].styles.length).toBe(2);
|
||||
expect(vert['S'].styles[0]).toBe('background:#aaa');
|
||||
expect(vert['T'].styles[0]).toBe('background:#bbb');
|
||||
expect(vert['T'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('S').styles.length).toBe(1);
|
||||
expect(vert.get('T').styles.length).toBe(2);
|
||||
expect(vert.get('S').styles[0]).toBe('background:#aaa');
|
||||
expect(vert.get('T').styles[0]).toBe('background:#bbb');
|
||||
expect(vert.get('T').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should handle styles and graph definitions in a graph', function () {
|
||||
@ -73,9 +71,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['T'].styles.length).toBe(2);
|
||||
expect(vert['T'].styles[0]).toBe('background:#bbb');
|
||||
expect(vert['T'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('T').styles.length).toBe(2);
|
||||
expect(vert.get('T').styles[0]).toBe('background:#bbb');
|
||||
expect(vert.get('T').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should keep node label text (if already defined) when a style is applied', function () {
|
||||
@ -85,10 +83,10 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['A'].text).toBe('');
|
||||
expect(vert['B'].text).toBe('Test');
|
||||
expect(vert['C'].text).toBe('C');
|
||||
expect(vert['D'].text).toBe('D');
|
||||
expect(vert.get('A').text).toBe('');
|
||||
expect(vert.get('B').text).toBe('Test');
|
||||
expect(vert.get('C').text).toBe('C');
|
||||
expect(vert.get('D').text).toBe('D');
|
||||
});
|
||||
|
||||
it('should be possible to declare a class', function () {
|
||||
@ -99,9 +97,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to declare multiple classes', function () {
|
||||
@ -111,13 +109,13 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['firstClass'].styles.length).toBe(2);
|
||||
expect(classes['firstClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['firstClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('firstClass').styles.length).toBe(2);
|
||||
expect(classes.get('firstClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('firstClass').styles[1]).toBe('border:1px solid red');
|
||||
|
||||
expect(classes['secondClass'].styles.length).toBe(2);
|
||||
expect(classes['secondClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['secondClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('secondClass').styles.length).toBe(2);
|
||||
expect(classes.get('secondClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('secondClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to declare a class with a dot in the style', function () {
|
||||
@ -128,9 +126,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
it('should be possible to declare a class with a space in the style', function () {
|
||||
const res = flow.parser.parse(
|
||||
@ -140,9 +138,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background: #bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background: #bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a vertex', function () {
|
||||
let statement = '';
|
||||
@ -156,9 +154,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a vertex with an id containing _', function () {
|
||||
let statement = '';
|
||||
@ -172,9 +170,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a vertex directly', function () {
|
||||
let statement = '';
|
||||
@ -187,10 +185,10 @@ describe('[Style] when parsing', () => {
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to apply a class to a vertex directly : usecase A[text].class ', function () {
|
||||
@ -204,10 +202,10 @@ describe('[Style] when parsing', () => {
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to apply a class to a vertex directly : usecase A[text].class-->B[test2] ', function () {
|
||||
@ -221,10 +219,10 @@ describe('[Style] when parsing', () => {
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(vertices['A'].classes[0]).toBe('exClass');
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(vertices.get('A').classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to apply a class to a vertex directly 2', function () {
|
||||
@ -238,10 +236,10 @@ describe('[Style] when parsing', () => {
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a comma separated list of vertices', function () {
|
||||
let statement = '';
|
||||
@ -256,11 +254,11 @@ describe('[Style] when parsing', () => {
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vertices['a'].classes[0]).toBe('exClass');
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
expect(vertices.get('a').classes[0]).toBe('exClass');
|
||||
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||
});
|
||||
|
||||
it('should handle style definitions with more then 1 digit in a row', function () {
|
||||
@ -364,9 +362,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['A'].classes.length).toBe(0);
|
||||
expect(vert['B'].classes[0]).toBe('C1');
|
||||
expect(vert['D'].classes[0]).toBe('C1');
|
||||
expect(vert['E'].classes[0]).toBe('C2');
|
||||
expect(vert.get('A').classes.length).toBe(0);
|
||||
expect(vert.get('B').classes[0]).toBe('C1');
|
||||
expect(vert.get('D').classes[0]).toBe('C1');
|
||||
expect(vert.get('E').classes[0]).toBe('C2');
|
||||
});
|
||||
});
|
||||
|
@ -113,7 +113,7 @@ describe('[Text] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['v'].text).toBe('my text');
|
||||
expect(vert.get('v').text).toBe('my text');
|
||||
});
|
||||
it('should handle v in node ids v at end', function () {
|
||||
// v at end
|
||||
@ -123,7 +123,7 @@ describe('[Text] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['csv'].text).toBe('my text');
|
||||
expect(vert.get('csv').text).toBe('my text');
|
||||
});
|
||||
it('should handle v in node ids v in middle', function () {
|
||||
// v in middle
|
||||
@ -133,7 +133,7 @@ describe('[Text] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['ava'].text).toBe('my text');
|
||||
expect(vert.get('ava').text).toBe('my text');
|
||||
});
|
||||
it('should handle v in node ids, v at start', function () {
|
||||
// v at start
|
||||
@ -143,7 +143,7 @@ describe('[Text] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['va'].text).toBe('my text');
|
||||
expect(vert.get('va').text).toBe('my text');
|
||||
});
|
||||
it('should handle keywords', function () {
|
||||
const res = flow.parser.parse('graph TD;A--x|text including graph space|B;');
|
||||
@ -157,7 +157,7 @@ describe('[Text] when parsing', () => {
|
||||
const res = flow.parser.parse('graph TD;V-->a[v]');
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['a'].text).toBe('v');
|
||||
expect(vert.get('a').text).toBe('v');
|
||||
});
|
||||
it('should handle quoted text', function () {
|
||||
const res = flow.parser.parse('graph TD;V-- "test string()" -->a[v]');
|
||||
@ -302,8 +302,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('Chimpansen hoppar');
|
||||
expect(vert.get('C').type).toBe('round');
|
||||
expect(vert.get('C').text).toBe('Chimpansen hoppar');
|
||||
});
|
||||
|
||||
const keywords = [
|
||||
@ -353,8 +353,8 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['B'].type).toBe(`${shape.name}`);
|
||||
expect(vert['B'].text).toBe(`This node has a ${keyword} as text`);
|
||||
expect(vert.get('B').type).toBe(`${shape.name}`);
|
||||
expect(vert.get('B').text).toBe(`This node has a ${keyword} as text`);
|
||||
});
|
||||
});
|
||||
|
||||
@ -365,24 +365,24 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['B'].type).toBe('rect');
|
||||
expect(vert['B'].text).toBe(`This node has a ${keyword} as text`);
|
||||
expect(vert.get('B').type).toBe('rect');
|
||||
expect(vert.get('B').text).toBe(`This node has a ${keyword} as text`);
|
||||
});
|
||||
|
||||
it('should handle edge case for odd vertex with node id ending with minus', function () {
|
||||
const res = flow.parser.parse('graph TD;A_node-->odd->Vertex Text];');
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['odd-'].type).toBe('odd');
|
||||
expect(vert['odd-'].text).toBe('Vertex Text');
|
||||
expect(vert.get('odd-').type).toBe('odd');
|
||||
expect(vert.get('odd-').text).toBe('Vertex Text');
|
||||
});
|
||||
it('should allow forward slashes in lean_right vertices', function () {
|
||||
const rest = flow.parser.parse(`graph TD;A_node-->B[/This node has a / as text/];`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['B'].type).toBe('lean_right');
|
||||
expect(vert['B'].text).toBe(`This node has a / as text`);
|
||||
expect(vert.get('B').type).toBe('lean_right');
|
||||
expect(vert.get('B').text).toBe(`This node has a / as text`);
|
||||
});
|
||||
|
||||
it('should allow back slashes in lean_left vertices', function () {
|
||||
@ -390,8 +390,8 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['B'].type).toBe('lean_left');
|
||||
expect(vert['B'].text).toBe(`This node has a \\ as text`);
|
||||
expect(vert.get('B').type).toBe('lean_left');
|
||||
expect(vert.get('B').text).toBe(`This node has a \\ as text`);
|
||||
});
|
||||
|
||||
it('should handle åäö and minus', function () {
|
||||
@ -400,8 +400,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('diamond');
|
||||
expect(vert['C'].text).toBe('Chimpansen hoppar åäö-ÅÄÖ');
|
||||
expect(vert.get('C').type).toBe('diamond');
|
||||
expect(vert.get('C').text).toBe('Chimpansen hoppar åäö-ÅÄÖ');
|
||||
});
|
||||
|
||||
it('should handle with åäö, minus and space and br', function () {
|
||||
@ -410,8 +410,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('Chimpansen hoppar åäö <br> - ÅÄÖ');
|
||||
expect(vert.get('C').type).toBe('round');
|
||||
expect(vert.get('C').text).toBe('Chimpansen hoppar åäö <br> - ÅÄÖ');
|
||||
});
|
||||
// it.skip('should handle åäö, minus and space and br',function(){
|
||||
// const res = flow.parser.parse('graph TD; A[Object(foo,bar)]-->B(Thing);');
|
||||
@ -419,22 +419,22 @@ describe('[Text] when parsing', () => {
|
||||
// const vert = flow.parser.yy.getVertices();
|
||||
// const edges = flow.parser.yy.getEdges();
|
||||
//
|
||||
// expect(vert['C'].type).toBe('round');
|
||||
// expect(vert['C'].text).toBe(' A[Object(foo,bar)]-->B(Thing);');
|
||||
// expect(vert.get('C').type).toBe('round');
|
||||
// expect(vert.get('C').text).toBe(' A[Object(foo,bar)]-->B(Thing);');
|
||||
// });
|
||||
it('should handle unicode chars', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(Начало);');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['C'].text).toBe('Начало');
|
||||
expect(vert.get('C').text).toBe('Начало');
|
||||
});
|
||||
it('should handle backslask', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(c:\\windows);');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['C'].text).toBe('c:\\windows');
|
||||
expect(vert.get('C').text).toBe('c:\\windows');
|
||||
});
|
||||
it('should handle CAPS', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(some CAPS);');
|
||||
@ -442,8 +442,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('some CAPS');
|
||||
expect(vert.get('C').type).toBe('round');
|
||||
expect(vert.get('C').text).toBe('some CAPS');
|
||||
});
|
||||
it('should handle directions', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(some URL);');
|
||||
@ -451,8 +451,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('some URL');
|
||||
expect(vert.get('C').type).toBe('round');
|
||||
expect(vert.get('C').text).toBe('some URL');
|
||||
});
|
||||
});
|
||||
|
||||
@ -464,9 +464,9 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_circle');
|
||||
expect(edges[1].type).toBe('arrow_point');
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -482,8 +482,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
expect(vert.get('A').type).toBe('square');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||
});
|
||||
|
||||
it('should handle text in vertices with space with spaces between vertices and link', function () {
|
||||
@ -492,8 +492,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
expect(vert.get('A').type).toBe('square');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||
});
|
||||
it('should handle text including _ in vertices', function () {
|
||||
const res = flow.parser.parse('graph TD;A[chimpansen_hoppar] --> C;');
|
||||
@ -501,8 +501,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen_hoppar');
|
||||
expect(vert.get('A').type).toBe('square');
|
||||
expect(vert.get('A').text).toBe('chimpansen_hoppar');
|
||||
});
|
||||
|
||||
it('should handle quoted text in vertices ', function () {
|
||||
@ -511,8 +511,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar ()[]');
|
||||
expect(vert.get('A').type).toBe('square');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar ()[]');
|
||||
});
|
||||
|
||||
it('should handle text in circle vertices with space', function () {
|
||||
@ -521,8 +521,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('circle');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
expect(vert.get('A').type).toBe('circle');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||
});
|
||||
|
||||
it('should handle text in ellipse vertices', function () {
|
||||
@ -531,8 +531,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('ellipse');
|
||||
expect(vert['A'].text).toBe('this is an ellipse');
|
||||
expect(vert.get('A').type).toBe('ellipse');
|
||||
expect(vert.get('A').text).toBe('this is an ellipse');
|
||||
});
|
||||
|
||||
it('should not freeze when ellipse text has a `(`', function () {
|
||||
@ -545,8 +545,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('round');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
expect(vert.get('A').type).toBe('round');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||
});
|
||||
|
||||
it('should handle text in with ?', function () {
|
||||
@ -555,7 +555,7 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].text).toBe('?');
|
||||
expect(vert.get('A').text).toBe('?');
|
||||
expect(edges[0].text).toBe('?');
|
||||
});
|
||||
it('should handle text in with éèêàçô', function () {
|
||||
@ -564,7 +564,7 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].text).toBe('éèêàçô');
|
||||
expect(vert.get('A').text).toBe('éèêàçô');
|
||||
expect(edges[0].text).toBe('éèêàçô');
|
||||
});
|
||||
|
||||
@ -574,7 +574,7 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].text).toBe(',.?!+-*');
|
||||
expect(vert.get('A').text).toBe(',.?!+-*');
|
||||
expect(edges[0].text).toBe(',.?!+-*');
|
||||
});
|
||||
|
||||
|
@ -22,9 +22,9 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -44,9 +44,9 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('C');
|
||||
@ -66,9 +66,9 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -88,10 +88,10 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert['D'].id).toBe('D');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(vert.get('D').id).toBe('D');
|
||||
expect(edges.length).toBe(4);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('C');
|
||||
@ -119,10 +119,10 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert['D'].id).toBe('D');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(vert.get('D').id).toBe('D');
|
||||
expect(edges.length).toBe(4);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('C');
|
||||
@ -150,11 +150,11 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['B2'].id).toBe('B2');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert['D2'].id).toBe('D2');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('B2').id).toBe('B2');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(vert.get('D2').id).toBe('D2');
|
||||
expect(edges.length).toBe(6);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -193,14 +193,14 @@ describe('when parsing flowcharts', function () {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['B'].classes[0]).toBe('exClass');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert['D'].id).toBe('D');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('B').classes[0]).toBe('exClass');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(vert.get('D').id).toBe('D');
|
||||
expect(edges.length).toBe(4);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
@ -19,8 +19,8 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -34,8 +34,8 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['endpoint'].id).toBe('endpoint');
|
||||
expect(vert['sender'].id).toBe('sender');
|
||||
expect(vert.get('endpoint').id).toBe('endpoint');
|
||||
expect(vert.get('sender').id).toBe('sender');
|
||||
expect(edges[0].start).toBe('endpoint');
|
||||
expect(edges[0].end).toBe('sender');
|
||||
});
|
||||
@ -46,8 +46,8 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['blend'].id).toBe('blend');
|
||||
expect(vert['monograph'].id).toBe('monograph');
|
||||
expect(vert.get('blend').id).toBe('blend');
|
||||
expect(vert.get('monograph').id).toBe('monograph');
|
||||
expect(edges[0].start).toBe('blend');
|
||||
expect(edges[0].end).toBe('monograph');
|
||||
});
|
||||
@ -58,8 +58,8 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['default'].id).toBe('default');
|
||||
expect(vert['monograph'].id).toBe('monograph');
|
||||
expect(vert.get('default').id).toBe('default');
|
||||
expect(vert.get('monograph').id).toBe('monograph');
|
||||
expect(edges[0].start).toBe('default');
|
||||
expect(edges[0].end).toBe('monograph');
|
||||
});
|
||||
@ -71,12 +71,12 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
if (result) {
|
||||
expect(vert['A'].text).toBe(result);
|
||||
expect(vert.get('A').text).toBe(result);
|
||||
} else {
|
||||
expect(vert['A'].text).toBe(char);
|
||||
expect(vert.get('A').text).toBe(char);
|
||||
}
|
||||
flow.parser.yy.clear();
|
||||
};
|
||||
@ -135,7 +135,7 @@ describe('parsing a flow chart', function () {
|
||||
const res = flow.parser.parse(statement);
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
expect(vertices['node1TB'].id).toBe('node1TB');
|
||||
expect(vertices.get('node1TB').id).toBe('node1TB');
|
||||
});
|
||||
|
||||
it('should be possible to use direction in node ids', function () {
|
||||
@ -145,7 +145,7 @@ describe('parsing a flow chart', function () {
|
||||
const res = flow.parser.parse(statement);
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
expect(vertices['A'].id).toBe('A');
|
||||
expect(vertices.get('A').id).toBe('A');
|
||||
});
|
||||
|
||||
it('should be possible to use numbers as labels', function () {
|
||||
@ -154,8 +154,8 @@ describe('parsing a flow chart', function () {
|
||||
statement = statement + 'graph TB;subgraph "number as labels";1;end;';
|
||||
const res = flow.parser.parse(statement);
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
expect(vertices['1'].id).toBe('1');
|
||||
|
||||
expect(vertices.get('1').id).toBe('1');
|
||||
});
|
||||
|
||||
it('should add accTitle and accDescr to flow chart', function () {
|
||||
|
@ -28,7 +28,7 @@ let tickInterval = undefined;
|
||||
let todayMarker = '';
|
||||
let includes = [];
|
||||
let excludes = [];
|
||||
let links = {};
|
||||
let links = new Map();
|
||||
let sections = [];
|
||||
let tasks = [];
|
||||
let currentSection = '';
|
||||
@ -62,7 +62,7 @@ export const clear = function () {
|
||||
inclusiveEndDates = false;
|
||||
topAxis = false;
|
||||
lastOrder = 0;
|
||||
links = {};
|
||||
links = new Map();
|
||||
commonClear();
|
||||
weekday = 'sunday';
|
||||
weekend = 'saturday';
|
||||
@ -639,7 +639,7 @@ export const setLink = function (ids, _linkStr) {
|
||||
pushFun(id, () => {
|
||||
window.open(linkStr, '_self');
|
||||
});
|
||||
links[id] = linkStr;
|
||||
links.set(id, linkStr);
|
||||
}
|
||||
});
|
||||
setClass(ids, 'clickable');
|
||||
|
@ -475,14 +475,14 @@ export const draw = function (text, id, version, diagObj) {
|
||||
|
||||
rectangles
|
||||
.filter(function (d) {
|
||||
return links[d.id] !== undefined;
|
||||
return links.has(d.id);
|
||||
})
|
||||
.each(function (o) {
|
||||
var taskRect = doc.querySelector('#' + o.id);
|
||||
var taskText = doc.querySelector('#' + o.id + '-text');
|
||||
const oldParent = taskRect.parentNode;
|
||||
var Link = doc.createElement('a');
|
||||
Link.setAttribute('xlink:href', links[o.id]);
|
||||
Link.setAttribute('xlink:href', links.get(o.id));
|
||||
Link.setAttribute('target', '_top');
|
||||
oldParent.appendChild(Link);
|
||||
Link.appendChild(taskRect);
|
||||
|
@ -14,12 +14,12 @@ import {
|
||||
|
||||
let mainBranchName = getConfig().gitGraph.mainBranchName;
|
||||
let mainBranchOrder = getConfig().gitGraph.mainBranchOrder;
|
||||
let commits = {};
|
||||
let commits = new Map();
|
||||
let head = null;
|
||||
let branchesConfig = {};
|
||||
branchesConfig[mainBranchName] = { name: mainBranchName, order: mainBranchOrder };
|
||||
let branches = {};
|
||||
branches[mainBranchName] = head;
|
||||
let branchesConfig = new Map();
|
||||
branchesConfig.set(mainBranchName, { name: mainBranchName, order: mainBranchOrder });
|
||||
let branches = new Map();
|
||||
branches.set(mainBranchName, head);
|
||||
let curBranch = mainBranchName;
|
||||
let direction = 'LR';
|
||||
let seq = 0;
|
||||
@ -46,11 +46,11 @@ function getId() {
|
||||
// if (Array.isArray(otherCommit.parent)) {
|
||||
// log.debug('In merge commit:', otherCommit.parent);
|
||||
// return (
|
||||
// isFastForwardable(currentCommit, commits[otherCommit.parent[0]]) ||
|
||||
// isFastForwardable(currentCommit, commits[otherCommit.parent[1]])
|
||||
// isFastForwardable(currentCommit, commits.get(otherCommit.parent[0])) ||
|
||||
// isFastForwardable(currentCommit, commits.get(otherCommit.parent[1]))
|
||||
// );
|
||||
// } else {
|
||||
// otherCommit = commits[otherCommit.parent];
|
||||
// otherCommit = commits.get(otherCommit.parent);
|
||||
// }
|
||||
// }
|
||||
// log.debug(currentCommit.id, otherCommit.id);
|
||||
@ -118,16 +118,16 @@ export const commit = function (msg, id, type, tag) {
|
||||
branch: curBranch,
|
||||
};
|
||||
head = commit;
|
||||
commits[commit.id] = commit;
|
||||
branches[curBranch] = commit.id;
|
||||
commits.set(commit.id, commit);
|
||||
branches.set(curBranch, commit.id);
|
||||
log.debug('in pushCommit ' + commit.id);
|
||||
};
|
||||
|
||||
export const branch = function (name, order) {
|
||||
name = common.sanitizeText(name, getConfig());
|
||||
if (branches[name] === undefined) {
|
||||
branches[name] = head != null ? head.id : null;
|
||||
branchesConfig[name] = { name, order: order ? parseInt(order, 10) : null };
|
||||
if (!branches.has(name)) {
|
||||
branches.set(name, head != null ? head.id : null);
|
||||
branchesConfig.set(name, { name, order: order ? parseInt(order, 10) : null });
|
||||
checkout(name);
|
||||
log.debug('in createBranch');
|
||||
} else {
|
||||
@ -151,8 +151,8 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
otherBranch = common.sanitizeText(otherBranch, getConfig());
|
||||
custom_id = common.sanitizeText(custom_id, getConfig());
|
||||
|
||||
const currentCommit = commits[branches[curBranch]];
|
||||
const otherCommit = commits[branches[otherBranch]];
|
||||
const currentCommit = commits.get(branches.get(curBranch));
|
||||
const otherCommit = commits.get(branches.get(otherBranch));
|
||||
if (curBranch === otherBranch) {
|
||||
let error = new Error('Incorrect usage of "merge". Cannot merge a branch to itself');
|
||||
error.hash = {
|
||||
@ -175,7 +175,7 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
expected: ['commit'],
|
||||
};
|
||||
throw error;
|
||||
} else if (branches[otherBranch] === undefined) {
|
||||
} else if (!branches.has(otherBranch)) {
|
||||
let error = new Error(
|
||||
'Incorrect usage of "merge". Branch to be merged (' + otherBranch + ') does not exist'
|
||||
);
|
||||
@ -209,7 +209,7 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
expected: ['branch abc'],
|
||||
};
|
||||
throw error;
|
||||
} else if (custom_id && commits[custom_id] !== undefined) {
|
||||
} else if (custom_id && commits.has(custom_id)) {
|
||||
let error = new Error(
|
||||
'Incorrect usage of "merge". Commit with id:' +
|
||||
custom_id +
|
||||
@ -232,15 +232,15 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
// return;
|
||||
// }
|
||||
// if (isFastForwardable(currentCommit, otherCommit)) {
|
||||
// branches[curBranch] = branches[otherBranch];
|
||||
// head = commits[branches[curBranch]];
|
||||
// branches.set(curBranch, branches.get(otherBranch));
|
||||
// head = commits.get(branches.get(curBranch));
|
||||
// } else {
|
||||
// create merge commit
|
||||
const commit = {
|
||||
id: custom_id ? custom_id : seq + '-' + getId(),
|
||||
message: 'merged branch ' + otherBranch + ' into ' + curBranch,
|
||||
seq: seq++,
|
||||
parents: [head == null ? null : head.id, branches[otherBranch]],
|
||||
parents: [head == null ? null : head.id, branches.get(otherBranch)],
|
||||
branch: curBranch,
|
||||
type: commitType.MERGE,
|
||||
customType: override_type,
|
||||
@ -248,8 +248,8 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
tag: custom_tag ? custom_tag : '',
|
||||
};
|
||||
head = commit;
|
||||
commits[commit.id] = commit;
|
||||
branches[curBranch] = commit.id;
|
||||
commits.set(commit.id, commit);
|
||||
branches.set(curBranch, commit.id);
|
||||
// }
|
||||
log.debug(branches);
|
||||
log.debug('in mergeBranch');
|
||||
@ -262,7 +262,7 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
tag = common.sanitizeText(tag, getConfig());
|
||||
parentCommitId = common.sanitizeText(parentCommitId, getConfig());
|
||||
|
||||
if (!sourceId || commits[sourceId] === undefined) {
|
||||
if (!sourceId || !commits.has(sourceId)) {
|
||||
let error = new Error(
|
||||
'Incorrect usage of "cherryPick". Source commit id should exist and provided'
|
||||
);
|
||||
@ -275,7 +275,7 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
let sourceCommit = commits[sourceId];
|
||||
let sourceCommit = commits.get(sourceId);
|
||||
let sourceCommitBranch = sourceCommit.branch;
|
||||
if (
|
||||
parentCommitId &&
|
||||
@ -292,7 +292,7 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
if (!targetId || commits[targetId] === undefined) {
|
||||
if (!targetId || !commits.has(targetId)) {
|
||||
// cherry-pick source commit to current branch
|
||||
|
||||
if (sourceCommitBranch === curBranch) {
|
||||
@ -308,7 +308,7 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
const currentCommit = commits[branches[curBranch]];
|
||||
const currentCommit = commits.get(branches.get(curBranch));
|
||||
if (currentCommit === undefined || !currentCommit) {
|
||||
let error = new Error(
|
||||
'Incorrect usage of "cherry-pick". Current branch (' + curBranch + ')has no commits'
|
||||
@ -336,15 +336,15 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
}`,
|
||||
};
|
||||
head = commit;
|
||||
commits[commit.id] = commit;
|
||||
branches[curBranch] = commit.id;
|
||||
commits.set(commit.id, commit);
|
||||
branches.set(curBranch, commit.id);
|
||||
log.debug(branches);
|
||||
log.debug('in cherryPick');
|
||||
}
|
||||
};
|
||||
export const checkout = function (branch) {
|
||||
branch = common.sanitizeText(branch, getConfig());
|
||||
if (branches[branch] === undefined) {
|
||||
if (!branches.has(branch)) {
|
||||
let error = new Error(
|
||||
'Trying to checkout branch which is not yet created. (Help try using "branch ' + branch + '")'
|
||||
);
|
||||
@ -360,8 +360,8 @@ export const checkout = function (branch) {
|
||||
//log.debug('in createBranch');
|
||||
} else {
|
||||
curBranch = branch;
|
||||
const id = branches[curBranch];
|
||||
head = commits[id];
|
||||
const id = branches.get(curBranch);
|
||||
head = commits.get(id);
|
||||
}
|
||||
};
|
||||
|
||||
@ -369,10 +369,10 @@ export const checkout = function (branch) {
|
||||
// log.debug('in reset', commitRef);
|
||||
// const ref = commitRef.split(':')[0];
|
||||
// let parentCount = parseInt(commitRef.split(':')[1]);
|
||||
// let commit = ref === 'HEAD' ? head : commits[branches[ref]];
|
||||
// let commit = ref === 'HEAD' ? head : commits.get(branches.get(ref));
|
||||
// log.debug(commit, parentCount);
|
||||
// while (parentCount > 0) {
|
||||
// commit = commits[commit.parent];
|
||||
// commit = commits.get(commit.parent);
|
||||
// parentCount--;
|
||||
// if (!commit) {
|
||||
// const err = 'Critical error - unique parent commit not found during reset';
|
||||
@ -416,19 +416,19 @@ function prettyPrintCommitHistory(commitArr) {
|
||||
});
|
||||
const label = [line, commit.id, commit.seq];
|
||||
for (let branch in branches) {
|
||||
if (branches[branch] === commit.id) {
|
||||
if (branches.get(branch) === commit.id) {
|
||||
label.push(branch);
|
||||
}
|
||||
}
|
||||
log.debug(label.join(' '));
|
||||
if (commit.parents && commit.parents.length == 2) {
|
||||
const newCommit = commits[commit.parents[0]];
|
||||
const newCommit = commits.get(commit.parents[0]);
|
||||
upsert(commitArr, commit, newCommit);
|
||||
commitArr.push(commits[commit.parents[1]]);
|
||||
commitArr.push(commits.get(commit.parents[1]));
|
||||
} else if (commit.parents.length == 0) {
|
||||
return;
|
||||
} else {
|
||||
const nextCommit = commits[commit.parents];
|
||||
const nextCommit = commits.get(commit.parents);
|
||||
upsert(commitArr, commit, nextCommit);
|
||||
}
|
||||
commitArr = uniqBy(commitArr, (c) => c.id);
|
||||
@ -442,21 +442,21 @@ export const prettyPrint = function () {
|
||||
};
|
||||
|
||||
export const clear = function () {
|
||||
commits = {};
|
||||
commits = new Map();
|
||||
head = null;
|
||||
let mainBranch = getConfig().gitGraph.mainBranchName;
|
||||
let mainBranchOrder = getConfig().gitGraph.mainBranchOrder;
|
||||
branches = {};
|
||||
branches[mainBranch] = null;
|
||||
branchesConfig = {};
|
||||
branchesConfig[mainBranch] = { name: mainBranch, order: mainBranchOrder };
|
||||
branches = new Map();
|
||||
branches.set(mainBranch, null);
|
||||
branchesConfig = new Map();
|
||||
branchesConfig.set(mainBranch, { name: mainBranch, order: mainBranchOrder });
|
||||
curBranch = mainBranch;
|
||||
seq = 0;
|
||||
commonClear();
|
||||
};
|
||||
|
||||
export const getBranchesAsObjArray = function () {
|
||||
const branchesArray = Object.values(branchesConfig)
|
||||
const branchesArray = [...branchesConfig.values()]
|
||||
.map((branchConfig, i) => {
|
||||
if (branchConfig.order !== null) {
|
||||
return branchConfig;
|
||||
@ -479,8 +479,8 @@ export const getCommits = function () {
|
||||
return commits;
|
||||
};
|
||||
export const getCommitsArray = function () {
|
||||
const commitArr = Object.keys(commits).map(function (key) {
|
||||
return commits[key];
|
||||
const commitArr = [...commits.keys()].map(function (key) {
|
||||
return commits.get(key);
|
||||
});
|
||||
commitArr.forEach(function (o) {
|
||||
log.debug(o.id);
|
||||
|
@ -12,10 +12,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph definition with empty options', function () {
|
||||
@ -25,10 +25,10 @@ describe('when parsing a gitGraph', function () {
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(parser.yy.getOptions()).toEqual({});
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph definition with valid options', function () {
|
||||
@ -37,10 +37,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(parser.yy.getOptions()['key']).toBe('value');
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should not fail on a gitGraph with malformed json', function () {
|
||||
@ -48,10 +48,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle set direction top to bottom', function () {
|
||||
@ -60,10 +60,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('TB');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle set direction bottom to top', function () {
|
||||
@ -72,10 +72,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('BT');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should checkout a branch', function () {
|
||||
@ -84,7 +84,7 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(0);
|
||||
expect(commits.size).toBe(0);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('new');
|
||||
});
|
||||
|
||||
@ -94,7 +94,7 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(0);
|
||||
expect(commits.size).toBe(0);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('new');
|
||||
});
|
||||
|
||||
@ -104,11 +104,11 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(2);
|
||||
expect(commits.size).toBe(2);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('new');
|
||||
const branchCommit = parser.yy.getBranches()['new'];
|
||||
const branchCommit = parser.yy.getBranches().get('new');
|
||||
expect(branchCommit).not.toBeNull();
|
||||
expect(commits[branchCommit].parent).not.toBeNull();
|
||||
expect(commits.get(branchCommit).parent).not.toBeNull();
|
||||
});
|
||||
it('should handle commit with args', function () {
|
||||
const str = 'gitGraph:\n' + 'commit "a commit"\n';
|
||||
@ -116,9 +116,9 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('a commit');
|
||||
expect(commits.size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('a commit');
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
});
|
||||
|
||||
@ -136,10 +136,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch');
|
||||
expect(parser.yy.getBranches()['newbranch']).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['newbranch']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).toEqual(parser.yy.getBranches().get('main'));
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('newbranch'));
|
||||
});
|
||||
|
||||
it.skip('reset can take an argument', function () {
|
||||
@ -155,9 +155,9 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch');
|
||||
const main = commits[parser.yy.getBranches()['main']];
|
||||
const main = commits.get(parser.yy.getBranches().get('main'));
|
||||
expect(parser.yy.getHead().id).toEqual(main.parent);
|
||||
});
|
||||
|
||||
@ -175,10 +175,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(4);
|
||||
expect(commits.size).toBe(4);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getBranches()['newbranch']).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['newbranch']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).toEqual(parser.yy.getBranches().get('main'));
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('newbranch'));
|
||||
});
|
||||
|
||||
it('should handle cases when merge is a noop', function () {
|
||||
@ -194,10 +194,12 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(4);
|
||||
expect(commits.size).toBe(4);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch');
|
||||
expect(parser.yy.getBranches()['newbranch']).not.toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['newbranch']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).not.toEqual(
|
||||
parser.yy.getBranches().get('main')
|
||||
);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('newbranch'));
|
||||
});
|
||||
|
||||
it('should handle merge with 2 parents', function () {
|
||||
@ -215,10 +217,12 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(5);
|
||||
expect(commits.size).toBe(5);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getBranches()['newbranch']).not.toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).not.toEqual(
|
||||
parser.yy.getBranches().get('main')
|
||||
);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('main'));
|
||||
});
|
||||
|
||||
it.skip('should handle ff merge when history walk has two parents (merge commit)', function () {
|
||||
@ -239,10 +243,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(7);
|
||||
expect(commits.size).toBe(7);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch');
|
||||
expect(parser.yy.getBranches()['newbranch']).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).toEqual(parser.yy.getBranches().get('main'));
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('main'));
|
||||
|
||||
parser.yy.prettyPrint();
|
||||
});
|
||||
|
@ -13,15 +13,15 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
//console.info(commits);
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit id only', function () {
|
||||
@ -30,15 +30,15 @@ describe('when parsing a gitGraph', function () {
|
||||
`;
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit tag only', function () {
|
||||
@ -48,15 +48,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('test');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('test');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit type HIGHLIGHT only', function () {
|
||||
@ -66,15 +66,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit type REVERSE only', function () {
|
||||
@ -84,15 +84,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit type NORMAL only', function () {
|
||||
@ -102,15 +102,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit msg only', function () {
|
||||
@ -120,15 +120,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test commit');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test commit');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit "msg:" key only', function () {
|
||||
@ -138,15 +138,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test commit');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test commit');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit id, tag only', function () {
|
||||
@ -156,15 +156,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit type, tag only', function () {
|
||||
@ -174,15 +174,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit tag and type only', function () {
|
||||
@ -192,15 +192,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit id, type and tag only', function () {
|
||||
@ -210,15 +210,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit id, type, tag and msg', function () {
|
||||
@ -228,15 +228,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test msg');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test msg');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom type,tag, msg, commit id,', function () {
|
||||
@ -247,15 +247,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test msg');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test msg');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom tag, msg, commit id, type,', function () {
|
||||
@ -265,15 +265,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test msg');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test msg');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom msg, commit id, type,tag', function () {
|
||||
@ -283,15 +283,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test msg');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test msg');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle 3 straight commits', function () {
|
||||
@ -303,10 +303,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle new branch creation', function () {
|
||||
@ -317,10 +317,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow quoted branch names', function () {
|
||||
@ -335,16 +335,16 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
const commit3 = Object.keys(commits)[2];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit2].branch).toBe('branch');
|
||||
expect(commits[commit3].branch).toBe('main');
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const commit1 = commits.keys().next().value;
|
||||
const commit2 = [...commits.keys()][1];
|
||||
const commit3 = [...commits.keys()][2];
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit2).branch).toBe('branch');
|
||||
expect(commits.get(commit3).branch).toBe('main');
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([{ name: 'main' }, { name: 'branch' }]);
|
||||
});
|
||||
|
||||
@ -356,10 +356,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('azAZ_-./test');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow branch names starting with numbers', function () {
|
||||
@ -371,10 +371,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('1.0.1');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow branch names starting with unusual prefixes', function () {
|
||||
@ -392,11 +392,11 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('A');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(7);
|
||||
expect(Object.keys(parser.yy.getBranches())).toEqual(
|
||||
expect(parser.yy.getBranches().size).toBe(7);
|
||||
expect([...parser.yy.getBranches().keys()]).toEqual(
|
||||
expect.arrayContaining([
|
||||
'branch01',
|
||||
'checkout02',
|
||||
@ -417,10 +417,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
it('should handle new branch checkout with order', function () {
|
||||
const str = `gitGraph:
|
||||
@ -432,9 +432,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('test3');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(4);
|
||||
expect(parser.yy.getBranches().size).toBe(4);
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'test3' },
|
||||
@ -452,9 +452,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('test3');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(4);
|
||||
expect(parser.yy.getBranches().size).toBe(4);
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'test2' },
|
||||
@ -473,16 +473,16 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(2);
|
||||
expect(commits.size).toBe(2);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commit1]);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const commit1 = commits.keys().next().value;
|
||||
const commit2 = [...commits.keys()][1];
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commit1]);
|
||||
});
|
||||
|
||||
it('should handle new branch checkout & commit and merge', function () {
|
||||
@ -498,22 +498,25 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(4);
|
||||
expect(commits.size).toBe(4);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
const commit3 = Object.keys(commits)[2];
|
||||
const commit4 = Object.keys(commits)[3];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commits[commit1].id]);
|
||||
expect(commits[commit3].branch).toBe('testBranch');
|
||||
expect(commits[commit3].parents).toStrictEqual([commits[commit2].id]);
|
||||
expect(commits[commit4].branch).toBe('main');
|
||||
expect(commits[commit4].parents).toStrictEqual([commits[commit1].id, commits[commit3].id]);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const commit1 = commits.keys().next().value;
|
||||
const commit2 = [...commits.keys()][1];
|
||||
const commit3 = [...commits.keys()][2];
|
||||
const commit4 = [...commits.keys()][3];
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commits.get(commit1).id]);
|
||||
expect(commits.get(commit3).branch).toBe('testBranch');
|
||||
expect(commits.get(commit3).parents).toStrictEqual([commits.get(commit2).id]);
|
||||
expect(commits.get(commit4).branch).toBe('main');
|
||||
expect(commits.get(commit4).parents).toStrictEqual([
|
||||
commits.get(commit1).id,
|
||||
commits.get(commit3).id,
|
||||
]);
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'testBranch' },
|
||||
@ -529,10 +532,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle new branch switch & commit', function () {
|
||||
@ -545,16 +548,16 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(2);
|
||||
expect(commits.size).toBe(2);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commit1]);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const commit1 = commits.keys().next().value;
|
||||
const commit2 = [...commits.keys()][1];
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commit1]);
|
||||
});
|
||||
|
||||
it('should handle new branch switch & commit and merge', function () {
|
||||
@ -570,22 +573,25 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(4);
|
||||
expect(commits.size).toBe(4);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
const commit3 = Object.keys(commits)[2];
|
||||
const commit4 = Object.keys(commits)[3];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commits[commit1].id]);
|
||||
expect(commits[commit3].branch).toBe('testBranch');
|
||||
expect(commits[commit3].parents).toStrictEqual([commits[commit2].id]);
|
||||
expect(commits[commit4].branch).toBe('main');
|
||||
expect(commits[commit4].parents).toStrictEqual([commits[commit1].id, commits[commit3].id]);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const commit1 = commits.keys().next().value;
|
||||
const commit2 = [...commits.keys()][1];
|
||||
const commit3 = [...commits.keys()][2];
|
||||
const commit4 = [...commits.keys()][3];
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commits.get(commit1).id]);
|
||||
expect(commits.get(commit3).branch).toBe('testBranch');
|
||||
expect(commits.get(commit3).parents).toStrictEqual([commits.get(commit2).id]);
|
||||
expect(commits.get(commit4).branch).toBe('main');
|
||||
expect(commits.get(commit4).parents).toStrictEqual([
|
||||
commits.get(commit1).id,
|
||||
commits.get(commit3).id,
|
||||
]);
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'testBranch' },
|
||||
@ -604,23 +610,26 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
const commit3 = Object.keys(commits)[2];
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const commit1 = commits.keys().next().value;
|
||||
const commit2 = [...commits.keys()][1];
|
||||
const commit3 = [...commits.keys()][2];
|
||||
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commits[commit1].id]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commits.get(commit1).id]);
|
||||
|
||||
expect(commits[commit3].branch).toBe('main');
|
||||
expect(commits[commit3].parents).toStrictEqual([commits[commit1].id, commits[commit2].id]);
|
||||
expect(commits[commit3].tag).toBe('merge-tag');
|
||||
expect(commits.get(commit3).branch).toBe('main');
|
||||
expect(commits.get(commit3).parents).toStrictEqual([
|
||||
commits.get(commit1).id,
|
||||
commits.get(commit2).id,
|
||||
]);
|
||||
expect(commits.get(commit3).tag).toBe('merge-tag');
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'testBranch' },
|
||||
@ -652,7 +661,7 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(7);
|
||||
expect(commits.size).toBe(7);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
|
||||
@ -665,7 +674,7 @@ describe('when parsing a gitGraph', function () {
|
||||
testBranch2Merge,
|
||||
testBranch3Commit,
|
||||
testBranch3Merge,
|
||||
] = Object.values(commits);
|
||||
] = [...commits.values()];
|
||||
|
||||
expect(mainCommit.branch).toBe('main');
|
||||
expect(mainCommit.parents).toStrictEqual([]);
|
||||
@ -708,9 +717,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[2];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('cherry-pick:A');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('main');
|
||||
const cherryPickCommitID = [...commits.keys()][2];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('cherry-pick:A');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('main');
|
||||
});
|
||||
|
||||
it('should support cherry-picking commits with custom tag', function () {
|
||||
@ -724,9 +733,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[2];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('MyTag');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('main');
|
||||
const cherryPickCommitID = [...commits.keys()][2];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('MyTag');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('main');
|
||||
});
|
||||
|
||||
it('should support cherry-picking commits with no tag', function () {
|
||||
@ -740,9 +749,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[2];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('main');
|
||||
const cherryPickCommitID = [...commits.keys()][2];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('main');
|
||||
});
|
||||
|
||||
it('should support cherry-picking of merge commits', function () {
|
||||
@ -761,9 +770,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[4];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('cherry-pick:M|parent:B');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('release');
|
||||
const cherryPickCommitID = [...commits.keys()][4];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('cherry-pick:M|parent:B');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('release');
|
||||
});
|
||||
|
||||
it('should support cherry-picking of merge commits with tag', function () {
|
||||
@ -782,9 +791,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[4];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('v1.0');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('release');
|
||||
const cherryPickCommitID = [...commits.keys()][4];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('v1.0');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('release');
|
||||
});
|
||||
|
||||
it('should support cherry-picking of merge commits with additional commit', function () {
|
||||
@ -805,9 +814,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[5];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('v2.1:ZERO');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('release');
|
||||
const cherryPickCommitID = [...commits.keys()][5];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('v2.1:ZERO');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('release');
|
||||
});
|
||||
|
||||
it('should support cherry-picking of merge commits with empty tag', function () {
|
||||
@ -829,11 +838,11 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[5];
|
||||
const cherryPickCommitID2 = Object.keys(commits)[7];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('');
|
||||
expect(commits[cherryPickCommitID2].tag).toBe('');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('release');
|
||||
const cherryPickCommitID = [...commits.keys()][5];
|
||||
const cherryPickCommitID2 = [...commits.keys()][7];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('');
|
||||
expect(commits.get(cherryPickCommitID2).tag).toBe('');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('release');
|
||||
});
|
||||
|
||||
it('should fail cherry-picking of merge commits if the parent of merge commits is not specified', function () {
|
||||
|
@ -129,7 +129,7 @@ const setParallelBTPos = (sortedKeys, commits, defaultPos, commitStep, layoutOff
|
||||
let maxPosition = defaultPos;
|
||||
let roots = [];
|
||||
sortedKeys.forEach((key) => {
|
||||
const commit = commits[key];
|
||||
const commit = commits.get(key);
|
||||
if (commit.parents.length) {
|
||||
const closestParent = findClosestParent(commit.parents);
|
||||
curPos = commitPos[closestParent].y + commitStep;
|
||||
@ -151,7 +151,7 @@ const setParallelBTPos = (sortedKeys, commits, defaultPos, commitStep, layoutOff
|
||||
commitPos[commit.id] = { x: x, y: y };
|
||||
});
|
||||
sortedKeys.forEach((key) => {
|
||||
const commit = commits[key];
|
||||
const commit = commits.get(key);
|
||||
if (commit.parents.length) {
|
||||
const closestParent = findClosestParentBT(commit.parents);
|
||||
curPos = commitPos[closestParent].y - commitStep;
|
||||
@ -183,18 +183,18 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
if (dir === 'TB' || dir === 'BT') {
|
||||
pos = defaultPos;
|
||||
}
|
||||
const keys = Object.keys(commits);
|
||||
const keys = [...commits.keys()];
|
||||
const isParallelCommits = gitGraphConfig.parallelCommits;
|
||||
const layoutOffset = 10;
|
||||
const commitStep = 40;
|
||||
let sortedKeys =
|
||||
dir !== 'BT' || (dir === 'BT' && isParallelCommits)
|
||||
? keys.sort((a, b) => {
|
||||
return commits[a].seq - commits[b].seq;
|
||||
return commits.get(a).seq - commits.get(b).seq;
|
||||
})
|
||||
: keys
|
||||
.sort((a, b) => {
|
||||
return commits[a].seq - commits[b].seq;
|
||||
return commits.get(a).seq - commits.get(b).seq;
|
||||
})
|
||||
.reverse();
|
||||
|
||||
@ -203,7 +203,7 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
sortedKeys = sortedKeys.reverse();
|
||||
}
|
||||
sortedKeys.forEach((key) => {
|
||||
const commit = commits[key];
|
||||
const commit = commits.get(key);
|
||||
if (isParallelCommits) {
|
||||
if (commit.parents.length) {
|
||||
const closestParent =
|
||||
@ -712,11 +712,11 @@ const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
|
||||
const drawArrows = (svg, commits) => {
|
||||
const gArrows = svg.append('g').attr('class', 'commit-arrows');
|
||||
Object.keys(commits).forEach((key) => {
|
||||
const commit = commits[key];
|
||||
[...commits.keys()].forEach((key) => {
|
||||
const commit = commits.get(key);
|
||||
if (commit.parents && commit.parents.length > 0) {
|
||||
commit.parents.forEach((parent) => {
|
||||
drawArrow(gArrows, commits[parent], commit, commits);
|
||||
drawArrow(gArrows, commits.get(parent), commit, commits);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -16,7 +16,7 @@ describe('pie', () => {
|
||||
`);
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(100);
|
||||
expect(sections.get('ash')).toBe(100);
|
||||
});
|
||||
|
||||
it('should handle simple pie', async () => {
|
||||
@ -26,8 +26,8 @@ describe('pie', () => {
|
||||
`);
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with showData', async () => {
|
||||
@ -39,8 +39,8 @@ describe('pie', () => {
|
||||
expect(db.getShowData()).toBeTruthy();
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with comments', async () => {
|
||||
@ -51,8 +51,8 @@ describe('pie', () => {
|
||||
`);
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with a title', async () => {
|
||||
@ -64,8 +64,8 @@ describe('pie', () => {
|
||||
expect(db.getDiagramTitle()).toBe('a 60/40 pie');
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with an acc title (accTitle)', async () => {
|
||||
@ -80,8 +80,8 @@ describe('pie', () => {
|
||||
expect(db.getAccTitle()).toBe('a neat acc title');
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with an acc description (accDescr)', async () => {
|
||||
@ -96,8 +96,8 @@ describe('pie', () => {
|
||||
expect(db.getAccDescription()).toBe('a neat description');
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with a multiline acc description (accDescr)', async () => {
|
||||
@ -115,8 +115,8 @@ describe('pie', () => {
|
||||
expect(db.getAccDescription()).toBe('a neat description\non multiple lines');
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with positive decimal', async () => {
|
||||
@ -126,8 +126,8 @@ describe('pie', () => {
|
||||
`);
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60.67);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60.67);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with negative decimal', () => {
|
||||
|
@ -16,7 +16,7 @@ import DEFAULT_CONFIG from '../../defaultConfig.js';
|
||||
export const DEFAULT_PIE_CONFIG: Required<PieDiagramConfig> = DEFAULT_CONFIG.pie;
|
||||
|
||||
export const DEFAULT_PIE_DB: RequiredDeep<PieFields> = {
|
||||
sections: {},
|
||||
sections: new Map(),
|
||||
showData: false,
|
||||
config: DEFAULT_PIE_CONFIG,
|
||||
} as const;
|
||||
@ -28,14 +28,14 @@ const config: Required<PieDiagramConfig> = structuredClone(DEFAULT_PIE_CONFIG);
|
||||
const getConfig = (): Required<PieDiagramConfig> => structuredClone(config);
|
||||
|
||||
const clear = (): void => {
|
||||
sections = structuredClone(DEFAULT_PIE_DB.sections);
|
||||
sections = new Map();
|
||||
showData = DEFAULT_PIE_DB.showData;
|
||||
commonClear();
|
||||
};
|
||||
|
||||
const addSection = ({ label, value }: D3Section): void => {
|
||||
if (sections[label] === undefined) {
|
||||
sections[label] = value;
|
||||
if (!sections.has(label)) {
|
||||
sections.set(label, value);
|
||||
log.debug(`added new section: ${label}, with value: ${value}`);
|
||||
}
|
||||
};
|
||||
|
@ -11,7 +11,7 @@ import { selectSvgElement } from '../../rendering-util/selectSvgElement.js';
|
||||
|
||||
const createPieArcs = (sections: Sections): d3.PieArcDatum<D3Section>[] => {
|
||||
// Compute the position of each group on the pie:
|
||||
const pieData: D3Section[] = Object.entries(sections)
|
||||
const pieData: D3Section[] = [...sections.entries()]
|
||||
.map((element: [string, number]): D3Section => {
|
||||
return {
|
||||
label: element[0],
|
||||
@ -105,8 +105,8 @@ export const draw: DrawDefinition = (text, id, _version, diagObj) => {
|
||||
.attr('class', 'pieCircle');
|
||||
|
||||
let sum = 0;
|
||||
Object.keys(sections).forEach((key: string): void => {
|
||||
sum += sections[key];
|
||||
[...sections.keys()].forEach((key: string): void => {
|
||||
sum += sections.get(key)!;
|
||||
});
|
||||
// Now add the percentage.
|
||||
// Use the centroid method to get the best coordinates.
|
||||
|
@ -34,7 +34,7 @@ export interface PieStyleOptions {
|
||||
pieOpacity: string;
|
||||
}
|
||||
|
||||
export type Sections = Record<string, number>;
|
||||
export type Sections = Map<string, number>;
|
||||
|
||||
export interface D3Section {
|
||||
label: string;
|
||||
|
@ -33,14 +33,14 @@ describe('when parsing requirement diagram it...', function () {
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
expect(Object.keys(requirementDb.getRequirements()).length).toBe(1);
|
||||
expect(requirementDb.getRequirements().size).toBe(1);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.id).toBe(expectedId);
|
||||
expect(foundReq.text).toBe(expectedText);
|
||||
|
||||
expect(Object.keys(requirementDb.getElements()).length).toBe(0);
|
||||
expect(requirementDb.getElements().size).toBe(0);
|
||||
expect(Object.keys(requirementDb.getRelationships()).length).toBe(0);
|
||||
});
|
||||
|
||||
@ -61,10 +61,10 @@ describe('when parsing requirement diagram it...', function () {
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
expect(Object.keys(requirementDb.getRequirements()).length).toBe(0);
|
||||
expect(Object.keys(requirementDb.getElements()).length).toBe(1);
|
||||
expect(requirementDb.getRequirements().size).toBe(0);
|
||||
expect(requirementDb.getElements().size).toBe(1);
|
||||
|
||||
let foundElement = requirementDb.getElements()[expectedName];
|
||||
let foundElement = requirementDb.getElements().get(expectedName);
|
||||
expect(foundElement).toBeDefined();
|
||||
expect(foundElement.type).toBe(expectedType);
|
||||
expect(foundElement.docRef).toBe(expectedDocRef);
|
||||
@ -121,8 +121,8 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
expect(Object.keys(requirementDb.getRequirements()).length).toBe(0);
|
||||
expect(Object.keys(requirementDb.getElements()).length).toBe(0);
|
||||
expect(requirementDb.getRequirements().size).toBe(0);
|
||||
expect(requirementDb.getElements().size).toBe(0);
|
||||
expect(Object.keys(requirementDb.getRelationships()).length).toBe(1);
|
||||
|
||||
let foundRelationship = requirementDb.getRelationships()[0];
|
||||
@ -152,7 +152,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@ -179,7 +179,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@ -206,7 +206,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@ -233,7 +233,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@ -260,7 +260,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@ -287,7 +287,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@ -314,7 +314,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.risk).toBe(expectedRisk);
|
||||
});
|
||||
@ -341,7 +341,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.risk).toBe(expectedRisk);
|
||||
});
|
||||
@ -368,7 +368,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.risk).toBe(expectedRisk);
|
||||
});
|
||||
@ -395,7 +395,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
@ -422,7 +422,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
@ -449,7 +449,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
@ -476,7 +476,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
|
@ -11,9 +11,9 @@ import {
|
||||
|
||||
let relations = [];
|
||||
let latestRequirement = {};
|
||||
let requirements = {};
|
||||
let requirements = new Map();
|
||||
let latestElement = {};
|
||||
let elements = {};
|
||||
let elements = new Map();
|
||||
|
||||
const RequirementType = {
|
||||
REQUIREMENT: 'Requirement',
|
||||
@ -48,8 +48,8 @@ const Relationships = {
|
||||
};
|
||||
|
||||
const addRequirement = (name, type) => {
|
||||
if (requirements[name] === undefined) {
|
||||
requirements[name] = {
|
||||
if (!requirements.has(name)) {
|
||||
requirements.set(name, {
|
||||
name,
|
||||
type,
|
||||
|
||||
@ -57,11 +57,11 @@ const addRequirement = (name, type) => {
|
||||
text: latestRequirement.text,
|
||||
risk: latestRequirement.risk,
|
||||
verifyMethod: latestRequirement.verifyMethod,
|
||||
};
|
||||
});
|
||||
}
|
||||
latestRequirement = {};
|
||||
|
||||
return requirements[name];
|
||||
return requirements.get(name);
|
||||
};
|
||||
|
||||
const getRequirements = () => requirements;
|
||||
@ -91,18 +91,17 @@ const setNewReqVerifyMethod = (verifyMethod) => {
|
||||
};
|
||||
|
||||
const addElement = (name) => {
|
||||
if (elements[name] === undefined) {
|
||||
elements[name] = {
|
||||
if (!elements.has(name)) {
|
||||
elements.set(name, {
|
||||
name,
|
||||
|
||||
type: latestElement.type,
|
||||
docRef: latestElement.docRef,
|
||||
};
|
||||
});
|
||||
log.info('Added new requirement: ', name);
|
||||
}
|
||||
latestElement = {};
|
||||
|
||||
return elements[name];
|
||||
return elements.get(name);
|
||||
};
|
||||
|
||||
const getElements = () => elements;
|
||||
@ -132,9 +131,9 @@ const getRelationships = () => relations;
|
||||
const clear = () => {
|
||||
relations = [];
|
||||
latestRequirement = {};
|
||||
requirements = {};
|
||||
requirements = new Map();
|
||||
latestElement = {};
|
||||
elements = {};
|
||||
elements = new Map();
|
||||
commonClear();
|
||||
};
|
||||
|
||||
|
@ -192,8 +192,8 @@ const drawRelationshipFromLayout = function (svg, rel, g, insert, diagObj) {
|
||||
};
|
||||
|
||||
export const drawReqs = (reqs, graph, svgNode) => {
|
||||
Object.keys(reqs).forEach((reqName) => {
|
||||
let req = reqs[reqName];
|
||||
[...reqs.keys()].forEach((reqName) => {
|
||||
let req = reqs.get(reqName);
|
||||
reqName = elementString(reqName);
|
||||
log.info('Added new requirement: ', reqName);
|
||||
|
||||
@ -237,8 +237,8 @@ export const drawReqs = (reqs, graph, svgNode) => {
|
||||
};
|
||||
|
||||
export const drawElements = (els, graph, svgNode) => {
|
||||
Object.keys(els).forEach((elName) => {
|
||||
let el = els[elName];
|
||||
[...els.keys()].forEach((elName) => {
|
||||
let el = els.get(elName);
|
||||
const id = elementString(elName);
|
||||
|
||||
const groupNode = svgNode.append('g').attr('id', id);
|
||||
|
@ -15,12 +15,12 @@ let links: SankeyLink[] = [];
|
||||
// Array of nodes guarantees their order
|
||||
let nodes: SankeyNode[] = [];
|
||||
// We also have to track nodes uniqueness (by ID)
|
||||
let nodesMap: Record<string, SankeyNode> = {};
|
||||
let nodesMap: Map<string, SankeyNode> = new Map();
|
||||
|
||||
const clear = (): void => {
|
||||
links = [];
|
||||
nodes = [];
|
||||
nodesMap = {};
|
||||
nodesMap = new Map();
|
||||
commonClear();
|
||||
};
|
||||
|
||||
@ -48,11 +48,11 @@ class SankeyNode {
|
||||
const findOrCreateNode = (ID: string): SankeyNode => {
|
||||
ID = common.sanitizeText(ID, getConfig());
|
||||
|
||||
if (!nodesMap[ID]) {
|
||||
nodesMap[ID] = new SankeyNode(ID);
|
||||
nodes.push(nodesMap[ID]);
|
||||
if (!nodesMap.has(ID)) {
|
||||
nodesMap.set(ID, new SankeyNode(ID));
|
||||
nodes.push(nodesMap.get(ID)!);
|
||||
}
|
||||
return nodesMap[ID];
|
||||
return nodesMap.get(ID)!;
|
||||
};
|
||||
|
||||
const getNodes = () => nodes;
|
||||
|
@ -15,9 +15,9 @@ import type { Actor, AddMessageParams, Box, Message, Note } from './types.js';
|
||||
|
||||
interface SequenceState {
|
||||
prevActor?: string;
|
||||
actors: Record<string, Actor>;
|
||||
createdActors: Record<string, number>;
|
||||
destroyedActors: Record<string, number>;
|
||||
actors: Map<string, Actor>;
|
||||
createdActors: Map<string, number>;
|
||||
destroyedActors: Map<string, number>;
|
||||
boxes: Box[];
|
||||
messages: Message[];
|
||||
notes: Note[];
|
||||
@ -30,9 +30,9 @@ interface SequenceState {
|
||||
|
||||
const state = new ImperativeState<SequenceState>(() => ({
|
||||
prevActor: undefined,
|
||||
actors: {},
|
||||
createdActors: {},
|
||||
destroyedActors: {},
|
||||
actors: new Map(),
|
||||
createdActors: new Map(),
|
||||
destroyedActors: new Map(),
|
||||
boxes: [],
|
||||
messages: [],
|
||||
notes: [],
|
||||
@ -60,7 +60,7 @@ export const addActor = function (
|
||||
type: string
|
||||
) {
|
||||
let assignedBox = state.records.currentBox;
|
||||
const old = state.records.actors[id];
|
||||
const old = state.records.actors.get(id);
|
||||
if (old) {
|
||||
// If already set and trying to set to a new one throw error
|
||||
if (state.records.currentBox && old.box && state.records.currentBox !== old.box) {
|
||||
@ -87,7 +87,7 @@ export const addActor = function (
|
||||
description = { text: name, wrap: null, type };
|
||||
}
|
||||
|
||||
state.records.actors[id] = {
|
||||
state.records.actors.set(id, {
|
||||
box: assignedBox,
|
||||
name: name,
|
||||
description: description.text,
|
||||
@ -98,9 +98,9 @@ export const addActor = function (
|
||||
actorCnt: null,
|
||||
rectData: null,
|
||||
type: type ?? 'participant',
|
||||
};
|
||||
if (state.records.prevActor && state.records.actors[state.records.prevActor]) {
|
||||
state.records.actors[state.records.prevActor].nextActor = id;
|
||||
});
|
||||
if (state.records.prevActor && state.records.actors.has(state.records.prevActor)) {
|
||||
state.records.actors.get(state.records.prevActor)!.nextActor = id;
|
||||
}
|
||||
|
||||
if (state.records.currentBox) {
|
||||
@ -210,10 +210,10 @@ export const getDestroyedActors = function () {
|
||||
return state.records.destroyedActors;
|
||||
};
|
||||
export const getActor = function (id: string) {
|
||||
return state.records.actors[id];
|
||||
return state.records.actors.get(id)!;
|
||||
};
|
||||
export const getActorKeys = function () {
|
||||
return Object.keys(state.records.actors);
|
||||
return [...state.records.actors.keys()];
|
||||
};
|
||||
export const enableSequenceNumbers = function () {
|
||||
state.records.sequenceNumbersEnabled = true;
|
||||
@ -502,18 +502,18 @@ export const apply = function (param: any | AddMessageParams | AddMessageParams[
|
||||
addActor(param.actor, param.actor, param.description, param.draw);
|
||||
break;
|
||||
case 'createParticipant':
|
||||
if (state.records.actors[param.actor]) {
|
||||
if (state.records.actors.has(param.actor)) {
|
||||
throw new Error(
|
||||
"It is not possible to have actors with the same id, even if one is destroyed before the next is created. Use 'AS' aliases to simulate the behavior"
|
||||
);
|
||||
}
|
||||
state.records.lastCreated = param.actor;
|
||||
addActor(param.actor, param.actor, param.description, param.draw);
|
||||
state.records.createdActors[param.actor] = state.records.messages.length;
|
||||
state.records.createdActors.set(param.actor, state.records.messages.length);
|
||||
break;
|
||||
case 'destroyParticipant':
|
||||
state.records.lastDestroyed = param.actor;
|
||||
state.records.destroyedActors[param.actor] = state.records.messages.length;
|
||||
state.records.destroyedActors.set(param.actor, state.records.messages.length);
|
||||
break;
|
||||
case 'activeStart':
|
||||
addSignal(param.actor, undefined, undefined, param.signalType);
|
||||
|
@ -192,8 +192,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -235,8 +235,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
expect(diagram.db.getAccDescription()).toBe('');
|
||||
const messages = diagram.db.getMessages();
|
||||
@ -258,8 +258,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
expect(diagram.db.getAccDescription()).toBe('');
|
||||
const messages = diagram.db.getMessages();
|
||||
@ -311,8 +311,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -328,8 +328,8 @@ Bob-->Alice-in-Wonderland:I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors['Alice-in-Wonderland'].description).toBe('Alice-in-Wonderland');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice-in-Wonderland').description).toBe('Alice-in-Wonderland');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -348,9 +348,9 @@ Bob-->Alice-in-Wonderland:I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['Alice-in-Wonderland', 'Bob']);
|
||||
expect(actors['Alice-in-Wonderland'].description).toBe('Alice-in-Wonderland');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect([...actors.keys()].sort()).toEqual(['Alice-in-Wonderland', 'Bob']);
|
||||
expect(actors.get('Alice-in-Wonderland').description).toBe('Alice-in-Wonderland');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -371,9 +371,9 @@ B-->A: I am good thanks!`;
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(Object.keys(actors)).toEqual(['A', 'B']);
|
||||
expect(actors.A.description).toBe('Alice');
|
||||
expect(actors.B.description).toBe('Bob');
|
||||
expect([...actors.keys()].sort()).toEqual(['A', 'B']);
|
||||
expect(actors.get('A').description).toBe('Alice');
|
||||
expect(actors.get('B').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages.length).toBe(2);
|
||||
@ -396,12 +396,12 @@ sequenceDiagram
|
||||
await mermaidAPI.parse(str);
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['Alice', 'Bob', 'John', 'Mandy', 'Joan']);
|
||||
expect(actors.Alice.description).toBe('Alice2');
|
||||
expect(actors.Alice.type).toBe('actor');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.John.type).toBe('participant');
|
||||
expect(actors.Joan.type).toBe('participant');
|
||||
expect([...actors.keys()]).toEqual(['Alice', 'Bob', 'John', 'Mandy', 'Joan']);
|
||||
expect(actors.get('Alice').description).toBe('Alice2');
|
||||
expect(actors.get('Alice').type).toBe('actor');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
expect(actors.get('John').type).toBe('participant');
|
||||
expect(actors.get('Joan').type).toBe('participant');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages.length).toBe(5);
|
||||
@ -419,9 +419,9 @@ B-->A: I am good thanks!`;
|
||||
await mermaidAPI.parse(str);
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['A', 'B']);
|
||||
expect(actors.A.description).toBe('Alice');
|
||||
expect(actors.B.description).toBe('Bob');
|
||||
expect([...actors.keys()]).toEqual(['A', 'B']);
|
||||
expect(actors.get('A').description).toBe('Alice');
|
||||
expect(actors.get('B').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages.length).toBe(2);
|
||||
@ -435,8 +435,8 @@ Alice-xBob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -450,8 +450,8 @@ Alice--xBob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -465,8 +465,8 @@ Alice-)Bob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -480,8 +480,8 @@ Alice--)Bob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -495,8 +495,8 @@ Alice->>Bob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -508,8 +508,8 @@ Alice->>Bob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -526,8 +526,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -547,8 +547,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -571,8 +571,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -624,8 +624,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -645,8 +645,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -660,8 +660,8 @@ sequenceDiagram;Alice->Bob: Hello Bob, how are you?;Note right of Bob: Bob think
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -680,8 +680,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -700,8 +700,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -725,8 +725,8 @@ Bob-->John: Jolly good!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -754,10 +754,10 @@ note right of 1: multiline<br \t/>text
|
||||
await mermaidAPI.parse(str);
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors['1'].description).toBe('multiline<br>text');
|
||||
expect(actors['2'].description).toBe('multiline<br/>text');
|
||||
expect(actors['3'].description).toBe('multiline<br />text');
|
||||
expect(actors['4'].description).toBe('multiline<br \t/>text');
|
||||
expect(actors.get('1').description).toBe('multiline<br>text');
|
||||
expect(actors.get('2').description).toBe('multiline<br/>text');
|
||||
expect(actors.get('3').description).toBe('multiline<br />text');
|
||||
expect(actors.get('4').description).toBe('multiline<br \t/>text');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages[0].message).toBe('multiline<br>text');
|
||||
@ -893,8 +893,8 @@ end`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -915,8 +915,8 @@ end`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages[1].type).toEqual(diagram.db.LINETYPE.RECT_START);
|
||||
@ -940,8 +940,8 @@ end`;
|
||||
`;
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages[1].type).toEqual(diagram.db.LINETYPE.RECT_START);
|
||||
@ -967,8 +967,8 @@ end`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -993,8 +993,8 @@ end`;
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -1039,8 +1039,8 @@ sequenceDiagram
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Service.description).toBe('Service');
|
||||
expect(actors.DB.description).toBe('DB');
|
||||
expect(actors.get('Service').description).toBe('Service');
|
||||
expect(actors.get('DB').description).toBe('DB');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -1063,8 +1063,8 @@ sequenceDiagram
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Service.description).toBe('Service');
|
||||
expect(actors.DB.description).toBe('DB');
|
||||
expect(actors.get('Service').description).toBe('Service');
|
||||
expect(actors.get('DB').description).toBe('DB');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -1090,8 +1090,8 @@ sequenceDiagram
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Consumer.description).toBe('Consumer');
|
||||
expect(actors.API.description).toBe('API');
|
||||
expect(actors.get('Consumer').description).toBe('Consumer');
|
||||
expect(actors.get('API').description).toBe('API');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -1120,8 +1120,8 @@ end`;
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -1142,8 +1142,8 @@ end`;
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -1309,15 +1309,15 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.a.links['Repo']).toBe('https://repo.contoso.com/');
|
||||
expect(actors.b.links['Repo']).toBe(undefined);
|
||||
expect(actors.a.links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.b.links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.a.links['On-Call']).toBe('https://oncall.contoso.com/?svc=alice');
|
||||
expect(actors.c.links['Dashboard']).toBe(undefined);
|
||||
expect(actors.a.links['Endpoint']).toBe('https://alice.contoso.com');
|
||||
expect(actors.a.links['Swagger']).toBe('https://swagger.contoso.com');
|
||||
expect(actors.a.links['Tests']).toBe('https://tests.contoso.com/?svc=alice@contoso.com');
|
||||
expect(actors.get('a').links['Repo']).toBe('https://repo.contoso.com/');
|
||||
expect(actors.get('b').links['Repo']).toBe(undefined);
|
||||
expect(actors.get('a').links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.get('b').links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.get('a').links['On-Call']).toBe('https://oncall.contoso.com/?svc=alice');
|
||||
expect(actors.get('c').links['Dashboard']).toBe(undefined);
|
||||
expect(actors.get('a').links['Endpoint']).toBe('https://alice.contoso.com');
|
||||
expect(actors.get('a').links['Swagger']).toBe('https://swagger.contoso.com');
|
||||
expect(actors.get('a').links['Tests']).toBe('https://tests.contoso.com/?svc=alice@contoso.com');
|
||||
});
|
||||
|
||||
it('should handle properties EXPERIMENTAL: USE WITH CAUTION', async () => {
|
||||
@ -1333,11 +1333,11 @@ properties b: {"class": "external-service-actor", "icon": "@computer"}
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.a.properties['class']).toBe('internal-service-actor');
|
||||
expect(actors.b.properties['class']).toBe('external-service-actor');
|
||||
expect(actors.a.properties['icon']).toBe('@clock');
|
||||
expect(actors.b.properties['icon']).toBe('@computer');
|
||||
expect(actors.c.properties['class']).toBe(undefined);
|
||||
expect(actors.get('a').properties['class']).toBe('internal-service-actor');
|
||||
expect(actors.get('b').properties['class']).toBe('external-service-actor');
|
||||
expect(actors.get('a').properties['icon']).toBe('@clock');
|
||||
expect(actors.get('b').properties['icon']).toBe('@computer');
|
||||
expect(actors.get('c').properties['class']).toBe(undefined);
|
||||
});
|
||||
|
||||
it('should handle box', async () => {
|
||||
@ -1423,14 +1423,14 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
const createdActors = diagram.db.getCreatedActors();
|
||||
expect(actors['c'].name).toEqual('c');
|
||||
expect(actors['c'].description).toEqual('c');
|
||||
expect(actors['c'].type).toEqual('participant');
|
||||
expect(createdActors['c']).toEqual(1);
|
||||
expect(actors['d'].name).toEqual('d');
|
||||
expect(actors['d'].description).toEqual('Donald');
|
||||
expect(actors['d'].type).toEqual('actor');
|
||||
expect(createdActors['d']).toEqual(3);
|
||||
expect(actors.get('c').name).toEqual('c');
|
||||
expect(actors.get('c').description).toEqual('c');
|
||||
expect(actors.get('c').type).toEqual('participant');
|
||||
expect(createdActors.get('c')).toEqual(1);
|
||||
expect(actors.get('d').name).toEqual('d');
|
||||
expect(actors.get('d').description).toEqual('Donald');
|
||||
expect(actors.get('d').type).toEqual('actor');
|
||||
expect(createdActors.get('d')).toEqual(3);
|
||||
});
|
||||
it('should handle simple actor destruction', async () => {
|
||||
const str = `
|
||||
@ -1445,8 +1445,8 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
`;
|
||||
await mermaidAPI.parse(str);
|
||||
const destroyedActors = diagram.db.getDestroyedActors();
|
||||
expect(destroyedActors['a']).toEqual(1);
|
||||
expect(destroyedActors['c']).toEqual(3);
|
||||
expect(destroyedActors.get('a')).toEqual(1);
|
||||
expect(destroyedActors.get('c')).toEqual(3);
|
||||
});
|
||||
it('should handle the creation and destruction of the same actor', async () => {
|
||||
const str = `
|
||||
@ -1461,8 +1461,8 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
await mermaidAPI.parse(str);
|
||||
const createdActors = diagram.db.getCreatedActors();
|
||||
const destroyedActors = diagram.db.getDestroyedActors();
|
||||
expect(createdActors['c']).toEqual(1);
|
||||
expect(destroyedActors['c']).toEqual(3);
|
||||
expect(createdActors.get('c')).toEqual(1);
|
||||
expect(destroyedActors.get('c')).toEqual(3);
|
||||
});
|
||||
});
|
||||
describe('when checking the bounds in a sequenceDiagram', function () {
|
||||
@ -1668,7 +1668,7 @@ participant Alice
|
||||
await mermaidAPI.parse(str);
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['Alice']);
|
||||
expect([...actors.keys()]).toEqual(['Alice']);
|
||||
});
|
||||
it('should handle one actor and a centered note', async () => {
|
||||
const str = `
|
||||
|
@ -144,7 +144,7 @@ export const bounds = {
|
||||
this.updateBounds(_startx, _starty, _stopx, _stopy);
|
||||
},
|
||||
newActivation: function (message, diagram, actors) {
|
||||
const actorRect = actors[message.from.actor];
|
||||
const actorRect = actors.get(message.from.actor);
|
||||
const stackedSize = actorActivations(message.from.actor).length || 0;
|
||||
const x = actorRect.x + actorRect.width / 2 + ((stackedSize - 1) * conf.activationWidth) / 2;
|
||||
this.activations.push({
|
||||
@ -488,7 +488,7 @@ const drawMessage = async function (diagram, msgModel, lineStartY: number, diagO
|
||||
const addActorRenderingData = async function (
|
||||
diagram,
|
||||
actors,
|
||||
createdActors,
|
||||
createdActors: Map<string, any>,
|
||||
actorKeys,
|
||||
verticalPos,
|
||||
messages,
|
||||
@ -500,7 +500,7 @@ const addActorRenderingData = async function (
|
||||
let maxHeight = 0;
|
||||
|
||||
for (const actorKey of actorKeys) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
const box = actor.box;
|
||||
|
||||
// end of box
|
||||
@ -528,7 +528,7 @@ const addActorRenderingData = async function (
|
||||
maxHeight = common.getMax(maxHeight, actor.height);
|
||||
|
||||
// if the actor is created by a message, widen margin
|
||||
if (createdActors[actor.name]) {
|
||||
if (createdActors.get(actor.name)) {
|
||||
prevMargin += actor.width / 2;
|
||||
}
|
||||
|
||||
@ -558,7 +558,7 @@ const addActorRenderingData = async function (
|
||||
export const drawActors = async function (diagram, actors, actorKeys, isFooter) {
|
||||
if (!isFooter) {
|
||||
for (const actorKey of actorKeys) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
// Draw the box with the attached line
|
||||
await svgDraw.drawActor(diagram, actor, conf, false);
|
||||
}
|
||||
@ -566,7 +566,7 @@ export const drawActors = async function (diagram, actors, actorKeys, isFooter)
|
||||
let maxHeight = 0;
|
||||
bounds.bumpVerticalPos(conf.boxMargin * 2);
|
||||
for (const actorKey of actorKeys) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
if (!actor.stopy) {
|
||||
actor.stopy = bounds.getVerticalPos();
|
||||
}
|
||||
@ -581,7 +581,7 @@ export const drawActorsPopup = function (diagram, actors, actorKeys, doc) {
|
||||
let maxHeight = 0;
|
||||
let maxWidth = 0;
|
||||
for (const actorKey of actorKeys) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
const minMenuWidth = getRequiredPopupWidth(actor);
|
||||
const menuDimensions = svgDraw.drawPopup(
|
||||
diagram,
|
||||
@ -624,7 +624,7 @@ const actorActivations = function (actor) {
|
||||
|
||||
const activationBounds = function (actor, actors) {
|
||||
// handle multiple stacked activations for same actor
|
||||
const actorObj = actors[actor];
|
||||
const actorObj = actors.get(actor);
|
||||
const activations = actorActivations(actor);
|
||||
|
||||
const left = activations.reduce(
|
||||
@ -682,7 +682,7 @@ function adjustCreatedDestroyedData(
|
||||
destroyedActors
|
||||
) {
|
||||
function receiverAdjustment(actor, adjustment) {
|
||||
if (actor.x < actors[msg.from].x) {
|
||||
if (actor.x < actors.get(msg.from).x) {
|
||||
bounds.insert(
|
||||
msgModel.stopx - adjustment,
|
||||
msgModel.starty,
|
||||
@ -702,7 +702,7 @@ function adjustCreatedDestroyedData(
|
||||
}
|
||||
|
||||
function senderAdjustment(actor, adjustment) {
|
||||
if (actor.x < actors[msg.to].x) {
|
||||
if (actor.x < actors.get(msg.to).x) {
|
||||
bounds.insert(
|
||||
msgModel.startx - adjustment,
|
||||
msgModel.starty,
|
||||
@ -722,16 +722,16 @@ function adjustCreatedDestroyedData(
|
||||
}
|
||||
|
||||
// if it is a create message
|
||||
if (createdActors[msg.to] == index) {
|
||||
const actor = actors[msg.to];
|
||||
if (createdActors.get(msg.to) == index) {
|
||||
const actor = actors.get(msg.to);
|
||||
const adjustment = actor.type == 'actor' ? ACTOR_TYPE_WIDTH / 2 + 3 : actor.width / 2 + 3;
|
||||
receiverAdjustment(actor, adjustment);
|
||||
actor.starty = lineStartY - actor.height / 2;
|
||||
bounds.bumpVerticalPos(actor.height / 2);
|
||||
}
|
||||
// if it is a destroy sender message
|
||||
else if (destroyedActors[msg.from] == index) {
|
||||
const actor = actors[msg.from];
|
||||
else if (destroyedActors.get(msg.from) == index) {
|
||||
const actor = actors.get(msg.from);
|
||||
if (conf.mirrorActors) {
|
||||
const adjustment = actor.type == 'actor' ? ACTOR_TYPE_WIDTH / 2 : actor.width / 2;
|
||||
senderAdjustment(actor, adjustment);
|
||||
@ -740,8 +740,8 @@ function adjustCreatedDestroyedData(
|
||||
bounds.bumpVerticalPos(actor.height / 2);
|
||||
}
|
||||
// if it is a destroy receiver message
|
||||
else if (destroyedActors[msg.to] == index) {
|
||||
const actor = actors[msg.to];
|
||||
else if (destroyedActors.get(msg.to) == index) {
|
||||
const actor = actors.get(msg.to);
|
||||
if (conf.mirrorActors) {
|
||||
const adjustment = actor.type == 'actor' ? ACTOR_TYPE_WIDTH / 2 + 3 : actor.width / 2 + 3;
|
||||
receiverAdjustment(actor, adjustment);
|
||||
@ -1132,15 +1132,15 @@ export const draw = async function (_text: string, id: string, _version: string,
|
||||
* @returns The max message width of each actor.
|
||||
*/
|
||||
async function getMaxMessageWidthPerActor(
|
||||
actors: { [id: string]: any },
|
||||
actors: Map<string, any>,
|
||||
messages: any[],
|
||||
diagObj: Diagram
|
||||
): Promise<{ [id: string]: number }> {
|
||||
const maxMessageWidthPerActor = {};
|
||||
|
||||
for (const msg of messages) {
|
||||
if (actors[msg.to] && actors[msg.from]) {
|
||||
const actor = actors[msg.to];
|
||||
if (actors.get(msg.to) && actors.get(msg.from)) {
|
||||
const actor = actors.get(msg.to);
|
||||
|
||||
// If this is the first actor, and the message is left of it, no need to calculate the margin
|
||||
if (msg.placement === diagObj.db.PLACEMENT.LEFTOF && !actor.prevActor) {
|
||||
@ -1258,13 +1258,13 @@ const getRequiredPopupWidth = function (actor) {
|
||||
* @param boxes - The boxes around the actors if any
|
||||
*/
|
||||
async function calculateActorMargins(
|
||||
actors: { [id: string]: any },
|
||||
actors: Map<string, any>,
|
||||
actorToMessageWidth: Awaited<ReturnType<typeof getMaxMessageWidthPerActor>>,
|
||||
boxes
|
||||
) {
|
||||
let maxHeight = 0;
|
||||
for (const prop of Object.keys(actors)) {
|
||||
const actor = actors[prop];
|
||||
for (const prop of actors.keys()) {
|
||||
const actor = actors.get(prop);
|
||||
if (actor.wrap) {
|
||||
actor.description = utils.wrapLabel(
|
||||
actor.description,
|
||||
@ -1285,13 +1285,13 @@ async function calculateActorMargins(
|
||||
}
|
||||
|
||||
for (const actorKey in actorToMessageWidth) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
|
||||
if (!actor) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const nextActor = actors[actor.nextActor];
|
||||
const nextActor = actors.get(actor.nextActor);
|
||||
|
||||
// No need to space out an actor that doesn't have a next link
|
||||
if (!nextActor) {
|
||||
@ -1311,7 +1311,7 @@ async function calculateActorMargins(
|
||||
boxes.forEach((box) => {
|
||||
const textFont = messageFont(conf);
|
||||
let totalWidth = box.actorKeys.reduce((total, aKey) => {
|
||||
return (total += actors[aKey].width + (actors[aKey].margin || 0));
|
||||
return (total += actors.get(aKey).width + (actors.get(aKey).margin || 0));
|
||||
}, 0);
|
||||
|
||||
totalWidth -= 2 * conf.boxTextMargin;
|
||||
@ -1334,8 +1334,10 @@ async function calculateActorMargins(
|
||||
}
|
||||
|
||||
const buildNoteModel = async function (msg, actors, diagObj) {
|
||||
const startx = actors[msg.from].x;
|
||||
const stopx = actors[msg.to].x;
|
||||
const fromActor = actors.get(msg.from);
|
||||
const toActor = actors.get(msg.to);
|
||||
const startx = fromActor.x;
|
||||
const stopx = toActor.x;
|
||||
const shouldWrap = msg.wrap && msg.message;
|
||||
|
||||
let textDimensions: { width: number; height: number; lineHeight?: number } = hasKatex(msg.message)
|
||||
@ -1349,7 +1351,7 @@ const buildNoteModel = async function (msg, actors, diagObj) {
|
||||
? conf.width
|
||||
: common.getMax(conf.width, textDimensions.width + 2 * conf.noteMargin),
|
||||
height: 0,
|
||||
startx: actors[msg.from].x,
|
||||
startx: fromActor.x,
|
||||
stopx: 0,
|
||||
starty: 0,
|
||||
stopy: 0,
|
||||
@ -1359,45 +1361,36 @@ const buildNoteModel = async function (msg, actors, diagObj) {
|
||||
noteModel.width = shouldWrap
|
||||
? common.getMax(conf.width, textDimensions.width)
|
||||
: common.getMax(
|
||||
actors[msg.from].width / 2 + actors[msg.to].width / 2,
|
||||
fromActor.width / 2 + toActor.width / 2,
|
||||
textDimensions.width + 2 * conf.noteMargin
|
||||
);
|
||||
noteModel.startx = startx + (actors[msg.from].width + conf.actorMargin) / 2;
|
||||
noteModel.startx = startx + (fromActor.width + conf.actorMargin) / 2;
|
||||
} else if (msg.placement === diagObj.db.PLACEMENT.LEFTOF) {
|
||||
noteModel.width = shouldWrap
|
||||
? common.getMax(conf.width, textDimensions.width + 2 * conf.noteMargin)
|
||||
: common.getMax(
|
||||
actors[msg.from].width / 2 + actors[msg.to].width / 2,
|
||||
fromActor.width / 2 + toActor.width / 2,
|
||||
textDimensions.width + 2 * conf.noteMargin
|
||||
);
|
||||
noteModel.startx = startx - noteModel.width + (actors[msg.from].width - conf.actorMargin) / 2;
|
||||
noteModel.startx = startx - noteModel.width + (fromActor.width - conf.actorMargin) / 2;
|
||||
} else if (msg.to === msg.from) {
|
||||
textDimensions = utils.calculateTextDimensions(
|
||||
shouldWrap
|
||||
? utils.wrapLabel(
|
||||
msg.message,
|
||||
common.getMax(conf.width, actors[msg.from].width),
|
||||
noteFont(conf)
|
||||
)
|
||||
? utils.wrapLabel(msg.message, common.getMax(conf.width, fromActor.width), noteFont(conf))
|
||||
: msg.message,
|
||||
noteFont(conf)
|
||||
);
|
||||
noteModel.width = shouldWrap
|
||||
? common.getMax(conf.width, actors[msg.from].width)
|
||||
: common.getMax(
|
||||
actors[msg.from].width,
|
||||
conf.width,
|
||||
textDimensions.width + 2 * conf.noteMargin
|
||||
);
|
||||
noteModel.startx = startx + (actors[msg.from].width - noteModel.width) / 2;
|
||||
? common.getMax(conf.width, fromActor.width)
|
||||
: common.getMax(fromActor.width, conf.width, textDimensions.width + 2 * conf.noteMargin);
|
||||
noteModel.startx = startx + (fromActor.width - noteModel.width) / 2;
|
||||
} else {
|
||||
noteModel.width =
|
||||
Math.abs(startx + actors[msg.from].width / 2 - (stopx + actors[msg.to].width / 2)) +
|
||||
conf.actorMargin;
|
||||
Math.abs(startx + fromActor.width / 2 - (stopx + toActor.width / 2)) + conf.actorMargin;
|
||||
noteModel.startx =
|
||||
startx < stopx
|
||||
? startx + actors[msg.from].width / 2 - conf.actorMargin / 2
|
||||
: stopx + actors[msg.to].width / 2 - conf.actorMargin / 2;
|
||||
? startx + fromActor.width / 2 - conf.actorMargin / 2
|
||||
: stopx + toActor.width / 2 - conf.actorMargin / 2;
|
||||
}
|
||||
if (shouldWrap) {
|
||||
noteModel.message = utils.wrapLabel(
|
||||
@ -1545,7 +1538,7 @@ const calculateLoopBounds = async function (messages, actors, _maxWidthPerActor,
|
||||
break;
|
||||
case diagObj.db.LINETYPE.ACTIVE_START:
|
||||
{
|
||||
const actorRect = actors[msg.from ? msg.from.actor : msg.to.actor];
|
||||
const actorRect = actors.get(msg.from ? msg.from.actor : msg.to.actor);
|
||||
const stackedSize = actorActivations(msg.from ? msg.from.actor : msg.to.actor).length;
|
||||
const x =
|
||||
actorRect.x + actorRect.width / 2 + ((stackedSize - 1) * conf.activationWidth) / 2;
|
||||
@ -1585,8 +1578,8 @@ const calculateLoopBounds = async function (messages, actors, _maxWidthPerActor,
|
||||
stack.forEach((stk) => {
|
||||
current = stk;
|
||||
if (msgModel.startx === msgModel.stopx) {
|
||||
const from = actors[msg.from];
|
||||
const to = actors[msg.to];
|
||||
const from = actors.get(msg.from);
|
||||
const to = actors.get(msg.to);
|
||||
current.from = common.getMin(
|
||||
from.x - msgModel.width / 2,
|
||||
from.x - from.width / 2,
|
||||
|
@ -307,7 +307,7 @@ export const fixLifeLineHeights = (diagram, actors, actorKeys, conf) => {
|
||||
return;
|
||||
}
|
||||
actorKeys.forEach((actorKey) => {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
const actorDOM = diagram.select('#actor' + actor.actorCnt);
|
||||
if (!conf.mirrorActors && actor.stopy) {
|
||||
actorDOM.attr('y2', actor.stopy + actor.height / 2);
|
||||
|
@ -21,8 +21,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['namedState1']).not.toBeUndefined();
|
||||
expect(states['namedState1'].descriptions.join(' ')).toEqual('Small State 1');
|
||||
expect(states.get('namedState1')).not.toBeUndefined();
|
||||
expect(states.get('namedState1').descriptions.join(' ')).toEqual('Small State 1');
|
||||
});
|
||||
});
|
||||
|
||||
@ -34,8 +34,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['namedState1']).not.toBeUndefined();
|
||||
expect(states['namedState1'].descriptions.join(' ')).toEqual('Small State 1');
|
||||
expect(states.get('namedState1')).not.toBeUndefined();
|
||||
expect(states.get('namedState1').descriptions.join(' ')).toEqual('Small State 1');
|
||||
});
|
||||
|
||||
it('no spaces before and after the colon', () => {
|
||||
@ -45,8 +45,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['namedState1']).not.toBeUndefined();
|
||||
expect(states['namedState1'].descriptions.join(' ')).toEqual('Small State 1');
|
||||
expect(states.get('namedState1')).not.toBeUndefined();
|
||||
expect(states.get('namedState1').descriptions.join(' ')).toEqual('Small State 1');
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -62,8 +62,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.parse(diagramText);
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['assemble']).not.toBeUndefined();
|
||||
expect(states['assemblies']).not.toBeUndefined();
|
||||
expect(states.get('assemble')).not.toBeUndefined();
|
||||
expect(states.get('assemblies')).not.toBeUndefined();
|
||||
});
|
||||
|
||||
it('state "as" as as', function () {
|
||||
@ -73,8 +73,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.parse(diagramText);
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['as']).not.toBeUndefined();
|
||||
expect(states['as'].descriptions.join(' ')).toEqual('as');
|
||||
expect(states.get('as')).not.toBeUndefined();
|
||||
expect(states.get('as').descriptions.join(' ')).toEqual('as');
|
||||
});
|
||||
});
|
||||
|
||||
@ -99,12 +99,12 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['namedState1']).not.toBeUndefined();
|
||||
expect(states['bigState1']).not.toBeUndefined();
|
||||
expect(states['bigState1'].doc[0].id).toEqual('bigState1InternalState');
|
||||
expect(states['namedState2']).not.toBeUndefined();
|
||||
expect(states['bigState2']).not.toBeUndefined();
|
||||
expect(states['bigState2'].doc[0].id).toEqual('bigState2InternalState');
|
||||
expect(states.get('namedState1')).not.toBeUndefined();
|
||||
expect(states.get('bigState1')).not.toBeUndefined();
|
||||
expect(states.get('bigState1').doc[0].id).toEqual('bigState1InternalState');
|
||||
expect(states.get('namedState2')).not.toBeUndefined();
|
||||
expect(states.get('bigState2')).not.toBeUndefined();
|
||||
expect(states.get('bigState2').doc[0].id).toEqual('bigState2InternalState');
|
||||
const relationships = stateDiagram.parser.yy.getRelations();
|
||||
expect(relationships[0].id1).toEqual('namedState1');
|
||||
expect(relationships[0].id2).toEqual('bigState1');
|
||||
@ -123,11 +123,11 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['bigState1']).not.toBeUndefined();
|
||||
expect(states['bigState1'].doc[0].id).toEqual('inner1');
|
||||
expect(states['bigState1'].doc[0].description).toEqual('inner state 1');
|
||||
expect(states['bigState1'].doc[1].id).toEqual('inner2');
|
||||
expect(states['bigState1'].doc[1].description).toEqual('inner state 2');
|
||||
expect(states.get('bigState1')).not.toBeUndefined();
|
||||
expect(states.get('bigState1').doc[0].id).toEqual('inner1');
|
||||
expect(states.get('bigState1').doc[0].description).toEqual('inner state 1');
|
||||
expect(states.get('bigState1').doc[1].id).toEqual('inner2');
|
||||
expect(states.get('bigState1').doc[1].description).toEqual('inner state 2');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -19,8 +19,8 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const styleClasses = stateDb.getClasses();
|
||||
expect(styleClasses['exampleClass'].styles.length).toEqual(1);
|
||||
expect(styleClasses['exampleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(styleClasses.get('exampleClass').styles.length).toEqual(1);
|
||||
expect(styleClasses.get('exampleClass').styles[0]).toEqual('background:#bbb');
|
||||
});
|
||||
|
||||
it('can define multiple attributes separated by commas', function () {
|
||||
@ -30,10 +30,10 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const styleClasses = stateDb.getClasses();
|
||||
expect(styleClasses['exampleClass'].styles.length).toEqual(3);
|
||||
expect(styleClasses['exampleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(styleClasses['exampleClass'].styles[1]).toEqual('font-weight:bold');
|
||||
expect(styleClasses['exampleClass'].styles[2]).toEqual('font-style:italic');
|
||||
expect(styleClasses.get('exampleClass').styles.length).toEqual(3);
|
||||
expect(styleClasses.get('exampleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(styleClasses.get('exampleClass').styles[1]).toEqual('font-weight:bold');
|
||||
expect(styleClasses.get('exampleClass').styles[2]).toEqual('font-style:italic');
|
||||
});
|
||||
|
||||
// need to look at what the lexer is doing
|
||||
@ -44,9 +44,9 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
expect(classes['exampleStyleClass'].styles.length).toBe(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toBe(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
|
||||
it('an attribute can have a space in the style', function () {
|
||||
@ -56,9 +56,9 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
expect(classes['exampleStyleClass'].styles.length).toBe(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toBe('background: #bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toBe(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toBe('background: #bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
});
|
||||
|
||||
@ -74,9 +74,9 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const classes = stateDb.getClasses();
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
|
||||
const state_a = stateDb.getState('a');
|
||||
expect(state_a.classes.length).toEqual(1);
|
||||
@ -95,9 +95,9 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
expect(classes['exampleStyleClass'].styles.length).toBe(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toBe(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toBe('border:1px solid red');
|
||||
|
||||
const state_a_a = stateDiagram.parser.yy.getState('a_a');
|
||||
expect(state_a_a.classes.length).toEqual(1);
|
||||
@ -117,11 +117,11 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
|
||||
expect(states['b'].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states.get('b').classes[0]).toEqual('exampleStyleClass');
|
||||
});
|
||||
|
||||
it('can be applied to a [*] state', () => {
|
||||
@ -136,11 +136,11 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
|
||||
expect(states['root_start'].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states.get('root_start').classes[0]).toEqual('exampleStyleClass');
|
||||
});
|
||||
|
||||
it('can be applied to a comma separated list of states', function () {
|
||||
@ -155,11 +155,11 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
let classes = stateDiagram.parser.yy.getClasses();
|
||||
let states = stateDiagram.parser.yy.getStates();
|
||||
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(states['a'].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states['b'].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
expect(states.get('a').classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states.get('b').classes[0]).toEqual('exampleStyleClass');
|
||||
});
|
||||
|
||||
it('a comma separated list of states may or may not have spaces after commas', function () {
|
||||
@ -174,13 +174,13 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
|
||||
const statesList = ['a', 'b', 'c', 'd', 'e'];
|
||||
statesList.forEach((stateId) => {
|
||||
expect(states[stateId].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states.get(stateId).classes[0]).toEqual('exampleStyleClass');
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -202,7 +202,7 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
|
||||
expect(states['Moving'].doc.length).toEqual(1);
|
||||
expect(states.get('Moving').doc.length).toEqual(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -37,10 +37,10 @@ const STYLECLASS_SEP = ',';
|
||||
* In the future, this can be replaced with a class common to all diagrams.
|
||||
* ClassDef information = { id: id, styles: [], textStyles: [] }
|
||||
*
|
||||
* @returns {{}}
|
||||
* @returns {Map<string, any>}
|
||||
*/
|
||||
function newClassesList() {
|
||||
return {};
|
||||
return new Map();
|
||||
}
|
||||
|
||||
let direction = DEFAULT_DIAGRAM_DIRECTION;
|
||||
@ -49,8 +49,9 @@ let classes = newClassesList(); // style classes defined by a classDef
|
||||
|
||||
const newDoc = () => {
|
||||
return {
|
||||
/** @type {{ id1: string, id2: string, relationTitle: string }[]} */
|
||||
relations: [],
|
||||
states: {},
|
||||
states: new Map(),
|
||||
documents: {},
|
||||
};
|
||||
};
|
||||
@ -217,9 +218,9 @@ export const addState = function (
|
||||
) {
|
||||
const trimmedId = id?.trim();
|
||||
// add the state if needed
|
||||
if (currentDocument.states[trimmedId] === undefined) {
|
||||
if (!currentDocument.states.has(trimmedId)) {
|
||||
log.info('Adding state ', trimmedId, descr);
|
||||
currentDocument.states[trimmedId] = {
|
||||
currentDocument.states.set(trimmedId, {
|
||||
id: trimmedId,
|
||||
descriptions: [],
|
||||
type,
|
||||
@ -228,16 +229,18 @@ export const addState = function (
|
||||
classes: [],
|
||||
styles: [],
|
||||
textStyles: [],
|
||||
};
|
||||
});
|
||||
} else {
|
||||
if (!currentDocument.states[trimmedId].doc) {
|
||||
currentDocument.states[trimmedId].doc = doc;
|
||||
if (!currentDocument.states.get(trimmedId).doc) {
|
||||
currentDocument.states.get(trimmedId).doc = doc;
|
||||
}
|
||||
if (!currentDocument.states[trimmedId].type) {
|
||||
currentDocument.states[trimmedId].type = type;
|
||||
if (!currentDocument.states.get(trimmedId).type) {
|
||||
currentDocument.states.get(trimmedId).type = type;
|
||||
}
|
||||
}
|
||||
|
||||
const doc2 = currentDocument.states.get(trimmedId);
|
||||
|
||||
if (descr) {
|
||||
log.info('Setting state description', trimmedId, descr);
|
||||
if (typeof descr === 'string') {
|
||||
@ -250,11 +253,8 @@ export const addState = function (
|
||||
}
|
||||
|
||||
if (note) {
|
||||
currentDocument.states[trimmedId].note = note;
|
||||
currentDocument.states[trimmedId].note.text = common.sanitizeText(
|
||||
currentDocument.states[trimmedId].note.text,
|
||||
getConfig()
|
||||
);
|
||||
doc2.note = note;
|
||||
doc2.note.text = common.sanitizeText(doc2.note.text, getConfig());
|
||||
}
|
||||
|
||||
if (classes) {
|
||||
@ -291,7 +291,7 @@ export const clear = function (saveCommon) {
|
||||
};
|
||||
|
||||
export const getState = function (id) {
|
||||
return currentDocument.states[id];
|
||||
return currentDocument.states.get(id);
|
||||
};
|
||||
|
||||
export const getStates = function () {
|
||||
@ -429,7 +429,7 @@ export const addRelation = function (item1, item2, title) {
|
||||
};
|
||||
|
||||
export const addDescription = function (id, descr) {
|
||||
const theState = currentDocument.states[id];
|
||||
const theState = currentDocument.states.get(id);
|
||||
const _descr = descr.startsWith(':') ? descr.replace(':', '').trim() : descr;
|
||||
theState.descriptions.push(common.sanitizeText(_descr, getConfig()));
|
||||
};
|
||||
@ -456,10 +456,10 @@ const getDividerId = () => {
|
||||
*/
|
||||
export const addStyleClass = function (id, styleAttributes = '') {
|
||||
// create a new style class object with this id
|
||||
if (classes[id] === undefined) {
|
||||
classes[id] = { id: id, styles: [], textStyles: [] }; // This is a classDef
|
||||
if (!classes.has(id)) {
|
||||
classes.set(id, { id: id, styles: [], textStyles: [] }); // This is a classDef
|
||||
}
|
||||
const foundClass = classes[id];
|
||||
const foundClass = classes.get(id);
|
||||
if (styleAttributes !== undefined && styleAttributes !== null) {
|
||||
styleAttributes.split(STYLECLASS_SEP).forEach((attrib) => {
|
||||
// remove any trailing ;
|
||||
|
@ -12,10 +12,10 @@ describe('State Diagram stateDb', () => {
|
||||
|
||||
stateDb.addStyleClass(newStyleClassId, newStyleClassAttribs);
|
||||
const styleClasses = stateDb.getClasses();
|
||||
expect(styleClasses[newStyleClassId].id).toEqual(newStyleClassId);
|
||||
expect(styleClasses[newStyleClassId].styles.length).toEqual(2);
|
||||
expect(styleClasses[newStyleClassId].styles[0]).toEqual('font-weight:bold');
|
||||
expect(styleClasses[newStyleClassId].styles[1]).toEqual('border:blue');
|
||||
expect(styleClasses.get(newStyleClassId).id).toEqual(newStyleClassId);
|
||||
expect(styleClasses.get(newStyleClassId).styles.length).toEqual(2);
|
||||
expect(styleClasses.get(newStyleClassId).styles[0]).toEqual('font-weight:bold');
|
||||
expect(styleClasses.get(newStyleClassId).styles[1]).toEqual('border:blue');
|
||||
});
|
||||
});
|
||||
|
||||
@ -34,15 +34,15 @@ describe('State Diagram stateDb', () => {
|
||||
|
||||
stateDb.addDescription(testStateId, restOfTheDescription);
|
||||
let states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[0]).toEqual(restOfTheDescription);
|
||||
expect(states.get(testStateId).descriptions[0]).toEqual(restOfTheDescription);
|
||||
|
||||
stateDb.addDescription(testStateId, oneLeadingColon);
|
||||
states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[1]).toEqual(restOfTheDescription);
|
||||
expect(states.get(testStateId).descriptions[1]).toEqual(restOfTheDescription);
|
||||
|
||||
stateDb.addDescription(testStateId, twoLeadingColons);
|
||||
states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[2]).toEqual(`:${restOfTheDescription}`);
|
||||
expect(states.get(testStateId).descriptions[2]).toEqual(`:${restOfTheDescription}`);
|
||||
});
|
||||
|
||||
it('adds each description to the array of descriptions', () => {
|
||||
@ -51,10 +51,10 @@ describe('State Diagram stateDb', () => {
|
||||
stateDb.addDescription(testStateId, 'description 2');
|
||||
|
||||
let states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions.length).toEqual(3);
|
||||
expect(states[testStateId].descriptions[0]).toEqual('description 0');
|
||||
expect(states[testStateId].descriptions[1]).toEqual('description 1');
|
||||
expect(states[testStateId].descriptions[2]).toEqual('description 2');
|
||||
expect(states.get(testStateId).descriptions.length).toEqual(3);
|
||||
expect(states.get(testStateId).descriptions[0]).toEqual('description 0');
|
||||
expect(states.get(testStateId).descriptions[1]).toEqual('description 1');
|
||||
expect(states.get(testStateId).descriptions[2]).toEqual('description 2');
|
||||
});
|
||||
|
||||
it('sanitizes on the description', () => {
|
||||
@ -63,13 +63,13 @@ describe('State Diagram stateDb', () => {
|
||||
'desc outside the script <script>the description</script>'
|
||||
);
|
||||
let states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[0]).toEqual('desc outside the script ');
|
||||
expect(states.get(testStateId).descriptions[0]).toEqual('desc outside the script ');
|
||||
});
|
||||
|
||||
it('adds the description to the state with the given id', () => {
|
||||
stateDb.addDescription(testStateId, 'the description');
|
||||
let states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[0]).toEqual('the description');
|
||||
expect(states.get(testStateId).descriptions[0]).toEqual('the description');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -405,7 +405,7 @@ describe('state diagram V2, ', function () {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDb.getStates();
|
||||
expect(states['Active'].doc[0].id).toEqual('Idle');
|
||||
expect(states.get('Active').doc[0].id).toEqual('Idle');
|
||||
|
||||
const rels = stateDb.getRelations();
|
||||
const rel_Inactive_Idle = rels.find((rel) => rel.id1 === 'Inactive' && rel.id2 === 'Idle');
|
||||
|
@ -81,7 +81,7 @@ export const setConf = function (cnf) {
|
||||
*
|
||||
* @param {string} text - the diagram text to be parsed
|
||||
* @param diagramObj
|
||||
* @returns {Record<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles (a Map with keys = strings, values = )
|
||||
* @returns {Map<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles (a Map with keys = strings, values = )
|
||||
*/
|
||||
export const getClasses = function (text, diagramObj) {
|
||||
diagramObj.db.extract(diagramObj.db.getRootDocV2());
|
||||
|
@ -295,7 +295,7 @@ describe('mermaidAPI', () => {
|
||||
expect(styles).toMatch(/^\ndefault(.*)/);
|
||||
});
|
||||
it('gets the fontFamily from the config', () => {
|
||||
const styles = createCssStyles(mocked_config_with_htmlLabels, {});
|
||||
const styles = createCssStyles(mocked_config_with_htmlLabels, new Map());
|
||||
expect(styles).toMatch(/(.*)\n:root { --mermaid-font-family: serif(.*)/);
|
||||
});
|
||||
it('gets the alt fontFamily from the config', () => {
|
||||
@ -375,7 +375,7 @@ describe('mermaidAPI', () => {
|
||||
// @todo TODO Can't figure out how to spy on the cssImportantStyles method.
|
||||
// That would be a much better approach than manually checking the result
|
||||
|
||||
const styles = createCssStyles(mocked_config, classDefs);
|
||||
const styles = createCssStyles(mocked_config, new Map(Object.entries(classDefs)));
|
||||
htmlElements.forEach((htmlElement) => {
|
||||
expect_styles_matchesHtmlElements(styles, htmlElement);
|
||||
});
|
||||
@ -413,7 +413,10 @@ describe('mermaidAPI', () => {
|
||||
it('creates CSS styles for every style and textStyle in every classDef', () => {
|
||||
// TODO Can't figure out how to spy on the cssImportantStyles method. That would be a much better approach than manually checking the result.
|
||||
|
||||
const styles = createCssStyles(mocked_config_no_htmlLabels, classDefs);
|
||||
const styles = createCssStyles(
|
||||
mocked_config_no_htmlLabels,
|
||||
new Map(Object.entries(classDefs))
|
||||
);
|
||||
htmlElements.forEach((htmlElement) => {
|
||||
expect_styles_matchesHtmlElements(styles, htmlElement);
|
||||
});
|
||||
@ -437,7 +440,7 @@ describe('mermaidAPI', () => {
|
||||
it('gets the css styles created', () => {
|
||||
// @todo TODO if a single function in the module can be mocked, do it for createCssStyles and mock the results.
|
||||
|
||||
createUserStyles(mockConfig, 'flowchart-v2', { classDef1 }, 'someId');
|
||||
createUserStyles(mockConfig, 'flowchart-v2', new Map([['classDef1', classDef1]]), 'someId');
|
||||
const expectedStyles =
|
||||
'\ndefault' +
|
||||
'\n.classDef1 > * { style1-1 !important; }' +
|
||||
@ -448,12 +451,12 @@ describe('mermaidAPI', () => {
|
||||
});
|
||||
|
||||
it('calls getStyles to get css for all graph, user css styles, and config theme variables', () => {
|
||||
createUserStyles(mockConfig, 'someDiagram', {}, 'someId');
|
||||
createUserStyles(mockConfig, 'someDiagram', new Map(), 'someId');
|
||||
expect(getStyles).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('returns the result of compiling, stringifying, and serializing the css code with stylis', () => {
|
||||
const result = createUserStyles(mockConfig, 'someDiagram', {}, 'someId');
|
||||
const result = createUserStyles(mockConfig, 'someDiagram', new Map(), 'someId');
|
||||
expect(compile).toHaveBeenCalled();
|
||||
expect(serialize).toHaveBeenCalled();
|
||||
expect(result).toEqual('stylis serialized css');
|
||||
|
@ -152,7 +152,7 @@ export const cssImportantStyles = (
|
||||
*/
|
||||
export const createCssStyles = (
|
||||
config: MermaidConfig,
|
||||
classDefs: Record<string, DiagramStyleClassDef> | null | undefined = {}
|
||||
classDefs: Map<string, DiagramStyleClassDef> | null | undefined = new Map()
|
||||
): string => {
|
||||
let cssStyles = '';
|
||||
|
||||
@ -171,7 +171,7 @@ export const createCssStyles = (
|
||||
}
|
||||
|
||||
// classDefs defined in the diagram text
|
||||
if (!isEmpty(classDefs)) {
|
||||
if (!isEmpty(classDefs) && classDefs instanceof Map) {
|
||||
const htmlLabels = config.htmlLabels || config.flowchart?.htmlLabels; // TODO why specifically check the Flowchart diagram config?
|
||||
|
||||
const cssHtmlElements = ['> *', 'span']; // TODO make a constant
|
||||
@ -180,8 +180,8 @@ export const createCssStyles = (
|
||||
const cssElements = htmlLabels ? cssHtmlElements : cssShapeElements;
|
||||
|
||||
// create the CSS styles needed for each styleClass definition and css element
|
||||
for (const classId in classDefs) {
|
||||
const styleClassDef = classDefs[classId];
|
||||
for (const classId of classDefs!.keys()) {
|
||||
const styleClassDef = classDefs.get(classId)!;
|
||||
// create the css styles for each cssElement and the styles (only if there are styles)
|
||||
if (!isEmpty(styleClassDef.styles)) {
|
||||
cssElements.forEach((cssElement) => {
|
||||
@ -200,7 +200,7 @@ export const createCssStyles = (
|
||||
export const createUserStyles = (
|
||||
config: MermaidConfig,
|
||||
graphType: string,
|
||||
classDefs: Record<string, DiagramStyleClassDef> | undefined,
|
||||
classDefs: Map<string, DiagramStyleClassDef> | undefined,
|
||||
svgId: string
|
||||
): string => {
|
||||
const userCSSstyles = createCssStyles(config, classDefs);
|
||||
|
Loading…
x
Reference in New Issue
Block a user