mirror of
https://github.com/mermaid-js/mermaid.git
synced 2025-01-14 06:43:25 +08:00
Merge branch 'develop' into 5237-unified-layout-common-renderer
* develop: (50 commits) Update pnpm to v9 chore: Fix link Update docs fixup, docs: update plugin's name Update docs Changed Mermaid Chart GPT plugin's name Updated Mermaid Chart ChatGPT plugin link chore(deps): update all patch dependencies run the pnpm --filter mermaid run docs:build command Update flowchart.md chore: address @Yokozuna59's code reviews Lint fix Fix for proper handling of block-diagram labels Fix for proper handling of block-diagram labels docs: Add blog post - Documentation Software (#5519) refactor: remove non-null assertion operator chore: remove unrefSubSchemas chore: Build config types before building types chore: Ignore `vite.config.ts.timestamp-` chore(deps): update all patch dependencies ...
This commit is contained in:
commit
c10ab23387
3
.gitignore
vendored
3
.gitignore
vendored
@ -48,6 +48,7 @@ demos/dev/**
|
||||
!/demos/dev/example.html
|
||||
!/demos/dev/reload.js
|
||||
tsx-0/**
|
||||
vite.config.ts.timestamp-*
|
||||
|
||||
# autogenereated by langium-cli
|
||||
generated/
|
||||
generated/
|
||||
|
@ -137,4 +137,9 @@ describe('XSS', () => {
|
||||
cy.wait(1000);
|
||||
cy.get('#the-malware').should('not.exist');
|
||||
});
|
||||
it('should sanitize backticks block diagram labels properly', () => {
|
||||
cy.visit('http://localhost:9000/xss25.html');
|
||||
cy.wait(1000);
|
||||
cy.get('#the-malware').should('not.exist');
|
||||
});
|
||||
});
|
||||
|
108
cypress/platform/xss25.html
Normal file
108
cypress/platform/xss25.html
Normal file
@ -0,0 +1,108 @@
|
||||
<html>
|
||||
<head>
|
||||
<link href="https://fonts.googleapis.com/css?family=Montserrat&display=swap" rel="stylesheet" />
|
||||
<link href="https://unpkg.com/tailwindcss@^1.0/dist/tailwind.min.css" rel="stylesheet" />
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css"
|
||||
/>
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css?family=Noto+Sans+SC&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<style>
|
||||
body {
|
||||
/* background: rgb(221, 208, 208); */
|
||||
/* background:#333; */
|
||||
font-family: 'Arial';
|
||||
/* font-size: 18px !important; */
|
||||
}
|
||||
h1 {
|
||||
color: grey;
|
||||
}
|
||||
.mermaid2 {
|
||||
display: none;
|
||||
}
|
||||
.mermaid svg {
|
||||
/* font-size: 18px !important; */
|
||||
}
|
||||
.malware {
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 150px;
|
||||
background: red;
|
||||
color: black;
|
||||
display: flex;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
font-family: monospace;
|
||||
font-size: 72px;
|
||||
}
|
||||
</style>
|
||||
<script>
|
||||
function xssAttack() {
|
||||
const div = document.createElement('div');
|
||||
div.id = 'the-malware';
|
||||
div.className = 'malware';
|
||||
div.innerHTML = 'XSS Succeeded';
|
||||
document.getElementsByTagName('body')[0].appendChild(div);
|
||||
throw new Error('XSS Succeeded');
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div>Security check</div>
|
||||
<div class="flex">
|
||||
<div id="diagram" class="mermaid"></div>
|
||||
<div id="res" class=""></div>
|
||||
</div>
|
||||
<script type="module">
|
||||
import mermaid from './mermaid.esm.mjs';
|
||||
mermaid.parseError = function (err, hash) {
|
||||
// console.error('Mermaid error: ', err);
|
||||
};
|
||||
mermaid.initialize({
|
||||
theme: 'forest',
|
||||
arrowMarkerAbsolute: true,
|
||||
// themeCSS: '.edgePath .path {stroke: red;} .arrowheadPath {fill: red;}',
|
||||
logLevel: 0,
|
||||
state: {
|
||||
defaultRenderer: 'dagre-wrapper',
|
||||
},
|
||||
flowchart: {
|
||||
// defaultRenderer: 'dagre-wrapper',
|
||||
nodeSpacing: 10,
|
||||
curve: 'cardinal',
|
||||
htmlLabels: true,
|
||||
},
|
||||
htmlLabels: false,
|
||||
// gantt: { axisFormat: '%m/%d/%Y' },
|
||||
sequence: { actorFontFamily: 'courier', actorMargin: 50, showSequenceNumbers: false },
|
||||
// sequenceDiagram: { actorMargin: 300 } // deprecated
|
||||
// fontFamily: '"times", sans-serif',
|
||||
// fontFamily: 'courier',
|
||||
fontSize: 18,
|
||||
curve: 'basis',
|
||||
securityLevel: 'strict',
|
||||
startOnLoad: false,
|
||||
secure: ['secure', 'securityLevel', 'startOnLoad', 'maxTextSize'],
|
||||
// themeVariables: {relationLabelColor: 'red'}
|
||||
});
|
||||
function callback() {
|
||||
alert('It worked');
|
||||
}
|
||||
|
||||
let diagram = 'block-beta\n';
|
||||
diagram += '`A-- "X<img src=x on';
|
||||
diagram += 'error=xssAttack()>" -->B';
|
||||
|
||||
console.log(diagram);
|
||||
// document.querySelector('#diagram').innerHTML = diagram;
|
||||
const { svg } = await mermaid.render('diagram', diagram);
|
||||
document.querySelector('#res').innerHTML = svg;
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
BIN
docs/config/img/mathMLDifferences.png
Normal file
BIN
docs/config/img/mathMLDifferences.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
@ -84,3 +84,13 @@ Example with legacy mode enabled (the latest version of KaTeX's stylesheet can b
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
## Handling Rendering Differences
|
||||
|
||||
Due to differences between default fonts across operating systems and browser's MathML implementations, inconsistent results can be seen across platforms. If having consistent results are important, or the most optimal rendered results are desired, `forceLegacyMathML` can be enabled in the config.
|
||||
|
||||
This option will always use KaTeX's stylesheet instead of only when MathML is not supported (as with `legacyMathML`). Note that only `forceLegacyMathML` needs to be set.
|
||||
|
||||
If including KaTeX's stylesheet is not a concern, enabling this option is recommended to avoid scenarios where no MathML implementation within a browser provides the desired output (as seen below).
|
||||
|
||||
![Image showing differences between Browsers](img/mathMLDifferences.png)
|
||||
|
@ -98,7 +98,7 @@ mermaid.initialize(config);
|
||||
|
||||
#### Defined in
|
||||
|
||||
[mermaidAPI.ts:635](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L635)
|
||||
[mermaidAPI.ts:634](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L634)
|
||||
|
||||
## Functions
|
||||
|
||||
@ -129,7 +129,7 @@ Return the last node appended
|
||||
|
||||
#### Defined in
|
||||
|
||||
[mermaidAPI.ts:277](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L277)
|
||||
[mermaidAPI.ts:276](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L276)
|
||||
|
||||
---
|
||||
|
||||
@ -155,7 +155,7 @@ the cleaned up svgCode
|
||||
|
||||
#### Defined in
|
||||
|
||||
[mermaidAPI.ts:223](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L223)
|
||||
[mermaidAPI.ts:222](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L222)
|
||||
|
||||
---
|
||||
|
||||
@ -167,10 +167,10 @@ Create the user styles
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :---------- | :------------------------------------------------------------------ | :------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `config` | `MermaidConfig` | configuration that has style and theme settings to use |
|
||||
| `classDefs` | `undefined` \| `null` \| `Record`<`string`, `DiagramStyleClassDef`> | the classDefs in the diagram text. Might be null if none were defined. Usually is the result of a call to getClasses(...) |
|
||||
| Name | Type | Description |
|
||||
| :---------- | :--------------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `config` | `MermaidConfig` | configuration that has style and theme settings to use |
|
||||
| `classDefs` | `undefined` \| `null` \| `Map`<`string`, `DiagramStyleClassDef`> | the classDefs in the diagram text. Might be null if none were defined. Usually is the result of a call to getClasses(...) |
|
||||
|
||||
#### Returns
|
||||
|
||||
@ -190,12 +190,12 @@ the string with all the user styles
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :---------- | :-------------------------------------------------------- |
|
||||
| `config` | `MermaidConfig` |
|
||||
| `graphType` | `string` |
|
||||
| `classDefs` | `undefined` \| `Record`<`string`, `DiagramStyleClassDef`> |
|
||||
| `svgId` | `string` |
|
||||
| Name | Type |
|
||||
| :---------- | :----------------------------------------------------- |
|
||||
| `config` | `MermaidConfig` |
|
||||
| `graphType` | `string` |
|
||||
| `classDefs` | `undefined` \| `Map`<`string`, `DiagramStyleClassDef`> |
|
||||
| `svgId` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
@ -203,7 +203,7 @@ the string with all the user styles
|
||||
|
||||
#### Defined in
|
||||
|
||||
[mermaidAPI.ts:200](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L200)
|
||||
[mermaidAPI.ts:199](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L199)
|
||||
|
||||
---
|
||||
|
||||
@ -256,7 +256,7 @@ Put the svgCode into an iFrame. Return the iFrame code
|
||||
|
||||
#### Defined in
|
||||
|
||||
[mermaidAPI.ts:254](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L254)
|
||||
[mermaidAPI.ts:253](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L253)
|
||||
|
||||
---
|
||||
|
||||
@ -281,4 +281,4 @@ Remove any existing elements from the given document
|
||||
|
||||
#### Defined in
|
||||
|
||||
[mermaidAPI.ts:327](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L327)
|
||||
[mermaidAPI.ts:326](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L326)
|
||||
|
@ -240,6 +240,8 @@ Communication tools and platforms
|
||||
|
||||
### Other
|
||||
|
||||
- [Astro](https://astro.build/)
|
||||
- [Adding diagrams to your Astro site with MermaidJS and Playwright](https://agramont.net/blog/diagraming-with-mermaidjs-astro/)
|
||||
- [Bisheng](https://www.npmjs.com/package/bisheng)
|
||||
- [bisheng-plugin-mermaid](https://github.com/yct21/bisheng-plugin-mermaid)
|
||||
- [Blazorade Mermaid: Render Mermaid diagrams in Blazor applications](https://github.com/Blazorade/Blazorade-Mermaid/wiki)
|
||||
@ -249,6 +251,7 @@ Communication tools and platforms
|
||||
- [Jekyll](https://jekyllrb.com/)
|
||||
- [jekyll-mermaid](https://rubygems.org/gems/jekyll-mermaid)
|
||||
- [jekyll-mermaid-diagrams](https://github.com/fuzhibo/jekyll-mermaid-diagrams)
|
||||
- [MarkChart: Preview Mermaid diagrams on macOS](https://markchart.app/)
|
||||
- [mermaid-isomorphic](https://github.com/remcohaszing/mermaid-isomorphic)
|
||||
- [mermaid-server: Generate diagrams using a HTTP request](https://github.com/TomWright/mermaid-server)
|
||||
- [NiceGUI: Let any browser be the frontend of your Python code](https://nicegui.io) ✅
|
||||
|
@ -6,6 +6,10 @@
|
||||
|
||||
# Mermaid Chart
|
||||
|
||||
The Future of Diagramming & Visual Collaboration
|
||||
|
||||
Try the Ultimate AI, Mermaid, and Visual Diagramming Suite by creating an account at [Mermaid Chart](https://www.mermaidchart.com/app/sign-up).
|
||||
|
||||
<br />
|
||||
|
||||
<a href="https://www.producthunt.com/posts/mermaid-chart?utm_source=badge-featured&utm_medium=badge&utm_souce=badge-mermaid-chart" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=416671&theme=light" alt="Mermaid Chart - A smarter way to create diagrams | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
@ -18,22 +22,26 @@
|
||||
|
||||
- **Editor** - A web based editor for creating and editing Mermaid diagrams.
|
||||
|
||||
- **Presentation** - A presentation mode for viewing Mermaid diagrams in a slideshow format.
|
||||
- **Visual Editor** - The Visual Editor enables users of all skill levels to create diagrams easily and efficiently, with both GUI and code-based editing options.
|
||||
|
||||
- **Collaboration** - A web based collaboration feature for multi-user editing on Mermaid diagrams in real-time (Pro plan).
|
||||
- **AI Chat** - Use our embedded AI Chat to generate diagrams from natural language descriptions.
|
||||
|
||||
- **Plugins** - A plugin system for extending the functionality of Mermaid.
|
||||
|
||||
Plugins are available for:
|
||||
Official Mermaid Chart plugins:
|
||||
|
||||
- [ChatGPT](https://docs.mermaidchart.com/plugins/chatgpt)
|
||||
- [Mermaid Chart GPT](https://chat.openai.com/g/g-1IRFKwq4G-mermaid-chart)
|
||||
- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=MermaidChart.vscode-mermaid-chart)
|
||||
- [JetBrains IDE](https://plugins.jetbrains.com/plugin/23043-mermaid-chart)
|
||||
- [Microsoft PowerPoint and Word](https://appsource.microsoft.com/en-us/product/office/WA200006214?tab=Overview)
|
||||
- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=MermaidChart.vscode-mermaid-chart)
|
||||
|
||||
- **AI diagramming** - A feature for generating Mermaid diagrams from text using AI (Pro plan).
|
||||
Visit our [Plugins](https://www.mermaidchart.com/plugins) page for more information.
|
||||
|
||||
- **More** - To learn more, visit our [Product](https://www.mermaidchart.com/product) page.
|
||||
- **Collaboration** - A web based collaboration feature for multi-user editing on Mermaid diagrams in real-time (Pro and Enterprise plans).
|
||||
|
||||
- **Comments** - Enhance collaboration by adding comments to diagrams.
|
||||
|
||||
- **Presentations** - A presentation mode for viewing Mermaid diagrams in a slideshow format.
|
||||
|
||||
## Plans
|
||||
|
||||
@ -43,11 +51,9 @@
|
||||
|
||||
- **Enterprise** - A paid plan for enterprise use that includes all Pro features, and more.
|
||||
|
||||
## Access
|
||||
To learn more, visit our [Pricing](https://mermaidchart.com/pricing) page.
|
||||
|
||||
Sign up for a free account at [Mermaid Chart](https://www.mermaidchart.com/app/sign-up).
|
||||
|
||||
Mermaid Chart is currently offering a 14-day free trial of our newly-launched Pro tier. To learn more, visit our [Pricing](https://mermaidchart.com/pricing) page.
|
||||
Mermaid Chart is currently offering a 14-day free trial on our Pro and Enterprise tiers. Sign up for a free account at [Mermaid Chart](https://www.mermaidchart.com/app/sign-up).
|
||||
|
||||
## Mermaid JS contributions
|
||||
|
||||
|
@ -157,7 +157,7 @@ For a list of Mermaid Plugins and Integrations, visit the [Integrations page](..
|
||||
|
||||
Mermaid Chart plugins are available for:
|
||||
|
||||
- [ChatGPT](https://docs.mermaidchart.com/plugins/chatgpt)
|
||||
- [ChatGPT](https://docs.mermaidchart.com/plugins/mermaid-chart-gpt)
|
||||
- [JetBrains IDE](https://docs.mermaidchart.com/plugins/jetbrains-ide)
|
||||
- [Microsoft PowerPoint](https://docs.mermaidchart.com/plugins/microsoft-powerpoint)
|
||||
- [Microsoft Word](https://docs.mermaidchart.com/plugins/microsoft-word)
|
||||
|
@ -6,6 +6,18 @@
|
||||
|
||||
# Blog
|
||||
|
||||
## [How to Choose the Right Documentation Software](https://www.mermaidchart.com/blog/posts/how-to-choose-the-right-documentation-software/)
|
||||
|
||||
7 May 2024 · 5 mins
|
||||
|
||||
How to Choose the Right Documentation Software. Reliable and efficient documentation software is crucial in the fast-paced world of software development.
|
||||
|
||||
## [AI in software diagramming: What trends will define the future?](https://www.mermaidchart.com/blog/posts/ai-in-software-diagramming/)
|
||||
|
||||
24 April 2024 · 5 mins
|
||||
|
||||
Artificial intelligence (AI) tools are changing the way developers work.
|
||||
|
||||
## [Mermaid Chart Unveils Visual Editor for Sequence Diagrams](https://www.mermaidchart.com/blog/posts/mermaid-chart-unveils-visual-editor-for-sequence-diagrams/)
|
||||
|
||||
8 April 2024 · 5 mins
|
||||
|
@ -881,7 +881,7 @@ Examples of tooltip usage below:
|
||||
|
||||
```html
|
||||
<script>
|
||||
const callback = function () {
|
||||
window.callback = function () {
|
||||
alert('A callback was triggered');
|
||||
};
|
||||
</script>
|
||||
@ -913,7 +913,7 @@ flowchart LR
|
||||
|
||||
> **Success** The tooltip functionality and the ability to link to urls are available from version 0.5.2.
|
||||
|
||||
?> Due to limitations with how Docsify handles JavaScript callback functions, an alternate working demo for the above code can be viewed at [this jsfiddle](https://jsfiddle.net/Ogglas/2o73vdez/7).
|
||||
?> Due to limitations with how Docsify handles JavaScript callback functions, an alternate working demo for the above code can be viewed at [this jsfiddle](https://jsfiddle.net/yk4h7qou/2/).
|
||||
|
||||
Links are opened in the same browser tab/window by default. It is possible to change this by adding a link target to the click definition (`_self`, `_blank`, `_parent` and `_top` are supported):
|
||||
|
||||
@ -957,7 +957,7 @@ Beginner's tip—a full example using interactive links in a html context:
|
||||
</pre>
|
||||
|
||||
<script>
|
||||
const callback = function () {
|
||||
window.callback = function () {
|
||||
alert('A callback was triggered');
|
||||
};
|
||||
const config = {
|
||||
|
@ -4,7 +4,7 @@
|
||||
"version": "10.2.4",
|
||||
"description": "Markdownish syntax for generating flowcharts, sequence diagrams, class diagrams, gantt charts and git graphs.",
|
||||
"type": "module",
|
||||
"packageManager": "pnpm@8.15.7",
|
||||
"packageManager": "pnpm@9.1.2+sha512.127dc83b9ea10c32be65d22a8efb4a65fb952e8fefbdfded39bdc3c97efc32d31b48b00420df2c1187ace28c921c902f0cb5a134a4d032b8b5295cbfa2c681e2",
|
||||
"keywords": [
|
||||
"diagram",
|
||||
"markdown",
|
||||
@ -107,7 +107,7 @@
|
||||
"jison": "^0.4.18",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsdom": "^24.0.0",
|
||||
"langium-cli": "3.0.1",
|
||||
"langium-cli": "3.0.3",
|
||||
"lint-staged": "^15.2.2",
|
||||
"markdown-table": "^3.0.3",
|
||||
"nyc": "^15.1.0",
|
||||
|
@ -41,12 +41,12 @@ let nodeDb = {};
|
||||
export const addVertices = async function (vert, svgId, root, doc, diagObj, parentLookupDb, graph) {
|
||||
const svg = root.select(`[id="${svgId}"]`);
|
||||
const nodes = svg.insert('g').attr('class', 'nodes');
|
||||
const keys = Object.keys(vert);
|
||||
const keys = [...vert.keys()];
|
||||
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
await Promise.all(
|
||||
keys.map(async function (id) {
|
||||
const vertex = vert[id];
|
||||
const vertex = vert.get(id);
|
||||
|
||||
/**
|
||||
* Variable for storing the classes for the vertex
|
||||
@ -595,7 +595,7 @@ const addMarkersToEdge = function (svgPath, edgeData, diagramType, arrowMarkerAb
|
||||
*
|
||||
* @param text
|
||||
* @param diagObj
|
||||
* @returns {Record<string, import('../../mermaid/src/diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
* @returns {Map<string, import('../../mermaid/src/diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
*/
|
||||
export const getClasses = function (text, diagObj) {
|
||||
log.info('Extracting classes');
|
||||
|
@ -10,18 +10,16 @@
|
||||
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
import _Ajv2019, { type JSONSchemaType } from 'ajv/dist/2019.js';
|
||||
import { JSON_SCHEMA, load } from 'js-yaml';
|
||||
import { compile, type JSONSchema } from 'json-schema-to-typescript';
|
||||
import assert from 'node:assert';
|
||||
import { execFile } from 'node:child_process';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
import { promisify } from 'node:util';
|
||||
|
||||
import { load, JSON_SCHEMA } from 'js-yaml';
|
||||
import { compile, type JSONSchema } from 'json-schema-to-typescript';
|
||||
import prettier from 'prettier';
|
||||
|
||||
import _Ajv2019, { type JSONSchemaType } from 'ajv/dist/2019.js';
|
||||
|
||||
// Workaround for wrong AJV types, see
|
||||
// https://github.com/ajv-validator/ajv/issues/2132#issuecomment-1290409907
|
||||
const Ajv2019 = _Ajv2019 as unknown as typeof _Ajv2019.default;
|
||||
@ -34,28 +32,6 @@ const verifyOnly = process.argv.includes('--verify');
|
||||
/** If `true`, automatically `git add` any changes (i.e. during `pnpm run pre-commit`)*/
|
||||
const git = process.argv.includes('--git');
|
||||
|
||||
/**
|
||||
* All of the keys in the mermaid config that have a mermaid diagram config.
|
||||
*/
|
||||
const MERMAID_CONFIG_DIAGRAM_KEYS = [
|
||||
'flowchart',
|
||||
'sequence',
|
||||
'gantt',
|
||||
'journey',
|
||||
'class',
|
||||
'state',
|
||||
'er',
|
||||
'pie',
|
||||
'quadrantChart',
|
||||
'xyChart',
|
||||
'requirement',
|
||||
'mindmap',
|
||||
'timeline',
|
||||
'gitGraph',
|
||||
'c4',
|
||||
'sankey',
|
||||
];
|
||||
|
||||
/**
|
||||
* Loads the MermaidConfig JSON schema YAML file.
|
||||
*
|
||||
@ -148,53 +124,9 @@ async function generateTypescript(mermaidConfigSchema: JSONSchemaType<MermaidCon
|
||||
return { ...schema, required: [] };
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a temporary hack to control the order the types are generated in.
|
||||
*
|
||||
* By default, json-schema-to-typescript outputs the $defs in the order they
|
||||
* are used, then any unused schemas at the end.
|
||||
*
|
||||
* **The only purpose of this function is to make the `git diff` simpler**
|
||||
* **We should remove this later to simplify the code**
|
||||
*
|
||||
* @todo TODO: Remove this function in a future PR.
|
||||
* @param schema - The input schema.
|
||||
* @returns The schema with all `$ref`s removed.
|
||||
*/
|
||||
function unrefSubschemas(schema: JSONSchemaType<Record<string, any>>) {
|
||||
return {
|
||||
...schema,
|
||||
properties: Object.fromEntries(
|
||||
Object.entries(schema.properties).map(([key, propertySchema]) => {
|
||||
if (MERMAID_CONFIG_DIAGRAM_KEYS.includes(key)) {
|
||||
const { $ref, ...propertySchemaWithoutRef } = propertySchema as JSONSchemaType<unknown>;
|
||||
if ($ref === undefined) {
|
||||
throw Error(
|
||||
`subSchema ${key} is in MERMAID_CONFIG_DIAGRAM_KEYS but does not have a $ref field`
|
||||
);
|
||||
}
|
||||
const [
|
||||
_root, // eslint-disable-line @typescript-eslint/no-unused-vars
|
||||
_defs, // eslint-disable-line @typescript-eslint/no-unused-vars
|
||||
defName,
|
||||
] = $ref.split('/');
|
||||
return [
|
||||
key,
|
||||
{
|
||||
...propertySchemaWithoutRef,
|
||||
tsType: defName,
|
||||
},
|
||||
];
|
||||
}
|
||||
return [key, propertySchema];
|
||||
})
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
assert.ok(mermaidConfigSchema.$defs);
|
||||
const modifiedSchema = {
|
||||
...unrefSubschemas(removeRequired(mermaidConfigSchema)),
|
||||
...removeRequired(mermaidConfigSchema),
|
||||
|
||||
$defs: Object.fromEntries(
|
||||
Object.entries(mermaidConfigSchema.$defs).map(([key, subSchema]) => {
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -70,7 +70,7 @@ export interface DiagramRenderer {
|
||||
getClasses?: (
|
||||
text: string,
|
||||
diagram: Pick<DiagramDefinition, 'db'>
|
||||
) => Record<string, DiagramStyleClassDef>;
|
||||
) => Map<string, DiagramStyleClassDef>;
|
||||
}
|
||||
|
||||
export interface DiagramDefinition {
|
||||
|
@ -1,21 +1,26 @@
|
||||
import clone from 'lodash-es/clone.js';
|
||||
import * as configApi from '../../config.js';
|
||||
import { getConfig } from '../../diagram-api/diagramAPI.js';
|
||||
import type { DiagramDB } from '../../diagram-api/types.js';
|
||||
import { log } from '../../logger.js';
|
||||
import common from '../common/common.js';
|
||||
import { clear as commonClear } from '../common/commonDb.js';
|
||||
import type { Block, ClassDef } from './blockTypes.js';
|
||||
|
||||
// Initialize the node database for simple lookups
|
||||
let blockDatabase: Record<string, Block> = {};
|
||||
let blockDatabase: Map<string, Block> = new Map();
|
||||
let edgeList: Block[] = [];
|
||||
let edgeCount: Record<string, number> = {};
|
||||
let edgeCount: Map<string, number> = new Map();
|
||||
|
||||
const COLOR_KEYWORD = 'color';
|
||||
const FILL_KEYWORD = 'fill';
|
||||
const BG_FILL = 'bgFill';
|
||||
const STYLECLASS_SEP = ',';
|
||||
const config = getConfig();
|
||||
|
||||
let classes = {} as Record<string, ClassDef>;
|
||||
let classes: Map<string, ClassDef> = new Map();
|
||||
|
||||
const sanitizeText = (txt: string) => common.sanitizeText(txt, config);
|
||||
|
||||
/**
|
||||
* Called when the parser comes across a (style) class definition
|
||||
@ -26,10 +31,11 @@ let classes = {} as Record<string, ClassDef>;
|
||||
*/
|
||||
export const addStyleClass = function (id: string, styleAttributes = '') {
|
||||
// create a new style class object with this id
|
||||
if (classes[id] === undefined) {
|
||||
classes[id] = { id: id, styles: [], textStyles: [] }; // This is a classDef
|
||||
let foundClass = classes.get(id);
|
||||
if (!foundClass) {
|
||||
foundClass = { id: id, styles: [], textStyles: [] };
|
||||
classes.set(id, foundClass); // This is a classDef
|
||||
}
|
||||
const foundClass = classes[id];
|
||||
if (styleAttributes !== undefined && styleAttributes !== null) {
|
||||
styleAttributes.split(STYLECLASS_SEP).forEach((attrib) => {
|
||||
// remove any trailing ;
|
||||
@ -54,7 +60,7 @@ export const addStyleClass = function (id: string, styleAttributes = '') {
|
||||
* @param styles - the string with 1 or more style attributes (each separated by a comma)
|
||||
*/
|
||||
export const addStyle2Node = function (id: string, styles = '') {
|
||||
const foundBlock = blockDatabase[id];
|
||||
const foundBlock = blockDatabase.get(id)!;
|
||||
if (styles !== undefined && styles !== null) {
|
||||
foundBlock.styles = styles.split(STYLECLASS_SEP);
|
||||
}
|
||||
@ -70,11 +76,11 @@ export const addStyle2Node = function (id: string, styles = '') {
|
||||
*/
|
||||
export const setCssClass = function (itemIds: string, cssClassName: string) {
|
||||
itemIds.split(',').forEach(function (id: string) {
|
||||
let foundBlock = blockDatabase[id];
|
||||
let foundBlock = blockDatabase.get(id);
|
||||
if (foundBlock === undefined) {
|
||||
const trimmedId = id.trim();
|
||||
blockDatabase[trimmedId] = { id: trimmedId, type: 'na', children: [] } as Block;
|
||||
foundBlock = blockDatabase[trimmedId];
|
||||
foundBlock = { id: trimmedId, type: 'na', children: [] } as Block;
|
||||
blockDatabase.set(trimmedId, foundBlock);
|
||||
}
|
||||
if (!foundBlock.classes) {
|
||||
foundBlock.classes = [];
|
||||
@ -87,6 +93,9 @@ const populateBlockDatabase = (_blockList: Block[] | Block[][], parent: Block):
|
||||
const blockList = _blockList.flat();
|
||||
const children = [];
|
||||
for (const block of blockList) {
|
||||
if (block.label) {
|
||||
block.label = sanitizeText(block.label);
|
||||
}
|
||||
if (block.type === 'classDef') {
|
||||
addStyleClass(block.id, block.css);
|
||||
continue;
|
||||
@ -104,12 +113,9 @@ const populateBlockDatabase = (_blockList: Block[] | Block[][], parent: Block):
|
||||
if (block.type === 'column-setting') {
|
||||
parent.columns = block.columns || -1;
|
||||
} else if (block.type === 'edge') {
|
||||
if (edgeCount[block.id]) {
|
||||
edgeCount[block.id]++;
|
||||
} else {
|
||||
edgeCount[block.id] = 1;
|
||||
}
|
||||
block.id = edgeCount[block.id] + '-' + block.id;
|
||||
const count = (edgeCount.get(block.id) ?? 0) + 1;
|
||||
edgeCount.set(block.id, count);
|
||||
block.id = count + '-' + block.id;
|
||||
edgeList.push(block);
|
||||
} else {
|
||||
if (!block.label) {
|
||||
@ -120,16 +126,17 @@ const populateBlockDatabase = (_blockList: Block[] | Block[][], parent: Block):
|
||||
block.label = block.id;
|
||||
}
|
||||
}
|
||||
const newBlock = !blockDatabase[block.id];
|
||||
if (newBlock) {
|
||||
blockDatabase[block.id] = block;
|
||||
const existingBlock = blockDatabase.get(block.id);
|
||||
|
||||
if (existingBlock === undefined) {
|
||||
blockDatabase.set(block.id, block);
|
||||
} else {
|
||||
// Add newer relevant data to aggregated node
|
||||
if (block.type !== 'na') {
|
||||
blockDatabase[block.id].type = block.type;
|
||||
existingBlock.type = block.type;
|
||||
}
|
||||
if (block.label !== block.id) {
|
||||
blockDatabase[block.id].label = block.label;
|
||||
existingBlock.label = block.label;
|
||||
}
|
||||
}
|
||||
|
||||
@ -142,10 +149,10 @@ const populateBlockDatabase = (_blockList: Block[] | Block[][], parent: Block):
|
||||
for (let j = 0; j < w; j++) {
|
||||
const newBlock = clone(block);
|
||||
newBlock.id = newBlock.id + '-' + j;
|
||||
blockDatabase[newBlock.id] = newBlock;
|
||||
blockDatabase.set(newBlock.id, newBlock);
|
||||
children.push(newBlock);
|
||||
}
|
||||
} else if (newBlock) {
|
||||
} else if (existingBlock === undefined) {
|
||||
children.push(block);
|
||||
}
|
||||
}
|
||||
@ -160,12 +167,12 @@ const clear = (): void => {
|
||||
log.debug('Clear called');
|
||||
commonClear();
|
||||
rootBlock = { id: 'root', type: 'composite', children: [], columns: -1 } as Block;
|
||||
blockDatabase = { root: rootBlock };
|
||||
blockDatabase = new Map([['root', rootBlock]]);
|
||||
blocks = [] as Block[];
|
||||
classes = {} as Record<string, ClassDef>;
|
||||
classes = new Map();
|
||||
|
||||
edgeList = [];
|
||||
edgeCount = {};
|
||||
edgeCount = new Map();
|
||||
};
|
||||
|
||||
export function typeStr2Type(typeStr: string) {
|
||||
@ -241,7 +248,7 @@ const setHierarchy = (block: Block[]): void => {
|
||||
};
|
||||
|
||||
const getColumns = (blockId: string): number => {
|
||||
const block = blockDatabase[blockId];
|
||||
const block = blockDatabase.get(blockId);
|
||||
if (!block) {
|
||||
return -1;
|
||||
}
|
||||
@ -259,7 +266,7 @@ const getColumns = (blockId: string): number => {
|
||||
* @returns
|
||||
*/
|
||||
const getBlocksFlat = () => {
|
||||
return [...Object.values(blockDatabase)];
|
||||
return [...blockDatabase.values()];
|
||||
};
|
||||
/**
|
||||
* Returns the hierarchy of blocks
|
||||
@ -273,11 +280,11 @@ const getEdges = () => {
|
||||
return edgeList;
|
||||
};
|
||||
const getBlock = (id: string) => {
|
||||
return blockDatabase[id];
|
||||
return blockDatabase.get(id);
|
||||
};
|
||||
|
||||
const setBlock = (block: Block) => {
|
||||
blockDatabase[block.id] = block;
|
||||
blockDatabase.set(block.id, block);
|
||||
};
|
||||
|
||||
const getLogger = () => console;
|
||||
|
@ -388,7 +388,7 @@ describe('Block diagram', function () {
|
||||
const mc = blocks[0];
|
||||
expect(mc.classes).toContain('black');
|
||||
const classes = db.getClasses();
|
||||
const black = classes.black;
|
||||
const black = classes.get('black')!;
|
||||
expect(black.id).toBe('black');
|
||||
expect(black.styles[0]).toEqual('color:#ffffff');
|
||||
});
|
||||
@ -406,4 +406,21 @@ columns 1
|
||||
expect(B.styles).toContain('fill:#f9F');
|
||||
});
|
||||
});
|
||||
|
||||
describe('prototype properties', function () {
|
||||
function validateProperty(prop: string) {
|
||||
expect(() => block.parse(`block-beta\n${prop}`)).not.toThrow();
|
||||
expect(() =>
|
||||
block.parse(`block-beta\nA; classDef ${prop} color:#ffffff,fill:#000000; class A ${prop}`)
|
||||
).not.toThrow();
|
||||
}
|
||||
|
||||
it('should work with a __proto__ property', function () {
|
||||
validateProperty('__proto__');
|
||||
});
|
||||
|
||||
it('should work with a constructor property', function () {
|
||||
validateProperty('constructor');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -26,10 +26,10 @@ import type {
|
||||
const MERMAID_DOM_ID_PREFIX = 'classId-';
|
||||
|
||||
let relations: ClassRelation[] = [];
|
||||
let classes: ClassMap = {};
|
||||
let classes: Map<string, ClassNode> = new Map();
|
||||
let notes: ClassNote[] = [];
|
||||
let classCounter = 0;
|
||||
let namespaces: NamespaceMap = {};
|
||||
let namespaces: Map<string, NamespaceNode> = new Map();
|
||||
let namespaceCounter = 0;
|
||||
|
||||
let functions: any[] = [];
|
||||
@ -57,7 +57,7 @@ export const setClassLabel = function (_id: string, label: string) {
|
||||
}
|
||||
|
||||
const { className } = splitClassNameAndType(id);
|
||||
classes[className].label = label;
|
||||
classes.get(className)!.label = label;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -70,13 +70,13 @@ export const addClass = function (_id: string) {
|
||||
const id = common.sanitizeText(_id, getConfig());
|
||||
const { className, type } = splitClassNameAndType(id);
|
||||
// Only add class if not exists
|
||||
if (Object.hasOwn(classes, className)) {
|
||||
if (classes.has(className)) {
|
||||
return;
|
||||
}
|
||||
// alert('Adding class: ' + className);
|
||||
const name = common.sanitizeText(className, getConfig());
|
||||
// alert('Adding class after: ' + name);
|
||||
classes[name] = {
|
||||
classes.set(name, {
|
||||
id: name,
|
||||
type: type,
|
||||
label: name,
|
||||
@ -86,7 +86,7 @@ export const addClass = function (_id: string) {
|
||||
annotations: [],
|
||||
styles: [],
|
||||
domId: MERMAID_DOM_ID_PREFIX + name + '-' + classCounter,
|
||||
} as ClassNode;
|
||||
} as ClassNode);
|
||||
|
||||
classCounter++;
|
||||
};
|
||||
@ -99,25 +99,25 @@ export const addClass = function (_id: string) {
|
||||
*/
|
||||
export const lookUpDomId = function (_id: string): string {
|
||||
const id = common.sanitizeText(_id, getConfig());
|
||||
if (id in classes) {
|
||||
return classes[id].domId;
|
||||
if (classes.has(id)) {
|
||||
return classes.get(id)!.domId;
|
||||
}
|
||||
throw new Error('Class not found: ' + id);
|
||||
};
|
||||
|
||||
export const clear = function () {
|
||||
relations = [];
|
||||
classes = {};
|
||||
classes = new Map();
|
||||
notes = [];
|
||||
functions = [];
|
||||
functions.push(setupToolTips);
|
||||
namespaces = {};
|
||||
namespaces = new Map();
|
||||
namespaceCounter = 0;
|
||||
commonClear();
|
||||
};
|
||||
|
||||
export const getClass = function (id: string): ClassNode {
|
||||
return classes[id];
|
||||
return classes.get(id)!;
|
||||
};
|
||||
|
||||
export const getClasses = function (): ClassMap {
|
||||
@ -157,7 +157,7 @@ export const addRelation = function (relation: ClassRelation) {
|
||||
*/
|
||||
export const addAnnotation = function (className: string, annotation: string) {
|
||||
const validatedClassName = splitClassNameAndType(className).className;
|
||||
classes[validatedClassName].annotations.push(annotation);
|
||||
classes.get(validatedClassName)!.annotations.push(annotation);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -173,7 +173,7 @@ export const addMember = function (className: string, member: string) {
|
||||
addClass(className);
|
||||
|
||||
const validatedClassName = splitClassNameAndType(className).className;
|
||||
const theClass = classes[validatedClassName];
|
||||
const theClass = classes.get(validatedClassName)!;
|
||||
|
||||
if (typeof member === 'string') {
|
||||
// Member can contain white spaces, we trim them out
|
||||
@ -226,8 +226,9 @@ export const setCssClass = function (ids: string, className: string) {
|
||||
if (_id[0].match(/\d/)) {
|
||||
id = MERMAID_DOM_ID_PREFIX + id;
|
||||
}
|
||||
if (classes[id] !== undefined) {
|
||||
classes[id].cssClasses.push(className);
|
||||
const classNode = classes.get(id);
|
||||
if (classNode) {
|
||||
classNode.cssClasses.push(className);
|
||||
}
|
||||
});
|
||||
};
|
||||
@ -241,17 +242,17 @@ export const setCssClass = function (ids: string, className: string) {
|
||||
const setTooltip = function (ids: string, tooltip?: string) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
if (tooltip !== undefined) {
|
||||
classes[id].tooltip = sanitizeText(tooltip);
|
||||
classes.get(id)!.tooltip = sanitizeText(tooltip);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const getTooltip = function (id: string, namespace?: string) {
|
||||
if (namespace) {
|
||||
return namespaces[namespace].classes[id].tooltip;
|
||||
if (namespace && namespaces.has(namespace)) {
|
||||
return namespaces.get(namespace)!.classes.get(id)!.tooltip;
|
||||
}
|
||||
|
||||
return classes[id].tooltip;
|
||||
return classes.get(id)!.tooltip;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -268,14 +269,15 @@ export const setLink = function (ids: string, linkStr: string, target: string) {
|
||||
if (_id[0].match(/\d/)) {
|
||||
id = MERMAID_DOM_ID_PREFIX + id;
|
||||
}
|
||||
if (classes[id] !== undefined) {
|
||||
classes[id].link = utils.formatUrl(linkStr, config);
|
||||
const theClass = classes.get(id);
|
||||
if (theClass) {
|
||||
theClass.link = utils.formatUrl(linkStr, config);
|
||||
if (config.securityLevel === 'sandbox') {
|
||||
classes[id].linkTarget = '_top';
|
||||
theClass.linkTarget = '_top';
|
||||
} else if (typeof target === 'string') {
|
||||
classes[id].linkTarget = sanitizeText(target);
|
||||
theClass.linkTarget = sanitizeText(target);
|
||||
} else {
|
||||
classes[id].linkTarget = '_blank';
|
||||
theClass.linkTarget = '_blank';
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -292,7 +294,7 @@ export const setLink = function (ids: string, linkStr: string, target: string) {
|
||||
export const setClickEvent = function (ids: string, functionName: string, functionArgs: string) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
setClickFunc(id, functionName, functionArgs);
|
||||
classes[id].haveCallback = true;
|
||||
classes.get(id)!.haveCallback = true;
|
||||
});
|
||||
setCssClass(ids, 'clickable');
|
||||
};
|
||||
@ -308,7 +310,7 @@ const setClickFunc = function (_domId: string, functionName: string, functionArg
|
||||
}
|
||||
|
||||
const id = domId;
|
||||
if (classes[id] !== undefined) {
|
||||
if (classes.has(id)) {
|
||||
const elemId = lookUpDomId(id);
|
||||
let argList: string[] = [];
|
||||
if (typeof functionArgs === 'string') {
|
||||
@ -417,22 +419,22 @@ const setDirection = (dir: string) => {
|
||||
* @public
|
||||
*/
|
||||
export const addNamespace = function (id: string) {
|
||||
if (namespaces[id] !== undefined) {
|
||||
if (namespaces.has(id)) {
|
||||
return;
|
||||
}
|
||||
|
||||
namespaces[id] = {
|
||||
namespaces.set(id, {
|
||||
id: id,
|
||||
classes: {},
|
||||
classes: new Map(),
|
||||
children: {},
|
||||
domId: MERMAID_DOM_ID_PREFIX + id + '-' + namespaceCounter,
|
||||
} as NamespaceNode;
|
||||
} as NamespaceNode);
|
||||
|
||||
namespaceCounter++;
|
||||
};
|
||||
|
||||
const getNamespace = function (name: string): NamespaceNode {
|
||||
return namespaces[name];
|
||||
return namespaces.get(name)!;
|
||||
};
|
||||
|
||||
const getNamespaces = function (): NamespaceMap {
|
||||
@ -447,18 +449,18 @@ const getNamespaces = function (): NamespaceMap {
|
||||
* @public
|
||||
*/
|
||||
export const addClassesToNamespace = function (id: string, classNames: string[]) {
|
||||
if (namespaces[id] === undefined) {
|
||||
if (!namespaces.has(id)) {
|
||||
return;
|
||||
}
|
||||
for (const name of classNames) {
|
||||
const { className } = splitClassNameAndType(name);
|
||||
classes[className].parent = id;
|
||||
namespaces[id].classes[className] = classes[className];
|
||||
classes.get(className)!.parent = id;
|
||||
namespaces.get(id)!.classes.set(className, classes.get(className)!);
|
||||
}
|
||||
};
|
||||
|
||||
export const setCssStyle = function (id: string, styles: string[]) {
|
||||
const thisClass = classes[id];
|
||||
const thisClass = classes.get(id);
|
||||
if (!styles || !thisClass) {
|
||||
return;
|
||||
}
|
||||
|
@ -2,6 +2,7 @@
|
||||
import { parser } from './parser/classDiagram.jison';
|
||||
import classDb from './classDb.js';
|
||||
import { vi, describe, it, expect } from 'vitest';
|
||||
import type { ClassMap, NamespaceNode } from './classTypes.js';
|
||||
const spyOn = vi.spyOn;
|
||||
|
||||
const staticCssStyle = 'text-decoration:underline;';
|
||||
@ -392,8 +393,8 @@ class C13["With Città foreign language"]
|
||||
Student "1" --o "1" IdCard : carries
|
||||
Student "1" --o "1" Bike : rides`);
|
||||
|
||||
expect(Object.keys(classDb.getClasses()).length).toBe(3);
|
||||
expect(classDb.getClasses().Student).toMatchInlineSnapshot(`
|
||||
expect(classDb.getClasses().size).toBe(3);
|
||||
expect(classDb.getClasses().get('Student')).toMatchInlineSnapshot(`
|
||||
{
|
||||
"annotations": [],
|
||||
"cssClasses": [],
|
||||
@ -1539,12 +1540,12 @@ class Class2
|
||||
}`;
|
||||
parser.parse(str);
|
||||
|
||||
const testNamespace = parser.yy.getNamespace('Namespace1');
|
||||
const testClasses = parser.yy.getClasses();
|
||||
expect(Object.keys(testNamespace.classes).length).toBe(2);
|
||||
const testNamespace: NamespaceNode = parser.yy.getNamespace('Namespace1');
|
||||
const testClasses: ClassMap = parser.yy.getClasses();
|
||||
expect(testNamespace.classes.size).toBe(2);
|
||||
expect(Object.keys(testNamespace.children).length).toBe(0);
|
||||
expect(testNamespace.classes['Class1'].id).toBe('Class1');
|
||||
expect(Object.keys(testClasses).length).toBe(2);
|
||||
expect(testNamespace.classes.get('Class1')?.id).toBe('Class1');
|
||||
expect(testClasses.size).toBe(2);
|
||||
});
|
||||
|
||||
it('should add relations between classes of different namespaces', function () {
|
||||
@ -1573,25 +1574,25 @@ class Class2
|
||||
const testNamespaceB = parser.yy.getNamespace('B');
|
||||
const testClasses = parser.yy.getClasses();
|
||||
const testRelations = parser.yy.getRelations();
|
||||
expect(Object.keys(testNamespaceA.classes).length).toBe(2);
|
||||
expect(testNamespaceA.classes['A1'].members[0].getDisplayDetails().displayText).toBe(
|
||||
expect(testNamespaceA.classes.size).toBe(2);
|
||||
expect(testNamespaceA.classes.get('A1').members[0].getDisplayDetails().displayText).toBe(
|
||||
'+foo : string'
|
||||
);
|
||||
expect(testNamespaceA.classes['A2'].members[0].getDisplayDetails().displayText).toBe(
|
||||
expect(testNamespaceA.classes.get('A2').members[0].getDisplayDetails().displayText).toBe(
|
||||
'+bar : int'
|
||||
);
|
||||
expect(Object.keys(testNamespaceB.classes).length).toBe(2);
|
||||
expect(testNamespaceB.classes['B1'].members[0].getDisplayDetails().displayText).toBe(
|
||||
expect(testNamespaceB.classes.size).toBe(2);
|
||||
expect(testNamespaceB.classes.get('B1').members[0].getDisplayDetails().displayText).toBe(
|
||||
'+foo : bool'
|
||||
);
|
||||
expect(testNamespaceB.classes['B2'].members[0].getDisplayDetails().displayText).toBe(
|
||||
expect(testNamespaceB.classes.get('B2').members[0].getDisplayDetails().displayText).toBe(
|
||||
'+bar : float'
|
||||
);
|
||||
expect(Object.keys(testClasses).length).toBe(4);
|
||||
expect(testClasses['A1'].parent).toBe('A');
|
||||
expect(testClasses['A2'].parent).toBe('A');
|
||||
expect(testClasses['B1'].parent).toBe('B');
|
||||
expect(testClasses['B2'].parent).toBe('B');
|
||||
expect(testClasses.size).toBe(4);
|
||||
expect(testClasses.get('A1').parent).toBe('A');
|
||||
expect(testClasses.get('A2').parent).toBe('A');
|
||||
expect(testClasses.get('B1').parent).toBe('B');
|
||||
expect(testClasses.get('B2').parent).toBe('B');
|
||||
expect(testRelations[0].id1).toBe('A1');
|
||||
expect(testRelations[0].id2).toBe('B1');
|
||||
expect(testRelations[1].id1).toBe('A2');
|
||||
|
@ -4,7 +4,7 @@ import * as graphlib from 'dagre-d3-es/src/graphlib/index.js';
|
||||
import { log } from '../../logger.js';
|
||||
import { getConfig } from '../../diagram-api/diagramAPI.js';
|
||||
import { render } from '../../dagre-wrapper/index.js';
|
||||
import utils from '../../utils.js';
|
||||
import utils, { getEdgeId } from '../../utils.js';
|
||||
import { interpolateToCurve, getStylesFromArray } from '../../utils.js';
|
||||
import { setupGraphViewbox } from '../../setupGraphViewbox.js';
|
||||
import common from '../common/common.js';
|
||||
@ -44,14 +44,11 @@ export const addNamespaces = function (
|
||||
_id: string,
|
||||
diagObj: any
|
||||
) {
|
||||
const keys = Object.keys(namespaces);
|
||||
log.info('keys:', keys);
|
||||
log.info('keys:', [...namespaces.keys()]);
|
||||
log.info(namespaces);
|
||||
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
keys.forEach(function (id) {
|
||||
const vertex = namespaces[id];
|
||||
|
||||
namespaces.forEach(function (vertex) {
|
||||
// parent node must be one of [rect, roundedWithTitle, noteGroup, divider]
|
||||
const shape = 'rect';
|
||||
|
||||
@ -89,16 +86,13 @@ export const addClasses = function (
|
||||
diagObj: any,
|
||||
parent?: string
|
||||
) {
|
||||
const keys = Object.keys(classes);
|
||||
log.info('keys:', keys);
|
||||
log.info('keys:', [...classes.keys()]);
|
||||
log.info(classes);
|
||||
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
keys
|
||||
.filter((id) => classes[id].parent == parent)
|
||||
.forEach(function (id) {
|
||||
const vertex = classes[id];
|
||||
|
||||
[...classes.values()]
|
||||
.filter((vertex) => vertex.parent === parent)
|
||||
.forEach(function (vertex) {
|
||||
/**
|
||||
* Variable for storing the classes for the vertex
|
||||
*/
|
||||
@ -187,7 +181,7 @@ export const addNotes = function (
|
||||
g.setNode(vertex.id, node);
|
||||
log.info('setNode', node);
|
||||
|
||||
if (!vertex.class || !(vertex.class in classes)) {
|
||||
if (!vertex.class || !classes.has(vertex.class)) {
|
||||
return;
|
||||
}
|
||||
const edgeId = startEdgeId + i;
|
||||
@ -231,7 +225,10 @@ export const addRelations = function (relations: ClassRelation[], g: graphlib.Gr
|
||||
//Set relationship style and line type
|
||||
classes: 'relation',
|
||||
pattern: edge.relation.lineType == 1 ? 'dashed' : 'solid',
|
||||
id: `id_${edge.id1}_${edge.id2}_${cnt}`,
|
||||
id: getEdgeId(edge.id1, edge.id2, {
|
||||
prefix: 'id',
|
||||
counter: cnt,
|
||||
}),
|
||||
// Set link type for rendering
|
||||
arrowhead: edge.type === 'arrow_open' ? 'none' : 'normal',
|
||||
//Set edge extra labels
|
||||
@ -346,7 +343,7 @@ export const draw = async function (text: string, id: string, _version: string,
|
||||
}
|
||||
const root =
|
||||
securityLevel === 'sandbox'
|
||||
? select(sandboxElement.nodes()[0].contentDocument.body)
|
||||
? select(sandboxElement!.nodes()[0]!.contentDocument.body)
|
||||
: select('body');
|
||||
const svg = root.select(`[id="${id}"]`);
|
||||
|
||||
@ -366,7 +363,8 @@ export const draw = async function (text: string, id: string, _version: string,
|
||||
|
||||
// Add label rects for non html labels
|
||||
if (!conf?.htmlLabels) {
|
||||
const doc = securityLevel === 'sandbox' ? sandboxElement.nodes()[0].contentDocument : document;
|
||||
const doc =
|
||||
securityLevel === 'sandbox' ? sandboxElement!.nodes()[0]!.contentDocument : document;
|
||||
const labels = doc.querySelectorAll('[id="' + id + '"] .edgeLabel .label');
|
||||
for (const label of labels) {
|
||||
// Get dimensions of label
|
||||
|
@ -175,10 +175,10 @@ export const draw = function (text, id, _version, diagObj) {
|
||||
});
|
||||
|
||||
const classes = diagObj.db.getClasses();
|
||||
const keys = Object.keys(classes);
|
||||
const keys = [...classes.keys()];
|
||||
|
||||
for (const key of keys) {
|
||||
const classDef = classes[key];
|
||||
const classDef = classes.get(key);
|
||||
const node = svgDraw.drawClass(diagram, classDef, conf, diagObj);
|
||||
idCache[node.id] = node;
|
||||
|
||||
@ -216,7 +216,7 @@ export const draw = function (text, id, _version, diagObj) {
|
||||
// metadata about the node. In this case we're going to add labels to each of
|
||||
// our nodes.
|
||||
g.setNode(node.id, node);
|
||||
if (note.class && note.class in classes) {
|
||||
if (note.class && classes.has(note.class)) {
|
||||
g.setEdge(
|
||||
note.id,
|
||||
getGraphId(note.class),
|
||||
|
@ -161,5 +161,5 @@ export interface NamespaceNode {
|
||||
children: NamespaceMap;
|
||||
}
|
||||
|
||||
export type ClassMap = Record<string, ClassNode>;
|
||||
export type NamespaceMap = Record<string, NamespaceNode>;
|
||||
export type ClassMap = Map<string, ClassNode>;
|
||||
export type NamespaceMap = Map<string, NamespaceNode>;
|
||||
|
16
packages/mermaid/src/diagrams/class/parser/class.spec.js
Normal file
16
packages/mermaid/src/diagrams/class/parser/class.spec.js
Normal file
@ -0,0 +1,16 @@
|
||||
import { parser } from './classDiagram.jison';
|
||||
import classDb from '../classDb.js';
|
||||
|
||||
describe('class diagram', function () {
|
||||
beforeEach(function () {
|
||||
parser.yy = classDb;
|
||||
parser.yy.clear();
|
||||
});
|
||||
|
||||
describe('prototype properties', function () {
|
||||
it.each(['__proto__', 'constructor'])('should work with a %s property', function (prop) {
|
||||
expect(() => parser.parse(`classDiagram\nclass ${prop}`)).not.toThrow();
|
||||
expect(() => parser.parse(`classDiagram\nnamespace ${prop} {\n\tclass A\n}`)).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
@ -337,18 +337,20 @@ export const renderKatex = async (text: string, config: MermaidConfig): Promise<
|
||||
return text;
|
||||
}
|
||||
|
||||
if (!isMathMLSupported() && !config.legacyMathML) {
|
||||
if (!(isMathMLSupported() || config.legacyMathML || config.forceLegacyMathML)) {
|
||||
return text.replace(katexRegex, 'MathML is unsupported in this environment.');
|
||||
}
|
||||
|
||||
const { default: katex } = await import('katex');
|
||||
const outputMode =
|
||||
config.forceLegacyMathML || (!isMathMLSupported() && config.legacyMathML)
|
||||
? 'htmlAndMathml'
|
||||
: 'mathml';
|
||||
return text
|
||||
.split(lineBreakRegex)
|
||||
.map((line) =>
|
||||
hasKatex(line)
|
||||
? `<div style="display: flex; align-items: center; justify-content: center; white-space: nowrap;">
|
||||
${line}
|
||||
</div>`
|
||||
? `<div style="display: flex; align-items: center; justify-content: center; white-space: nowrap;">${line}</div>`
|
||||
: `<div>${line}</div>`
|
||||
)
|
||||
.join('')
|
||||
@ -357,7 +359,7 @@ export const renderKatex = async (text: string, config: MermaidConfig): Promise<
|
||||
.renderToString(c, {
|
||||
throwOnError: true,
|
||||
displayMode: true,
|
||||
output: isMathMLSupported() ? 'mathml' : 'htmlAndMathml',
|
||||
output: outputMode,
|
||||
})
|
||||
.replace(/\n/g, ' ')
|
||||
.replace(/<annotation.*<\/annotation>/g, '')
|
||||
|
@ -11,7 +11,7 @@ import {
|
||||
getDiagramTitle,
|
||||
} from '../common/commonDb.js';
|
||||
|
||||
let entities = {};
|
||||
let entities = new Map();
|
||||
let relationships = [];
|
||||
|
||||
const Cardinality = {
|
||||
@ -28,15 +28,15 @@ const Identification = {
|
||||
};
|
||||
|
||||
const addEntity = function (name, alias = undefined) {
|
||||
if (entities[name] === undefined) {
|
||||
entities[name] = { attributes: [], alias: alias };
|
||||
if (!entities.has(name)) {
|
||||
entities.set(name, { attributes: [], alias: alias });
|
||||
log.info('Added new entity :', name);
|
||||
} else if (entities[name] && !entities[name].alias && alias) {
|
||||
entities[name].alias = alias;
|
||||
} else if (entities.has(name) && !entities.get(name).alias && alias) {
|
||||
entities.get(name).alias = alias;
|
||||
log.info(`Add alias '${alias}' to entity '${name}'`);
|
||||
}
|
||||
|
||||
return entities[name];
|
||||
return entities.get(name);
|
||||
};
|
||||
|
||||
const getEntities = () => entities;
|
||||
@ -75,7 +75,7 @@ const addRelationship = function (entA, rolA, entB, rSpec) {
|
||||
const getRelationships = () => relationships;
|
||||
|
||||
const clear = function () {
|
||||
entities = {};
|
||||
entities = new Map();
|
||||
relationships = [];
|
||||
commonClear();
|
||||
};
|
||||
|
@ -296,12 +296,12 @@ const drawAttributes = (groupNode, entityTextNode, attributes) => {
|
||||
* Use D3 to construct the svg elements for the entities
|
||||
*
|
||||
* @param svgNode The svg node that contains the diagram
|
||||
* @param entities The entities to be drawn
|
||||
* @param {Map<string, object>} entities The entities to be drawn
|
||||
* @param graph The graph that contains the vertex and edge definitions post-layout
|
||||
* @returns {object} The first entity that was inserted
|
||||
*/
|
||||
const drawEntities = function (svgNode, entities, graph) {
|
||||
const keys = Object.keys(entities);
|
||||
const keys = [...entities.keys()];
|
||||
let firstOne;
|
||||
|
||||
keys.forEach(function (entityName) {
|
||||
@ -326,12 +326,12 @@ const drawEntities = function (svgNode, entities, graph) {
|
||||
.style('text-anchor', 'middle')
|
||||
.style('font-family', getConfig().fontFamily)
|
||||
.style('font-size', conf.fontSize + 'px')
|
||||
.text(entities[entityName].alias ?? entityName);
|
||||
.text(entities.get(entityName).alias ?? entityName);
|
||||
|
||||
const { width: entityWidth, height: entityHeight } = drawAttributes(
|
||||
groupNode,
|
||||
textNode,
|
||||
entities[entityName].attributes
|
||||
entities.get(entityName).attributes
|
||||
);
|
||||
|
||||
// Draw the rectangle - insert it before the text so that the text is not obscured
|
||||
|
@ -17,7 +17,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
const line2 = 'MAINLAND';
|
||||
erDiagram.parser.parse(`erDiagram\n${line1}\n${line2}`);
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(erDb.getRelationships().length).toBe(0);
|
||||
});
|
||||
|
||||
@ -27,7 +27,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n ${name}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
describe('has non A-Za-z0-9_- chars', function () {
|
||||
@ -47,7 +47,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n ${name}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
@ -55,21 +55,21 @@ describe('when parsing ER diagram it...', function () {
|
||||
const name = singleOccurrence;
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(true);
|
||||
expect(entities.has(name)).toBe(true);
|
||||
});
|
||||
|
||||
it(`"${repeatedOccurrence}" repeated occurrence`, function () {
|
||||
const name = repeatedOccurrence;
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(true);
|
||||
expect(entities.has(name)).toBe(true);
|
||||
});
|
||||
|
||||
it(`"${singleOccurrence}" ends with`, function () {
|
||||
const name = endsWith;
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(true);
|
||||
expect(entities.has(name)).toBe(true);
|
||||
});
|
||||
|
||||
it(`"${cannontStartWith}" cannot start with the character`, function () {
|
||||
@ -77,7 +77,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
});
|
||||
@ -88,7 +88,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
const name = 'a' + allCombined;
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(true);
|
||||
expect(entities.has(name)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@ -96,14 +96,14 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n "Blo%rf"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
it('cannot contain \\ because it could start and escape code', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n "Blo\\rf"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
@ -114,7 +114,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n "${badName}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(badName)).toBe(false);
|
||||
expect(entities.has(badName)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
});
|
||||
@ -124,14 +124,14 @@ describe('when parsing ER diagram it...', function () {
|
||||
const beyondEnglishName = 'DUCK-àáâäæãåā';
|
||||
erDiagram.parser.parse(`erDiagram\n${beyondEnglishName}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(beyondEnglishName)).toBe(true);
|
||||
expect(entities.has(beyondEnglishName)).toBe(true);
|
||||
});
|
||||
|
||||
it('can contain - _ without needing ""', function () {
|
||||
const hyphensUnderscore = 'DUCK-BILLED_PLATYPUS';
|
||||
erDiagram.parser.parse(`erDiagram\n${hyphensUnderscore}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(hyphensUnderscore)).toBe(true);
|
||||
expect(entities.has(hyphensUnderscore)).toBe(true);
|
||||
});
|
||||
|
||||
it('can have an alias', function () {
|
||||
@ -139,8 +139,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
const alias = 'bar';
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}["${alias}"]\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(entity)).toBe(true);
|
||||
expect(entities[entity].alias).toBe(alias);
|
||||
expect(entities.has(entity)).toBe(true);
|
||||
expect(entities.get(entity).alias).toBe(alias);
|
||||
});
|
||||
|
||||
it('can have an alias even if the relationship is defined before class', function () {
|
||||
@ -151,10 +151,10 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${firstEntity} ||--o| ${secondEntity} : rel\nclass ${firstEntity}["${alias}"]\n`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(firstEntity)).toBe(true);
|
||||
expect(entities.hasOwnProperty(secondEntity)).toBe(true);
|
||||
expect(entities[firstEntity].alias).toBe(alias);
|
||||
expect(entities[secondEntity].alias).toBeUndefined();
|
||||
expect(entities.has(firstEntity)).toBe(true);
|
||||
expect(entities.has(secondEntity)).toBe(true);
|
||||
expect(entities.get(firstEntity).alias).toBe(alias);
|
||||
expect(entities.get(secondEntity).alias).toBeUndefined();
|
||||
});
|
||||
|
||||
it('can have an alias even if the relationship is defined after class', function () {
|
||||
@ -165,17 +165,17 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\nclass ${firstEntity}["${alias}"]\n${firstEntity} ||--o| ${secondEntity} : rel\n`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(firstEntity)).toBe(true);
|
||||
expect(entities.hasOwnProperty(secondEntity)).toBe(true);
|
||||
expect(entities[firstEntity].alias).toBe(alias);
|
||||
expect(entities[secondEntity].alias).toBeUndefined();
|
||||
expect(entities.has(firstEntity)).toBe(true);
|
||||
expect(entities.has(secondEntity)).toBe(true);
|
||||
expect(entities.get(firstEntity).alias).toBe(alias);
|
||||
expect(entities.get(secondEntity).alias).toBeUndefined();
|
||||
});
|
||||
|
||||
it('can start with an underscore', function () {
|
||||
const entity = '_foo';
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(entity)).toBe(true);
|
||||
expect(entities.has(entity)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@ -191,11 +191,11 @@ describe('when parsing ER diagram it...', function () {
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
expect(entities[entity].attributes[0].attributeName).toBe('myBookTitle');
|
||||
expect(entities[entity].attributes[1].attributeName).toBe('MYBOOKSUBTITLE_1');
|
||||
expect(entities[entity].attributes[2].attributeName).toBe('author-ref[name](1)');
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(3);
|
||||
expect(entities.get(entity).attributes[0].attributeName).toBe('myBookTitle');
|
||||
expect(entities.get(entity).attributes[1].attributeName).toBe('MYBOOKSUBTITLE_1');
|
||||
expect(entities.get(entity).attributes[2].attributeName).toBe('author-ref[name](1)');
|
||||
});
|
||||
|
||||
it('should allow asterisk at the start of attribute name', function () {
|
||||
@ -204,8 +204,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}{\n${attribute}}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow asterisks at the start of attribute declared with type and name', () => {
|
||||
@ -214,8 +214,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should not allow leading numbers, dashes or brackets', function () {
|
||||
@ -236,8 +236,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow an entity with a single attribute to be defined with a key', function () {
|
||||
@ -246,8 +246,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow an entity with a single attribute to be defined with a comment', function () {
|
||||
@ -256,9 +256,9 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities[entity].attributes[0].attributeComment).toBe('comment');
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
expect(entities.get(entity).attributes[0].attributeComment).toBe('comment');
|
||||
});
|
||||
|
||||
it('should allow an entity with a single attribute to be defined with a key and a comment', function () {
|
||||
@ -267,8 +267,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow an entity with attribute starting with fk, pk or uk and a comment', function () {
|
||||
@ -282,7 +282,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1} \n\n${attribute2}\n${attribute3}\n${attribute4}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes.length).toBe(4);
|
||||
expect(entities.get(entity).attributes.length).toBe(4);
|
||||
});
|
||||
|
||||
it('should allow an entity with attributes that have many constraints and comments', function () {
|
||||
@ -297,14 +297,14 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n${attribute3}\n${attribute4}\n${attribute5}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes[0].attributeKeyTypeList).toEqual(['PK', 'FK']);
|
||||
expect(entities[entity].attributes[0].attributeComment).toBe('comment1');
|
||||
expect(entities[entity].attributes[1].attributeKeyTypeList).toEqual(['PK', 'UK', 'FK']);
|
||||
expect(entities[entity].attributes[2].attributeKeyTypeList).toEqual(['PK', 'UK']);
|
||||
expect(entities[entity].attributes[2].attributeComment).toBe('comment3');
|
||||
expect(entities[entity].attributes[3].attributeKeyTypeList).toBeUndefined();
|
||||
expect(entities[entity].attributes[4].attributeKeyTypeList).toBeUndefined();
|
||||
expect(entities[entity].attributes[4].attributeComment).toBe('comment5');
|
||||
expect(entities.get(entity).attributes[0].attributeKeyTypeList).toEqual(['PK', 'FK']);
|
||||
expect(entities.get(entity).attributes[0].attributeComment).toBe('comment1');
|
||||
expect(entities.get(entity).attributes[1].attributeKeyTypeList).toEqual(['PK', 'UK', 'FK']);
|
||||
expect(entities.get(entity).attributes[2].attributeKeyTypeList).toEqual(['PK', 'UK']);
|
||||
expect(entities.get(entity).attributes[2].attributeComment).toBe('comment3');
|
||||
expect(entities.get(entity).attributes[3].attributeKeyTypeList).toBeUndefined();
|
||||
expect(entities.get(entity).attributes[4].attributeKeyTypeList).toBeUndefined();
|
||||
expect(entities.get(entity).attributes[4].attributeComment).toBe('comment5');
|
||||
});
|
||||
|
||||
it('should allow an entity with attribute that has a generic type', function () {
|
||||
@ -317,8 +317,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n${attribute3}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(3);
|
||||
});
|
||||
|
||||
it('should allow an entity with attribute that is an array', function () {
|
||||
@ -328,8 +328,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(2);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow an entity with attribute that is a limited length string', function () {
|
||||
@ -339,10 +339,10 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(2);
|
||||
expect(entities[entity].attributes[0].attributeType).toBe('character(10)');
|
||||
expect(entities[entity].attributes[1].attributeType).toBe('varchar(5)');
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(2);
|
||||
expect(entities.get(entity).attributes[0].attributeType).toBe('character(10)');
|
||||
expect(entities.get(entity).attributes[1].attributeType).toBe('varchar(5)');
|
||||
});
|
||||
|
||||
it('should allow an entity with multiple attributes to be defined', function () {
|
||||
@ -355,7 +355,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n${attribute3}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
expect(entities.get(entity).attributes.length).toBe(3);
|
||||
});
|
||||
|
||||
it('should allow attribute definitions to be split into multiple blocks', function () {
|
||||
@ -368,7 +368,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1}\n}\n${entity} {\n${attribute2}\n${attribute3}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
expect(entities.get(entity).attributes.length).toBe(3);
|
||||
});
|
||||
|
||||
it('should allow an empty attribute block', function () {
|
||||
@ -376,8 +376,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty('BOOK')).toBe(true);
|
||||
expect(entities[entity].attributes.length).toBe(0);
|
||||
expect(entities.has('BOOK')).toBe(true);
|
||||
expect(entities.get(entity).attributes.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should allow an attribute block to start immediately after the entity name', function () {
|
||||
@ -385,8 +385,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}{}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty('BOOK')).toBe(true);
|
||||
expect(entities[entity].attributes.length).toBe(0);
|
||||
expect(entities.has('BOOK')).toBe(true);
|
||||
expect(entities.get(entity).attributes.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should allow an attribute block to be separated from the entity name by spaces', function () {
|
||||
@ -394,8 +394,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty('BOOK')).toBe(true);
|
||||
expect(entities[entity].attributes.length).toBe(0);
|
||||
expect(entities.has('BOOK')).toBe(true);
|
||||
expect(entities.get(entity).attributes.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should allow whitespace before and after attribute definitions', function () {
|
||||
@ -404,8 +404,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n \n\n ${attribute}\n\n \n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow no whitespace before and after attribute definitions', function () {
|
||||
@ -414,8 +414,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}{${attribute}}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should associate two entities correctly', function () {
|
||||
@ -423,8 +423,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
const entities = erDb.getEntities();
|
||||
const relationships = erDb.getRelationships();
|
||||
|
||||
expect(entities.hasOwnProperty('CAR')).toBe(true);
|
||||
expect(entities.hasOwnProperty('DRIVER')).toBe(true);
|
||||
expect(entities.has('CAR')).toBe(true);
|
||||
expect(entities.has('DRIVER')).toBe(true);
|
||||
expect(relationships.length).toBe(1);
|
||||
expect(relationships[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(relationships[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@ -437,7 +437,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse(`erDiagram\n${line1}\n${line2}`);
|
||||
const entities = erDb.getEntities();
|
||||
|
||||
expect(Object.keys(entities).length).toBe(3);
|
||||
expect(entities.size).toBe(3);
|
||||
});
|
||||
|
||||
it('should create the role specified', function () {
|
||||
@ -451,7 +451,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
it('should allow recursive relationships', function () {
|
||||
erDiagram.parser.parse('erDiagram\nNODE ||--o{ NODE : "leads to"');
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(1);
|
||||
expect(erDb.getEntities().size).toBe(1);
|
||||
});
|
||||
|
||||
describe('accessible title and description', () => {
|
||||
@ -491,7 +491,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
const entities = erDb.getEntities();
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(entities).length).toBe(2);
|
||||
expect(entities.size).toBe(2);
|
||||
expect(rels.length).toBe(2);
|
||||
});
|
||||
|
||||
@ -507,7 +507,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||--|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@ -517,7 +517,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||..o{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@ -527,7 +527,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA |o..o{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@ -537,7 +537,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA |o--|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@ -547,7 +547,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }|--|| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@ -557,7 +557,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }o--|| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -567,7 +567,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }o..o| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -577,7 +577,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }|..o| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@ -587,7 +587,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA |o..|| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@ -597,7 +597,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||..|| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@ -607,7 +607,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||--o| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@ -617,7 +617,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA |o..o| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@ -627,7 +627,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }o--o{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -637,7 +637,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }|..|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@ -647,7 +647,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }o--|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -657,7 +657,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }|..o{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@ -667,7 +667,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA one or zero to many B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@ -677,7 +677,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA one or many optionally to zero or one B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@ -687,7 +687,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA zero or more to zero or many B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -697,7 +697,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA many(0) to many(1) B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -707,7 +707,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA many optionally to one B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -717,7 +717,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA only one optionally to 1+ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@ -727,7 +727,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA 0+ optionally to 1 B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@ -786,10 +786,21 @@ describe('when parsing ER diagram it...', function () {
|
||||
it('should represent parent-child relationship correctly', function () {
|
||||
erDiagram.parser.parse('erDiagram\nPROJECT u--o{ TEAM_MEMBER : "parent"');
|
||||
const rels = erDb.getRelationships();
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.MD_PARENT);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prototype properties', function () {
|
||||
it.each(['__proto__', 'constructor', 'prototype'])(
|
||||
'should work with a %s property',
|
||||
function (prop) {
|
||||
expect(() =>
|
||||
erDiagram.parser.parse(`erDiagram\n${prop} ||--|{ ORDER : place`)
|
||||
).not.toThrow();
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
@ -51,17 +51,17 @@ describe('flow db addClass', () => {
|
||||
flowDb.addClass('a,b', ['stroke-width: 8px']);
|
||||
const classes = flowDb.getClasses();
|
||||
|
||||
expect(classes.hasOwnProperty('a')).toBe(true);
|
||||
expect(classes.hasOwnProperty('b')).toBe(true);
|
||||
expect(classes['a']['styles']).toEqual(['stroke-width: 8px']);
|
||||
expect(classes['b']['styles']).toEqual(['stroke-width: 8px']);
|
||||
expect(classes.has('a')).toBe(true);
|
||||
expect(classes.has('b')).toBe(true);
|
||||
expect(classes.get('a')?.styles).toEqual(['stroke-width: 8px']);
|
||||
expect(classes.get('b')?.styles).toEqual(['stroke-width: 8px']);
|
||||
});
|
||||
|
||||
it('should detect single class', () => {
|
||||
flowDb.addClass('a', ['stroke-width: 8px']);
|
||||
const classes = flowDb.getClasses();
|
||||
|
||||
expect(classes.hasOwnProperty('a')).toBe(true);
|
||||
expect(classes['a']['styles']).toEqual(['stroke-width: 8px']);
|
||||
expect(classes.has('a')).toBe(true);
|
||||
expect(classes.get('a')?.styles).toEqual(['stroke-width: 8px']);
|
||||
});
|
||||
});
|
||||
|
@ -17,12 +17,12 @@ import type { FlowVertex, FlowClass, FlowSubGraph, FlowText, FlowEdge, FlowLink
|
||||
const MERMAID_DOM_ID_PREFIX = 'flowchart-';
|
||||
let vertexCounter = 0;
|
||||
let config = getConfig();
|
||||
let vertices: Record<string, FlowVertex> = {};
|
||||
let vertices: Map<string, FlowVertex> = new Map();
|
||||
let edges: FlowEdge[] & { defaultInterpolate?: string; defaultStyle?: string[] } = [];
|
||||
let classes: Record<string, FlowClass> = {};
|
||||
let classes: Map<string, FlowClass> = new Map();
|
||||
let subGraphs: FlowSubGraph[] = [];
|
||||
let subGraphLookup: Record<string, FlowSubGraph> = {};
|
||||
let tooltips: Record<string, string> = {};
|
||||
let subGraphLookup: Map<string, FlowSubGraph> = new Map();
|
||||
let tooltips: Map<string, string> = new Map();
|
||||
let subCount = 0;
|
||||
let firstGraphFlag = true;
|
||||
let direction: string;
|
||||
@ -40,10 +40,9 @@ const sanitizeText = (txt: string) => common.sanitizeText(txt, config);
|
||||
* @param id - id of the node
|
||||
*/
|
||||
export const lookUpDomId = function (id: string) {
|
||||
const vertexKeys = Object.keys(vertices);
|
||||
for (const vertexKey of vertexKeys) {
|
||||
if (vertices[vertexKey].id === id) {
|
||||
return vertices[vertexKey].domId;
|
||||
for (const vertex of vertices.values()) {
|
||||
if (vertex.id === id) {
|
||||
return vertex.domId;
|
||||
}
|
||||
}
|
||||
return id;
|
||||
@ -67,50 +66,53 @@ export const addVertex = function (
|
||||
}
|
||||
let txt;
|
||||
|
||||
if (vertices[id] === undefined) {
|
||||
vertices[id] = {
|
||||
let vertex = vertices.get(id);
|
||||
if (vertex === undefined) {
|
||||
vertex = {
|
||||
id,
|
||||
labelType: 'text',
|
||||
domId: MERMAID_DOM_ID_PREFIX + id + '-' + vertexCounter,
|
||||
styles: [],
|
||||
classes: [],
|
||||
};
|
||||
vertices.set(id, vertex);
|
||||
}
|
||||
vertexCounter++;
|
||||
|
||||
if (textObj !== undefined) {
|
||||
config = getConfig();
|
||||
txt = sanitizeText(textObj.text.trim());
|
||||
vertices[id].labelType = textObj.type;
|
||||
vertex.labelType = textObj.type;
|
||||
// strip quotes if string starts and ends with a quote
|
||||
if (txt[0] === '"' && txt[txt.length - 1] === '"') {
|
||||
txt = txt.substring(1, txt.length - 1);
|
||||
}
|
||||
vertices[id].text = txt;
|
||||
vertex.text = txt;
|
||||
} else {
|
||||
if (vertices[id].text === undefined) {
|
||||
vertices[id].text = id;
|
||||
if (vertex.text === undefined) {
|
||||
vertex.text = id;
|
||||
}
|
||||
}
|
||||
if (type !== undefined) {
|
||||
vertices[id].type = type;
|
||||
vertex.type = type;
|
||||
}
|
||||
if (style !== undefined && style !== null) {
|
||||
style.forEach(function (s) {
|
||||
vertices[id].styles.push(s);
|
||||
vertex.styles.push(s);
|
||||
});
|
||||
}
|
||||
if (classes !== undefined && classes !== null) {
|
||||
classes.forEach(function (s) {
|
||||
vertices[id].classes.push(s);
|
||||
vertex.classes.push(s);
|
||||
});
|
||||
}
|
||||
if (dir !== undefined) {
|
||||
vertices[id].dir = dir;
|
||||
vertex.dir = dir;
|
||||
}
|
||||
if (vertices[id].props === undefined) {
|
||||
vertices[id].props = props;
|
||||
if (vertex.props === undefined) {
|
||||
vertex.props = props;
|
||||
} else if (props !== undefined) {
|
||||
Object.assign(vertices[id].props, props);
|
||||
Object.assign(vertex.props, props);
|
||||
}
|
||||
};
|
||||
|
||||
@ -208,17 +210,19 @@ export const updateLink = function (positions: ('default' | number)[], style: st
|
||||
|
||||
export const addClass = function (ids: string, style: string[]) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
if (classes[id] === undefined) {
|
||||
classes[id] = { id, styles: [], textStyles: [] };
|
||||
let classNode = classes.get(id);
|
||||
if (classNode === undefined) {
|
||||
classNode = { id, styles: [], textStyles: [] };
|
||||
classes.set(id, classNode);
|
||||
}
|
||||
|
||||
if (style !== undefined && style !== null) {
|
||||
style.forEach(function (s) {
|
||||
if (s.match('color')) {
|
||||
const newStyle = s.replace('fill', 'bgFill').replace('color', 'fill');
|
||||
classes[id].textStyles.push(newStyle);
|
||||
classNode.textStyles.push(newStyle);
|
||||
}
|
||||
classes[id].styles.push(s);
|
||||
classNode.styles.push(s);
|
||||
});
|
||||
}
|
||||
});
|
||||
@ -255,11 +259,13 @@ export const setDirection = function (dir: string) {
|
||||
*/
|
||||
export const setClass = function (ids: string, className: string) {
|
||||
for (const id of ids.split(',')) {
|
||||
if (vertices[id]) {
|
||||
vertices[id].classes.push(className);
|
||||
const vertex = vertices.get(id);
|
||||
if (vertex) {
|
||||
vertex.classes.push(className);
|
||||
}
|
||||
if (subGraphLookup[id]) {
|
||||
subGraphLookup[id].classes.push(className);
|
||||
const subGraph = subGraphLookup.get(id);
|
||||
if (subGraph) {
|
||||
subGraph.classes.push(className);
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -270,7 +276,7 @@ const setTooltip = function (ids: string, tooltip: string) {
|
||||
}
|
||||
tooltip = sanitizeText(tooltip);
|
||||
for (const id of ids.split(',')) {
|
||||
tooltips[version === 'gen-1' ? lookUpDomId(id) : id] = tooltip;
|
||||
tooltips.set(version === 'gen-1' ? lookUpDomId(id) : id, tooltip);
|
||||
}
|
||||
};
|
||||
|
||||
@ -303,8 +309,9 @@ const setClickFun = function (id: string, functionName: string, functionArgs: st
|
||||
argList.push(id);
|
||||
}
|
||||
|
||||
if (vertices[id] !== undefined) {
|
||||
vertices[id].haveCallback = true;
|
||||
const vertex = vertices.get(id);
|
||||
if (vertex) {
|
||||
vertex.haveCallback = true;
|
||||
funs.push(function () {
|
||||
const elem = document.querySelector(`[id="${domId}"]`);
|
||||
if (elem !== null) {
|
||||
@ -329,19 +336,17 @@ const setClickFun = function (id: string, functionName: string, functionArgs: st
|
||||
*/
|
||||
export const setLink = function (ids: string, linkStr: string, target: string) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
if (vertices[id] !== undefined) {
|
||||
vertices[id].link = utils.formatUrl(linkStr, config);
|
||||
vertices[id].linkTarget = target;
|
||||
const vertex = vertices.get(id);
|
||||
if (vertex !== undefined) {
|
||||
vertex.link = utils.formatUrl(linkStr, config);
|
||||
vertex.linkTarget = target;
|
||||
}
|
||||
});
|
||||
setClass(ids, 'clickable');
|
||||
};
|
||||
|
||||
export const getTooltip = function (id: string) {
|
||||
if (tooltips.hasOwnProperty(id)) {
|
||||
return tooltips[id];
|
||||
}
|
||||
return undefined;
|
||||
return tooltips.get(id);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -435,14 +440,14 @@ funs.push(setupToolTips);
|
||||
*
|
||||
*/
|
||||
export const clear = function (ver = 'gen-1') {
|
||||
vertices = {};
|
||||
classes = {};
|
||||
vertices = new Map();
|
||||
classes = new Map();
|
||||
edges = [];
|
||||
funs = [setupToolTips];
|
||||
subGraphs = [];
|
||||
subGraphLookup = {};
|
||||
subGraphLookup = new Map();
|
||||
subCount = 0;
|
||||
tooltips = {};
|
||||
tooltips = new Map();
|
||||
firstGraphFlag = true;
|
||||
version = ver;
|
||||
config = getConfig();
|
||||
@ -516,7 +521,7 @@ export const addSubGraph = function (
|
||||
// Remove the members in the new subgraph if they already belong to another subgraph
|
||||
subGraph.nodes = makeUniq(subGraph, subGraphs).nodes;
|
||||
subGraphs.push(subGraph);
|
||||
subGraphLookup[id] = subGraph;
|
||||
subGraphLookup.set(id, subGraph);
|
||||
return id;
|
||||
};
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import * as graphlib from 'dagre-d3-es/src/graphlib/index.js';
|
||||
import { select, curveLinear, selectAll } from 'd3';
|
||||
import { getConfig } from '../../diagram-api/diagramAPI.js';
|
||||
import utils from '../../utils.js';
|
||||
import utils, { getEdgeId } from '../../utils.js';
|
||||
import { render } from '../../dagre-wrapper/index.js';
|
||||
import { addHtmlLabel } from 'dagre-d3-es/src/dagre-js/label/add-html-label.js';
|
||||
import { log } from '../../logger.js';
|
||||
@ -31,12 +31,11 @@ export const addVertices = async function (vert, g, svgId, root, doc, diagObj) {
|
||||
const svg = root.select(`[id="${svgId}"]`);
|
||||
console.log('SVG:', svg, svg.node(), 'root:', root, root.node());
|
||||
|
||||
|
||||
const keys = Object.keys(vert);
|
||||
const keys = vert.keys();
|
||||
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
for (const id of keys) {
|
||||
const vertex = vert[id];
|
||||
const vertex = vert.get(id);
|
||||
|
||||
/**
|
||||
* Variable for storing the classes for the vertex
|
||||
@ -213,7 +212,11 @@ export const addEdges = async function (edges, g, diagObj) {
|
||||
cnt++;
|
||||
|
||||
// Identify Link
|
||||
const linkIdBase = 'L-' + edge.start + '-' + edge.end;
|
||||
const linkIdBase = getEdgeId(edge.start, edge.end, {
|
||||
counter: cnt,
|
||||
prefix: 'L',
|
||||
});
|
||||
|
||||
// count the links from+to the same node to give unique id
|
||||
if (linkIdCnt[linkIdBase] === undefined) {
|
||||
linkIdCnt[linkIdBase] = 0;
|
||||
@ -222,7 +225,8 @@ export const addEdges = async function (edges, g, diagObj) {
|
||||
linkIdCnt[linkIdBase]++;
|
||||
log.info('abc78 new entry', linkIdBase, linkIdCnt[linkIdBase]);
|
||||
}
|
||||
let linkId = linkIdBase + '-' + linkIdCnt[linkIdBase];
|
||||
let linkId = `${linkIdBase}_${linkIdCnt[linkIdBase]}`;
|
||||
|
||||
log.info('abc78 new link id to be used is', linkIdBase, linkId, linkIdCnt[linkIdBase]);
|
||||
const linkNameStart = 'LS-' + edge.start;
|
||||
const linkNameEnd = 'LE-' + edge.end;
|
||||
@ -339,7 +343,7 @@ export const addEdges = async function (edges, g, diagObj) {
|
||||
*
|
||||
* @param text
|
||||
* @param diagObj
|
||||
* @returns {Record<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
* @returns {Map<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
*/
|
||||
export const getClasses = function (text, diagObj) {
|
||||
return diagObj.db.getClasses();
|
||||
@ -466,9 +470,9 @@ export const draw = async function (text, id, _version, diagObj) {
|
||||
}
|
||||
|
||||
// If node has a link, wrap it in an anchor SVG object.
|
||||
const keys = Object.keys(vert);
|
||||
keys.forEach(function (key) {
|
||||
const vertex = vert[key];
|
||||
const keys = [...vert.keys()];
|
||||
keys.forEach((key) => {
|
||||
const vertex = vert.get(key);
|
||||
|
||||
if (vertex.link) {
|
||||
const node = select('#' + id + ' [id="' + key + '"]');
|
||||
|
@ -6,7 +6,7 @@ import { applyStyle } from 'dagre-d3-es/src/dagre-js/util.js';
|
||||
import { addHtmlLabel } from 'dagre-d3-es/src/dagre-js/label/add-html-label.js';
|
||||
import { log } from '../../logger.js';
|
||||
import common, { evaluate, renderKatex } from '../common/common.js';
|
||||
import { interpolateToCurve, getStylesFromArray } from '../../utils.js';
|
||||
import { interpolateToCurve, getStylesFromArray, getEdgeId } from '../../utils.js';
|
||||
import { setupGraphViewbox } from '../../setupGraphViewbox.js';
|
||||
import flowChartShapes from './flowChartShapes.js';
|
||||
import { replaceIconSubstring } from '../../rendering-util/createText.js';
|
||||
@ -175,7 +175,10 @@ export const addEdges = async function (edges, g, diagObj) {
|
||||
cnt++;
|
||||
|
||||
// Identify Link
|
||||
const linkId = 'L-' + edge.start + '-' + edge.end;
|
||||
const linkId = getEdgeId(edge.start, edge.end, {
|
||||
counter: cnt,
|
||||
prefix: 'L',
|
||||
});
|
||||
const linkNameStart = 'LS-' + edge.start;
|
||||
const linkNameEnd = 'LE-' + edge.end;
|
||||
|
||||
@ -265,7 +268,7 @@ export const addEdges = async function (edges, g, diagObj) {
|
||||
*
|
||||
* @param text
|
||||
* @param diagObj
|
||||
* @returns {Record<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
* @returns {Map<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
*/
|
||||
export const getClasses = function (text, diagObj) {
|
||||
log.info('Extracting classes');
|
||||
@ -452,9 +455,9 @@ export const draw = async function (text, id, _version, diagObj) {
|
||||
setupGraphViewbox(g, svg, conf.diagramPadding, conf.useMaxWidth);
|
||||
|
||||
// If node has a link, wrap it in an anchor SVG object.
|
||||
const keys = Object.keys(vert);
|
||||
const keys = [...vert.keys()];
|
||||
keys.forEach(function (key) {
|
||||
const vertex = vert[key];
|
||||
const vertex = vert.get(key);
|
||||
|
||||
if (vertex.link) {
|
||||
const node = root.select('#' + id + ' [id="' + diagObj.db.lookUpDomId(key) + '"]');
|
||||
|
@ -18,8 +18,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -38,8 +38,8 @@ describe('[Arrows] when parsing', () => {
|
||||
|
||||
expect(direction).toBe('LR');
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -58,8 +58,8 @@ describe('[Arrows] when parsing', () => {
|
||||
|
||||
expect(direction).toBe('RL');
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -78,8 +78,8 @@ describe('[Arrows] when parsing', () => {
|
||||
|
||||
expect(direction).toBe('BT');
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -99,8 +99,8 @@ describe('[Arrows] when parsing', () => {
|
||||
|
||||
expect(direction).toBe('TB');
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -116,8 +116,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -133,8 +133,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -150,8 +150,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[1].start).toBe('B');
|
||||
expect(edges[1].end).toBe('C');
|
||||
@ -169,8 +169,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -186,8 +186,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -203,8 +203,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -220,8 +220,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -237,8 +237,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -254,8 +254,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
@ -19,8 +19,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -34,8 +34,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -49,8 +49,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -64,8 +64,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -79,8 +79,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -94,8 +94,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -109,8 +109,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -124,8 +124,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -143,8 +143,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
@ -48,8 +48,6 @@ describe('[Edges] when parsing', () => {
|
||||
|
||||
it('should handle open ended edges', function () {
|
||||
const res = flow.parser.parse('graph TD;A---B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_open');
|
||||
@ -57,8 +55,6 @@ describe('[Edges] when parsing', () => {
|
||||
|
||||
it('should handle cross ended edges', function () {
|
||||
const res = flow.parser.parse('graph TD;A--xB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
@ -66,8 +62,6 @@ describe('[Edges] when parsing', () => {
|
||||
|
||||
it('should handle open ended edges', function () {
|
||||
const res = flow.parser.parse('graph TD;A--oB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_circle');
|
||||
@ -81,8 +75,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -99,8 +93,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -119,8 +113,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -139,8 +133,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -164,8 +158,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -183,8 +177,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -202,8 +196,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -221,8 +215,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -240,8 +234,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -259,8 +253,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -278,8 +272,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -297,8 +291,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -316,8 +310,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -335,8 +329,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -354,8 +348,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -373,8 +367,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -392,8 +386,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -411,8 +405,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -430,8 +424,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -449,8 +443,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -468,8 +462,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -487,8 +481,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
@ -23,7 +23,7 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
expect(edges.length).toBe(47917);
|
||||
expect(Object.keys(vert).length).toBe(2);
|
||||
expect(vert.size).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -19,12 +19,12 @@ A["\`The cat in **the** hat\`"]-- "\`The *bat* in the chat\`" -->B["The dog in t
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['A'].text).toBe('The cat in **the** hat');
|
||||
expect(vert['A'].labelType).toBe('markdown');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['B'].text).toBe('The dog in the hog');
|
||||
expect(vert['B'].labelType).toBe('string');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('A').text).toBe('The cat in **the** hat');
|
||||
expect(vert.get('A').labelType).toBe('markdown');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('B').text).toBe('The dog in the hog');
|
||||
expect(vert.get('B').labelType).toBe('string');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
@ -43,7 +43,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['A'].styles.length).toBe(0);
|
||||
expect(vert.get('A').styles.length).toBe(0);
|
||||
});
|
||||
it('should handle a single node with white space after it (SN1)', function () {
|
||||
// Silly but syntactically correct
|
||||
@ -53,7 +53,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['A'].styles.length).toBe(0);
|
||||
expect(vert.get('A').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single square node', function () {
|
||||
@ -64,8 +64,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].styles.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('square');
|
||||
expect(vert.get('a').styles.length).toBe(0);
|
||||
expect(vert.get('a').type).toBe('square');
|
||||
});
|
||||
|
||||
it('should handle a single round square node', function () {
|
||||
@ -76,8 +76,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].styles.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('square');
|
||||
expect(vert.get('a').styles.length).toBe(0);
|
||||
expect(vert.get('a').type).toBe('square');
|
||||
});
|
||||
|
||||
it('should handle a single circle node', function () {
|
||||
@ -88,7 +88,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('circle');
|
||||
expect(vert.get('a').type).toBe('circle');
|
||||
});
|
||||
|
||||
it('should handle a single round node', function () {
|
||||
@ -99,7 +99,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('round');
|
||||
expect(vert.get('a').type).toBe('round');
|
||||
});
|
||||
|
||||
it('should handle a single odd node', function () {
|
||||
@ -110,7 +110,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('odd');
|
||||
expect(vert.get('a').type).toBe('odd');
|
||||
});
|
||||
|
||||
it('should handle a single diamond node', function () {
|
||||
@ -121,7 +121,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('diamond');
|
||||
expect(vert.get('a').type).toBe('diamond');
|
||||
});
|
||||
|
||||
it('should handle a single diamond node with whitespace after it', function () {
|
||||
@ -132,7 +132,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('diamond');
|
||||
expect(vert.get('a').type).toBe('diamond');
|
||||
});
|
||||
|
||||
it('should handle a single diamond node with html in it (SN3)', function () {
|
||||
@ -143,8 +143,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('diamond');
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
expect(vert.get('a').type).toBe('diamond');
|
||||
expect(vert.get('a').text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single hexagon node', function () {
|
||||
@ -155,7 +155,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('hexagon');
|
||||
expect(vert.get('a').type).toBe('hexagon');
|
||||
});
|
||||
|
||||
it('should handle a single hexagon node with html in it', function () {
|
||||
@ -166,8 +166,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('hexagon');
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
expect(vert.get('a').type).toBe('hexagon');
|
||||
expect(vert.get('a').text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single round node with html in it', function () {
|
||||
@ -178,8 +178,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('round');
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
expect(vert.get('a').type).toBe('round');
|
||||
expect(vert.get('a').text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single double circle node', function () {
|
||||
@ -190,7 +190,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('doublecircle');
|
||||
expect(vert.get('a').type).toBe('doublecircle');
|
||||
});
|
||||
|
||||
it('should handle a single double circle node with whitespace after it', function () {
|
||||
@ -201,7 +201,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('doublecircle');
|
||||
expect(vert.get('a').type).toBe('doublecircle');
|
||||
});
|
||||
|
||||
it('should handle a single double circle node with html in it (SN3)', function () {
|
||||
@ -212,8 +212,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('doublecircle');
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
expect(vert.get('a').type).toBe('doublecircle');
|
||||
expect(vert.get('a').text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics starting on a char', function () {
|
||||
@ -224,7 +224,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['id1'].styles.length).toBe(0);
|
||||
expect(vert.get('id1').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single node with a single digit', function () {
|
||||
@ -235,7 +235,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['1'].text).toBe('1');
|
||||
expect(vert.get('1').text).toBe('1');
|
||||
});
|
||||
|
||||
it('should handle a single node with a single digit in a subgraph', function () {
|
||||
@ -247,7 +247,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['1'].text).toBe('1');
|
||||
expect(vert.get('1').text).toBe('1');
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics starting on a num', function () {
|
||||
@ -258,7 +258,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['1id'].styles.length).toBe(0);
|
||||
expect(vert.get('1id').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics containing a minus sign', function () {
|
||||
@ -269,7 +269,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['i-d'].styles.length).toBe(0);
|
||||
expect(vert.get('i-d').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics containing a underscore sign', function () {
|
||||
@ -280,33 +280,33 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['i_d'].styles.length).toBe(0);
|
||||
expect(vert.get('i_d').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it.each(keywords)('should handle keywords between dashes "-"', function (keyword) {
|
||||
const res = flow.parser.parse(`graph TD;a-${keyword}-node;`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`a-${keyword}-node`].text).toBe(`a-${keyword}-node`);
|
||||
expect(vert.get(`a-${keyword}-node`).text).toBe(`a-${keyword}-node`);
|
||||
});
|
||||
|
||||
it.each(keywords)('should handle keywords between periods "."', function (keyword) {
|
||||
const res = flow.parser.parse(`graph TD;a.${keyword}.node;`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`a.${keyword}.node`].text).toBe(`a.${keyword}.node`);
|
||||
expect(vert.get(`a.${keyword}.node`).text).toBe(`a.${keyword}.node`);
|
||||
});
|
||||
|
||||
it.each(keywords)('should handle keywords between underscores "_"', function (keyword) {
|
||||
const res = flow.parser.parse(`graph TD;a_${keyword}_node;`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`a_${keyword}_node`].text).toBe(`a_${keyword}_node`);
|
||||
expect(vert.get(`a_${keyword}_node`).text).toBe(`a_${keyword}_node`);
|
||||
});
|
||||
|
||||
it.each(keywords)('should handle nodes ending in %s', function (keyword) {
|
||||
const res = flow.parser.parse(`graph TD;node_${keyword};node.${keyword};node-${keyword};`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`node_${keyword}`].text).toBe(`node_${keyword}`);
|
||||
expect(vert[`node.${keyword}`].text).toBe(`node.${keyword}`);
|
||||
expect(vert[`node-${keyword}`].text).toBe(`node-${keyword}`);
|
||||
expect(vert.get(`node_${keyword}`).text).toBe(`node_${keyword}`);
|
||||
expect(vert.get(`node.${keyword}`).text).toBe(`node.${keyword}`);
|
||||
expect(vert.get(`node-${keyword}`).text).toBe(`node-${keyword}`);
|
||||
});
|
||||
|
||||
const errorKeywords = [
|
||||
@ -337,9 +337,9 @@ describe('[Singlenodes] when parsing', () => {
|
||||
it.each(workingKeywords)('should parse node beginning with %s', function (keyword) {
|
||||
flow.parser.parse(`graph TD; ${keyword}.node;${keyword}-node;${keyword}/node;`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`${keyword}.node`].text).toBe(`${keyword}.node`);
|
||||
expect(vert[`${keyword}-node`].text).toBe(`${keyword}-node`);
|
||||
expect(vert[`${keyword}/node`].text).toBe(`${keyword}/node`);
|
||||
expect(vert.get(`${keyword}.node`).text).toBe(`${keyword}.node`);
|
||||
expect(vert.get(`${keyword}-node`).text).toBe(`${keyword}-node`);
|
||||
expect(vert.get(`${keyword}/node`).text).toBe(`${keyword}/node`);
|
||||
});
|
||||
|
||||
it.each(specialChars)(
|
||||
@ -347,7 +347,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
function (specialChar) {
|
||||
flow.parser.parse(`graph TD; ${specialChar} --> A`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`${specialChar}`].text).toBe(`${specialChar}`);
|
||||
expect(vert.get(`${specialChar}`).text).toBe(`${specialChar}`);
|
||||
}
|
||||
);
|
||||
|
||||
@ -356,7 +356,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
function (specialChar) {
|
||||
flow.parser.parse(`graph TD; ${specialChar}node --> A`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`${specialChar}node`].text).toBe(`${specialChar}node`);
|
||||
expect(vert.get(`${specialChar}node`).text).toBe(`${specialChar}node`);
|
||||
}
|
||||
);
|
||||
|
||||
@ -365,7 +365,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
function (specialChar) {
|
||||
flow.parser.parse(`graph TD; node${specialChar} --> A`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`node${specialChar}`].text).toBe(`node${specialChar}`);
|
||||
expect(vert.get(`node${specialChar}`).text).toBe(`node${specialChar}`);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
@ -20,10 +20,8 @@ describe('[Style] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
const style = vert['Q'].styles[0];
|
||||
|
||||
expect(vert['Q'].styles.length).toBe(1);
|
||||
expect(vert['Q'].styles[0]).toBe('background:#fff');
|
||||
expect(vert.get('Q').styles.length).toBe(1);
|
||||
expect(vert.get('Q').styles[0]).toBe('background:#fff');
|
||||
});
|
||||
|
||||
it('should handle multiple styles for a vortex', function () {
|
||||
@ -32,9 +30,9 @@ describe('[Style] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['R'].styles.length).toBe(2);
|
||||
expect(vert['R'].styles[0]).toBe('background:#fff');
|
||||
expect(vert['R'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('R').styles.length).toBe(2);
|
||||
expect(vert.get('R').styles[0]).toBe('background:#fff');
|
||||
expect(vert.get('R').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should handle multiple styles in a graph', function () {
|
||||
@ -45,11 +43,11 @@ describe('[Style] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['S'].styles.length).toBe(1);
|
||||
expect(vert['T'].styles.length).toBe(2);
|
||||
expect(vert['S'].styles[0]).toBe('background:#aaa');
|
||||
expect(vert['T'].styles[0]).toBe('background:#bbb');
|
||||
expect(vert['T'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('S').styles.length).toBe(1);
|
||||
expect(vert.get('T').styles.length).toBe(2);
|
||||
expect(vert.get('S').styles[0]).toBe('background:#aaa');
|
||||
expect(vert.get('T').styles[0]).toBe('background:#bbb');
|
||||
expect(vert.get('T').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should handle styles and graph definitions in a graph', function () {
|
||||
@ -60,11 +58,11 @@ describe('[Style] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['S'].styles.length).toBe(1);
|
||||
expect(vert['T'].styles.length).toBe(2);
|
||||
expect(vert['S'].styles[0]).toBe('background:#aaa');
|
||||
expect(vert['T'].styles[0]).toBe('background:#bbb');
|
||||
expect(vert['T'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('S').styles.length).toBe(1);
|
||||
expect(vert.get('T').styles.length).toBe(2);
|
||||
expect(vert.get('S').styles[0]).toBe('background:#aaa');
|
||||
expect(vert.get('T').styles[0]).toBe('background:#bbb');
|
||||
expect(vert.get('T').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should handle styles and graph definitions in a graph', function () {
|
||||
@ -73,9 +71,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['T'].styles.length).toBe(2);
|
||||
expect(vert['T'].styles[0]).toBe('background:#bbb');
|
||||
expect(vert['T'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('T').styles.length).toBe(2);
|
||||
expect(vert.get('T').styles[0]).toBe('background:#bbb');
|
||||
expect(vert.get('T').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should keep node label text (if already defined) when a style is applied', function () {
|
||||
@ -85,10 +83,10 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['A'].text).toBe('');
|
||||
expect(vert['B'].text).toBe('Test');
|
||||
expect(vert['C'].text).toBe('C');
|
||||
expect(vert['D'].text).toBe('D');
|
||||
expect(vert.get('A').text).toBe('');
|
||||
expect(vert.get('B').text).toBe('Test');
|
||||
expect(vert.get('C').text).toBe('C');
|
||||
expect(vert.get('D').text).toBe('D');
|
||||
});
|
||||
|
||||
it('should be possible to declare a class', function () {
|
||||
@ -99,9 +97,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to declare multiple classes', function () {
|
||||
@ -111,13 +109,13 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['firstClass'].styles.length).toBe(2);
|
||||
expect(classes['firstClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['firstClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('firstClass').styles.length).toBe(2);
|
||||
expect(classes.get('firstClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('firstClass').styles[1]).toBe('border:1px solid red');
|
||||
|
||||
expect(classes['secondClass'].styles.length).toBe(2);
|
||||
expect(classes['secondClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['secondClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('secondClass').styles.length).toBe(2);
|
||||
expect(classes.get('secondClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('secondClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to declare a class with a dot in the style', function () {
|
||||
@ -128,9 +126,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
it('should be possible to declare a class with a space in the style', function () {
|
||||
const res = flow.parser.parse(
|
||||
@ -140,9 +138,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background: #bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background: #bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a vertex', function () {
|
||||
let statement = '';
|
||||
@ -156,9 +154,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a vertex with an id containing _', function () {
|
||||
let statement = '';
|
||||
@ -172,9 +170,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a vertex directly', function () {
|
||||
let statement = '';
|
||||
@ -187,10 +185,10 @@ describe('[Style] when parsing', () => {
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to apply a class to a vertex directly : usecase A[text].class ', function () {
|
||||
@ -204,10 +202,10 @@ describe('[Style] when parsing', () => {
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to apply a class to a vertex directly : usecase A[text].class-->B[test2] ', function () {
|
||||
@ -221,10 +219,10 @@ describe('[Style] when parsing', () => {
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(vertices['A'].classes[0]).toBe('exClass');
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(vertices.get('A').classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to apply a class to a vertex directly 2', function () {
|
||||
@ -238,10 +236,10 @@ describe('[Style] when parsing', () => {
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a comma separated list of vertices', function () {
|
||||
let statement = '';
|
||||
@ -256,11 +254,11 @@ describe('[Style] when parsing', () => {
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vertices['a'].classes[0]).toBe('exClass');
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
expect(vertices.get('a').classes[0]).toBe('exClass');
|
||||
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||
});
|
||||
|
||||
it('should handle style definitions with more then 1 digit in a row', function () {
|
||||
@ -364,9 +362,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['A'].classes.length).toBe(0);
|
||||
expect(vert['B'].classes[0]).toBe('C1');
|
||||
expect(vert['D'].classes[0]).toBe('C1');
|
||||
expect(vert['E'].classes[0]).toBe('C2');
|
||||
expect(vert.get('A').classes.length).toBe(0);
|
||||
expect(vert.get('B').classes[0]).toBe('C1');
|
||||
expect(vert.get('D').classes[0]).toBe('C1');
|
||||
expect(vert.get('E').classes[0]).toBe('C2');
|
||||
});
|
||||
});
|
||||
|
@ -113,7 +113,7 @@ describe('[Text] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['v'].text).toBe('my text');
|
||||
expect(vert.get('v').text).toBe('my text');
|
||||
});
|
||||
it('should handle v in node ids v at end', function () {
|
||||
// v at end
|
||||
@ -123,7 +123,7 @@ describe('[Text] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['csv'].text).toBe('my text');
|
||||
expect(vert.get('csv').text).toBe('my text');
|
||||
});
|
||||
it('should handle v in node ids v in middle', function () {
|
||||
// v in middle
|
||||
@ -133,7 +133,7 @@ describe('[Text] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['ava'].text).toBe('my text');
|
||||
expect(vert.get('ava').text).toBe('my text');
|
||||
});
|
||||
it('should handle v in node ids, v at start', function () {
|
||||
// v at start
|
||||
@ -143,7 +143,7 @@ describe('[Text] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['va'].text).toBe('my text');
|
||||
expect(vert.get('va').text).toBe('my text');
|
||||
});
|
||||
it('should handle keywords', function () {
|
||||
const res = flow.parser.parse('graph TD;A--x|text including graph space|B;');
|
||||
@ -157,7 +157,7 @@ describe('[Text] when parsing', () => {
|
||||
const res = flow.parser.parse('graph TD;V-->a[v]');
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['a'].text).toBe('v');
|
||||
expect(vert.get('a').text).toBe('v');
|
||||
});
|
||||
it('should handle quoted text', function () {
|
||||
const res = flow.parser.parse('graph TD;V-- "test string()" -->a[v]');
|
||||
@ -302,8 +302,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('Chimpansen hoppar');
|
||||
expect(vert.get('C').type).toBe('round');
|
||||
expect(vert.get('C').text).toBe('Chimpansen hoppar');
|
||||
});
|
||||
|
||||
const keywords = [
|
||||
@ -353,8 +353,8 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['B'].type).toBe(`${shape.name}`);
|
||||
expect(vert['B'].text).toBe(`This node has a ${keyword} as text`);
|
||||
expect(vert.get('B').type).toBe(`${shape.name}`);
|
||||
expect(vert.get('B').text).toBe(`This node has a ${keyword} as text`);
|
||||
});
|
||||
});
|
||||
|
||||
@ -365,24 +365,24 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['B'].type).toBe('rect');
|
||||
expect(vert['B'].text).toBe(`This node has a ${keyword} as text`);
|
||||
expect(vert.get('B').type).toBe('rect');
|
||||
expect(vert.get('B').text).toBe(`This node has a ${keyword} as text`);
|
||||
});
|
||||
|
||||
it('should handle edge case for odd vertex with node id ending with minus', function () {
|
||||
const res = flow.parser.parse('graph TD;A_node-->odd->Vertex Text];');
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['odd-'].type).toBe('odd');
|
||||
expect(vert['odd-'].text).toBe('Vertex Text');
|
||||
expect(vert.get('odd-').type).toBe('odd');
|
||||
expect(vert.get('odd-').text).toBe('Vertex Text');
|
||||
});
|
||||
it('should allow forward slashes in lean_right vertices', function () {
|
||||
const rest = flow.parser.parse(`graph TD;A_node-->B[/This node has a / as text/];`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['B'].type).toBe('lean_right');
|
||||
expect(vert['B'].text).toBe(`This node has a / as text`);
|
||||
expect(vert.get('B').type).toBe('lean_right');
|
||||
expect(vert.get('B').text).toBe(`This node has a / as text`);
|
||||
});
|
||||
|
||||
it('should allow back slashes in lean_left vertices', function () {
|
||||
@ -390,8 +390,8 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['B'].type).toBe('lean_left');
|
||||
expect(vert['B'].text).toBe(`This node has a \\ as text`);
|
||||
expect(vert.get('B').type).toBe('lean_left');
|
||||
expect(vert.get('B').text).toBe(`This node has a \\ as text`);
|
||||
});
|
||||
|
||||
it('should handle åäö and minus', function () {
|
||||
@ -400,8 +400,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('diamond');
|
||||
expect(vert['C'].text).toBe('Chimpansen hoppar åäö-ÅÄÖ');
|
||||
expect(vert.get('C').type).toBe('diamond');
|
||||
expect(vert.get('C').text).toBe('Chimpansen hoppar åäö-ÅÄÖ');
|
||||
});
|
||||
|
||||
it('should handle with åäö, minus and space and br', function () {
|
||||
@ -410,8 +410,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('Chimpansen hoppar åäö <br> - ÅÄÖ');
|
||||
expect(vert.get('C').type).toBe('round');
|
||||
expect(vert.get('C').text).toBe('Chimpansen hoppar åäö <br> - ÅÄÖ');
|
||||
});
|
||||
// it.skip('should handle åäö, minus and space and br',function(){
|
||||
// const res = flow.parser.parse('graph TD; A[Object(foo,bar)]-->B(Thing);');
|
||||
@ -419,22 +419,22 @@ describe('[Text] when parsing', () => {
|
||||
// const vert = flow.parser.yy.getVertices();
|
||||
// const edges = flow.parser.yy.getEdges();
|
||||
//
|
||||
// expect(vert['C'].type).toBe('round');
|
||||
// expect(vert['C'].text).toBe(' A[Object(foo,bar)]-->B(Thing);');
|
||||
// expect(vert.get('C').type).toBe('round');
|
||||
// expect(vert.get('C').text).toBe(' A[Object(foo,bar)]-->B(Thing);');
|
||||
// });
|
||||
it('should handle unicode chars', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(Начало);');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['C'].text).toBe('Начало');
|
||||
expect(vert.get('C').text).toBe('Начало');
|
||||
});
|
||||
it('should handle backslask', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(c:\\windows);');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['C'].text).toBe('c:\\windows');
|
||||
expect(vert.get('C').text).toBe('c:\\windows');
|
||||
});
|
||||
it('should handle CAPS', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(some CAPS);');
|
||||
@ -442,8 +442,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('some CAPS');
|
||||
expect(vert.get('C').type).toBe('round');
|
||||
expect(vert.get('C').text).toBe('some CAPS');
|
||||
});
|
||||
it('should handle directions', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(some URL);');
|
||||
@ -451,8 +451,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('some URL');
|
||||
expect(vert.get('C').type).toBe('round');
|
||||
expect(vert.get('C').text).toBe('some URL');
|
||||
});
|
||||
});
|
||||
|
||||
@ -464,9 +464,9 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_circle');
|
||||
expect(edges[1].type).toBe('arrow_point');
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -482,8 +482,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
expect(vert.get('A').type).toBe('square');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||
});
|
||||
|
||||
it('should handle text in vertices with space with spaces between vertices and link', function () {
|
||||
@ -492,8 +492,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
expect(vert.get('A').type).toBe('square');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||
});
|
||||
it('should handle text including _ in vertices', function () {
|
||||
const res = flow.parser.parse('graph TD;A[chimpansen_hoppar] --> C;');
|
||||
@ -501,8 +501,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen_hoppar');
|
||||
expect(vert.get('A').type).toBe('square');
|
||||
expect(vert.get('A').text).toBe('chimpansen_hoppar');
|
||||
});
|
||||
|
||||
it('should handle quoted text in vertices ', function () {
|
||||
@ -511,8 +511,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar ()[]');
|
||||
expect(vert.get('A').type).toBe('square');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar ()[]');
|
||||
});
|
||||
|
||||
it('should handle text in circle vertices with space', function () {
|
||||
@ -521,8 +521,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('circle');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
expect(vert.get('A').type).toBe('circle');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||
});
|
||||
|
||||
it('should handle text in ellipse vertices', function () {
|
||||
@ -531,8 +531,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('ellipse');
|
||||
expect(vert['A'].text).toBe('this is an ellipse');
|
||||
expect(vert.get('A').type).toBe('ellipse');
|
||||
expect(vert.get('A').text).toBe('this is an ellipse');
|
||||
});
|
||||
|
||||
it('should not freeze when ellipse text has a `(`', function () {
|
||||
@ -545,8 +545,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('round');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
expect(vert.get('A').type).toBe('round');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||
});
|
||||
|
||||
it('should handle text in with ?', function () {
|
||||
@ -555,7 +555,7 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].text).toBe('?');
|
||||
expect(vert.get('A').text).toBe('?');
|
||||
expect(edges[0].text).toBe('?');
|
||||
});
|
||||
it('should handle text in with éèêàçô', function () {
|
||||
@ -564,7 +564,7 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].text).toBe('éèêàçô');
|
||||
expect(vert.get('A').text).toBe('éèêàçô');
|
||||
expect(edges[0].text).toBe('éèêàçô');
|
||||
});
|
||||
|
||||
@ -574,7 +574,7 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].text).toBe(',.?!+-*');
|
||||
expect(vert.get('A').text).toBe(',.?!+-*');
|
||||
expect(edges[0].text).toBe(',.?!+-*');
|
||||
});
|
||||
|
||||
|
@ -22,9 +22,9 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -44,9 +44,9 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('C');
|
||||
@ -66,9 +66,9 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -88,10 +88,10 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert['D'].id).toBe('D');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(vert.get('D').id).toBe('D');
|
||||
expect(edges.length).toBe(4);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('C');
|
||||
@ -119,10 +119,10 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert['D'].id).toBe('D');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(vert.get('D').id).toBe('D');
|
||||
expect(edges.length).toBe(4);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('C');
|
||||
@ -150,11 +150,11 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['B2'].id).toBe('B2');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert['D2'].id).toBe('D2');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('B2').id).toBe('B2');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(vert.get('D2').id).toBe('D2');
|
||||
expect(edges.length).toBe(6);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -193,14 +193,14 @@ describe('when parsing flowcharts', function () {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['B'].classes[0]).toBe('exClass');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert['D'].id).toBe('D');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('B').classes[0]).toBe('exClass');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(vert.get('D').id).toBe('D');
|
||||
expect(edges.length).toBe(4);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
@ -19,8 +19,8 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@ -34,8 +34,8 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['endpoint'].id).toBe('endpoint');
|
||||
expect(vert['sender'].id).toBe('sender');
|
||||
expect(vert.get('endpoint').id).toBe('endpoint');
|
||||
expect(vert.get('sender').id).toBe('sender');
|
||||
expect(edges[0].start).toBe('endpoint');
|
||||
expect(edges[0].end).toBe('sender');
|
||||
});
|
||||
@ -46,8 +46,8 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['blend'].id).toBe('blend');
|
||||
expect(vert['monograph'].id).toBe('monograph');
|
||||
expect(vert.get('blend').id).toBe('blend');
|
||||
expect(vert.get('monograph').id).toBe('monograph');
|
||||
expect(edges[0].start).toBe('blend');
|
||||
expect(edges[0].end).toBe('monograph');
|
||||
});
|
||||
@ -58,8 +58,8 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['default'].id).toBe('default');
|
||||
expect(vert['monograph'].id).toBe('monograph');
|
||||
expect(vert.get('default').id).toBe('default');
|
||||
expect(vert.get('monograph').id).toBe('monograph');
|
||||
expect(edges[0].start).toBe('default');
|
||||
expect(edges[0].end).toBe('monograph');
|
||||
});
|
||||
@ -71,12 +71,12 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
if (result) {
|
||||
expect(vert['A'].text).toBe(result);
|
||||
expect(vert.get('A').text).toBe(result);
|
||||
} else {
|
||||
expect(vert['A'].text).toBe(char);
|
||||
expect(vert.get('A').text).toBe(char);
|
||||
}
|
||||
flow.parser.yy.clear();
|
||||
};
|
||||
@ -135,7 +135,7 @@ describe('parsing a flow chart', function () {
|
||||
const res = flow.parser.parse(statement);
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
expect(vertices['node1TB'].id).toBe('node1TB');
|
||||
expect(vertices.get('node1TB').id).toBe('node1TB');
|
||||
});
|
||||
|
||||
it('should be possible to use direction in node ids', function () {
|
||||
@ -145,7 +145,7 @@ describe('parsing a flow chart', function () {
|
||||
const res = flow.parser.parse(statement);
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
expect(vertices['A'].id).toBe('A');
|
||||
expect(vertices.get('A').id).toBe('A');
|
||||
});
|
||||
|
||||
it('should be possible to use numbers as labels', function () {
|
||||
@ -154,8 +154,8 @@ describe('parsing a flow chart', function () {
|
||||
statement = statement + 'graph TB;subgraph "number as labels";1;end;';
|
||||
const res = flow.parser.parse(statement);
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
expect(vertices['1'].id).toBe('1');
|
||||
|
||||
expect(vertices.get('1').id).toBe('1');
|
||||
});
|
||||
|
||||
it('should add accTitle and accDescr to flow chart', function () {
|
||||
@ -194,4 +194,47 @@ describe('parsing a flow chart', function () {
|
||||
with a second line`
|
||||
);
|
||||
});
|
||||
|
||||
for (const unsafeProp of ['__proto__', 'constructor']) {
|
||||
it(`should work with node id ${unsafeProp}`, function () {
|
||||
const flowChart = `graph LR
|
||||
${unsafeProp} --> A;`;
|
||||
|
||||
expect(() => {
|
||||
flow.parser.parse(flowChart);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it(`should work with tooltip id ${unsafeProp}`, function () {
|
||||
const flowChart = `graph LR
|
||||
click ${unsafeProp} callback "${unsafeProp}";`;
|
||||
|
||||
expect(() => {
|
||||
flow.parser.parse(flowChart);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it(`should work with class id ${unsafeProp}`, function () {
|
||||
const flowChart = `graph LR
|
||||
${unsafeProp} --> A;
|
||||
classDef ${unsafeProp} color:#ffffff,fill:#000000;
|
||||
class ${unsafeProp} ${unsafeProp};`;
|
||||
|
||||
expect(() => {
|
||||
flow.parser.parse(flowChart);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it(`should work with subgraph id ${unsafeProp}`, function () {
|
||||
const flowChart = `graph LR
|
||||
${unsafeProp} --> A;
|
||||
subgraph ${unsafeProp}
|
||||
C --> D;
|
||||
end;`;
|
||||
|
||||
expect(() => {
|
||||
flow.parser.parse(flowChart);
|
||||
}).not.toThrow();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -28,7 +28,7 @@ let tickInterval = undefined;
|
||||
let todayMarker = '';
|
||||
let includes = [];
|
||||
let excludes = [];
|
||||
let links = {};
|
||||
let links = new Map();
|
||||
let sections = [];
|
||||
let tasks = [];
|
||||
let currentSection = '';
|
||||
@ -62,7 +62,7 @@ export const clear = function () {
|
||||
inclusiveEndDates = false;
|
||||
topAxis = false;
|
||||
lastOrder = 0;
|
||||
links = {};
|
||||
links = new Map();
|
||||
commonClear();
|
||||
weekday = 'sunday';
|
||||
weekend = 'saturday';
|
||||
@ -639,7 +639,7 @@ export const setLink = function (ids, _linkStr) {
|
||||
pushFun(id, () => {
|
||||
window.open(linkStr, '_self');
|
||||
});
|
||||
links[id] = linkStr;
|
||||
links.set(id, linkStr);
|
||||
}
|
||||
});
|
||||
setClass(ids, 'clickable');
|
||||
|
@ -475,14 +475,14 @@ export const draw = function (text, id, version, diagObj) {
|
||||
|
||||
rectangles
|
||||
.filter(function (d) {
|
||||
return links[d.id] !== undefined;
|
||||
return links.has(d.id);
|
||||
})
|
||||
.each(function (o) {
|
||||
var taskRect = doc.querySelector('#' + o.id);
|
||||
var taskText = doc.querySelector('#' + o.id + '-text');
|
||||
const oldParent = taskRect.parentNode;
|
||||
var Link = doc.createElement('a');
|
||||
Link.setAttribute('xlink:href', links[o.id]);
|
||||
Link.setAttribute('xlink:href', links.get(o.id));
|
||||
Link.setAttribute('target', '_top');
|
||||
oldParent.appendChild(Link);
|
||||
Link.appendChild(taskRect);
|
||||
|
@ -256,4 +256,15 @@ row2`;
|
||||
expect(ganttDb.getWeekday()).toBe(day);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['__proto__', 'constructor'])('should allow for a link to %s id', (prop) => {
|
||||
expect(() =>
|
||||
parser.parse(`gantt
|
||||
dateFormat YYYY-MM-DD
|
||||
section Section
|
||||
A task :${prop}, 2024-10-01, 3d
|
||||
click ${prop} href "https://mermaid.js.org/"
|
||||
`)
|
||||
).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
@ -14,12 +14,12 @@ import {
|
||||
|
||||
let mainBranchName = getConfig().gitGraph.mainBranchName;
|
||||
let mainBranchOrder = getConfig().gitGraph.mainBranchOrder;
|
||||
let commits = {};
|
||||
let commits = new Map();
|
||||
let head = null;
|
||||
let branchesConfig = {};
|
||||
branchesConfig[mainBranchName] = { name: mainBranchName, order: mainBranchOrder };
|
||||
let branches = {};
|
||||
branches[mainBranchName] = head;
|
||||
let branchesConfig = new Map();
|
||||
branchesConfig.set(mainBranchName, { name: mainBranchName, order: mainBranchOrder });
|
||||
let branches = new Map();
|
||||
branches.set(mainBranchName, head);
|
||||
let curBranch = mainBranchName;
|
||||
let direction = 'LR';
|
||||
let seq = 0;
|
||||
@ -46,11 +46,11 @@ function getId() {
|
||||
// if (Array.isArray(otherCommit.parent)) {
|
||||
// log.debug('In merge commit:', otherCommit.parent);
|
||||
// return (
|
||||
// isFastForwardable(currentCommit, commits[otherCommit.parent[0]]) ||
|
||||
// isFastForwardable(currentCommit, commits[otherCommit.parent[1]])
|
||||
// isFastForwardable(currentCommit, commits.get(otherCommit.parent[0])) ||
|
||||
// isFastForwardable(currentCommit, commits.get(otherCommit.parent[1]))
|
||||
// );
|
||||
// } else {
|
||||
// otherCommit = commits[otherCommit.parent];
|
||||
// otherCommit = commits.get(otherCommit.parent);
|
||||
// }
|
||||
// }
|
||||
// log.debug(currentCommit.id, otherCommit.id);
|
||||
@ -118,16 +118,16 @@ export const commit = function (msg, id, type, tag) {
|
||||
branch: curBranch,
|
||||
};
|
||||
head = commit;
|
||||
commits[commit.id] = commit;
|
||||
branches[curBranch] = commit.id;
|
||||
commits.set(commit.id, commit);
|
||||
branches.set(curBranch, commit.id);
|
||||
log.debug('in pushCommit ' + commit.id);
|
||||
};
|
||||
|
||||
export const branch = function (name, order) {
|
||||
name = common.sanitizeText(name, getConfig());
|
||||
if (branches[name] === undefined) {
|
||||
branches[name] = head != null ? head.id : null;
|
||||
branchesConfig[name] = { name, order: order ? parseInt(order, 10) : null };
|
||||
if (!branches.has(name)) {
|
||||
branches.set(name, head != null ? head.id : null);
|
||||
branchesConfig.set(name, { name, order: order ? parseInt(order, 10) : null });
|
||||
checkout(name);
|
||||
log.debug('in createBranch');
|
||||
} else {
|
||||
@ -151,8 +151,8 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
otherBranch = common.sanitizeText(otherBranch, getConfig());
|
||||
custom_id = common.sanitizeText(custom_id, getConfig());
|
||||
|
||||
const currentCommit = commits[branches[curBranch]];
|
||||
const otherCommit = commits[branches[otherBranch]];
|
||||
const currentCommit = commits.get(branches.get(curBranch));
|
||||
const otherCommit = commits.get(branches.get(otherBranch));
|
||||
if (curBranch === otherBranch) {
|
||||
let error = new Error('Incorrect usage of "merge". Cannot merge a branch to itself');
|
||||
error.hash = {
|
||||
@ -175,7 +175,7 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
expected: ['commit'],
|
||||
};
|
||||
throw error;
|
||||
} else if (branches[otherBranch] === undefined) {
|
||||
} else if (!branches.has(otherBranch)) {
|
||||
let error = new Error(
|
||||
'Incorrect usage of "merge". Branch to be merged (' + otherBranch + ') does not exist'
|
||||
);
|
||||
@ -209,7 +209,7 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
expected: ['branch abc'],
|
||||
};
|
||||
throw error;
|
||||
} else if (custom_id && commits[custom_id] !== undefined) {
|
||||
} else if (custom_id && commits.has(custom_id)) {
|
||||
let error = new Error(
|
||||
'Incorrect usage of "merge". Commit with id:' +
|
||||
custom_id +
|
||||
@ -232,15 +232,15 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
// return;
|
||||
// }
|
||||
// if (isFastForwardable(currentCommit, otherCommit)) {
|
||||
// branches[curBranch] = branches[otherBranch];
|
||||
// head = commits[branches[curBranch]];
|
||||
// branches.set(curBranch, branches.get(otherBranch));
|
||||
// head = commits.get(branches.get(curBranch));
|
||||
// } else {
|
||||
// create merge commit
|
||||
const commit = {
|
||||
id: custom_id ? custom_id : seq + '-' + getId(),
|
||||
message: 'merged branch ' + otherBranch + ' into ' + curBranch,
|
||||
seq: seq++,
|
||||
parents: [head == null ? null : head.id, branches[otherBranch]],
|
||||
parents: [head == null ? null : head.id, branches.get(otherBranch)],
|
||||
branch: curBranch,
|
||||
type: commitType.MERGE,
|
||||
customType: override_type,
|
||||
@ -248,8 +248,8 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
tag: custom_tag ? custom_tag : '',
|
||||
};
|
||||
head = commit;
|
||||
commits[commit.id] = commit;
|
||||
branches[curBranch] = commit.id;
|
||||
commits.set(commit.id, commit);
|
||||
branches.set(curBranch, commit.id);
|
||||
// }
|
||||
log.debug(branches);
|
||||
log.debug('in mergeBranch');
|
||||
@ -262,7 +262,7 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
tag = common.sanitizeText(tag, getConfig());
|
||||
parentCommitId = common.sanitizeText(parentCommitId, getConfig());
|
||||
|
||||
if (!sourceId || commits[sourceId] === undefined) {
|
||||
if (!sourceId || !commits.has(sourceId)) {
|
||||
let error = new Error(
|
||||
'Incorrect usage of "cherryPick". Source commit id should exist and provided'
|
||||
);
|
||||
@ -275,7 +275,7 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
let sourceCommit = commits[sourceId];
|
||||
let sourceCommit = commits.get(sourceId);
|
||||
let sourceCommitBranch = sourceCommit.branch;
|
||||
if (
|
||||
parentCommitId &&
|
||||
@ -292,7 +292,7 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
if (!targetId || commits[targetId] === undefined) {
|
||||
if (!targetId || !commits.has(targetId)) {
|
||||
// cherry-pick source commit to current branch
|
||||
|
||||
if (sourceCommitBranch === curBranch) {
|
||||
@ -308,7 +308,7 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
const currentCommit = commits[branches[curBranch]];
|
||||
const currentCommit = commits.get(branches.get(curBranch));
|
||||
if (currentCommit === undefined || !currentCommit) {
|
||||
let error = new Error(
|
||||
'Incorrect usage of "cherry-pick". Current branch (' + curBranch + ')has no commits'
|
||||
@ -336,15 +336,15 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
}`,
|
||||
};
|
||||
head = commit;
|
||||
commits[commit.id] = commit;
|
||||
branches[curBranch] = commit.id;
|
||||
commits.set(commit.id, commit);
|
||||
branches.set(curBranch, commit.id);
|
||||
log.debug(branches);
|
||||
log.debug('in cherryPick');
|
||||
}
|
||||
};
|
||||
export const checkout = function (branch) {
|
||||
branch = common.sanitizeText(branch, getConfig());
|
||||
if (branches[branch] === undefined) {
|
||||
if (!branches.has(branch)) {
|
||||
let error = new Error(
|
||||
'Trying to checkout branch which is not yet created. (Help try using "branch ' + branch + '")'
|
||||
);
|
||||
@ -360,8 +360,8 @@ export const checkout = function (branch) {
|
||||
//log.debug('in createBranch');
|
||||
} else {
|
||||
curBranch = branch;
|
||||
const id = branches[curBranch];
|
||||
head = commits[id];
|
||||
const id = branches.get(curBranch);
|
||||
head = commits.get(id);
|
||||
}
|
||||
};
|
||||
|
||||
@ -369,10 +369,10 @@ export const checkout = function (branch) {
|
||||
// log.debug('in reset', commitRef);
|
||||
// const ref = commitRef.split(':')[0];
|
||||
// let parentCount = parseInt(commitRef.split(':')[1]);
|
||||
// let commit = ref === 'HEAD' ? head : commits[branches[ref]];
|
||||
// let commit = ref === 'HEAD' ? head : commits.get(branches.get(ref));
|
||||
// log.debug(commit, parentCount);
|
||||
// while (parentCount > 0) {
|
||||
// commit = commits[commit.parent];
|
||||
// commit = commits.get(commit.parent);
|
||||
// parentCount--;
|
||||
// if (!commit) {
|
||||
// const err = 'Critical error - unique parent commit not found during reset';
|
||||
@ -416,19 +416,19 @@ function prettyPrintCommitHistory(commitArr) {
|
||||
});
|
||||
const label = [line, commit.id, commit.seq];
|
||||
for (let branch in branches) {
|
||||
if (branches[branch] === commit.id) {
|
||||
if (branches.get(branch) === commit.id) {
|
||||
label.push(branch);
|
||||
}
|
||||
}
|
||||
log.debug(label.join(' '));
|
||||
if (commit.parents && commit.parents.length == 2) {
|
||||
const newCommit = commits[commit.parents[0]];
|
||||
const newCommit = commits.get(commit.parents[0]);
|
||||
upsert(commitArr, commit, newCommit);
|
||||
commitArr.push(commits[commit.parents[1]]);
|
||||
commitArr.push(commits.get(commit.parents[1]));
|
||||
} else if (commit.parents.length == 0) {
|
||||
return;
|
||||
} else {
|
||||
const nextCommit = commits[commit.parents];
|
||||
const nextCommit = commits.get(commit.parents);
|
||||
upsert(commitArr, commit, nextCommit);
|
||||
}
|
||||
commitArr = uniqBy(commitArr, (c) => c.id);
|
||||
@ -442,21 +442,21 @@ export const prettyPrint = function () {
|
||||
};
|
||||
|
||||
export const clear = function () {
|
||||
commits = {};
|
||||
commits = new Map();
|
||||
head = null;
|
||||
let mainBranch = getConfig().gitGraph.mainBranchName;
|
||||
let mainBranchOrder = getConfig().gitGraph.mainBranchOrder;
|
||||
branches = {};
|
||||
branches[mainBranch] = null;
|
||||
branchesConfig = {};
|
||||
branchesConfig[mainBranch] = { name: mainBranch, order: mainBranchOrder };
|
||||
branches = new Map();
|
||||
branches.set(mainBranch, null);
|
||||
branchesConfig = new Map();
|
||||
branchesConfig.set(mainBranch, { name: mainBranch, order: mainBranchOrder });
|
||||
curBranch = mainBranch;
|
||||
seq = 0;
|
||||
commonClear();
|
||||
};
|
||||
|
||||
export const getBranchesAsObjArray = function () {
|
||||
const branchesArray = Object.values(branchesConfig)
|
||||
const branchesArray = [...branchesConfig.values()]
|
||||
.map((branchConfig, i) => {
|
||||
if (branchConfig.order !== null) {
|
||||
return branchConfig;
|
||||
@ -479,9 +479,7 @@ export const getCommits = function () {
|
||||
return commits;
|
||||
};
|
||||
export const getCommitsArray = function () {
|
||||
const commitArr = Object.keys(commits).map(function (key) {
|
||||
return commits[key];
|
||||
});
|
||||
const commitArr = [...commits.values()];
|
||||
commitArr.forEach(function (o) {
|
||||
log.debug(o.id);
|
||||
});
|
||||
|
@ -12,10 +12,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph definition with empty options', function () {
|
||||
@ -25,10 +25,10 @@ describe('when parsing a gitGraph', function () {
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(parser.yy.getOptions()).toEqual({});
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph definition with valid options', function () {
|
||||
@ -37,10 +37,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(parser.yy.getOptions()['key']).toBe('value');
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should not fail on a gitGraph with malformed json', function () {
|
||||
@ -48,10 +48,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle set direction top to bottom', function () {
|
||||
@ -60,10 +60,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('TB');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle set direction bottom to top', function () {
|
||||
@ -72,10 +72,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('BT');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should checkout a branch', function () {
|
||||
@ -84,7 +84,7 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(0);
|
||||
expect(commits.size).toBe(0);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('new');
|
||||
});
|
||||
|
||||
@ -94,7 +94,7 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(0);
|
||||
expect(commits.size).toBe(0);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('new');
|
||||
});
|
||||
|
||||
@ -104,11 +104,11 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(2);
|
||||
expect(commits.size).toBe(2);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('new');
|
||||
const branchCommit = parser.yy.getBranches()['new'];
|
||||
const branchCommit = parser.yy.getBranches().get('new');
|
||||
expect(branchCommit).not.toBeNull();
|
||||
expect(commits[branchCommit].parent).not.toBeNull();
|
||||
expect(commits.get(branchCommit).parent).not.toBeNull();
|
||||
});
|
||||
it('should handle commit with args', function () {
|
||||
const str = 'gitGraph:\n' + 'commit "a commit"\n';
|
||||
@ -116,9 +116,9 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('a commit');
|
||||
expect(commits.size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('a commit');
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
});
|
||||
|
||||
@ -136,10 +136,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch');
|
||||
expect(parser.yy.getBranches()['newbranch']).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['newbranch']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).toEqual(parser.yy.getBranches().get('main'));
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('newbranch'));
|
||||
});
|
||||
|
||||
it.skip('reset can take an argument', function () {
|
||||
@ -155,9 +155,9 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch');
|
||||
const main = commits[parser.yy.getBranches()['main']];
|
||||
const main = commits.get(parser.yy.getBranches().get('main'));
|
||||
expect(parser.yy.getHead().id).toEqual(main.parent);
|
||||
});
|
||||
|
||||
@ -175,10 +175,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(4);
|
||||
expect(commits.size).toBe(4);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getBranches()['newbranch']).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['newbranch']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).toEqual(parser.yy.getBranches().get('main'));
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('newbranch'));
|
||||
});
|
||||
|
||||
it('should handle cases when merge is a noop', function () {
|
||||
@ -194,10 +194,12 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(4);
|
||||
expect(commits.size).toBe(4);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch');
|
||||
expect(parser.yy.getBranches()['newbranch']).not.toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['newbranch']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).not.toEqual(
|
||||
parser.yy.getBranches().get('main')
|
||||
);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('newbranch'));
|
||||
});
|
||||
|
||||
it('should handle merge with 2 parents', function () {
|
||||
@ -215,10 +217,12 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(5);
|
||||
expect(commits.size).toBe(5);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getBranches()['newbranch']).not.toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).not.toEqual(
|
||||
parser.yy.getBranches().get('main')
|
||||
);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('main'));
|
||||
});
|
||||
|
||||
it.skip('should handle ff merge when history walk has two parents (merge commit)', function () {
|
||||
@ -239,10 +243,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(7);
|
||||
expect(commits.size).toBe(7);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch');
|
||||
expect(parser.yy.getBranches()['newbranch']).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).toEqual(parser.yy.getBranches().get('main'));
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('main'));
|
||||
|
||||
parser.yy.prettyPrint();
|
||||
});
|
||||
|
@ -13,15 +13,15 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
//console.info(commits);
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit id only', function () {
|
||||
@ -30,15 +30,15 @@ describe('when parsing a gitGraph', function () {
|
||||
`;
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit tag only', function () {
|
||||
@ -48,15 +48,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('test');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('test');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit type HIGHLIGHT only', function () {
|
||||
@ -66,15 +66,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit type REVERSE only', function () {
|
||||
@ -84,15 +84,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit type NORMAL only', function () {
|
||||
@ -102,15 +102,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit msg only', function () {
|
||||
@ -120,15 +120,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test commit');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test commit');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit "msg:" key only', function () {
|
||||
@ -138,15 +138,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test commit');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test commit');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit id, tag only', function () {
|
||||
@ -156,15 +156,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit type, tag only', function () {
|
||||
@ -174,15 +174,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit tag and type only', function () {
|
||||
@ -192,15 +192,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit id, type and tag only', function () {
|
||||
@ -210,15 +210,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit id, type, tag and msg', function () {
|
||||
@ -228,15 +228,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test msg');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test msg');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom type,tag, msg, commit id,', function () {
|
||||
@ -247,15 +247,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test msg');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test msg');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom tag, msg, commit id, type,', function () {
|
||||
@ -265,15 +265,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test msg');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test msg');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom msg, commit id, type,tag', function () {
|
||||
@ -283,15 +283,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test msg');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test msg');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle 3 straight commits', function () {
|
||||
@ -303,10 +303,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle new branch creation', function () {
|
||||
@ -317,10 +317,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow quoted branch names', function () {
|
||||
@ -335,16 +335,14 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
const commit3 = Object.keys(commits)[2];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit2].branch).toBe('branch');
|
||||
expect(commits[commit3].branch).toBe('main');
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const [commit1, commit2, commit3] = commits.keys();
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit2).branch).toBe('branch');
|
||||
expect(commits.get(commit3).branch).toBe('main');
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([{ name: 'main' }, { name: 'branch' }]);
|
||||
});
|
||||
|
||||
@ -356,10 +354,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('azAZ_-./test');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow branch names starting with numbers', function () {
|
||||
@ -371,10 +369,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('1.0.1');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow branch names starting with unusual prefixes', function () {
|
||||
@ -392,11 +390,11 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('A');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(7);
|
||||
expect(Object.keys(parser.yy.getBranches())).toEqual(
|
||||
expect(parser.yy.getBranches().size).toBe(7);
|
||||
expect([...parser.yy.getBranches().keys()]).toEqual(
|
||||
expect.arrayContaining([
|
||||
'branch01',
|
||||
'checkout02',
|
||||
@ -417,10 +415,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
it('should handle new branch checkout with order', function () {
|
||||
const str = `gitGraph:
|
||||
@ -432,9 +430,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('test3');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(4);
|
||||
expect(parser.yy.getBranches().size).toBe(4);
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'test3' },
|
||||
@ -452,9 +450,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('test3');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(4);
|
||||
expect(parser.yy.getBranches().size).toBe(4);
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'test2' },
|
||||
@ -473,16 +471,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(2);
|
||||
expect(commits.size).toBe(2);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commit1]);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const [commit1, commit2] = commits.keys();
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commit1]);
|
||||
});
|
||||
|
||||
it('should handle new branch checkout & commit and merge', function () {
|
||||
@ -498,22 +495,22 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(4);
|
||||
expect(commits.size).toBe(4);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
const commit3 = Object.keys(commits)[2];
|
||||
const commit4 = Object.keys(commits)[3];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commits[commit1].id]);
|
||||
expect(commits[commit3].branch).toBe('testBranch');
|
||||
expect(commits[commit3].parents).toStrictEqual([commits[commit2].id]);
|
||||
expect(commits[commit4].branch).toBe('main');
|
||||
expect(commits[commit4].parents).toStrictEqual([commits[commit1].id, commits[commit3].id]);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const [commit1, commit2, commit3, commit4] = commits.keys();
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commits.get(commit1).id]);
|
||||
expect(commits.get(commit3).branch).toBe('testBranch');
|
||||
expect(commits.get(commit3).parents).toStrictEqual([commits.get(commit2).id]);
|
||||
expect(commits.get(commit4).branch).toBe('main');
|
||||
expect(commits.get(commit4).parents).toStrictEqual([
|
||||
commits.get(commit1).id,
|
||||
commits.get(commit3).id,
|
||||
]);
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'testBranch' },
|
||||
@ -529,10 +526,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle new branch switch & commit', function () {
|
||||
@ -545,16 +542,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(2);
|
||||
expect(commits.size).toBe(2);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commit1]);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const [commit1, commit2] = commits.keys();
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commit1]);
|
||||
});
|
||||
|
||||
it('should handle new branch switch & commit and merge', function () {
|
||||
@ -570,22 +566,22 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(4);
|
||||
expect(commits.size).toBe(4);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
const commit3 = Object.keys(commits)[2];
|
||||
const commit4 = Object.keys(commits)[3];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commits[commit1].id]);
|
||||
expect(commits[commit3].branch).toBe('testBranch');
|
||||
expect(commits[commit3].parents).toStrictEqual([commits[commit2].id]);
|
||||
expect(commits[commit4].branch).toBe('main');
|
||||
expect(commits[commit4].parents).toStrictEqual([commits[commit1].id, commits[commit3].id]);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const [commit1, commit2, commit3, commit4] = commits.keys();
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commits.get(commit1).id]);
|
||||
expect(commits.get(commit3).branch).toBe('testBranch');
|
||||
expect(commits.get(commit3).parents).toStrictEqual([commits.get(commit2).id]);
|
||||
expect(commits.get(commit4).branch).toBe('main');
|
||||
expect(commits.get(commit4).parents).toStrictEqual([
|
||||
commits.get(commit1).id,
|
||||
commits.get(commit3).id,
|
||||
]);
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'testBranch' },
|
||||
@ -604,23 +600,23 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
const commit3 = Object.keys(commits)[2];
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const [commit1, commit2, commit3] = commits.keys();
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commits.get(commit1).id]);
|
||||
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commits[commit1].id]);
|
||||
|
||||
expect(commits[commit3].branch).toBe('main');
|
||||
expect(commits[commit3].parents).toStrictEqual([commits[commit1].id, commits[commit2].id]);
|
||||
expect(commits[commit3].tag).toBe('merge-tag');
|
||||
expect(commits.get(commit3).branch).toBe('main');
|
||||
expect(commits.get(commit3).parents).toStrictEqual([
|
||||
commits.get(commit1).id,
|
||||
commits.get(commit2).id,
|
||||
]);
|
||||
expect(commits.get(commit3).tag).toBe('merge-tag');
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'testBranch' },
|
||||
@ -652,7 +648,7 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(7);
|
||||
expect(commits.size).toBe(7);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
|
||||
@ -665,7 +661,7 @@ describe('when parsing a gitGraph', function () {
|
||||
testBranch2Merge,
|
||||
testBranch3Commit,
|
||||
testBranch3Merge,
|
||||
] = Object.values(commits);
|
||||
] = [...commits.values()];
|
||||
|
||||
expect(mainCommit.branch).toBe('main');
|
||||
expect(mainCommit.parents).toStrictEqual([]);
|
||||
@ -708,9 +704,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[2];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('cherry-pick:A');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('main');
|
||||
const cherryPickCommitID = [...commits.keys()][2];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('cherry-pick:A');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('main');
|
||||
});
|
||||
|
||||
it('should support cherry-picking commits with custom tag', function () {
|
||||
@ -724,9 +720,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[2];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('MyTag');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('main');
|
||||
const cherryPickCommitID = [...commits.keys()][2];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('MyTag');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('main');
|
||||
});
|
||||
|
||||
it('should support cherry-picking commits with no tag', function () {
|
||||
@ -740,9 +736,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[2];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('main');
|
||||
const cherryPickCommitID = [...commits.keys()][2];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('main');
|
||||
});
|
||||
|
||||
it('should support cherry-picking of merge commits', function () {
|
||||
@ -761,9 +757,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[4];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('cherry-pick:M|parent:B');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('release');
|
||||
const cherryPickCommitID = [...commits.keys()][4];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('cherry-pick:M|parent:B');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('release');
|
||||
});
|
||||
|
||||
it('should support cherry-picking of merge commits with tag', function () {
|
||||
@ -782,9 +778,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[4];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('v1.0');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('release');
|
||||
const cherryPickCommitID = [...commits.keys()][4];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('v1.0');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('release');
|
||||
});
|
||||
|
||||
it('should support cherry-picking of merge commits with additional commit', function () {
|
||||
@ -805,9 +801,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[5];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('v2.1:ZERO');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('release');
|
||||
const cherryPickCommitID = [...commits.keys()][5];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('v2.1:ZERO');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('release');
|
||||
});
|
||||
|
||||
it('should support cherry-picking of merge commits with empty tag', function () {
|
||||
@ -829,11 +825,11 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[5];
|
||||
const cherryPickCommitID2 = Object.keys(commits)[7];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('');
|
||||
expect(commits[cherryPickCommitID2].tag).toBe('');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('release');
|
||||
const cherryPickCommitID = [...commits.keys()][5];
|
||||
const cherryPickCommitID2 = [...commits.keys()][7];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('');
|
||||
expect(commits.get(cherryPickCommitID2).tag).toBe('');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('release');
|
||||
});
|
||||
|
||||
it('should fail cherry-picking of merge commits if the parent of merge commits is not specified', function () {
|
||||
@ -1086,4 +1082,26 @@ describe('when parsing a gitGraph', function () {
|
||||
expect(parser.yy.getAccDescription()).toBe('This is a description\nusing multiple lines');
|
||||
});
|
||||
});
|
||||
|
||||
describe('unsafe properties', () => {
|
||||
for (const prop of ['__proto__', 'constructor']) {
|
||||
it(`should work with custom commit id or branch name ${prop}`, () => {
|
||||
const str = `gitGraph
|
||||
commit id:"${prop}"
|
||||
branch ${prop}
|
||||
checkout ${prop}
|
||||
commit
|
||||
checkout main
|
||||
merge ${prop}
|
||||
`;
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(commits.size).toBe(3);
|
||||
expect(commits.keys().next().value).toBe(prop);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
expect(parser.yy.getBranchesAsObjArray()[1].name).toBe(prop);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
@ -3,7 +3,12 @@ import { getConfig, setupGraphViewbox } from '../../diagram-api/diagramAPI.js';
|
||||
import { log } from '../../logger.js';
|
||||
import utils from '../../utils.js';
|
||||
|
||||
let allCommitsDict = {};
|
||||
/**
|
||||
* @typedef {Map<string, { id: string, message: string, seq: number, type: number, tag: string, parents: string[], branch: string }>} CommitMap
|
||||
*/
|
||||
|
||||
/** @type {CommitMap} */
|
||||
let allCommitsDict = new Map();
|
||||
|
||||
const commitType = {
|
||||
NORMAL: 0,
|
||||
@ -22,9 +27,9 @@ let maxPos = 0;
|
||||
let dir = 'LR';
|
||||
let defaultPos = 30;
|
||||
const clear = () => {
|
||||
branchPos = {};
|
||||
commitPos = {};
|
||||
allCommitsDict = {};
|
||||
branchPos = new Map();
|
||||
commitPos = new Map();
|
||||
allCommitsDict = new Map();
|
||||
maxPos = 0;
|
||||
lanes = [];
|
||||
dir = 'LR';
|
||||
@ -78,7 +83,8 @@ const findClosestParent = (parents) => {
|
||||
let maxPosition = 0;
|
||||
|
||||
parents.forEach((parent) => {
|
||||
const parentPosition = dir === 'TB' || dir === 'BT' ? commitPos[parent].y : commitPos[parent].x;
|
||||
const parentPosition =
|
||||
dir === 'TB' || dir === 'BT' ? commitPos.get(parent).y : commitPos.get(parent).x;
|
||||
if (parentPosition >= maxPosition) {
|
||||
closestParent = parent;
|
||||
maxPosition = parentPosition;
|
||||
@ -101,7 +107,7 @@ const findClosestParentBT = (parents) => {
|
||||
let maxPosition = Infinity;
|
||||
|
||||
parents.forEach((parent) => {
|
||||
const parentPosition = commitPos[parent].y;
|
||||
const parentPosition = commitPos.get(parent).y;
|
||||
if (parentPosition <= maxPosition) {
|
||||
closestParent = parent;
|
||||
maxPosition = parentPosition;
|
||||
@ -119,7 +125,7 @@ const findClosestParentBT = (parents) => {
|
||||
* of the remaining commits.
|
||||
*
|
||||
* @param {any} sortedKeys
|
||||
* @param {any} commits
|
||||
* @param {CommitMap} commits
|
||||
* @param {any} defaultPos
|
||||
* @param {any} commitStep
|
||||
* @param {any} layoutOffset
|
||||
@ -129,38 +135,38 @@ const setParallelBTPos = (sortedKeys, commits, defaultPos, commitStep, layoutOff
|
||||
let maxPosition = defaultPos;
|
||||
let roots = [];
|
||||
sortedKeys.forEach((key) => {
|
||||
const commit = commits[key];
|
||||
const commit = commits.get(key);
|
||||
if (commit.parents.length) {
|
||||
const closestParent = findClosestParent(commit.parents);
|
||||
curPos = commitPos[closestParent].y + commitStep;
|
||||
curPos = commitPos.get(closestParent).y + commitStep;
|
||||
if (curPos >= maxPosition) {
|
||||
maxPosition = curPos;
|
||||
}
|
||||
} else {
|
||||
roots.push(commit);
|
||||
}
|
||||
const x = branchPos[commit.branch].pos;
|
||||
const x = branchPos.get(commit.branch).pos;
|
||||
const y = curPos + layoutOffset;
|
||||
commitPos[commit.id] = { x: x, y: y };
|
||||
commitPos.set(commit.id, { x: x, y: y });
|
||||
});
|
||||
curPos = maxPosition;
|
||||
roots.forEach((commit) => {
|
||||
const posWithOffset = curPos + defaultPos;
|
||||
const y = posWithOffset;
|
||||
const x = branchPos[commit.branch].pos;
|
||||
commitPos[commit.id] = { x: x, y: y };
|
||||
const x = branchPos.get(commit.branch).pos;
|
||||
commitPos.set(commit.id, { x: x, y: y });
|
||||
});
|
||||
sortedKeys.forEach((key) => {
|
||||
const commit = commits[key];
|
||||
const commit = commits.get(key);
|
||||
if (commit.parents.length) {
|
||||
const closestParent = findClosestParentBT(commit.parents);
|
||||
curPos = commitPos[closestParent].y - commitStep;
|
||||
curPos = commitPos.get(closestParent).y - commitStep;
|
||||
if (curPos <= maxPosition) {
|
||||
maxPosition = curPos;
|
||||
}
|
||||
const x = branchPos[commit.branch].pos;
|
||||
const x = branchPos.get(commit.branch).pos;
|
||||
const y = curPos - layoutOffset;
|
||||
commitPos[commit.id] = { x: x, y: y };
|
||||
commitPos.set(commit.id, { x: x, y: y });
|
||||
}
|
||||
});
|
||||
};
|
||||
@ -171,7 +177,7 @@ const setParallelBTPos = (sortedKeys, commits, defaultPos, commitStep, layoutOff
|
||||
* vertical layering correct in the graph.
|
||||
*
|
||||
* @param {any} svg
|
||||
* @param {any} commits
|
||||
* @param {CommitMap} commits
|
||||
* @param {any} modifyGraph
|
||||
*/
|
||||
const drawCommits = (svg, commits, modifyGraph) => {
|
||||
@ -183,18 +189,18 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
if (dir === 'TB' || dir === 'BT') {
|
||||
pos = defaultPos;
|
||||
}
|
||||
const keys = Object.keys(commits);
|
||||
const keys = [...commits.keys()];
|
||||
const isParallelCommits = gitGraphConfig.parallelCommits;
|
||||
const layoutOffset = 10;
|
||||
const commitStep = 40;
|
||||
let sortedKeys =
|
||||
dir !== 'BT' || (dir === 'BT' && isParallelCommits)
|
||||
? keys.sort((a, b) => {
|
||||
return commits[a].seq - commits[b].seq;
|
||||
return commits.get(a).seq - commits.get(b).seq;
|
||||
})
|
||||
: keys
|
||||
.sort((a, b) => {
|
||||
return commits[a].seq - commits[b].seq;
|
||||
return commits.get(a).seq - commits.get(b).seq;
|
||||
})
|
||||
.reverse();
|
||||
|
||||
@ -203,31 +209,31 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
sortedKeys = sortedKeys.reverse();
|
||||
}
|
||||
sortedKeys.forEach((key) => {
|
||||
const commit = commits[key];
|
||||
const commit = commits.get(key);
|
||||
if (isParallelCommits) {
|
||||
if (commit.parents.length) {
|
||||
const closestParent =
|
||||
dir === 'BT' ? findClosestParentBT(commit.parents) : findClosestParent(commit.parents);
|
||||
if (dir === 'TB') {
|
||||
pos = commitPos[closestParent].y + commitStep;
|
||||
pos = commitPos.get(closestParent).y + commitStep;
|
||||
} else if (dir === 'BT') {
|
||||
pos = commitPos[key].y - commitStep;
|
||||
pos = commitPos.get(key).y - commitStep;
|
||||
} else {
|
||||
pos = commitPos[closestParent].x + commitStep;
|
||||
pos = commitPos.get(closestParent).x + commitStep;
|
||||
}
|
||||
} else {
|
||||
if (dir === 'TB') {
|
||||
pos = defaultPos;
|
||||
} else if (dir === 'BT') {
|
||||
pos = commitPos[key].y - commitStep;
|
||||
pos = commitPos.get(key).y - commitStep;
|
||||
} else {
|
||||
pos = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
const posWithOffset = dir === 'BT' && isParallelCommits ? pos : pos + layoutOffset;
|
||||
const y = dir === 'TB' || dir === 'BT' ? posWithOffset : branchPos[commit.branch].pos;
|
||||
const x = dir === 'TB' || dir === 'BT' ? branchPos[commit.branch].pos : posWithOffset;
|
||||
const y = dir === 'TB' || dir === 'BT' ? posWithOffset : branchPos.get(commit.branch).pos;
|
||||
const x = dir === 'TB' || dir === 'BT' ? branchPos.get(commit.branch).pos : posWithOffset;
|
||||
|
||||
// Don't draw the commits now but calculate the positioning which is used by the branch lines etc.
|
||||
if (modifyGraph) {
|
||||
@ -265,7 +271,7 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
circle.attr(
|
||||
'class',
|
||||
`commit ${commit.id} commit-highlight${
|
||||
branchPos[commit.branch].index % THEME_COLOR_LIMIT
|
||||
branchPos.get(commit.branch).index % THEME_COLOR_LIMIT
|
||||
} ${typeClass}-outer`
|
||||
);
|
||||
gBullets
|
||||
@ -277,7 +283,7 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
.attr(
|
||||
'class',
|
||||
`commit ${commit.id} commit${
|
||||
branchPos[commit.branch].index % THEME_COLOR_LIMIT
|
||||
branchPos.get(commit.branch).index % THEME_COLOR_LIMIT
|
||||
} ${typeClass}-inner`
|
||||
);
|
||||
} else if (commitSymbolType === commitType.CHERRY_PICK) {
|
||||
@ -324,7 +330,7 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
circle.attr('r', commit.type === commitType.MERGE ? 9 : 10);
|
||||
circle.attr(
|
||||
'class',
|
||||
`commit ${commit.id} commit${branchPos[commit.branch].index % THEME_COLOR_LIMIT}`
|
||||
`commit ${commit.id} commit${branchPos.get(commit.branch).index % THEME_COLOR_LIMIT}`
|
||||
);
|
||||
if (commitSymbolType === commitType.MERGE) {
|
||||
const circle2 = gBullets.append('circle');
|
||||
@ -334,7 +340,7 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
circle2.attr(
|
||||
'class',
|
||||
`commit ${typeClass} ${commit.id} commit${
|
||||
branchPos[commit.branch].index % THEME_COLOR_LIMIT
|
||||
branchPos.get(commit.branch).index % THEME_COLOR_LIMIT
|
||||
}`
|
||||
);
|
||||
}
|
||||
@ -345,16 +351,16 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
.attr(
|
||||
'class',
|
||||
`commit ${typeClass} ${commit.id} commit${
|
||||
branchPos[commit.branch].index % THEME_COLOR_LIMIT
|
||||
branchPos.get(commit.branch).index % THEME_COLOR_LIMIT
|
||||
}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (dir === 'TB' || dir === 'BT') {
|
||||
commitPos[commit.id] = { x: x, y: posWithOffset };
|
||||
commitPos.set(commit.id, { x: x, y: posWithOffset });
|
||||
} else {
|
||||
commitPos[commit.id] = { x: posWithOffset, y: y };
|
||||
commitPos.set(commit.id, { x: posWithOffset, y: y });
|
||||
}
|
||||
|
||||
// The first iteration over the commits are for positioning purposes, this
|
||||
@ -482,7 +488,7 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
* @param {any} commitB
|
||||
* @param p1
|
||||
* @param p2
|
||||
* @param allCommits
|
||||
* @param {CommitMap} allCommits
|
||||
* @returns {boolean}
|
||||
* If there are commits between
|
||||
* commitA's x-position
|
||||
@ -496,7 +502,7 @@ const shouldRerouteArrow = (commitA, commitB, p1, p2, allCommits) => {
|
||||
const branchToGetCurve = commitBIsFurthest ? commitB.branch : commitA.branch;
|
||||
const isOnBranchToGetCurve = (x) => x.branch === branchToGetCurve;
|
||||
const isBetweenCommits = (x) => x.seq > commitA.seq && x.seq < commitB.seq;
|
||||
return Object.values(allCommits).some((commitX) => {
|
||||
return [...allCommits.values()].some((commitX) => {
|
||||
return isBetweenCommits(commitX) && isOnBranchToGetCurve(commitX);
|
||||
});
|
||||
};
|
||||
@ -531,11 +537,11 @@ const findLane = (y1, y2, depth = 0) => {
|
||||
* @param {any} svg
|
||||
* @param {any} commitA
|
||||
* @param {any} commitB
|
||||
* @param {any} allCommits
|
||||
* @param {CommitMap} allCommits
|
||||
*/
|
||||
const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
const p1 = commitPos[commitA.id]; // arrowStart
|
||||
const p2 = commitPos[commitB.id]; // arrowEnd
|
||||
const p1 = commitPos.get(commitA.id); // arrowStart
|
||||
const p2 = commitPos.get(commitB.id); // arrowEnd
|
||||
const arrowNeedsRerouting = shouldRerouteArrow(commitA, commitB, p1, p2, allCommits);
|
||||
// log.debug('drawArrow', p1, p2, arrowNeedsRerouting, commitA.id, commitB.id);
|
||||
|
||||
@ -545,9 +551,9 @@ const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
let arc2 = '';
|
||||
let radius = 0;
|
||||
let offset = 0;
|
||||
let colorClassNum = branchPos[commitB.branch].index;
|
||||
let colorClassNum = branchPos.get(commitB.branch).index;
|
||||
if (commitB.type === commitType.MERGE && commitA.id !== commitB.parents[0]) {
|
||||
colorClassNum = branchPos[commitA.branch].index;
|
||||
colorClassNum = branchPos.get(commitA.branch).index;
|
||||
}
|
||||
|
||||
let lineDef;
|
||||
@ -570,7 +576,7 @@ const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
} else {
|
||||
// Source commit is on branch position right of destination commit
|
||||
// so render arrow leftward with colour of source branch
|
||||
colorClassNum = branchPos[commitA.branch].index;
|
||||
colorClassNum = branchPos.get(commitA.branch).index;
|
||||
lineDef = `M ${p1.x} ${p1.y} L ${lineX + radius} ${p1.y} ${arc} ${lineX} ${
|
||||
p1.y + offset
|
||||
} L ${lineX} ${p2.y - radius} ${arc2} ${lineX - offset} ${p2.y} L ${p2.x} ${p2.y}`;
|
||||
@ -585,7 +591,7 @@ const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
} else {
|
||||
// Source commit is on branch position right of destination commit
|
||||
// so render arrow leftward with colour of source branch
|
||||
colorClassNum = branchPos[commitA.branch].index;
|
||||
colorClassNum = branchPos.get(commitA.branch).index;
|
||||
lineDef = `M ${p1.x} ${p1.y} L ${lineX + radius} ${p1.y} ${arc2} ${lineX} ${
|
||||
p1.y - offset
|
||||
} L ${lineX} ${p2.y + radius} ${arc} ${lineX - offset} ${p2.y} L ${p2.x} ${p2.y}`;
|
||||
@ -600,7 +606,7 @@ const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
} else {
|
||||
// Source commit is on branch positioned below destination commit
|
||||
// so render arrow upward with colour of source branch
|
||||
colorClassNum = branchPos[commitA.branch].index;
|
||||
colorClassNum = branchPos.get(commitA.branch).index;
|
||||
lineDef = `M ${p1.x} ${p1.y} L ${p1.x} ${lineY + radius} ${arc2} ${
|
||||
p1.x + offset
|
||||
} ${lineY} L ${p2.x - radius} ${lineY} ${arc} ${p2.x} ${lineY - offset} L ${p2.x} ${p2.y}`;
|
||||
@ -710,13 +716,17 @@ const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
.attr('class', 'arrow arrow' + (colorClassNum % THEME_COLOR_LIMIT));
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {*} svg
|
||||
* @param {CommitMap} commits
|
||||
*/
|
||||
const drawArrows = (svg, commits) => {
|
||||
const gArrows = svg.append('g').attr('class', 'commit-arrows');
|
||||
Object.keys(commits).forEach((key) => {
|
||||
const commit = commits[key];
|
||||
[...commits.keys()].forEach((key) => {
|
||||
const commit = commits.get(key);
|
||||
if (commit.parents && commit.parents.length > 0) {
|
||||
commit.parents.forEach((parent) => {
|
||||
drawArrow(gArrows, commits[parent], commit, commits);
|
||||
drawArrow(gArrows, commits.get(parent), commit, commits);
|
||||
});
|
||||
}
|
||||
});
|
||||
@ -734,7 +744,7 @@ const drawBranches = (svg, branches) => {
|
||||
branches.forEach((branch, index) => {
|
||||
const adjustIndexForTheme = index % THEME_COLOR_LIMIT;
|
||||
|
||||
const pos = branchPos[branch.name].pos;
|
||||
const pos = branchPos.get(branch.name).pos;
|
||||
const line = g.append('line');
|
||||
line.attr('x1', 0);
|
||||
line.attr('y1', pos);
|
||||
@ -822,7 +832,7 @@ export const draw = function (txt, id, ver, diagObj) {
|
||||
label.node().appendChild(labelElement);
|
||||
let bbox = labelElement.getBBox();
|
||||
|
||||
branchPos[branch.name] = { pos, index };
|
||||
branchPos.set(branch.name, { pos, index });
|
||||
pos +=
|
||||
50 +
|
||||
(gitGraphConfig.rotateCommitLabel ? 40 : 0) +
|
||||
|
@ -16,7 +16,7 @@ describe('pie', () => {
|
||||
`);
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(100);
|
||||
expect(sections.get('ash')).toBe(100);
|
||||
});
|
||||
|
||||
it('should handle simple pie', async () => {
|
||||
@ -26,8 +26,8 @@ describe('pie', () => {
|
||||
`);
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with showData', async () => {
|
||||
@ -39,8 +39,8 @@ describe('pie', () => {
|
||||
expect(db.getShowData()).toBeTruthy();
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with comments', async () => {
|
||||
@ -51,8 +51,8 @@ describe('pie', () => {
|
||||
`);
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with a title', async () => {
|
||||
@ -64,8 +64,8 @@ describe('pie', () => {
|
||||
expect(db.getDiagramTitle()).toBe('a 60/40 pie');
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with an acc title (accTitle)', async () => {
|
||||
@ -80,8 +80,8 @@ describe('pie', () => {
|
||||
expect(db.getAccTitle()).toBe('a neat acc title');
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with an acc description (accDescr)', async () => {
|
||||
@ -96,8 +96,8 @@ describe('pie', () => {
|
||||
expect(db.getAccDescription()).toBe('a neat description');
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with a multiline acc description (accDescr)', async () => {
|
||||
@ -115,8 +115,8 @@ describe('pie', () => {
|
||||
expect(db.getAccDescription()).toBe('a neat description\non multiple lines');
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with positive decimal', async () => {
|
||||
@ -126,8 +126,8 @@ describe('pie', () => {
|
||||
`);
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60.67);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60.67);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with negative decimal', () => {
|
||||
@ -138,6 +138,16 @@ describe('pie', () => {
|
||||
`);
|
||||
}).rejects.toThrowError();
|
||||
});
|
||||
|
||||
it('should handle unsafe properties', async () => {
|
||||
await expect(
|
||||
parser.parse(`pie title Unsafe props test
|
||||
"__proto__" : 386
|
||||
"constructor" : 85
|
||||
"prototype" : 15`)
|
||||
).resolves.toBeUndefined();
|
||||
expect([...db.getSections().keys()]).toEqual(['__proto__', 'constructor', 'prototype']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('config', () => {
|
||||
|
@ -16,7 +16,7 @@ import DEFAULT_CONFIG from '../../defaultConfig.js';
|
||||
export const DEFAULT_PIE_CONFIG: Required<PieDiagramConfig> = DEFAULT_CONFIG.pie;
|
||||
|
||||
export const DEFAULT_PIE_DB: RequiredDeep<PieFields> = {
|
||||
sections: {},
|
||||
sections: new Map(),
|
||||
showData: false,
|
||||
config: DEFAULT_PIE_CONFIG,
|
||||
} as const;
|
||||
@ -28,14 +28,14 @@ const config: Required<PieDiagramConfig> = structuredClone(DEFAULT_PIE_CONFIG);
|
||||
const getConfig = (): Required<PieDiagramConfig> => structuredClone(config);
|
||||
|
||||
const clear = (): void => {
|
||||
sections = structuredClone(DEFAULT_PIE_DB.sections);
|
||||
sections = new Map();
|
||||
showData = DEFAULT_PIE_DB.showData;
|
||||
commonClear();
|
||||
};
|
||||
|
||||
const addSection = ({ label, value }: D3Section): void => {
|
||||
if (sections[label] === undefined) {
|
||||
sections[label] = value;
|
||||
if (!sections.has(label)) {
|
||||
sections.set(label, value);
|
||||
log.debug(`added new section: ${label}, with value: ${value}`);
|
||||
}
|
||||
};
|
||||
|
@ -11,7 +11,7 @@ import { selectSvgElement } from '../../rendering-util/selectSvgElement.js';
|
||||
|
||||
const createPieArcs = (sections: Sections): d3.PieArcDatum<D3Section>[] => {
|
||||
// Compute the position of each group on the pie:
|
||||
const pieData: D3Section[] = Object.entries(sections)
|
||||
const pieData: D3Section[] = [...sections.entries()]
|
||||
.map((element: [string, number]): D3Section => {
|
||||
return {
|
||||
label: element[0],
|
||||
@ -105,8 +105,8 @@ export const draw: DrawDefinition = (text, id, _version, diagObj) => {
|
||||
.attr('class', 'pieCircle');
|
||||
|
||||
let sum = 0;
|
||||
Object.keys(sections).forEach((key: string): void => {
|
||||
sum += sections[key];
|
||||
sections.forEach((section) => {
|
||||
sum += section;
|
||||
});
|
||||
// Now add the percentage.
|
||||
// Use the centroid method to get the best coordinates.
|
||||
|
@ -34,7 +34,7 @@ export interface PieStyleOptions {
|
||||
pieOpacity: string;
|
||||
}
|
||||
|
||||
export type Sections = Record<string, number>;
|
||||
export type Sections = Map<string, number>;
|
||||
|
||||
export interface D3Section {
|
||||
label: string;
|
||||
|
@ -20,6 +20,7 @@ const mockDB: Record<string, Mock<any, any>> = {
|
||||
setYAxisBottomText: vi.fn(),
|
||||
setDiagramTitle: vi.fn(),
|
||||
addPoint: vi.fn(),
|
||||
addClass: vi.fn(),
|
||||
};
|
||||
|
||||
function clearMocks() {
|
||||
@ -423,4 +424,13 @@ describe('Testing quadrantChart jison file', () => {
|
||||
['stroke-width: 10px']
|
||||
);
|
||||
});
|
||||
|
||||
it('should be able to handle constructor as a className', () => {
|
||||
const str = `quadrantChart
|
||||
classDef constructor fill:#ff0000
|
||||
Microsoft:::constructor: [0.75, 0.75]
|
||||
`;
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
expect(mockDB.addClass).toHaveBeenCalledWith('constructor', ['fill:#ff0000']);
|
||||
});
|
||||
});
|
||||
|
@ -127,7 +127,7 @@ export class QuadrantBuilder {
|
||||
private config: QuadrantBuilderConfig;
|
||||
private themeConfig: QuadrantBuilderThemeConfig;
|
||||
private data: QuadrantBuilderData;
|
||||
private classes: Record<string, StylesObject> = {};
|
||||
private classes: Map<string, StylesObject> = new Map();
|
||||
|
||||
constructor() {
|
||||
this.config = this.getDefaultConfig();
|
||||
@ -202,7 +202,7 @@ export class QuadrantBuilder {
|
||||
this.config = this.getDefaultConfig();
|
||||
this.themeConfig = this.getDefaultThemeConfig();
|
||||
this.data = this.getDefaultData();
|
||||
this.classes = {};
|
||||
this.classes = new Map();
|
||||
log.info('clear called');
|
||||
}
|
||||
|
||||
@ -215,7 +215,7 @@ export class QuadrantBuilder {
|
||||
}
|
||||
|
||||
addClass(className: string, styles: StylesObject) {
|
||||
this.classes[className] = styles;
|
||||
this.classes.set(className, styles);
|
||||
}
|
||||
|
||||
setConfig(config: Partial<QuadrantBuilderConfig>) {
|
||||
@ -486,7 +486,7 @@ export class QuadrantBuilder {
|
||||
.range([quadrantHeight + quadrantTop, quadrantTop]);
|
||||
|
||||
const points: QuadrantPointType[] = this.data.points.map((point) => {
|
||||
const classStyles = this.classes[point.className as keyof typeof this.classes];
|
||||
const classStyles = this.classes.get(point.className!);
|
||||
if (classStyles) {
|
||||
point = { ...classStyles, ...point };
|
||||
}
|
||||
|
@ -33,14 +33,14 @@ describe('when parsing requirement diagram it...', function () {
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
expect(Object.keys(requirementDb.getRequirements()).length).toBe(1);
|
||||
expect(requirementDb.getRequirements().size).toBe(1);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.id).toBe(expectedId);
|
||||
expect(foundReq.text).toBe(expectedText);
|
||||
|
||||
expect(Object.keys(requirementDb.getElements()).length).toBe(0);
|
||||
expect(requirementDb.getElements().size).toBe(0);
|
||||
expect(Object.keys(requirementDb.getRelationships()).length).toBe(0);
|
||||
});
|
||||
|
||||
@ -61,10 +61,10 @@ describe('when parsing requirement diagram it...', function () {
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
expect(Object.keys(requirementDb.getRequirements()).length).toBe(0);
|
||||
expect(Object.keys(requirementDb.getElements()).length).toBe(1);
|
||||
expect(requirementDb.getRequirements().size).toBe(0);
|
||||
expect(requirementDb.getElements().size).toBe(1);
|
||||
|
||||
let foundElement = requirementDb.getElements()[expectedName];
|
||||
let foundElement = requirementDb.getElements().get(expectedName);
|
||||
expect(foundElement).toBeDefined();
|
||||
expect(foundElement.type).toBe(expectedType);
|
||||
expect(foundElement.docRef).toBe(expectedDocRef);
|
||||
@ -121,8 +121,8 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
expect(Object.keys(requirementDb.getRequirements()).length).toBe(0);
|
||||
expect(Object.keys(requirementDb.getElements()).length).toBe(0);
|
||||
expect(requirementDb.getRequirements().size).toBe(0);
|
||||
expect(requirementDb.getElements().size).toBe(0);
|
||||
expect(Object.keys(requirementDb.getRelationships()).length).toBe(1);
|
||||
|
||||
let foundRelationship = requirementDb.getRelationships()[0];
|
||||
@ -152,7 +152,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@ -179,7 +179,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@ -206,7 +206,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@ -233,7 +233,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@ -260,7 +260,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@ -287,7 +287,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@ -314,7 +314,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.risk).toBe(expectedRisk);
|
||||
});
|
||||
@ -341,7 +341,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.risk).toBe(expectedRisk);
|
||||
});
|
||||
@ -368,7 +368,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.risk).toBe(expectedRisk);
|
||||
});
|
||||
@ -395,7 +395,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
@ -422,7 +422,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
@ -449,7 +449,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
@ -476,7 +476,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
@ -578,4 +578,25 @@ line 2`;
|
||||
let foundRelationship = requirementDb.getRelationships()[0];
|
||||
expect(foundRelationship.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
for (const property of ['__proto__', 'constructor']) {
|
||||
it(`will accept ${property} as requirement id`, function () {
|
||||
reqDiagram.parser.parse(`requirementDiagram
|
||||
requirement ${property} {
|
||||
id: 1
|
||||
text: the test text.
|
||||
risk: high
|
||||
verifymethod: test
|
||||
}`);
|
||||
expect(reqDiagram.parser.yy.getRequirements().size).toBe(1);
|
||||
});
|
||||
|
||||
it(`will accept ${property} as element id`, function () {
|
||||
reqDiagram.parser.parse(`requirementDiagram
|
||||
element ${property} {
|
||||
type: simulation
|
||||
}`);
|
||||
expect(reqDiagram.parser.yy.getElements().size).toBe(1);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -11,9 +11,9 @@ import {
|
||||
|
||||
let relations = [];
|
||||
let latestRequirement = {};
|
||||
let requirements = {};
|
||||
let requirements = new Map();
|
||||
let latestElement = {};
|
||||
let elements = {};
|
||||
let elements = new Map();
|
||||
|
||||
const RequirementType = {
|
||||
REQUIREMENT: 'Requirement',
|
||||
@ -48,8 +48,8 @@ const Relationships = {
|
||||
};
|
||||
|
||||
const addRequirement = (name, type) => {
|
||||
if (requirements[name] === undefined) {
|
||||
requirements[name] = {
|
||||
if (!requirements.has(name)) {
|
||||
requirements.set(name, {
|
||||
name,
|
||||
type,
|
||||
|
||||
@ -57,11 +57,11 @@ const addRequirement = (name, type) => {
|
||||
text: latestRequirement.text,
|
||||
risk: latestRequirement.risk,
|
||||
verifyMethod: latestRequirement.verifyMethod,
|
||||
};
|
||||
});
|
||||
}
|
||||
latestRequirement = {};
|
||||
|
||||
return requirements[name];
|
||||
return requirements.get(name);
|
||||
};
|
||||
|
||||
const getRequirements = () => requirements;
|
||||
@ -91,18 +91,17 @@ const setNewReqVerifyMethod = (verifyMethod) => {
|
||||
};
|
||||
|
||||
const addElement = (name) => {
|
||||
if (elements[name] === undefined) {
|
||||
elements[name] = {
|
||||
if (!elements.has(name)) {
|
||||
elements.set(name, {
|
||||
name,
|
||||
|
||||
type: latestElement.type,
|
||||
docRef: latestElement.docRef,
|
||||
};
|
||||
});
|
||||
log.info('Added new requirement: ', name);
|
||||
}
|
||||
latestElement = {};
|
||||
|
||||
return elements[name];
|
||||
return elements.get(name);
|
||||
};
|
||||
|
||||
const getElements = () => elements;
|
||||
@ -132,9 +131,9 @@ const getRelationships = () => relations;
|
||||
const clear = () => {
|
||||
relations = [];
|
||||
latestRequirement = {};
|
||||
requirements = {};
|
||||
requirements = new Map();
|
||||
latestElement = {};
|
||||
elements = {};
|
||||
elements = new Map();
|
||||
commonClear();
|
||||
};
|
||||
|
||||
|
@ -191,9 +191,13 @@ const drawRelationshipFromLayout = function (svg, rel, g, insert, diagObj) {
|
||||
return;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Map<string, any>} reqs
|
||||
* @param graph
|
||||
* @param svgNode
|
||||
*/
|
||||
export const drawReqs = (reqs, graph, svgNode) => {
|
||||
Object.keys(reqs).forEach((reqName) => {
|
||||
let req = reqs[reqName];
|
||||
reqs.forEach((req, reqName) => {
|
||||
reqName = elementString(reqName);
|
||||
log.info('Added new requirement: ', reqName);
|
||||
|
||||
@ -236,9 +240,13 @@ export const drawReqs = (reqs, graph, svgNode) => {
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Map<string, any>} els
|
||||
* @param graph
|
||||
* @param svgNode
|
||||
*/
|
||||
export const drawElements = (els, graph, svgNode) => {
|
||||
Object.keys(els).forEach((elName) => {
|
||||
let el = els[elName];
|
||||
els.forEach((el, elName) => {
|
||||
const id = elementString(elName);
|
||||
|
||||
const groupNode = svgNode.append('g').attr('id', id);
|
||||
|
@ -20,5 +20,14 @@ describe('Sankey diagram', function () {
|
||||
|
||||
sankey.parser.parse(graphDefinition);
|
||||
});
|
||||
|
||||
it('allows __proto__ as id', function () {
|
||||
sankey.parser.parse(
|
||||
prepareTextForParsing(`sankey-beta
|
||||
__proto__,A,0.597
|
||||
A,__proto__,0.403
|
||||
`)
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -15,12 +15,12 @@ let links: SankeyLink[] = [];
|
||||
// Array of nodes guarantees their order
|
||||
let nodes: SankeyNode[] = [];
|
||||
// We also have to track nodes uniqueness (by ID)
|
||||
let nodesMap: Record<string, SankeyNode> = {};
|
||||
let nodesMap: Map<string, SankeyNode> = new Map();
|
||||
|
||||
const clear = (): void => {
|
||||
links = [];
|
||||
nodes = [];
|
||||
nodesMap = {};
|
||||
nodesMap = new Map();
|
||||
commonClear();
|
||||
};
|
||||
|
||||
@ -48,11 +48,13 @@ class SankeyNode {
|
||||
const findOrCreateNode = (ID: string): SankeyNode => {
|
||||
ID = common.sanitizeText(ID, getConfig());
|
||||
|
||||
if (!nodesMap[ID]) {
|
||||
nodesMap[ID] = new SankeyNode(ID);
|
||||
nodes.push(nodesMap[ID]);
|
||||
let node = nodesMap.get(ID);
|
||||
if (node === undefined) {
|
||||
node = new SankeyNode(ID);
|
||||
nodesMap.set(ID, node);
|
||||
nodes.push(node);
|
||||
}
|
||||
return nodesMap[ID];
|
||||
return node;
|
||||
};
|
||||
|
||||
const getNodes = () => nodes;
|
||||
|
@ -15,9 +15,9 @@ import type { Actor, AddMessageParams, Box, Message, Note } from './types.js';
|
||||
|
||||
interface SequenceState {
|
||||
prevActor?: string;
|
||||
actors: Record<string, Actor>;
|
||||
createdActors: Record<string, number>;
|
||||
destroyedActors: Record<string, number>;
|
||||
actors: Map<string, Actor>;
|
||||
createdActors: Map<string, number>;
|
||||
destroyedActors: Map<string, number>;
|
||||
boxes: Box[];
|
||||
messages: Message[];
|
||||
notes: Note[];
|
||||
@ -30,9 +30,9 @@ interface SequenceState {
|
||||
|
||||
const state = new ImperativeState<SequenceState>(() => ({
|
||||
prevActor: undefined,
|
||||
actors: {},
|
||||
createdActors: {},
|
||||
destroyedActors: {},
|
||||
actors: new Map(),
|
||||
createdActors: new Map(),
|
||||
destroyedActors: new Map(),
|
||||
boxes: [],
|
||||
messages: [],
|
||||
notes: [],
|
||||
@ -60,7 +60,7 @@ export const addActor = function (
|
||||
type: string
|
||||
) {
|
||||
let assignedBox = state.records.currentBox;
|
||||
const old = state.records.actors[id];
|
||||
const old = state.records.actors.get(id);
|
||||
if (old) {
|
||||
// If already set and trying to set to a new one throw error
|
||||
if (state.records.currentBox && old.box && state.records.currentBox !== old.box) {
|
||||
@ -87,7 +87,7 @@ export const addActor = function (
|
||||
description = { text: name, wrap: null, type };
|
||||
}
|
||||
|
||||
state.records.actors[id] = {
|
||||
state.records.actors.set(id, {
|
||||
box: assignedBox,
|
||||
name: name,
|
||||
description: description.text,
|
||||
@ -98,9 +98,12 @@ export const addActor = function (
|
||||
actorCnt: null,
|
||||
rectData: null,
|
||||
type: type ?? 'participant',
|
||||
};
|
||||
if (state.records.prevActor && state.records.actors[state.records.prevActor]) {
|
||||
state.records.actors[state.records.prevActor].nextActor = id;
|
||||
});
|
||||
if (state.records.prevActor) {
|
||||
const prevActorInRecords = state.records.actors.get(state.records.prevActor);
|
||||
if (prevActorInRecords) {
|
||||
prevActorInRecords.nextActor = id;
|
||||
}
|
||||
}
|
||||
|
||||
if (state.records.currentBox) {
|
||||
@ -207,10 +210,11 @@ export const getDestroyedActors = function () {
|
||||
return state.records.destroyedActors;
|
||||
};
|
||||
export const getActor = function (id: string) {
|
||||
return state.records.actors[id];
|
||||
// TODO: do we ever use this function in a way that it might return undefined?
|
||||
return state.records.actors.get(id)!;
|
||||
};
|
||||
export const getActorKeys = function () {
|
||||
return Object.keys(state.records.actors);
|
||||
return [...state.records.actors.keys()];
|
||||
};
|
||||
export const enableSequenceNumbers = function () {
|
||||
state.records.sequenceNumbersEnabled = true;
|
||||
@ -499,18 +503,18 @@ export const apply = function (param: any | AddMessageParams | AddMessageParams[
|
||||
addActor(param.actor, param.actor, param.description, param.draw);
|
||||
break;
|
||||
case 'createParticipant':
|
||||
if (state.records.actors[param.actor]) {
|
||||
if (state.records.actors.has(param.actor)) {
|
||||
throw new Error(
|
||||
"It is not possible to have actors with the same id, even if one is destroyed before the next is created. Use 'AS' aliases to simulate the behavior"
|
||||
);
|
||||
}
|
||||
state.records.lastCreated = param.actor;
|
||||
addActor(param.actor, param.actor, param.description, param.draw);
|
||||
state.records.createdActors[param.actor] = state.records.messages.length;
|
||||
state.records.createdActors.set(param.actor, state.records.messages.length);
|
||||
break;
|
||||
case 'destroyParticipant':
|
||||
state.records.lastDestroyed = param.actor;
|
||||
state.records.destroyedActors[param.actor] = state.records.messages.length;
|
||||
state.records.destroyedActors.set(param.actor, state.records.messages.length);
|
||||
break;
|
||||
case 'activeStart':
|
||||
addSignal(param.actor, undefined, undefined, param.signalType);
|
||||
|
@ -192,8 +192,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -235,8 +235,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
expect(diagram.db.getAccDescription()).toBe('');
|
||||
const messages = diagram.db.getMessages();
|
||||
@ -258,8 +258,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
expect(diagram.db.getAccDescription()).toBe('');
|
||||
const messages = diagram.db.getMessages();
|
||||
@ -311,8 +311,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -328,8 +328,8 @@ Bob-->Alice-in-Wonderland:I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors['Alice-in-Wonderland'].description).toBe('Alice-in-Wonderland');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice-in-Wonderland').description).toBe('Alice-in-Wonderland');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -348,9 +348,9 @@ Bob-->Alice-in-Wonderland:I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['Alice-in-Wonderland', 'Bob']);
|
||||
expect(actors['Alice-in-Wonderland'].description).toBe('Alice-in-Wonderland');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect([...actors.keys()]).toEqual(['Alice-in-Wonderland', 'Bob']);
|
||||
expect(actors.get('Alice-in-Wonderland').description).toBe('Alice-in-Wonderland');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -371,9 +371,9 @@ B-->A: I am good thanks!`;
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(Object.keys(actors)).toEqual(['A', 'B']);
|
||||
expect(actors.A.description).toBe('Alice');
|
||||
expect(actors.B.description).toBe('Bob');
|
||||
expect([...actors.keys()]).toEqual(['A', 'B']);
|
||||
expect(actors.get('A').description).toBe('Alice');
|
||||
expect(actors.get('B').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages.length).toBe(2);
|
||||
@ -396,12 +396,12 @@ sequenceDiagram
|
||||
await mermaidAPI.parse(str);
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['Alice', 'Bob', 'John', 'Mandy', 'Joan']);
|
||||
expect(actors.Alice.description).toBe('Alice2');
|
||||
expect(actors.Alice.type).toBe('actor');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.John.type).toBe('participant');
|
||||
expect(actors.Joan.type).toBe('participant');
|
||||
expect([...actors.keys()]).toEqual(['Alice', 'Bob', 'John', 'Mandy', 'Joan']);
|
||||
expect(actors.get('Alice').description).toBe('Alice2');
|
||||
expect(actors.get('Alice').type).toBe('actor');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
expect(actors.get('John').type).toBe('participant');
|
||||
expect(actors.get('Joan').type).toBe('participant');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages.length).toBe(5);
|
||||
@ -419,9 +419,9 @@ B-->A: I am good thanks!`;
|
||||
await mermaidAPI.parse(str);
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['A', 'B']);
|
||||
expect(actors.A.description).toBe('Alice');
|
||||
expect(actors.B.description).toBe('Bob');
|
||||
expect([...actors.keys()]).toEqual(['A', 'B']);
|
||||
expect(actors.get('A').description).toBe('Alice');
|
||||
expect(actors.get('B').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages.length).toBe(2);
|
||||
@ -435,8 +435,8 @@ Alice-xBob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -450,8 +450,8 @@ Alice--xBob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -465,8 +465,8 @@ Alice-)Bob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -480,8 +480,8 @@ Alice--)Bob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -495,8 +495,8 @@ Alice->>Bob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -508,8 +508,8 @@ Alice->>Bob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -526,8 +526,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -547,8 +547,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -571,8 +571,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -624,8 +624,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -645,8 +645,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -660,8 +660,8 @@ sequenceDiagram;Alice->Bob: Hello Bob, how are you?;Note right of Bob: Bob think
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -680,8 +680,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -700,8 +700,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -725,8 +725,8 @@ Bob-->John: Jolly good!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -754,10 +754,10 @@ note right of 1: multiline<br \t/>text
|
||||
await mermaidAPI.parse(str);
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors['1'].description).toBe('multiline<br>text');
|
||||
expect(actors['2'].description).toBe('multiline<br/>text');
|
||||
expect(actors['3'].description).toBe('multiline<br />text');
|
||||
expect(actors['4'].description).toBe('multiline<br \t/>text');
|
||||
expect(actors.get('1').description).toBe('multiline<br>text');
|
||||
expect(actors.get('2').description).toBe('multiline<br/>text');
|
||||
expect(actors.get('3').description).toBe('multiline<br />text');
|
||||
expect(actors.get('4').description).toBe('multiline<br \t/>text');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages[0].message).toBe('multiline<br>text');
|
||||
@ -893,8 +893,8 @@ end`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -915,8 +915,8 @@ end`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages[1].type).toEqual(diagram.db.LINETYPE.RECT_START);
|
||||
@ -940,8 +940,8 @@ end`;
|
||||
`;
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages[1].type).toEqual(diagram.db.LINETYPE.RECT_START);
|
||||
@ -967,8 +967,8 @@ end`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -993,8 +993,8 @@ end`;
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -1039,8 +1039,8 @@ sequenceDiagram
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Service.description).toBe('Service');
|
||||
expect(actors.DB.description).toBe('DB');
|
||||
expect(actors.get('Service').description).toBe('Service');
|
||||
expect(actors.get('DB').description).toBe('DB');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -1063,8 +1063,8 @@ sequenceDiagram
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Service.description).toBe('Service');
|
||||
expect(actors.DB.description).toBe('DB');
|
||||
expect(actors.get('Service').description).toBe('Service');
|
||||
expect(actors.get('DB').description).toBe('DB');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -1090,8 +1090,8 @@ sequenceDiagram
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Consumer.description).toBe('Consumer');
|
||||
expect(actors.API.description).toBe('API');
|
||||
expect(actors.get('Consumer').description).toBe('Consumer');
|
||||
expect(actors.get('API').description).toBe('API');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -1120,8 +1120,8 @@ end`;
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -1142,8 +1142,8 @@ end`;
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@ -1309,15 +1309,15 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.a.links['Repo']).toBe('https://repo.contoso.com/');
|
||||
expect(actors.b.links['Repo']).toBe(undefined);
|
||||
expect(actors.a.links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.b.links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.a.links['On-Call']).toBe('https://oncall.contoso.com/?svc=alice');
|
||||
expect(actors.c.links['Dashboard']).toBe(undefined);
|
||||
expect(actors.a.links['Endpoint']).toBe('https://alice.contoso.com');
|
||||
expect(actors.a.links['Swagger']).toBe('https://swagger.contoso.com');
|
||||
expect(actors.a.links['Tests']).toBe('https://tests.contoso.com/?svc=alice@contoso.com');
|
||||
expect(actors.get('a').links['Repo']).toBe('https://repo.contoso.com/');
|
||||
expect(actors.get('b').links['Repo']).toBe(undefined);
|
||||
expect(actors.get('a').links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.get('b').links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.get('a').links['On-Call']).toBe('https://oncall.contoso.com/?svc=alice');
|
||||
expect(actors.get('c').links['Dashboard']).toBe(undefined);
|
||||
expect(actors.get('a').links['Endpoint']).toBe('https://alice.contoso.com');
|
||||
expect(actors.get('a').links['Swagger']).toBe('https://swagger.contoso.com');
|
||||
expect(actors.get('a').links['Tests']).toBe('https://tests.contoso.com/?svc=alice@contoso.com');
|
||||
});
|
||||
|
||||
it('should handle properties EXPERIMENTAL: USE WITH CAUTION', async () => {
|
||||
@ -1333,11 +1333,11 @@ properties b: {"class": "external-service-actor", "icon": "@computer"}
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.a.properties['class']).toBe('internal-service-actor');
|
||||
expect(actors.b.properties['class']).toBe('external-service-actor');
|
||||
expect(actors.a.properties['icon']).toBe('@clock');
|
||||
expect(actors.b.properties['icon']).toBe('@computer');
|
||||
expect(actors.c.properties['class']).toBe(undefined);
|
||||
expect(actors.get('a').properties['class']).toBe('internal-service-actor');
|
||||
expect(actors.get('b').properties['class']).toBe('external-service-actor');
|
||||
expect(actors.get('a').properties['icon']).toBe('@clock');
|
||||
expect(actors.get('b').properties['icon']).toBe('@computer');
|
||||
expect(actors.get('c').properties['class']).toBe(undefined);
|
||||
});
|
||||
|
||||
it('should handle box', async () => {
|
||||
@ -1423,14 +1423,14 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
const createdActors = diagram.db.getCreatedActors();
|
||||
expect(actors['c'].name).toEqual('c');
|
||||
expect(actors['c'].description).toEqual('c');
|
||||
expect(actors['c'].type).toEqual('participant');
|
||||
expect(createdActors['c']).toEqual(1);
|
||||
expect(actors['d'].name).toEqual('d');
|
||||
expect(actors['d'].description).toEqual('Donald');
|
||||
expect(actors['d'].type).toEqual('actor');
|
||||
expect(createdActors['d']).toEqual(3);
|
||||
expect(actors.get('c').name).toEqual('c');
|
||||
expect(actors.get('c').description).toEqual('c');
|
||||
expect(actors.get('c').type).toEqual('participant');
|
||||
expect(createdActors.get('c')).toEqual(1);
|
||||
expect(actors.get('d').name).toEqual('d');
|
||||
expect(actors.get('d').description).toEqual('Donald');
|
||||
expect(actors.get('d').type).toEqual('actor');
|
||||
expect(createdActors.get('d')).toEqual(3);
|
||||
});
|
||||
it('should handle simple actor destruction', async () => {
|
||||
const str = `
|
||||
@ -1445,8 +1445,8 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
`;
|
||||
await mermaidAPI.parse(str);
|
||||
const destroyedActors = diagram.db.getDestroyedActors();
|
||||
expect(destroyedActors['a']).toEqual(1);
|
||||
expect(destroyedActors['c']).toEqual(3);
|
||||
expect(destroyedActors.get('a')).toEqual(1);
|
||||
expect(destroyedActors.get('c')).toEqual(3);
|
||||
});
|
||||
it('should handle the creation and destruction of the same actor', async () => {
|
||||
const str = `
|
||||
@ -1461,8 +1461,8 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
await mermaidAPI.parse(str);
|
||||
const createdActors = diagram.db.getCreatedActors();
|
||||
const destroyedActors = diagram.db.getDestroyedActors();
|
||||
expect(createdActors['c']).toEqual(1);
|
||||
expect(destroyedActors['c']).toEqual(3);
|
||||
expect(createdActors.get('c')).toEqual(1);
|
||||
expect(destroyedActors.get('c')).toEqual(3);
|
||||
});
|
||||
});
|
||||
describe('when checking the bounds in a sequenceDiagram', function () {
|
||||
@ -1668,7 +1668,7 @@ participant Alice
|
||||
await mermaidAPI.parse(str);
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['Alice']);
|
||||
expect([...actors.keys()]).toEqual(['Alice']);
|
||||
});
|
||||
it('should handle one actor and a centered note', async () => {
|
||||
const str = `
|
||||
@ -2033,4 +2033,12 @@ participant Alice`;
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it.each(['__proto__', 'constructor'])('should allow %s as an actor name', function (prop) {
|
||||
expect(
|
||||
mermaidAPI.parse(`
|
||||
sequenceDiagram
|
||||
${prop}-->>A: Hello, how are you?`)
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
});
|
||||
|
@ -144,7 +144,7 @@ export const bounds = {
|
||||
this.updateBounds(_startx, _starty, _stopx, _stopy);
|
||||
},
|
||||
newActivation: function (message, diagram, actors) {
|
||||
const actorRect = actors[message.from];
|
||||
const actorRect = actors.get(message.from);
|
||||
const stackedSize = actorActivations(message.from).length || 0;
|
||||
const x = actorRect.x + actorRect.width / 2 + ((stackedSize - 1) * conf.activationWidth) / 2;
|
||||
this.activations.push({
|
||||
@ -488,7 +488,7 @@ const drawMessage = async function (diagram, msgModel, lineStartY: number, diagO
|
||||
const addActorRenderingData = async function (
|
||||
diagram,
|
||||
actors,
|
||||
createdActors,
|
||||
createdActors: Map<string, any>,
|
||||
actorKeys,
|
||||
verticalPos,
|
||||
messages,
|
||||
@ -500,7 +500,7 @@ const addActorRenderingData = async function (
|
||||
let maxHeight = 0;
|
||||
|
||||
for (const actorKey of actorKeys) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
const box = actor.box;
|
||||
|
||||
// end of box
|
||||
@ -528,7 +528,7 @@ const addActorRenderingData = async function (
|
||||
maxHeight = common.getMax(maxHeight, actor.height);
|
||||
|
||||
// if the actor is created by a message, widen margin
|
||||
if (createdActors[actor.name]) {
|
||||
if (createdActors.get(actor.name)) {
|
||||
prevMargin += actor.width / 2;
|
||||
}
|
||||
|
||||
@ -558,7 +558,7 @@ const addActorRenderingData = async function (
|
||||
export const drawActors = async function (diagram, actors, actorKeys, isFooter) {
|
||||
if (!isFooter) {
|
||||
for (const actorKey of actorKeys) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
// Draw the box with the attached line
|
||||
await svgDraw.drawActor(diagram, actor, conf, false);
|
||||
}
|
||||
@ -566,7 +566,7 @@ export const drawActors = async function (diagram, actors, actorKeys, isFooter)
|
||||
let maxHeight = 0;
|
||||
bounds.bumpVerticalPos(conf.boxMargin * 2);
|
||||
for (const actorKey of actorKeys) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
if (!actor.stopy) {
|
||||
actor.stopy = bounds.getVerticalPos();
|
||||
}
|
||||
@ -581,7 +581,7 @@ export const drawActorsPopup = function (diagram, actors, actorKeys, doc) {
|
||||
let maxHeight = 0;
|
||||
let maxWidth = 0;
|
||||
for (const actorKey of actorKeys) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
const minMenuWidth = getRequiredPopupWidth(actor);
|
||||
const menuDimensions = svgDraw.drawPopup(
|
||||
diagram,
|
||||
@ -624,7 +624,7 @@ const actorActivations = function (actor) {
|
||||
|
||||
const activationBounds = function (actor, actors) {
|
||||
// handle multiple stacked activations for same actor
|
||||
const actorObj = actors[actor];
|
||||
const actorObj = actors.get(actor);
|
||||
const activations = actorActivations(actor);
|
||||
|
||||
const left = activations.reduce(
|
||||
@ -682,7 +682,7 @@ function adjustCreatedDestroyedData(
|
||||
destroyedActors
|
||||
) {
|
||||
function receiverAdjustment(actor, adjustment) {
|
||||
if (actor.x < actors[msg.from].x) {
|
||||
if (actor.x < actors.get(msg.from).x) {
|
||||
bounds.insert(
|
||||
msgModel.stopx - adjustment,
|
||||
msgModel.starty,
|
||||
@ -702,7 +702,7 @@ function adjustCreatedDestroyedData(
|
||||
}
|
||||
|
||||
function senderAdjustment(actor, adjustment) {
|
||||
if (actor.x < actors[msg.to].x) {
|
||||
if (actor.x < actors.get(msg.to).x) {
|
||||
bounds.insert(
|
||||
msgModel.startx - adjustment,
|
||||
msgModel.starty,
|
||||
@ -722,16 +722,16 @@ function adjustCreatedDestroyedData(
|
||||
}
|
||||
|
||||
// if it is a create message
|
||||
if (createdActors[msg.to] == index) {
|
||||
const actor = actors[msg.to];
|
||||
if (createdActors.get(msg.to) == index) {
|
||||
const actor = actors.get(msg.to);
|
||||
const adjustment = actor.type == 'actor' ? ACTOR_TYPE_WIDTH / 2 + 3 : actor.width / 2 + 3;
|
||||
receiverAdjustment(actor, adjustment);
|
||||
actor.starty = lineStartY - actor.height / 2;
|
||||
bounds.bumpVerticalPos(actor.height / 2);
|
||||
}
|
||||
// if it is a destroy sender message
|
||||
else if (destroyedActors[msg.from] == index) {
|
||||
const actor = actors[msg.from];
|
||||
else if (destroyedActors.get(msg.from) == index) {
|
||||
const actor = actors.get(msg.from);
|
||||
if (conf.mirrorActors) {
|
||||
const adjustment = actor.type == 'actor' ? ACTOR_TYPE_WIDTH / 2 : actor.width / 2;
|
||||
senderAdjustment(actor, adjustment);
|
||||
@ -740,8 +740,8 @@ function adjustCreatedDestroyedData(
|
||||
bounds.bumpVerticalPos(actor.height / 2);
|
||||
}
|
||||
// if it is a destroy receiver message
|
||||
else if (destroyedActors[msg.to] == index) {
|
||||
const actor = actors[msg.to];
|
||||
else if (destroyedActors.get(msg.to) == index) {
|
||||
const actor = actors.get(msg.to);
|
||||
if (conf.mirrorActors) {
|
||||
const adjustment = actor.type == 'actor' ? ACTOR_TYPE_WIDTH / 2 + 3 : actor.width / 2 + 3;
|
||||
receiverAdjustment(actor, adjustment);
|
||||
@ -1132,15 +1132,15 @@ export const draw = async function (_text: string, id: string, _version: string,
|
||||
* @returns The max message width of each actor.
|
||||
*/
|
||||
async function getMaxMessageWidthPerActor(
|
||||
actors: { [id: string]: any },
|
||||
actors: Map<string, any>,
|
||||
messages: any[],
|
||||
diagObj: Diagram
|
||||
): Promise<{ [id: string]: number }> {
|
||||
const maxMessageWidthPerActor = {};
|
||||
|
||||
for (const msg of messages) {
|
||||
if (actors[msg.to] && actors[msg.from]) {
|
||||
const actor = actors[msg.to];
|
||||
if (actors.get(msg.to) && actors.get(msg.from)) {
|
||||
const actor = actors.get(msg.to);
|
||||
|
||||
// If this is the first actor, and the message is left of it, no need to calculate the margin
|
||||
if (msg.placement === diagObj.db.PLACEMENT.LEFTOF && !actor.prevActor) {
|
||||
@ -1258,13 +1258,13 @@ const getRequiredPopupWidth = function (actor) {
|
||||
* @param boxes - The boxes around the actors if any
|
||||
*/
|
||||
async function calculateActorMargins(
|
||||
actors: { [id: string]: any },
|
||||
actors: Map<string, any>,
|
||||
actorToMessageWidth: Awaited<ReturnType<typeof getMaxMessageWidthPerActor>>,
|
||||
boxes
|
||||
) {
|
||||
let maxHeight = 0;
|
||||
for (const prop of Object.keys(actors)) {
|
||||
const actor = actors[prop];
|
||||
for (const prop of actors.keys()) {
|
||||
const actor = actors.get(prop);
|
||||
if (actor.wrap) {
|
||||
actor.description = utils.wrapLabel(
|
||||
actor.description,
|
||||
@ -1285,13 +1285,13 @@ async function calculateActorMargins(
|
||||
}
|
||||
|
||||
for (const actorKey in actorToMessageWidth) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
|
||||
if (!actor) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const nextActor = actors[actor.nextActor];
|
||||
const nextActor = actors.get(actor.nextActor);
|
||||
|
||||
// No need to space out an actor that doesn't have a next link
|
||||
if (!nextActor) {
|
||||
@ -1311,7 +1311,7 @@ async function calculateActorMargins(
|
||||
boxes.forEach((box) => {
|
||||
const textFont = messageFont(conf);
|
||||
let totalWidth = box.actorKeys.reduce((total, aKey) => {
|
||||
return (total += actors[aKey].width + (actors[aKey].margin || 0));
|
||||
return (total += actors.get(aKey).width + (actors.get(aKey).margin || 0));
|
||||
}, 0);
|
||||
|
||||
totalWidth -= 2 * conf.boxTextMargin;
|
||||
@ -1334,8 +1334,10 @@ async function calculateActorMargins(
|
||||
}
|
||||
|
||||
const buildNoteModel = async function (msg, actors, diagObj) {
|
||||
const startx = actors[msg.from].x;
|
||||
const stopx = actors[msg.to].x;
|
||||
const fromActor = actors.get(msg.from);
|
||||
const toActor = actors.get(msg.to);
|
||||
const startx = fromActor.x;
|
||||
const stopx = toActor.x;
|
||||
const shouldWrap = msg.wrap && msg.message;
|
||||
|
||||
let textDimensions: { width: number; height: number; lineHeight?: number } = hasKatex(msg.message)
|
||||
@ -1349,7 +1351,7 @@ const buildNoteModel = async function (msg, actors, diagObj) {
|
||||
? conf.width
|
||||
: common.getMax(conf.width, textDimensions.width + 2 * conf.noteMargin),
|
||||
height: 0,
|
||||
startx: actors[msg.from].x,
|
||||
startx: fromActor.x,
|
||||
stopx: 0,
|
||||
starty: 0,
|
||||
stopy: 0,
|
||||
@ -1359,45 +1361,36 @@ const buildNoteModel = async function (msg, actors, diagObj) {
|
||||
noteModel.width = shouldWrap
|
||||
? common.getMax(conf.width, textDimensions.width)
|
||||
: common.getMax(
|
||||
actors[msg.from].width / 2 + actors[msg.to].width / 2,
|
||||
fromActor.width / 2 + toActor.width / 2,
|
||||
textDimensions.width + 2 * conf.noteMargin
|
||||
);
|
||||
noteModel.startx = startx + (actors[msg.from].width + conf.actorMargin) / 2;
|
||||
noteModel.startx = startx + (fromActor.width + conf.actorMargin) / 2;
|
||||
} else if (msg.placement === diagObj.db.PLACEMENT.LEFTOF) {
|
||||
noteModel.width = shouldWrap
|
||||
? common.getMax(conf.width, textDimensions.width + 2 * conf.noteMargin)
|
||||
: common.getMax(
|
||||
actors[msg.from].width / 2 + actors[msg.to].width / 2,
|
||||
fromActor.width / 2 + toActor.width / 2,
|
||||
textDimensions.width + 2 * conf.noteMargin
|
||||
);
|
||||
noteModel.startx = startx - noteModel.width + (actors[msg.from].width - conf.actorMargin) / 2;
|
||||
noteModel.startx = startx - noteModel.width + (fromActor.width - conf.actorMargin) / 2;
|
||||
} else if (msg.to === msg.from) {
|
||||
textDimensions = utils.calculateTextDimensions(
|
||||
shouldWrap
|
||||
? utils.wrapLabel(
|
||||
msg.message,
|
||||
common.getMax(conf.width, actors[msg.from].width),
|
||||
noteFont(conf)
|
||||
)
|
||||
? utils.wrapLabel(msg.message, common.getMax(conf.width, fromActor.width), noteFont(conf))
|
||||
: msg.message,
|
||||
noteFont(conf)
|
||||
);
|
||||
noteModel.width = shouldWrap
|
||||
? common.getMax(conf.width, actors[msg.from].width)
|
||||
: common.getMax(
|
||||
actors[msg.from].width,
|
||||
conf.width,
|
||||
textDimensions.width + 2 * conf.noteMargin
|
||||
);
|
||||
noteModel.startx = startx + (actors[msg.from].width - noteModel.width) / 2;
|
||||
? common.getMax(conf.width, fromActor.width)
|
||||
: common.getMax(fromActor.width, conf.width, textDimensions.width + 2 * conf.noteMargin);
|
||||
noteModel.startx = startx + (fromActor.width - noteModel.width) / 2;
|
||||
} else {
|
||||
noteModel.width =
|
||||
Math.abs(startx + actors[msg.from].width / 2 - (stopx + actors[msg.to].width / 2)) +
|
||||
conf.actorMargin;
|
||||
Math.abs(startx + fromActor.width / 2 - (stopx + toActor.width / 2)) + conf.actorMargin;
|
||||
noteModel.startx =
|
||||
startx < stopx
|
||||
? startx + actors[msg.from].width / 2 - conf.actorMargin / 2
|
||||
: stopx + actors[msg.to].width / 2 - conf.actorMargin / 2;
|
||||
? startx + fromActor.width / 2 - conf.actorMargin / 2
|
||||
: stopx + toActor.width / 2 - conf.actorMargin / 2;
|
||||
}
|
||||
if (shouldWrap) {
|
||||
noteModel.message = utils.wrapLabel(
|
||||
@ -1545,7 +1538,7 @@ const calculateLoopBounds = async function (messages, actors, _maxWidthPerActor,
|
||||
break;
|
||||
case diagObj.db.LINETYPE.ACTIVE_START:
|
||||
{
|
||||
const actorRect = actors[msg.from ? msg.from : msg.to.actor];
|
||||
const actorRect = actors.get(msg.from ? msg.from : msg.to.actor);
|
||||
const stackedSize = actorActivations(msg.from ? msg.from : msg.to.actor).length;
|
||||
const x =
|
||||
actorRect.x + actorRect.width / 2 + ((stackedSize - 1) * conf.activationWidth) / 2;
|
||||
@ -1585,8 +1578,8 @@ const calculateLoopBounds = async function (messages, actors, _maxWidthPerActor,
|
||||
stack.forEach((stk) => {
|
||||
current = stk;
|
||||
if (msgModel.startx === msgModel.stopx) {
|
||||
const from = actors[msg.from];
|
||||
const to = actors[msg.to];
|
||||
const from = actors.get(msg.from);
|
||||
const to = actors.get(msg.to);
|
||||
current.from = common.getMin(
|
||||
from.x - msgModel.width / 2,
|
||||
from.x - from.width / 2,
|
||||
|
@ -307,7 +307,7 @@ export const fixLifeLineHeights = (diagram, actors, actorKeys, conf) => {
|
||||
return;
|
||||
}
|
||||
actorKeys.forEach((actorKey) => {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
const actorDOM = diagram.select('#actor' + actor.actorCnt);
|
||||
if (!conf.mirrorActors && actor.stopy) {
|
||||
actorDOM.attr('y2', actor.stopy + actor.height / 2);
|
||||
|
@ -21,8 +21,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['namedState1']).not.toBeUndefined();
|
||||
expect(states['namedState1'].descriptions.join(' ')).toEqual('Small State 1');
|
||||
expect(states.get('namedState1')).not.toBeUndefined();
|
||||
expect(states.get('namedState1').descriptions.join(' ')).toEqual('Small State 1');
|
||||
});
|
||||
});
|
||||
|
||||
@ -34,8 +34,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['namedState1']).not.toBeUndefined();
|
||||
expect(states['namedState1'].descriptions.join(' ')).toEqual('Small State 1');
|
||||
expect(states.get('namedState1')).not.toBeUndefined();
|
||||
expect(states.get('namedState1').descriptions.join(' ')).toEqual('Small State 1');
|
||||
});
|
||||
|
||||
it('no spaces before and after the colon', () => {
|
||||
@ -45,8 +45,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['namedState1']).not.toBeUndefined();
|
||||
expect(states['namedState1'].descriptions.join(' ')).toEqual('Small State 1');
|
||||
expect(states.get('namedState1')).not.toBeUndefined();
|
||||
expect(states.get('namedState1').descriptions.join(' ')).toEqual('Small State 1');
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -62,8 +62,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.parse(diagramText);
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['assemble']).not.toBeUndefined();
|
||||
expect(states['assemblies']).not.toBeUndefined();
|
||||
expect(states.get('assemble')).not.toBeUndefined();
|
||||
expect(states.get('assemblies')).not.toBeUndefined();
|
||||
});
|
||||
|
||||
it('state "as" as as', function () {
|
||||
@ -73,8 +73,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.parse(diagramText);
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['as']).not.toBeUndefined();
|
||||
expect(states['as'].descriptions.join(' ')).toEqual('as');
|
||||
expect(states.get('as')).not.toBeUndefined();
|
||||
expect(states.get('as').descriptions.join(' ')).toEqual('as');
|
||||
});
|
||||
});
|
||||
|
||||
@ -99,12 +99,12 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['namedState1']).not.toBeUndefined();
|
||||
expect(states['bigState1']).not.toBeUndefined();
|
||||
expect(states['bigState1'].doc[0].id).toEqual('bigState1InternalState');
|
||||
expect(states['namedState2']).not.toBeUndefined();
|
||||
expect(states['bigState2']).not.toBeUndefined();
|
||||
expect(states['bigState2'].doc[0].id).toEqual('bigState2InternalState');
|
||||
expect(states.get('namedState1')).not.toBeUndefined();
|
||||
expect(states.get('bigState1')).not.toBeUndefined();
|
||||
expect(states.get('bigState1').doc[0].id).toEqual('bigState1InternalState');
|
||||
expect(states.get('namedState2')).not.toBeUndefined();
|
||||
expect(states.get('bigState2')).not.toBeUndefined();
|
||||
expect(states.get('bigState2').doc[0].id).toEqual('bigState2InternalState');
|
||||
const relationships = stateDiagram.parser.yy.getRelations();
|
||||
expect(relationships[0].id1).toEqual('namedState1');
|
||||
expect(relationships[0].id2).toEqual('bigState1');
|
||||
@ -123,11 +123,23 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['bigState1']).not.toBeUndefined();
|
||||
expect(states['bigState1'].doc[0].id).toEqual('inner1');
|
||||
expect(states['bigState1'].doc[0].description).toEqual('inner state 1');
|
||||
expect(states['bigState1'].doc[1].id).toEqual('inner2');
|
||||
expect(states['bigState1'].doc[1].description).toEqual('inner state 2');
|
||||
expect(states.get('bigState1')).not.toBeUndefined();
|
||||
expect(states.get('bigState1').doc[0].id).toEqual('inner1');
|
||||
expect(states.get('bigState1').doc[0].description).toEqual('inner state 1');
|
||||
expect(states.get('bigState1').doc[1].id).toEqual('inner2');
|
||||
expect(states.get('bigState1').doc[1].description).toEqual('inner state 2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('unsafe properties as state names', () => {
|
||||
it.each(['__proto__', 'constructor'])('should allow %s as a state name', function (prop) {
|
||||
stateDiagram.parser.parse(`
|
||||
stateDiagram-v2
|
||||
[*] --> ${prop}
|
||||
${prop} --> [*]`);
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states.get(prop)).not.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -19,8 +19,8 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const styleClasses = stateDb.getClasses();
|
||||
expect(styleClasses['exampleClass'].styles.length).toEqual(1);
|
||||
expect(styleClasses['exampleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(styleClasses.get('exampleClass').styles.length).toEqual(1);
|
||||
expect(styleClasses.get('exampleClass').styles[0]).toEqual('background:#bbb');
|
||||
});
|
||||
|
||||
it('can define multiple attributes separated by commas', function () {
|
||||
@ -30,10 +30,10 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const styleClasses = stateDb.getClasses();
|
||||
expect(styleClasses['exampleClass'].styles.length).toEqual(3);
|
||||
expect(styleClasses['exampleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(styleClasses['exampleClass'].styles[1]).toEqual('font-weight:bold');
|
||||
expect(styleClasses['exampleClass'].styles[2]).toEqual('font-style:italic');
|
||||
expect(styleClasses.get('exampleClass').styles.length).toEqual(3);
|
||||
expect(styleClasses.get('exampleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(styleClasses.get('exampleClass').styles[1]).toEqual('font-weight:bold');
|
||||
expect(styleClasses.get('exampleClass').styles[2]).toEqual('font-style:italic');
|
||||
});
|
||||
|
||||
// need to look at what the lexer is doing
|
||||
@ -44,9 +44,9 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
expect(classes['exampleStyleClass'].styles.length).toBe(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toBe(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
|
||||
it('an attribute can have a space in the style', function () {
|
||||
@ -56,9 +56,19 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
expect(classes['exampleStyleClass'].styles.length).toBe(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toBe('background: #bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toBe(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toBe('background: #bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
|
||||
it('can have __proto__ or constructor as a class name', function () {
|
||||
stateDiagram.parser.parse(
|
||||
'stateDiagram-v2\n classDef __proto__ background:#bbb,border:1.5px solid red;\n classDef constructor background:#bbb,border:1.5px solid red;'
|
||||
);
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
expect(classes.get('__proto__').styles.length).toBe(2);
|
||||
expect(classes.get('constructor').styles.length).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
@ -74,9 +84,9 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const classes = stateDb.getClasses();
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
|
||||
const state_a = stateDb.getState('a');
|
||||
expect(state_a.classes.length).toEqual(1);
|
||||
@ -95,9 +105,9 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
expect(classes['exampleStyleClass'].styles.length).toBe(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toBe(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toBe('border:1px solid red');
|
||||
|
||||
const state_a_a = stateDiagram.parser.yy.getState('a_a');
|
||||
expect(state_a_a.classes.length).toEqual(1);
|
||||
@ -117,11 +127,11 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
|
||||
expect(states['b'].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states.get('b').classes[0]).toEqual('exampleStyleClass');
|
||||
});
|
||||
|
||||
it('can be applied to a [*] state', () => {
|
||||
@ -136,11 +146,11 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
|
||||
expect(states['root_start'].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states.get('root_start').classes[0]).toEqual('exampleStyleClass');
|
||||
});
|
||||
|
||||
it('can be applied to a comma separated list of states', function () {
|
||||
@ -155,11 +165,11 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
let classes = stateDiagram.parser.yy.getClasses();
|
||||
let states = stateDiagram.parser.yy.getStates();
|
||||
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(states['a'].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states['b'].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
expect(states.get('a').classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states.get('b').classes[0]).toEqual('exampleStyleClass');
|
||||
});
|
||||
|
||||
it('a comma separated list of states may or may not have spaces after commas', function () {
|
||||
@ -174,13 +184,13 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
|
||||
const statesList = ['a', 'b', 'c', 'd', 'e'];
|
||||
statesList.forEach((stateId) => {
|
||||
expect(states[stateId].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states.get(stateId).classes[0]).toEqual('exampleStyleClass');
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -202,7 +212,7 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
|
||||
expect(states['Moving'].doc.length).toEqual(1);
|
||||
expect(states.get('Moving').doc.length).toEqual(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -65,10 +65,10 @@ const STYLECLASS_SEP = ',';
|
||||
* In the future, this can be replaced with a class common to all diagrams.
|
||||
* ClassDef information = { id: id, styles: [], textStyles: [] }
|
||||
*
|
||||
* @returns {{}}
|
||||
* @returns {Map<string, any>}
|
||||
*/
|
||||
function newClassesList() {
|
||||
return {};
|
||||
return new Map();
|
||||
}
|
||||
|
||||
let direction = DEFAULT_DIAGRAM_DIRECTION;
|
||||
@ -79,8 +79,9 @@ let classes = newClassesList(); // style classes defined by a classDef
|
||||
|
||||
const newDoc = () => {
|
||||
return {
|
||||
/** @type {{ id1: string, id2: string, relationTitle: string }[]} */
|
||||
relations: [],
|
||||
states: {},
|
||||
states: new Map(),
|
||||
documents: {},
|
||||
};
|
||||
};
|
||||
@ -247,9 +248,9 @@ export const addState = function (
|
||||
) {
|
||||
const trimmedId = id?.trim();
|
||||
// add the state if needed
|
||||
if (currentDocument.states[trimmedId] === undefined) {
|
||||
if (!currentDocument.states.has(trimmedId)) {
|
||||
log.info('Adding state ', trimmedId, descr);
|
||||
currentDocument.states[trimmedId] = {
|
||||
currentDocument.states.set(trimmedId, {
|
||||
id: trimmedId,
|
||||
descriptions: [],
|
||||
type,
|
||||
@ -258,13 +259,13 @@ export const addState = function (
|
||||
classes: [],
|
||||
styles: [],
|
||||
textStyles: [],
|
||||
};
|
||||
});
|
||||
} else {
|
||||
if (!currentDocument.states[trimmedId].doc) {
|
||||
currentDocument.states[trimmedId].doc = doc;
|
||||
if (!currentDocument.states.get(trimmedId).doc) {
|
||||
currentDocument.states.get(trimmedId).doc = doc;
|
||||
}
|
||||
if (!currentDocument.states[trimmedId].type) {
|
||||
currentDocument.states[trimmedId].type = type;
|
||||
if (!currentDocument.states.get(trimmedId).type) {
|
||||
currentDocument.states.get(trimmedId).type = type;
|
||||
}
|
||||
}
|
||||
|
||||
@ -280,11 +281,9 @@ export const addState = function (
|
||||
}
|
||||
|
||||
if (note) {
|
||||
currentDocument.states[trimmedId].note = note;
|
||||
currentDocument.states[trimmedId].note.text = common.sanitizeText(
|
||||
currentDocument.states[trimmedId].note.text,
|
||||
getConfig()
|
||||
);
|
||||
const doc2 = currentDocument.states.get(trimmedId);
|
||||
doc2.note = note;
|
||||
doc2.note.text = common.sanitizeText(doc2.note.text, getConfig());
|
||||
}
|
||||
|
||||
if (classes) {
|
||||
@ -321,7 +320,7 @@ export const clear = function (saveCommon) {
|
||||
};
|
||||
|
||||
export const getState = function (id) {
|
||||
return currentDocument.states[id];
|
||||
return currentDocument.states.get(id);
|
||||
};
|
||||
|
||||
export const getStates = function () {
|
||||
@ -459,7 +458,7 @@ export const addRelation = function (item1, item2, title) {
|
||||
};
|
||||
|
||||
export const addDescription = function (id, descr) {
|
||||
const theState = currentDocument.states[id];
|
||||
const theState = currentDocument.states.get(id);
|
||||
const _descr = descr.startsWith(':') ? descr.replace(':', '').trim() : descr;
|
||||
theState.descriptions.push(common.sanitizeText(_descr, getConfig()));
|
||||
};
|
||||
@ -486,10 +485,10 @@ const getDividerId = () => {
|
||||
*/
|
||||
export const addStyleClass = function (id, styleAttributes = '') {
|
||||
// create a new style class object with this id
|
||||
if (classes[id] === undefined) {
|
||||
classes[id] = { id: id, styles: [], textStyles: [] }; // This is a classDef
|
||||
if (!classes.has(id)) {
|
||||
classes.set(id, { id: id, styles: [], textStyles: [] }); // This is a classDef
|
||||
}
|
||||
const foundClass = classes[id];
|
||||
const foundClass = classes.get(id);
|
||||
if (styleAttributes !== undefined && styleAttributes !== null) {
|
||||
styleAttributes.split(STYLECLASS_SEP).forEach((attrib) => {
|
||||
// remove any trailing ;
|
||||
|
@ -12,10 +12,10 @@ describe('State Diagram stateDb', () => {
|
||||
|
||||
stateDb.addStyleClass(newStyleClassId, newStyleClassAttribs);
|
||||
const styleClasses = stateDb.getClasses();
|
||||
expect(styleClasses[newStyleClassId].id).toEqual(newStyleClassId);
|
||||
expect(styleClasses[newStyleClassId].styles.length).toEqual(2);
|
||||
expect(styleClasses[newStyleClassId].styles[0]).toEqual('font-weight:bold');
|
||||
expect(styleClasses[newStyleClassId].styles[1]).toEqual('border:blue');
|
||||
expect(styleClasses.get(newStyleClassId).id).toEqual(newStyleClassId);
|
||||
expect(styleClasses.get(newStyleClassId).styles.length).toEqual(2);
|
||||
expect(styleClasses.get(newStyleClassId).styles[0]).toEqual('font-weight:bold');
|
||||
expect(styleClasses.get(newStyleClassId).styles[1]).toEqual('border:blue');
|
||||
});
|
||||
});
|
||||
|
||||
@ -34,15 +34,15 @@ describe('State Diagram stateDb', () => {
|
||||
|
||||
stateDb.addDescription(testStateId, restOfTheDescription);
|
||||
let states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[0]).toEqual(restOfTheDescription);
|
||||
expect(states.get(testStateId).descriptions[0]).toEqual(restOfTheDescription);
|
||||
|
||||
stateDb.addDescription(testStateId, oneLeadingColon);
|
||||
states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[1]).toEqual(restOfTheDescription);
|
||||
expect(states.get(testStateId).descriptions[1]).toEqual(restOfTheDescription);
|
||||
|
||||
stateDb.addDescription(testStateId, twoLeadingColons);
|
||||
states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[2]).toEqual(`:${restOfTheDescription}`);
|
||||
expect(states.get(testStateId).descriptions[2]).toEqual(`:${restOfTheDescription}`);
|
||||
});
|
||||
|
||||
it('adds each description to the array of descriptions', () => {
|
||||
@ -51,10 +51,10 @@ describe('State Diagram stateDb', () => {
|
||||
stateDb.addDescription(testStateId, 'description 2');
|
||||
|
||||
let states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions.length).toEqual(3);
|
||||
expect(states[testStateId].descriptions[0]).toEqual('description 0');
|
||||
expect(states[testStateId].descriptions[1]).toEqual('description 1');
|
||||
expect(states[testStateId].descriptions[2]).toEqual('description 2');
|
||||
expect(states.get(testStateId).descriptions.length).toEqual(3);
|
||||
expect(states.get(testStateId).descriptions[0]).toEqual('description 0');
|
||||
expect(states.get(testStateId).descriptions[1]).toEqual('description 1');
|
||||
expect(states.get(testStateId).descriptions[2]).toEqual('description 2');
|
||||
});
|
||||
|
||||
it('sanitizes on the description', () => {
|
||||
@ -63,13 +63,13 @@ describe('State Diagram stateDb', () => {
|
||||
'desc outside the script <script>the description</script>'
|
||||
);
|
||||
let states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[0]).toEqual('desc outside the script ');
|
||||
expect(states.get(testStateId).descriptions[0]).toEqual('desc outside the script ');
|
||||
});
|
||||
|
||||
it('adds the description to the state with the given id', () => {
|
||||
stateDb.addDescription(testStateId, 'the description');
|
||||
let states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[0]).toEqual('the description');
|
||||
expect(states.get(testStateId).descriptions[0]).toEqual('the description');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -405,7 +405,7 @@ describe('state diagram V2, ', function () {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDb.getStates();
|
||||
expect(states['Active'].doc[0].id).toEqual('Idle');
|
||||
expect(states.get('Active').doc[0].id).toEqual('Idle');
|
||||
|
||||
const rels = stateDb.getRelations();
|
||||
const rel_Inactive_Idle = rels.find((rel) => rel.id1 === 'Inactive' && rel.id2 === 'Idle');
|
||||
|
@ -5,7 +5,7 @@ import { render } from '../../dagre-wrapper/index.js';
|
||||
import { log } from '../../logger.js';
|
||||
import { configureSvgSize } from '../../setupGraphViewbox.js';
|
||||
import common from '../common/common.js';
|
||||
import utils from '../../utils.js';
|
||||
import utils, { getEdgeId } from '../../utils.js';
|
||||
|
||||
import {
|
||||
DEFAULT_DIAGRAM_DIRECTION,
|
||||
@ -81,7 +81,7 @@ export const setConf = function (cnf) {
|
||||
*
|
||||
* @param {string} text - the diagram text to be parsed
|
||||
* @param diagramObj
|
||||
* @returns {Record<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles (a Map with keys = strings, values = )
|
||||
* @returns {Map<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles (a Map with keys = strings, values = )
|
||||
*/
|
||||
export const getClasses = function (text, diagramObj) {
|
||||
diagramObj.db.extract(diagramObj.db.getRootDocV2());
|
||||
@ -129,13 +129,13 @@ export function stateDomId(itemId = '', counter = 0, type = '', typeSpacer = DOM
|
||||
* @param g - graph
|
||||
* @param {object} parent
|
||||
* @param {object} parsedItem - parsed statement item
|
||||
* @param {object[]} diagramStates - the list of all known states for the diagram
|
||||
* @param {Map<string, object>} diagramStates - the list of all known states for the diagram
|
||||
* @param {object} diagramDb
|
||||
* @param {boolean} altFlag - for clusters, add the "statediagram-cluster-alt" CSS class
|
||||
*/
|
||||
const setupNode = (g, parent, parsedItem, diagramStates, diagramDb, altFlag) => {
|
||||
const itemId = parsedItem.id;
|
||||
const classStr = getClassesFromDbInfo(diagramStates[itemId]);
|
||||
const classStr = getClassesFromDbInfo(diagramStates.get(itemId));
|
||||
|
||||
if (itemId !== 'root') {
|
||||
let shape = SHAPE_STATE;
|
||||
@ -252,7 +252,6 @@ const setupNode = (g, parent, parsedItem, diagramStates, diagramDb, altFlag) =>
|
||||
type: 'group',
|
||||
padding: 0, //getConfig().flowchart.padding
|
||||
};
|
||||
graphItemCount++;
|
||||
|
||||
const parentNodeId = itemId + PARENT_ID;
|
||||
g.setNode(parentNodeId, groupData);
|
||||
@ -270,17 +269,23 @@ const setupNode = (g, parent, parsedItem, diagramStates, diagramDb, altFlag) =>
|
||||
from = noteData.id;
|
||||
to = itemId;
|
||||
}
|
||||
|
||||
g.setEdge(from, to, {
|
||||
arrowhead: 'none',
|
||||
arrowType: '',
|
||||
style: G_EDGE_STYLE,
|
||||
labelStyle: '',
|
||||
id: getEdgeId(from, to, {
|
||||
counter: graphItemCount,
|
||||
}),
|
||||
classes: CSS_EDGE_NOTE_EDGE,
|
||||
arrowheadStyle: G_EDGE_ARROWHEADSTYLE,
|
||||
labelpos: G_EDGE_LABELPOS,
|
||||
labelType: G_EDGE_LABELTYPE,
|
||||
thickness: G_EDGE_THICKNESS,
|
||||
});
|
||||
|
||||
graphItemCount++;
|
||||
} else {
|
||||
g.setNode(itemId, nodeData);
|
||||
}
|
||||
@ -303,7 +308,7 @@ const setupNode = (g, parent, parsedItem, diagramStates, diagramDb, altFlag) =>
|
||||
* @param g
|
||||
* @param parentParsedItem - parsed Item that is the parent of this document (doc)
|
||||
* @param doc - the document to set up; it is a list of parsed statements
|
||||
* @param {object[]} diagramStates - the list of all known states for the diagram
|
||||
* @param {Map<string, object>} diagramStates - the list of all known states for the diagram
|
||||
* @param diagramDb
|
||||
* @param {boolean} altFlag
|
||||
* @todo This duplicates some of what is done in stateDb.js extract method
|
||||
@ -324,7 +329,9 @@ const setupDoc = (g, parentParsedItem, doc, diagramStates, diagramDb, altFlag) =
|
||||
setupNode(g, parentParsedItem, item.state1, diagramStates, diagramDb, altFlag);
|
||||
setupNode(g, parentParsedItem, item.state2, diagramStates, diagramDb, altFlag);
|
||||
const edgeData = {
|
||||
id: 'edge' + graphItemCount,
|
||||
id: getEdgeId(item.state1.id, item.state2.id, {
|
||||
counter: graphItemCount,
|
||||
}),
|
||||
arrowhead: 'normal',
|
||||
arrowTypeEnd: 'arrow_barb',
|
||||
style: G_EDGE_STYLE,
|
||||
|
BIN
packages/mermaid/src/docs/config/img/mathMLDifferences.png
Normal file
BIN
packages/mermaid/src/docs/config/img/mathMLDifferences.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
@ -60,3 +60,13 @@ Example with legacy mode enabled (the latest version of KaTeX's stylesheet can b
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
## Handling Rendering Differences
|
||||
|
||||
Due to differences between default fonts across operating systems and browser's MathML implementations, inconsistent results can be seen across platforms. If having consistent results are important, or the most optimal rendered results are desired, `forceLegacyMathML` can be enabled in the config.
|
||||
|
||||
This option will always use KaTeX's stylesheet instead of only when MathML is not supported (as with `legacyMathML`). Note that only `forceLegacyMathML` needs to be set.
|
||||
|
||||
If including KaTeX's stylesheet is not a concern, enabling this option is recommended to avoid scenarios where no MathML implementation within a browser provides the desired output (as seen below).
|
||||
|
||||
![Image showing differences between Browsers](img/mathMLDifferences.png)
|
||||
|
@ -235,6 +235,8 @@ Communication tools and platforms
|
||||
|
||||
### Other
|
||||
|
||||
- [Astro](https://astro.build/)
|
||||
- [Adding diagrams to your Astro site with MermaidJS and Playwright](https://agramont.net/blog/diagraming-with-mermaidjs-astro/)
|
||||
- [Bisheng](https://www.npmjs.com/package/bisheng)
|
||||
- [bisheng-plugin-mermaid](https://github.com/yct21/bisheng-plugin-mermaid)
|
||||
- [Blazorade Mermaid: Render Mermaid diagrams in Blazor applications](https://github.com/Blazorade/Blazorade-Mermaid/wiki)
|
||||
@ -244,6 +246,7 @@ Communication tools and platforms
|
||||
- [Jekyll](https://jekyllrb.com/)
|
||||
- [jekyll-mermaid](https://rubygems.org/gems/jekyll-mermaid)
|
||||
- [jekyll-mermaid-diagrams](https://github.com/fuzhibo/jekyll-mermaid-diagrams)
|
||||
- [MarkChart: Preview Mermaid diagrams on macOS](https://markchart.app/)
|
||||
- [mermaid-isomorphic](https://github.com/remcohaszing/mermaid-isomorphic)
|
||||
- [mermaid-server: Generate diagrams using a HTTP request](https://github.com/TomWright/mermaid-server)
|
||||
- [NiceGUI: Let any browser be the frontend of your Python code](https://nicegui.io) ✅
|
||||
|
@ -1,5 +1,9 @@
|
||||
# Mermaid Chart
|
||||
|
||||
The Future of Diagramming & Visual Collaboration
|
||||
|
||||
Try the Ultimate AI, Mermaid, and Visual Diagramming Suite by creating an account at [Mermaid Chart](https://www.mermaidchart.com/app/sign-up).
|
||||
|
||||
<br />
|
||||
|
||||
<a href="https://www.producthunt.com/posts/mermaid-chart?utm_source=badge-featured&utm_medium=badge&utm_souce=badge-mermaid-chart" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=416671&theme=light" alt="Mermaid Chart - A smarter way to create diagrams | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
@ -12,22 +16,26 @@
|
||||
|
||||
- **Editor** - A web based editor for creating and editing Mermaid diagrams.
|
||||
|
||||
- **Presentation** - A presentation mode for viewing Mermaid diagrams in a slideshow format.
|
||||
- **Visual Editor** - The Visual Editor enables users of all skill levels to create diagrams easily and efficiently, with both GUI and code-based editing options.
|
||||
|
||||
- **Collaboration** - A web based collaboration feature for multi-user editing on Mermaid diagrams in real-time (Pro plan).
|
||||
- **AI Chat** - Use our embedded AI Chat to generate diagrams from natural language descriptions.
|
||||
|
||||
- **Plugins** - A plugin system for extending the functionality of Mermaid.
|
||||
|
||||
Plugins are available for:
|
||||
Official Mermaid Chart plugins:
|
||||
|
||||
- [ChatGPT](https://docs.mermaidchart.com/plugins/chatgpt)
|
||||
- [Mermaid Chart GPT](https://chat.openai.com/g/g-1IRFKwq4G-mermaid-chart)
|
||||
- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=MermaidChart.vscode-mermaid-chart)
|
||||
- [JetBrains IDE](https://plugins.jetbrains.com/plugin/23043-mermaid-chart)
|
||||
- [Microsoft PowerPoint and Word](https://appsource.microsoft.com/en-us/product/office/WA200006214?tab=Overview)
|
||||
- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=MermaidChart.vscode-mermaid-chart)
|
||||
|
||||
- **AI diagramming** - A feature for generating Mermaid diagrams from text using AI (Pro plan).
|
||||
Visit our [Plugins](https://www.mermaidchart.com/plugins) page for more information.
|
||||
|
||||
- **More** - To learn more, visit our [Product](https://www.mermaidchart.com/product) page.
|
||||
- **Collaboration** - A web based collaboration feature for multi-user editing on Mermaid diagrams in real-time (Pro and Enterprise plans).
|
||||
|
||||
- **Comments** - Enhance collaboration by adding comments to diagrams.
|
||||
|
||||
- **Presentations** - A presentation mode for viewing Mermaid diagrams in a slideshow format.
|
||||
|
||||
## Plans
|
||||
|
||||
@ -37,11 +45,9 @@
|
||||
|
||||
- **Enterprise** - A paid plan for enterprise use that includes all Pro features, and more.
|
||||
|
||||
## Access
|
||||
To learn more, visit our [Pricing](https://mermaidchart.com/pricing) page.
|
||||
|
||||
Sign up for a free account at [Mermaid Chart](https://www.mermaidchart.com/app/sign-up).
|
||||
|
||||
Mermaid Chart is currently offering a 14-day free trial of our newly-launched Pro tier. To learn more, visit our [Pricing](https://mermaidchart.com/pricing) page.
|
||||
Mermaid Chart is currently offering a 14-day free trial on our Pro and Enterprise tiers. Sign up for a free account at [Mermaid Chart](https://www.mermaidchart.com/app/sign-up).
|
||||
|
||||
## Mermaid JS contributions
|
||||
|
||||
|
@ -146,7 +146,7 @@ For a list of Mermaid Plugins and Integrations, visit the [Integrations page](..
|
||||
|
||||
Mermaid Chart plugins are available for:
|
||||
|
||||
- [ChatGPT](https://docs.mermaidchart.com/plugins/chatgpt)
|
||||
- [ChatGPT](https://docs.mermaidchart.com/plugins/mermaid-chart-gpt)
|
||||
- [JetBrains IDE](https://docs.mermaidchart.com/plugins/jetbrains-ide)
|
||||
- [Microsoft PowerPoint](https://docs.mermaidchart.com/plugins/microsoft-powerpoint)
|
||||
- [Microsoft Word](https://docs.mermaidchart.com/plugins/microsoft-word)
|
||||
|
@ -1,5 +1,17 @@
|
||||
# Blog
|
||||
|
||||
## [How to Choose the Right Documentation Software](https://www.mermaidchart.com/blog/posts/how-to-choose-the-right-documentation-software/)
|
||||
|
||||
7 May 2024 · 5 mins
|
||||
|
||||
How to Choose the Right Documentation Software. Reliable and efficient documentation software is crucial in the fast-paced world of software development.
|
||||
|
||||
## [AI in software diagramming: What trends will define the future?](https://www.mermaidchart.com/blog/posts/ai-in-software-diagramming/)
|
||||
|
||||
24 April 2024 · 5 mins
|
||||
|
||||
Artificial intelligence (AI) tools are changing the way developers work.
|
||||
|
||||
## [Mermaid Chart Unveils Visual Editor for Sequence Diagrams](https://www.mermaidchart.com/blog/posts/mermaid-chart-unveils-visual-editor-for-sequence-diagrams/)
|
||||
|
||||
8 April 2024 · 5 mins
|
||||
|
@ -34,7 +34,7 @@
|
||||
"unplugin-vue-components": "^0.26.0",
|
||||
"vite": "^5.0.0",
|
||||
"vite-plugin-pwa": "^0.19.7",
|
||||
"vitepress": "1.1.0",
|
||||
"vitepress": "1.1.4",
|
||||
"workbox-window": "^7.0.0"
|
||||
}
|
||||
}
|
||||
|
@ -567,7 +567,7 @@ Examples of tooltip usage below:
|
||||
|
||||
```html
|
||||
<script>
|
||||
const callback = function () {
|
||||
window.callback = function () {
|
||||
alert('A callback was triggered');
|
||||
};
|
||||
</script>
|
||||
@ -588,7 +588,7 @@ flowchart LR
|
||||
|
||||
> **Success** The tooltip functionality and the ability to link to urls are available from version 0.5.2.
|
||||
|
||||
?> Due to limitations with how Docsify handles JavaScript callback functions, an alternate working demo for the above code can be viewed at [this jsfiddle](https://jsfiddle.net/Ogglas/2o73vdez/7).
|
||||
?> Due to limitations with how Docsify handles JavaScript callback functions, an alternate working demo for the above code can be viewed at [this jsfiddle](https://jsfiddle.net/yk4h7qou/2/).
|
||||
|
||||
Links are opened in the same browser tab/window by default. It is possible to change this by adding a link target to the click definition (`_self`, `_blank`, `_parent` and `_top` are supported):
|
||||
|
||||
@ -620,7 +620,7 @@ Beginner's tip—a full example using interactive links in a html context:
|
||||
</pre>
|
||||
|
||||
<script>
|
||||
const callback = function () {
|
||||
window.callback = function () {
|
||||
alert('A callback was triggered');
|
||||
};
|
||||
const config = {
|
||||
|
@ -295,7 +295,7 @@ describe('mermaidAPI', () => {
|
||||
expect(styles).toMatch(/^\ndefault(.*)/);
|
||||
});
|
||||
it('gets the fontFamily from the config', () => {
|
||||
const styles = createCssStyles(mocked_config_with_htmlLabels, {});
|
||||
const styles = createCssStyles(mocked_config_with_htmlLabels, new Map());
|
||||
expect(styles).toMatch(/(.*)\n:root { --mermaid-font-family: serif(.*)/);
|
||||
});
|
||||
it('gets the alt fontFamily from the config', () => {
|
||||
@ -375,7 +375,7 @@ describe('mermaidAPI', () => {
|
||||
// @todo TODO Can't figure out how to spy on the cssImportantStyles method.
|
||||
// That would be a much better approach than manually checking the result
|
||||
|
||||
const styles = createCssStyles(mocked_config, classDefs);
|
||||
const styles = createCssStyles(mocked_config, new Map(Object.entries(classDefs)));
|
||||
htmlElements.forEach((htmlElement) => {
|
||||
expect_styles_matchesHtmlElements(styles, htmlElement);
|
||||
});
|
||||
@ -413,7 +413,10 @@ describe('mermaidAPI', () => {
|
||||
it('creates CSS styles for every style and textStyle in every classDef', () => {
|
||||
// TODO Can't figure out how to spy on the cssImportantStyles method. That would be a much better approach than manually checking the result.
|
||||
|
||||
const styles = createCssStyles(mocked_config_no_htmlLabels, classDefs);
|
||||
const styles = createCssStyles(
|
||||
mocked_config_no_htmlLabels,
|
||||
new Map(Object.entries(classDefs))
|
||||
);
|
||||
htmlElements.forEach((htmlElement) => {
|
||||
expect_styles_matchesHtmlElements(styles, htmlElement);
|
||||
});
|
||||
@ -437,7 +440,7 @@ describe('mermaidAPI', () => {
|
||||
it('gets the css styles created', () => {
|
||||
// @todo TODO if a single function in the module can be mocked, do it for createCssStyles and mock the results.
|
||||
|
||||
createUserStyles(mockConfig, 'flowchart-v2', { classDef1 }, 'someId');
|
||||
createUserStyles(mockConfig, 'flowchart-v2', new Map([['classDef1', classDef1]]), 'someId');
|
||||
const expectedStyles =
|
||||
'\ndefault' +
|
||||
'\n.classDef1 > * { style1-1 !important; }' +
|
||||
@ -448,12 +451,12 @@ describe('mermaidAPI', () => {
|
||||
});
|
||||
|
||||
it('calls getStyles to get css for all graph, user css styles, and config theme variables', () => {
|
||||
createUserStyles(mockConfig, 'someDiagram', {}, 'someId');
|
||||
createUserStyles(mockConfig, 'someDiagram', new Map(), 'someId');
|
||||
expect(getStyles).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('returns the result of compiling, stringifying, and serializing the css code with stylis', () => {
|
||||
const result = createUserStyles(mockConfig, 'someDiagram', {}, 'someId');
|
||||
const result = createUserStyles(mockConfig, 'someDiagram', new Map(), 'someId');
|
||||
expect(compile).toHaveBeenCalled();
|
||||
expect(serialize).toHaveBeenCalled();
|
||||
expect(result).toEqual('stylis serialized css');
|
||||
|
@ -152,7 +152,7 @@ export const cssImportantStyles = (
|
||||
*/
|
||||
export const createCssStyles = (
|
||||
config: MermaidConfig,
|
||||
classDefs: Record<string, DiagramStyleClassDef> | null | undefined = {}
|
||||
classDefs: Map<string, DiagramStyleClassDef> | null | undefined = new Map()
|
||||
): string => {
|
||||
let cssStyles = '';
|
||||
|
||||
@ -171,7 +171,7 @@ export const createCssStyles = (
|
||||
}
|
||||
|
||||
// classDefs defined in the diagram text
|
||||
if (!isEmpty(classDefs)) {
|
||||
if (classDefs instanceof Map) {
|
||||
const htmlLabels = config.htmlLabels || config.flowchart?.htmlLabels; // TODO why specifically check the Flowchart diagram config?
|
||||
|
||||
const cssHtmlElements = ['> *', 'span']; // TODO make a constant
|
||||
@ -180,8 +180,7 @@ export const createCssStyles = (
|
||||
const cssElements = htmlLabels ? cssHtmlElements : cssShapeElements;
|
||||
|
||||
// create the CSS styles needed for each styleClass definition and css element
|
||||
for (const classId in classDefs) {
|
||||
const styleClassDef = classDefs[classId];
|
||||
classDefs.forEach((styleClassDef) => {
|
||||
// create the css styles for each cssElement and the styles (only if there are styles)
|
||||
if (!isEmpty(styleClassDef.styles)) {
|
||||
cssElements.forEach((cssElement) => {
|
||||
@ -192,7 +191,7 @@ export const createCssStyles = (
|
||||
if (!isEmpty(styleClassDef.textStyles)) {
|
||||
cssStyles += cssImportantStyles(styleClassDef.id, 'tspan', styleClassDef.textStyles);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return cssStyles;
|
||||
};
|
||||
@ -200,7 +199,7 @@ export const createCssStyles = (
|
||||
export const createUserStyles = (
|
||||
config: MermaidConfig,
|
||||
graphType: string,
|
||||
classDefs: Record<string, DiagramStyleClassDef> | undefined,
|
||||
classDefs: Map<string, DiagramStyleClassDef> | undefined,
|
||||
svgId: string
|
||||
): string => {
|
||||
const userCSSstyles = createCssStyles(config, classDefs);
|
||||
|
@ -199,6 +199,13 @@ properties:
|
||||
fall back to legacy rendering for KaTeX.
|
||||
type: boolean
|
||||
default: false
|
||||
forceLegacyMathML:
|
||||
description: |
|
||||
This option forces Mermaid to rely on KaTeX's own stylesheet for rendering MathML. Due to differences between OS
|
||||
fonts and browser's MathML implementation, this option is recommended if consistent rendering is important.
|
||||
If set to true, ignores legacyMathML.
|
||||
type: boolean
|
||||
default: false
|
||||
deterministicIds:
|
||||
description: |
|
||||
This option controls if the generated ids of nodes in the SVG are
|
||||
|
@ -929,3 +929,19 @@ export const decodeEntities = function (text: string): string {
|
||||
export const isString = (value: unknown): value is string => {
|
||||
return typeof value === 'string';
|
||||
};
|
||||
|
||||
export const getEdgeId = (
|
||||
from: string,
|
||||
to: string,
|
||||
{
|
||||
counter = 0,
|
||||
prefix,
|
||||
suffix,
|
||||
}: {
|
||||
counter?: number;
|
||||
prefix?: string;
|
||||
suffix?: string;
|
||||
}
|
||||
) => {
|
||||
return `${prefix ? `${prefix}_` : ''}${from}_${to}_${counter}${suffix ? `_${suffix}` : ''}`;
|
||||
};
|
||||
|
20087
pnpm-lock.yaml
generated
20087
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user