diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..b6e4761
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,129 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
diff --git a/.idea/.gitignore b/.idea/.gitignore
new file mode 100644
index 0000000..26d3352
--- /dev/null
+++ b/.idea/.gitignore
@@ -0,0 +1,3 @@
+# Default ignored files
+/shelf/
+/workspace.xml
diff --git a/.idea/corinne-3-main.iml b/.idea/corinne-3-main.iml
new file mode 100644
index 0000000..d0876a7
--- /dev/null
+++ b/.idea/corinne-3-main.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 0000000..105ce2d
--- /dev/null
+++ b/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
new file mode 100644
index 0000000..dc9ea49
--- /dev/null
+++ b/.idea/misc.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000..1065f48
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.metadata/.lock b/.metadata/.lock
new file mode 100644
index 0000000..e69de29
diff --git a/.metadata/.mylyn/repositories.xml.zip b/.metadata/.mylyn/repositories.xml.zip
new file mode 100644
index 0000000..6cb3707
Binary files /dev/null and b/.metadata/.mylyn/repositories.xml.zip differ
diff --git a/.metadata/.plugins/org.eclipse.core.resources/.projects/.org.eclipse.egit.core.cmp/.location b/.metadata/.plugins/org.eclipse.core.resources/.projects/.org.eclipse.egit.core.cmp/.location
new file mode 100644
index 0000000..a9dfc7a
Binary files /dev/null and b/.metadata/.plugins/org.eclipse.core.resources/.projects/.org.eclipse.egit.core.cmp/.location differ
diff --git a/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/.location b/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/.location
new file mode 100644
index 0000000..7b9bb0d
Binary files /dev/null and b/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/.location differ
diff --git a/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/.markers b/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/.markers
new file mode 100644
index 0000000..c5dab0f
Binary files /dev/null and b/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/.markers differ
diff --git a/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/com.python.pydev.analysis/AdditionalProjectInterpreterInfo.pydevinfo b/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/com.python.pydev.analysis/AdditionalProjectInterpreterInfo.pydevinfo
new file mode 100644
index 0000000..6265878
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/com.python.pydev.analysis/AdditionalProjectInterpreterInfo.pydevinfo
@@ -0,0 +1,13 @@
+-- VERSION_5
+-- START DISKCACHE_3
+/home/ubuntu/eclipse-workspace/GitHub/corinne-3/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/com.python.pydev.analysis/v2_indexcache
+-- END DISKCACHE
+-- START DICTIONARY
+0
+-- END DICTIONARY
+-- START TREE 1
+0
+-- END TREE
+-- START TREE 2
+0
+-- END TREE
diff --git a/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/org.python.pydev/v1_astmanager/modulesKeys b/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/org.python.pydev/v1_astmanager/modulesKeys
new file mode 100644
index 0000000..ea1c7d7
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/org.python.pydev/v1_astmanager/modulesKeys
@@ -0,0 +1 @@
+MODULES_MANAGER_V3
diff --git a/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/org.python.pydev/v1_astmanager/pythonpath b/.metadata/.plugins/org.eclipse.core.resources/.projects/corinne-3-main/org.python.pydev/v1_astmanager/pythonpath
new file mode 100644
index 0000000..e69de29
diff --git a/.metadata/.plugins/org.eclipse.core.resources/.root/.indexes/history.version b/.metadata/.plugins/org.eclipse.core.resources/.root/.indexes/history.version
new file mode 100644
index 0000000..25cb955
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.resources/.root/.indexes/history.version
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/.metadata/.plugins/org.eclipse.core.resources/.root/.indexes/properties.index b/.metadata/.plugins/org.eclipse.core.resources/.root/.indexes/properties.index
new file mode 100644
index 0000000..8b08f80
Binary files /dev/null and b/.metadata/.plugins/org.eclipse.core.resources/.root/.indexes/properties.index differ
diff --git a/.metadata/.plugins/org.eclipse.core.resources/.root/.indexes/properties.version b/.metadata/.plugins/org.eclipse.core.resources/.root/.indexes/properties.version
new file mode 100644
index 0000000..6b2aaa7
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.resources/.root/.indexes/properties.version
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/.metadata/.plugins/org.eclipse.core.resources/.root/1.tree b/.metadata/.plugins/org.eclipse.core.resources/.root/1.tree
new file mode 100644
index 0000000..dda0641
Binary files /dev/null and b/.metadata/.plugins/org.eclipse.core.resources/.root/1.tree differ
diff --git a/.metadata/.plugins/org.eclipse.core.resources/.safetable/org.eclipse.core.resources b/.metadata/.plugins/org.eclipse.core.resources/.safetable/org.eclipse.core.resources
new file mode 100644
index 0000000..db8cd85
Binary files /dev/null and b/.metadata/.plugins/org.eclipse.core.resources/.safetable/org.eclipse.core.resources differ
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.core.resources.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.core.resources.prefs
new file mode 100644
index 0000000..30841eb
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.core.resources.prefs
@@ -0,0 +1,3 @@
+eclipse.preferences.version=1
+encoding=UTF-8
+version=1
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.debug.ui.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.debug.ui.prefs
new file mode 100644
index 0000000..d431715
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.debug.ui.prefs
@@ -0,0 +1,3 @@
+eclipse.preferences.version=1
+org.eclipse.debug.ui.PREF_LAUNCH_PERSPECTIVES=\n\n
+preferredTargets=org.eclipse.lsp4e.debug.toggleBreakpointTarget\:org.eclipse.lsp4e.debug.toggleBreakpointTarget|
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.jdt.ui.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000..869eef7
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,10 @@
+content_assist_proposals_background=255,255,255
+content_assist_proposals_foreground=0,0,0
+eclipse.preferences.version=1
+org.eclipse.jdt.internal.ui.navigator.layout=2
+org.eclipse.jdt.internal.ui.navigator.librariesnode=true
+org.eclipse.jdt.ui.formatterprofiles.version=23
+spelling_locale_initialized=true
+typefilter_migrated_2=true
+useAnnotationsPrefPage=true
+useQuickDiffPrefPage=true
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.jst.j2ee.webservice.ui.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.jst.j2ee.webservice.ui.prefs
new file mode 100644
index 0000000..553bb96
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.jst.j2ee.webservice.ui.prefs
@@ -0,0 +1,2 @@
+areThereWebServices=false
+eclipse.preferences.version=1
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.m2e.discovery.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.m2e.discovery.prefs
new file mode 100644
index 0000000..67b1d96
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.m2e.discovery.prefs
@@ -0,0 +1,2 @@
+eclipse.preferences.version=1
+org.eclipse.m2e.discovery.pref.projects=
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.mylyn.context.core.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.mylyn.context.core.prefs
new file mode 100644
index 0000000..43e97e4
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.mylyn.context.core.prefs
@@ -0,0 +1,2 @@
+eclipse.preferences.version=1
+mylyn.attention.migrated=true
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.mylyn.monitor.ui.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.mylyn.monitor.ui.prefs
new file mode 100644
index 0000000..8d462a6
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.mylyn.monitor.ui.prefs
@@ -0,0 +1,2 @@
+eclipse.preferences.version=1
+org.eclipse.mylyn.monitor.activity.tracking.enabled.checked=true
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.mylyn.tasks.ui.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.mylyn.tasks.ui.prefs
new file mode 100644
index 0000000..5330e43
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.mylyn.tasks.ui.prefs
@@ -0,0 +1,4 @@
+eclipse.preferences.version=1
+migrated.task.repositories.secure.store=true
+org.eclipse.mylyn.tasks.ui.filters.nonmatching=true
+org.eclipse.mylyn.tasks.ui.filters.nonmatching.encouraged=true
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.ide.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.ide.prefs
new file mode 100644
index 0000000..db244cd
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.ide.prefs
@@ -0,0 +1,4 @@
+eclipse.preferences.version=1
+platformState=1694372752304
+quickStart=false
+tipsAndTricks=true
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.navigator.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.navigator.prefs
new file mode 100644
index 0000000..c51537f
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.navigator.prefs
@@ -0,0 +1,2 @@
+eclipse.preferences.version=1
+org.eclipse.ui.navigator.ProjectExplorer.filterActivation=\:org.eclipse.jdt.java.ui.filters.HidePackageDeclaration\:org.eclipse.jdt.java.ui.filters.HideOutputFolder\:org.eclipse.buildship.ui.navigator.filter.gradle.subProject\:org.eclipse.ui.navigator.resources.nested.HideTopLevelProjectIfNested\:org.python.pydev.navigator.filters.customFilters\:org.python.pydev.navigator.filters.endsWithClass\:org.python.pydev.navigator.filters.hidePyoFiles\:org.python.pydev.navigator.filters.Imports\:org.eclipse.buildship.ui.navigator.filter.gradle.buildfolder\:org.eclipse.jdt.java.ui.filters.HideEmptyInnerPackages\:org.eclipse.jst.j2ee.navigator.ui.filters.jetemitters\:org.eclipse.jdt.java.ui.filters.HideInnerClassFiles\:org.eclipse.ui.navigator.resources.filters.startsWithDot\:org.python.pydev.navigator.filters.hidePycFiles\:org.eclipse.jdt.java.ui.filters.HideEmptyLibraryContainers\:org.eclipse.jdt.java.ui.filters.HideImportDeclaration\:org.eclipse.jdt.java.ui.filters.HideSyntheticMembers\:org.eclipse.mylyn.tasks.ui.navigator.filters.tasks\:org.eclipse.ui.navigator.resources.nested.HideFolderWhenProjectIsShownAsNested\:org.python.pydev.navigator.filters.hidePyTildaFiles\:org.python.pydev.navigator.filters.hidePycacheFolders\:
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.prefs
new file mode 100644
index 0000000..08076f2
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.prefs
@@ -0,0 +1,2 @@
+eclipse.preferences.version=1
+showIntro=false
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.workbench.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.workbench.prefs
new file mode 100644
index 0000000..fa0dc3c
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.workbench.prefs
@@ -0,0 +1,12 @@
+//org.eclipse.ui.commands/state/org.eclipse.ui.navigator.resources.nested.changeProjectPresentation/org.eclipse.ui.commands.radioState=false
+//org.eclipse.ui.commands/state/org.eclipse.wst.xml.views.XPathView.processor.xpathprocessor/org.eclipse.ui.commands.radioState=xpath10
+PLUGINS_NOT_ACTIVATED_ON_STARTUP=;org.eclipse.m2e.discovery;
+eclipse.preferences.version=1
+org.eclipse.ui.workbench.ACTIVE_NOFOCUS_TAB_BG_END=255,255,255
+org.eclipse.ui.workbench.ACTIVE_NOFOCUS_TAB_BG_START=255,255,255
+org.eclipse.ui.workbench.ACTIVE_NOFOCUS_TAB_TEXT_COLOR=16,16,16
+org.eclipse.ui.workbench.ACTIVE_TAB_BG_END=255,255,255
+org.eclipse.ui.workbench.ACTIVE_TAB_BG_START=255,255,255
+org.eclipse.ui.workbench.ACTIVE_TAB_TEXT_COLOR=61,61,61
+org.eclipse.ui.workbench.INACTIVE_TAB_BG_END=246,245,244
+org.eclipse.ui.workbench.INACTIVE_TAB_BG_START=246,245,244
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.urischeme.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.urischeme.prefs
new file mode 100644
index 0000000..b48c0c4
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.urischeme.prefs
@@ -0,0 +1,2 @@
+eclipse.preferences.version=1
+processedSchemes=,eclipse+mpc,eclipse+command
diff --git a/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.python.pydev.prefs b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.python.pydev.prefs
new file mode 100644
index 0000000..656dde0
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.python.pydev.prefs
@@ -0,0 +1,2 @@
+INTERPRETERS_CHECKED_ONCE=true
+eclipse.preferences.version=1
diff --git a/.metadata/.plugins/org.eclipse.e4.workbench/workbench.xmi b/.metadata/.plugins/org.eclipse.e4.workbench/workbench.xmi
new file mode 100644
index 0000000..72593b2
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.e4.workbench/workbench.xmi
@@ -0,0 +1,3240 @@
+
+
+
+ activeSchemeId:org.eclipse.ui.defaultAcceleratorConfiguration
+
+
+
+
+
+
+
+ topLevel
+ shellMaximized
+
+
+
+
+ persp.actionSet:org.eclipse.mylyn.doc.actionSet
+ persp.actionSet:org.eclipse.mylyn.tasks.ui.navigation
+ persp.actionSet:org.eclipse.ui.cheatsheets.actionSet
+ persp.actionSet:org.eclipse.search.searchActionSet
+ persp.actionSet:org.eclipse.text.quicksearch.actionSet
+ persp.actionSet:org.eclipse.ui.edit.text.actionSet.annotationNavigation
+ persp.actionSet:org.eclipse.ui.edit.text.actionSet.navigation
+ persp.actionSet:org.eclipse.ui.edit.text.actionSet.convertLineDelimitersTo
+ persp.actionSet:org.eclipse.ui.externaltools.ExternalToolsSet
+ persp.actionSet:org.eclipse.ui.actionSet.keyBindings
+ persp.actionSet:org.eclipse.ui.actionSet.openFiles
+ persp.actionSet:org.eclipse.jst.j2ee.J2eeMainActionSet
+ persp.actionSet:org.eclipse.jdt.ui.JavaActionSet
+ persp.actionSet:org.eclipse.debug.ui.launchActionSet
+ persp.actionSet:org.eclipse.debug.ui.debugActionSet
+ persp.actionSet:org.eclipse.ui.NavigateActionSet
+ persp.viewSC:org.eclipse.ui.navigator.ProjectExplorer
+ persp.viewSC:org.eclipse.wst.server.ui.ServersView
+ persp.viewSC:org.eclipse.datatools.connectivity.DataSourceExplorerNavigator
+ persp.viewSC:org.eclipse.ui.views.BookmarkView
+ persp.viewSC:org.eclipse.ui.views.ContentOutline
+ persp.viewSC:org.eclipse.ui.views.PropertySheet
+ persp.viewSC:org.eclipse.wst.common.snippets.internal.ui.SnippetsView
+ persp.viewSC:org.eclipse.ui.views.AllMarkersView
+ persp.viewSC:org.eclipse.ui.views.ProblemView
+ persp.viewSC:org.eclipse.mylyn.tasks.ui.views.tasks
+ persp.viewSC:org.eclipse.tm.terminal.view.ui.TerminalsView
+ persp.viewSC:org.eclipse.jdt.ui.PackagesView
+ persp.viewSC:org.eclipse.search.ui.views.SearchView
+ persp.viewSC:org.eclipse.ui.console.ConsoleView
+ persp.showIn:org.eclipse.ui.navigator.ProjectExplorer
+ persp.actionSet:org.eclipse.wst.ws.explorer.explorer
+ persp.newWizSC:org.eclipse.m2e.core.wizards.Maven2ProjectWizard
+ persp.newWizSC:org.eclipse.wst.css.ui.internal.wizard.NewCSSWizard
+ persp.newWizSC:org.eclipse.wst.jsdt.ui.NewJSWizard
+ persp.editorOnboardingText:Open a file or drop files here to open them.
+ persp.editorOnboardingCommand:Find Actions$$$Ctrl+3
+ persp.editorOnboardingCommand:Show Key Assist$$$Shift+Ctrl+L
+ persp.editorOnboardingCommand:New$$$Ctrl+N
+ persp.perspSC:org.eclipse.debug.ui.DebugPerspective
+ persp.perspSC:org.eclipse.jdt.ui.JavaPerspective
+ persp.perspSC:org.eclipse.ui.resourcePerspective
+ persp.perspSC:org.eclipse.wst.web.ui.webDevPerspective
+ persp.newWizSC:org.eclipse.jst.j2ee.ui.project.facet.EarProjectWizard
+ persp.newWizSC:org.eclipse.jst.servlet.ui.project.facet.WebProjectWizard
+ persp.newWizSC:org.eclipse.jst.ejb.ui.project.facet.EjbProjectWizard
+ persp.newWizSC:org.eclipse.jst.j2ee.jca.ui.internal.wizard.ConnectorProjectWizard
+ persp.newWizSC:org.eclipse.jst.j2ee.ui.project.facet.appclient.AppClientProjectWizard
+ persp.newWizSC:org.eclipse.wst.web.ui.internal.wizards.SimpleWebProjectWizard
+ persp.newWizSC:org.eclipse.jpt.ui.wizard.newJpaProject
+ persp.newWizSC:org.eclipse.jst.servlet.ui.internal.wizard.AddServletWizard
+ persp.newWizSC:org.eclipse.jst.ejb.ui.internal.wizard.AddSessionBeanWizard
+ persp.newWizSC:org.eclipse.jst.ejb.ui.internal.wizard.AddMessageDrivenBeanWizard
+ persp.newWizSC:org.eclipse.jpt.ui.wizard.newEntity
+ persp.newWizSC:org.eclipse.jst.ws.creation.ui.wizard.serverwizard
+ persp.newWizSC:org.eclipse.wst.html.ui.internal.wizard.NewHTMLWizard
+ persp.newWizSC:org.eclipse.wst.xml.ui.internal.wizards.NewXMLWizard
+ persp.newWizSC:org.eclipse.ui.wizards.new.folder
+ persp.newWizSC:org.eclipse.ui.wizards.new.file
+ persp.actionSet:org.eclipse.wst.server.ui.internal.webbrowser.actionSet
+ persp.actionSet:org.eclipse.debug.ui.breakpointActionSet
+ persp.actionSet:org.eclipse.eclemma.ui.CoverageActionSet
+ persp.showIn:org.eclipse.eclemma.ui.CoverageView
+ persp.showIn:org.eclipse.tm.terminal.view.ui.TerminalsView
+ persp.newWizSC:org.eclipse.jst.jsp.ui.internal.wizard.NewJSPWizard
+ persp.newWizSC:org.eclipse.jpt.jpa.ui.wizard.newJpaProject
+ persp.perspSC:org.eclipse.jpt.ui.jpaPerspective
+
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:Java
+
+
+ View
+ categoryTag:Java Browsing
+
+
+
+
+
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:General
+
+
+
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:Server
+
+
+ View
+ categoryTag:Data Management
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:Terminal
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:Git
+
+
+ View
+ categoryTag:Java
+
+
+ View
+ categoryTag:Java Browsing
+
+
+ View
+ categoryTag:Java
+
+
+
+
+
+
+
+ active
+
+ View
+ categoryTag:Help
+
+
+ View
+ categoryTag:General
+
+
+ View
+ categoryTag:Help
+
+
+
+
+
+
+ View
+ categoryTag:Help
+
+
+
+
+
+ View
+ categoryTag:General
+ active
+ activeOnClose
+
+ ViewMenu
+ menuContribution:menu
+
+
+
+
+
+
+ View
+ categoryTag:Help
+
+
+
+ org.eclipse.e4.primaryDataStack
+ EditorStack
+
+
+
+
+
+
+ View
+ categoryTag:General
+
+ ViewMenu
+ menuContribution:menu
+
+
+
+
+
+
+ View
+ categoryTag:Java
+
+
+
+
+ View
+ categoryTag:Java Browsing
+
+
+
+
+
+ View
+ categoryTag:General
+
+ ViewMenu
+ menuContribution:menu
+
+
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:Server
+
+
+
+
+ View
+ categoryTag:Data Management
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:Terminal
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:Git
+
+
+
+
+ View
+ categoryTag:Java
+
+
+
+
+ View
+ categoryTag:Java Browsing
+
+
+
+
+ View
+ categoryTag:Java
+
+
+
+
+
+ View
+ categoryTag:General
+
+ ViewMenu
+ menuContribution:menu
+
+
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+ toolbarSeparator
+
+
+
+ Draggable
+
+
+
+ toolbarSeparator
+
+
+
+ Draggable
+
+
+ toolbarSeparator
+
+
+
+ Draggable
+
+
+ Draggable
+
+
+ Draggable
+
+
+ Draggable
+
+
+ Draggable
+
+
+ toolbarSeparator
+
+
+
+ Draggable
+
+
+
+ toolbarSeparator
+
+
+
+ toolbarSeparator
+
+
+
+ Draggable
+
+
+ stretch
+ SHOW_RESTORE_MENU
+
+
+ Draggable
+ HIDEABLE
+ SHOW_RESTORE_MENU
+
+
+
+
+ stretch
+
+
+ Draggable
+
+
+ Draggable
+
+
+
+
+ TrimStack
+ Draggable
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ platform:gtk
+
+
+
+
+
+ platform:gtk
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ platform:gtk
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ platform:gtk
+
+
+ platform:gtk
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Editor
+ removeOnHide
+
+
+
+
+ View
+ categoryTag:Ant
+
+
+
+
+ View
+ categoryTag:Gradle
+
+
+
+
+ View
+ categoryTag:Gradle
+
+
+
+
+ View
+ categoryTag:Data Management
+
+
+
+
+ View
+ categoryTag:Data Management
+
+
+
+
+ View
+ categoryTag:Data Management
+
+
+
+
+ View
+ categoryTag:Debug
+
+
+
+
+ View
+ categoryTag:Debug
+
+
+
+
+ View
+ categoryTag:Debug
+
+
+
+
+ View
+ categoryTag:Debug
+
+
+
+
+ View
+ categoryTag:Debug
+
+
+
+
+ View
+ categoryTag:Debug
+
+
+
+
+ View
+ categoryTag:Debug
+
+
+ View
+ categoryTag:Debug
+
+
+
+
+ View
+ categoryTag:Java
+
+
+
+
+ View
+ categoryTag:Git
+
+
+
+
+ View
+ categoryTag:Git
+
+
+
+
+ View
+ categoryTag:Git
+
+
+
+
+ View
+ categoryTag:Git
+ NoRestore
+
+
+
+
+ View
+ categoryTag:Git
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:Help
+
+
+
+
+ View
+ categoryTag:Java
+
+
+
+
+ View
+ categoryTag:Java
+
+
+
+
+ View
+ categoryTag:Debug
+
+
+
+
+ View
+ categoryTag:Java
+
+
+
+
+ View
+ categoryTag:Java
+
+
+
+
+ View
+ categoryTag:Java
+
+
+
+
+ View
+ categoryTag:Java Browsing
+
+
+
+
+ View
+ categoryTag:Java Browsing
+
+
+
+
+ View
+ categoryTag:Java Browsing
+
+
+
+
+ View
+ categoryTag:Java Browsing
+
+
+
+
+ View
+ categoryTag:Java
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:Java
+
+
+
+
+ View
+ categoryTag:Java
+
+
+
+
+ View
+ categoryTag:JPA
+
+
+
+
+ View
+ categoryTag:JPA
+
+
+
+
+ View
+ categoryTag:JavaServer Faces
+
+
+
+
+ View
+ categoryTag:JavaServer Faces
+
+
+
+
+ View
+ categoryTag:Web Services
+
+
+
+
+ View
+ categoryTag:Other
+
+
+
+
+ View
+ categoryTag:Maven
+
+
+
+
+ View
+ categoryTag:Maven
+
+
+
+
+ View
+ categoryTag:Maven
+
+
+
+
+ View
+ categoryTag:Mylyn
+
+
+
+
+ View
+ categoryTag:Mylyn
+
+
+
+
+ View
+ categoryTag:Mylyn
+
+
+
+
+ View
+ categoryTag:Mylyn
+
+
+
+
+ View
+ categoryTag:Mylyn
+
+
+
+
+ View
+ categoryTag:Oomph
+
+
+
+
+ View
+ categoryTag:API Tools
+
+
+
+
+ View
+ categoryTag:Plug-in Development
+
+
+
+
+ View
+ categoryTag:Plug-in Development
+
+
+
+
+ View
+ categoryTag:Plug-in Development
+
+
+
+
+ View
+ categoryTag:Plug-in Development
+
+
+
+
+ View
+ categoryTag:Plug-in Development
+
+
+
+
+ View
+ categoryTag:Plug-in Development
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:Version Control (Team)
+
+
+
+
+ View
+ categoryTag:Version Control (Team)
+
+
+ View
+ categoryTag:Help
+
+
+
+
+ View
+ categoryTag:Terminal
+
+
+
+
+ View
+ categoryTag:Other
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:Help
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:General
+
+
+
+
+ View
+ categoryTag:Debug
+
+
+
+
+ View
+ categoryTag:Other
+
+
+
+
+ View
+ categoryTag:Other
+
+
+
+
+ View
+ categoryTag:Other
+
+
+
+
+ View
+ categoryTag:Server
+
+
+
+
+ View
+ categoryTag:XML
+
+
+
+
+ View
+ categoryTag:XML
+
+
+
+
+ View
+ categoryTag:XML
+
+
+
+
+ View
+ categoryTag:XML
+
+
+
+
+ View
+ categoryTag:XML
+
+
+
+
+ View
+ categoryTag:PyDev
+
+
+
+
+ View
+ categoryTag:PyDev
+
+
+
+
+ View
+ categoryTag:PyDev
+
+
+
+
+ View
+ categoryTag:PyDev
+
+
+
+
+ View
+ categoryTag:PyDev
+
+
+
+
+ View
+ categoryTag:PyDev
+
+
+
+
+ View
+ categoryTag:PyDev
+
+
+
+
+ View
+ categoryTag:PyDev
+
+
+
+ glue
+ move_after:PerspectiveSpacer
+ SHOW_RESTORE_MENU
+
+
+ move_after:Spacer Glue
+ HIDEABLE
+ SHOW_RESTORE_MENU
+
+
+ glue
+ move_after:SearchField
+ SHOW_RESTORE_MENU
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/.metadata/.plugins/org.eclipse.egit.core/.org.eclipse.egit.core.cmp/.project b/.metadata/.plugins/org.eclipse.egit.core/.org.eclipse.egit.core.cmp/.project
new file mode 100644
index 0000000..3c10856
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.egit.core/.org.eclipse.egit.core.cmp/.project
@@ -0,0 +1,11 @@
+
+
+ .org.eclipse.egit.core.cmp
+
+
+
+
+
+
+
+
diff --git a/.metadata/.plugins/org.eclipse.egit.core/.org.eclipse.egit.core.cmp/.settings/org.eclipse.core.resources.prefs b/.metadata/.plugins/org.eclipse.egit.core/.org.eclipse.egit.core.cmp/.settings/org.eclipse.core.resources.prefs
new file mode 100644
index 0000000..99f26c0
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.egit.core/.org.eclipse.egit.core.cmp/.settings/org.eclipse.core.resources.prefs
@@ -0,0 +1,2 @@
+eclipse.preferences.version=1
+encoding/=UTF-8
diff --git a/.metadata/.plugins/org.eclipse.jdt.core/assumedExternalFilesCache b/.metadata/.plugins/org.eclipse.jdt.core/assumedExternalFilesCache
new file mode 100644
index 0000000..593f470
Binary files /dev/null and b/.metadata/.plugins/org.eclipse.jdt.core/assumedExternalFilesCache differ
diff --git a/.metadata/.plugins/org.eclipse.jdt.core/externalFilesCache b/.metadata/.plugins/org.eclipse.jdt.core/externalFilesCache
new file mode 100644
index 0000000..593f470
Binary files /dev/null and b/.metadata/.plugins/org.eclipse.jdt.core/externalFilesCache differ
diff --git a/.metadata/.plugins/org.eclipse.jdt.core/javaLikeNames.txt b/.metadata/.plugins/org.eclipse.jdt.core/javaLikeNames.txt
new file mode 100644
index 0000000..8586397
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.jdt.core/javaLikeNames.txt
@@ -0,0 +1 @@
+java
\ No newline at end of file
diff --git a/.metadata/.plugins/org.eclipse.jdt.core/nonChainingJarsCache b/.metadata/.plugins/org.eclipse.jdt.core/nonChainingJarsCache
new file mode 100644
index 0000000..593f470
Binary files /dev/null and b/.metadata/.plugins/org.eclipse.jdt.core/nonChainingJarsCache differ
diff --git a/.metadata/.plugins/org.eclipse.jdt.core/variablesAndContainers.dat b/.metadata/.plugins/org.eclipse.jdt.core/variablesAndContainers.dat
new file mode 100644
index 0000000..3de398e
Binary files /dev/null and b/.metadata/.plugins/org.eclipse.jdt.core/variablesAndContainers.dat differ
diff --git a/.metadata/.plugins/org.eclipse.jdt.ui/OpenTypeHistory.xml b/.metadata/.plugins/org.eclipse.jdt.ui/OpenTypeHistory.xml
new file mode 100644
index 0000000..a4ee3cb
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.jdt.ui/OpenTypeHistory.xml
@@ -0,0 +1,2 @@
+
+
diff --git a/.metadata/.plugins/org.eclipse.jdt.ui/QualifiedTypeNameHistory.xml b/.metadata/.plugins/org.eclipse.jdt.ui/QualifiedTypeNameHistory.xml
new file mode 100644
index 0000000..9e390f5
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.jdt.ui/QualifiedTypeNameHistory.xml
@@ -0,0 +1,2 @@
+
+
diff --git a/.metadata/.plugins/org.eclipse.m2e.core/workspaceState.ser b/.metadata/.plugins/org.eclipse.m2e.core/workspaceState.ser
new file mode 100644
index 0000000..abbf8e5
Binary files /dev/null and b/.metadata/.plugins/org.eclipse.m2e.core/workspaceState.ser differ
diff --git a/.metadata/.plugins/org.eclipse.mylyn.github.ui/avatars.ser b/.metadata/.plugins/org.eclipse.mylyn.github.ui/avatars.ser
new file mode 100644
index 0000000..1e9a069
Binary files /dev/null and b/.metadata/.plugins/org.eclipse.mylyn.github.ui/avatars.ser differ
diff --git a/.metadata/.plugins/org.eclipse.oomph.setup/workspace.setup b/.metadata/.plugins/org.eclipse.oomph.setup/workspace.setup
new file mode 100644
index 0000000..1f73e14
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.oomph.setup/workspace.setup
@@ -0,0 +1,6 @@
+
+
diff --git a/.metadata/.plugins/org.eclipse.tips.ide/dialog_settings.xml b/.metadata/.plugins/org.eclipse.tips.ide/dialog_settings.xml
new file mode 100644
index 0000000..5ca0b77
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.tips.ide/dialog_settings.xml
@@ -0,0 +1,3 @@
+
+
diff --git a/.metadata/.plugins/org.eclipse.ui.ide/dialog_settings.xml b/.metadata/.plugins/org.eclipse.ui.ide/dialog_settings.xml
new file mode 100644
index 0000000..6e4c714
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.ui.ide/dialog_settings.xml
@@ -0,0 +1,12 @@
+
+
diff --git a/.metadata/.plugins/org.eclipse.ui.intro/introstate b/.metadata/.plugins/org.eclipse.ui.intro/introstate
new file mode 100644
index 0000000..02f134f
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.ui.intro/introstate
@@ -0,0 +1,2 @@
+
+
\ No newline at end of file
diff --git a/.metadata/.plugins/org.eclipse.ui.workbench/dialog_settings.xml b/.metadata/.plugins/org.eclipse.ui.workbench/dialog_settings.xml
new file mode 100644
index 0000000..5ca0b77
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.ui.workbench/dialog_settings.xml
@@ -0,0 +1,3 @@
+
+
diff --git a/.metadata/.plugins/org.eclipse.ui.workbench/workingsets.xml b/.metadata/.plugins/org.eclipse.ui.workbench/workingsets.xml
new file mode 100644
index 0000000..74ec227
--- /dev/null
+++ b/.metadata/.plugins/org.eclipse.ui.workbench/workingsets.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.metadata/.plugins/org.python.pydev/pyunit_tests/test_run_pin_info.txt b/.metadata/.plugins/org.python.pydev/pyunit_tests/test_run_pin_info.txt
new file mode 100644
index 0000000..27cc728
--- /dev/null
+++ b/.metadata/.plugins/org.python.pydev/pyunit_tests/test_run_pin_info.txt
@@ -0,0 +1 @@
+||
\ No newline at end of file
diff --git a/.metadata/version.ini b/.metadata/version.ini
new file mode 100644
index 0000000..45c5368
--- /dev/null
+++ b/.metadata/version.ini
@@ -0,0 +1,3 @@
+#Fri Dec 22 14:11:30 CET 2023
+org.eclipse.core.runtime=2
+org.eclipse.platform=4.28.0.v20230605-0440
diff --git a/.project b/.project
new file mode 100644
index 0000000..2cbc313
--- /dev/null
+++ b/.project
@@ -0,0 +1,17 @@
+
+
+ corinne-3
+
+
+
+
+
+ org.python.pydev.PyDevBuilder
+
+
+
+
+
+ org.python.pydev.pythonNature
+
+
diff --git a/.pydevproject b/.pydevproject
new file mode 100644
index 0000000..2b04565
--- /dev/null
+++ b/.pydevproject
@@ -0,0 +1,5 @@
+
+
+ Default
+ python interpreter
+
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000..46ef8a6
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2021 Simone Orlando, Vairo Di Pasquale, Ivan Lanese
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..d80d6ee
--- /dev/null
+++ b/README.md
@@ -0,0 +1,31 @@
+# corinne-3
+
+## Installation
+
+The following prerequisites should be fulfilled:
+
+- python3
+- python3-pip
+- antlr4
+
+Corinne relies on, antlr4, the DOT format, and python's Tkinter (for its GUI);
+therefore the following packages are required:
+
+- install python3-tk
+- python3-pil.imagetk
+- graphviz
+
+In debian-based distros execute the following commands from a shell.
+
+- sudo apt install antlr4
+- sudo apt install graphviz
+- sudo apt install python3-graphviz
+- sudo apt install python3-tk
+- sudo apt install python3-pil.imagetk
+- sudo pip install antlr4-python3-runtime
+
+1. execute the 'makeParser.sh' script in the subdirectory 'global_graph_parser'
+2. execute the 'makeParser.sh' script in the subdirectory 'dot_parser'
+
+You can now execute the script 'corinne.sh' from the root directory.
+Have fun!
diff --git a/cfsm.py b/cfsm.py
new file mode 100644
index 0000000..d837fd8
--- /dev/null
+++ b/cfsm.py
@@ -0,0 +1,102 @@
+from fsa import FSA
+
+
+class CFSM(FSA):
+ """
+ This class implement the communicating finite-state machine (CFSM).
+ A CFSM is a Finite State Machine where labels are in the
+ form of 'A B!m' or 'A B?m' (with A!=B)
+ """
+ def __init__(self, states: set, labels: set, edges: set, s0: str, participants: set):
+ super().__init__(states, labels, edges, s0)
+ self.participants = participants
+
+ def __get_participants_and_message_from_label__(self, label):
+ sender, receiver_message = label.split(' ')
+ if '?' in receiver_message: # case: A B?m
+ receiver, message = receiver_message.split('?')
+ else: # case A B!m
+ receiver, message = receiver_message.split('!')
+ return sender.strip(), receiver.strip(), message.strip()
+
+ def __transition_exist__(self, state, label):
+ """ If a transition δ(state, label, state2) exist
+ return state2, otherwise False. """
+ for edge in self.edges:
+ if edge[0] == state and edge[1] == label:
+ return edge[2]
+ return False
+
+ def minimization(self):
+ """
+ Implement an algorithm of minimization for the Communicating FSM.
+ This is based on the Table filling method (Myhill Nerode Theorem).
+ """
+ # create every pair of nodes, where the pair (i,j) is the same pair of (j,i)
+ # and can't be exist a pair (i,j) with i == j
+ self.states = set(map(str, self.states)) # TODO verify why self.states are integer in this point
+ nodes_pairs = set()
+ for i in self.states:
+ for j in self.states:
+ if i != j and not (i, j) in nodes_pairs and not (j, i) in nodes_pairs:
+ nodes_pairs.add((i, j))
+ """
+ NOTE: from the moment we're dealing with FSA with a partial transition
+ function (not all transitions are defined for every node), the algorithm
+ has been adapted. So, in the second step of the algorithm for every pair of
+ nodes (i,j) we'll check if δ(i,label) and δ(j,label) exist, and we act
+ accordingly to every possible case.
+ """
+ distinguishable_states = set()
+ stop = True
+ # a boolean to stop the algorithm if after a cycle
+ # no distinguishable pair of node was founded.
+ while stop:
+ stop = False
+ # at every cycle of the algorithm we delete every
+ # distinguishable states from the set of possible nodes pairs.
+ nodes_pairs = nodes_pairs.difference(distinguishable_states)
+ for node in nodes_pairs:
+ for label in self.labels:
+ i = self.__transition_exist__(node[0], label)
+ j = self.__transition_exist__(node[1], label)
+ if i and j: # both δ(i,label) and δ(j,label) exist
+ if i != j: # both transitions are going in different nodes
+ # check if they are going in a distinguishable pair of nodes
+ if (i, j) in distinguishable_states or (j, i) in distinguishable_states:
+ stop = True
+ distinguishable_states.add((node[0], node[1]))
+ break
+ elif i or j: # one of δ(i,label) or δ(j,label) exist
+ stop = True
+ distinguishable_states.add((node[0], node[1]))
+ break
+ # if both transitions don't exist, nothing to do
+ undistinguished_states = set()
+ for pair in nodes_pairs:
+ undistinguished_states.add(pair[0])
+ undistinguished_states.add(pair[1])
+
+ self.states = self.states.difference(undistinguished_states)
+
+ merge_node = ""
+ for state in undistinguished_states:
+ merge_node += state + ','
+ merge_node = merge_node[:-1]
+ if merge_node:
+ self.states.add(merge_node)
+
+ new_edges = set()
+ for edge in self.edges:
+ node1 = edge[0] in undistinguished_states
+ node2 = edge[2] in undistinguished_states
+ if not (node1 and node2): # node1 NAND node2
+ if node1:
+ new_edges.add((merge_node, edge[1], edge[2], edge[3], edge[4], edge[5]))
+ elif node2:
+ new_edges.add((edge[0], edge[1], merge_node, edge[3], edge[4], edge[5]))
+ else:
+ new_edges.add(edge)
+ else:
+ new_edges.add((merge_node, edge[1], merge_node, edge[3], edge[4], edge[5]))
+ self.edges = new_edges
diff --git a/chor_auto.py b/chor_auto.py
new file mode 100644
index 0000000..7012818
--- /dev/null
+++ b/chor_auto.py
@@ -0,0 +1,18 @@
+from fsa import FSA
+
+
+class ChoreographyAutomata(FSA):
+ """
+ This class implement the Choreography Automata (CA).
+ A CA is a Finite State Machine (FSA) where labels are in the
+ form of 'A->B:m' (with A!=B) or epsilon (empty string)
+ """
+
+ def __init__(self, states: set, labels: set, edges: set, s0: str, participants: set):
+ super().__init__(states, labels, edges, s0)
+ self.participants = participants
+
+ def __get_participants_and_message_from_label__(self, label):
+ sender, receiver_message = label.split('->')
+ receiver, message = receiver_message.split(':')
+ return sender.strip(), receiver.strip(), message.strip()
diff --git a/controller.py b/controller.py
new file mode 100644
index 0000000..64b67c9
--- /dev/null
+++ b/controller.py
@@ -0,0 +1,480 @@
+import os
+from graphviz import render, Digraph, view
+from cfsm import CFSM
+from global_graph_parser.main import main as global_graph_parser
+from dot_parser.main import main
+from dot_parser.domitilla_converter import domitilla_converter
+from dot_parser.MyErrorListener import parseError
+from dot_parser.MyVisitor import ForkStatementDetected
+
+import itertools
+from well_formedness import well_sequencedness_conditions, well_branchedness_first_condition, well_branchedness_second_condition, well_branchedness_third_condition, well_branchedness_late_join_condition
+from well_formedness import asynchronous_well_sequencedness_first_conditions, asynchronous_well_sequencedness_second_conditions
+
+class Controller:
+ ca_dict = {}
+
+ def get_participants(self, graph_name):
+ """ Return participants of a given graph """
+ return self.ca_dict.get(graph_name).participants
+
+ def get_all_ca(self):
+ """ Return all the opened graphs """
+ return self.ca_dict
+
+ def get_start_node(self, graph_name):
+ """ Return the start node of a given graph """
+ return self.ca_dict.get(graph_name).s0
+
+ def get_states(self, graph_name):
+ """ Return the states of a given graph """
+ return self.ca_dict.get(graph_name).states
+
+ def get_edges(self, graph_name):
+ """ Return the edges of a given graph """
+ return self.ca_dict.get(graph_name).edges
+
+ def get_labels(self, graph_name):
+ """ Return the labels of a given graph """
+ return self.ca_dict.get(graph_name).labels
+
+ def check_for_epsilon_moves(self, graph_name):
+ for edge in self.ca_dict.get(graph_name).edges:
+ if edge[1] == "":
+ return "Yes"
+ return "No"
+
+ def GGparser(self, path_file, path_store):
+ """ Parse a Chorgram file and create a converted
+ version (DOT) in a given path.
+ """
+ # read the file and convert in DOT
+ message, path = global_graph_parser(path_file, path_store)
+ # parse the new DOT file and store the graph
+ result = self.DOTparser(path)
+ # return a log message and the name of the graph
+ return message, result[2]
+
+ def DOTparser(self, path_file):
+ """ Parse a DOT file, check if it was a Domitilla graph
+ and fill a Choreography Automata (CA) struct. """
+ try:
+ ca, domi_bool, graph_name = main(path_file)
+ except (parseError, ForkStatementDetected) as e:
+ return False, e.message # return eventually an error message
+ else:
+ # store the CA in a dictionary { 'graph_name' : CA }
+ self.ca_dict.update({graph_name: ca})
+ # return a boolean for Domitilla graphs, a log message
+ # and the name of the graph
+ return domi_bool, ["Read Successfully " + path_file], graph_name
+
+ def DomitillaConverter(self, graph_name, path_file, path_store):
+ """ Convert a Domitilla Graph, previously opened and stored.
+ Check /dot_parser/domitilla_visitor.py """
+ ca, message = domitilla_converter(self.ca_dict.get(graph_name), path_file, path_store)
+ self.ca_dict.update({graph_name: ca})
+ return message
+
+ def remove_record(self, graph_name):
+ """ Remove a graph from the opened graphs dictionary (self.ca_dict)"""
+ self.ca_dict.pop(graph_name)
+
+ def render(self, path_file, extension,V_rend_on_default_image_viewer):
+ try:
+ main(path_file) # check for errors
+ except (parseError, ForkStatementDetected) as e:
+ print("non riesco a creare il render")
+ return e.message[0] + " " + e.message[1] # return eventually an error message
+ else:
+ #V voglio mettere che lo apre se glielo dico io,altirmenti mi rendera quello
+ #che voglio
+ save_path = render('dot', extension, path_file)
+
+ if V_rend_on_default_image_viewer == True:
+ view(save_path) # open the default image viewer
+ return save_path
+
+ def make_product(self, graph_name1, graph_name2, path_to_store):
+ """ Make the product of two choreography automata (ca1, ca2),
+ building a c-automaton corresponding to the concurrent
+ execution of the two original c-automata. """
+
+ # get the two choreography automaton from graphs name
+ ca1 = self.ca_dict.get(graph_name1)
+ ca2 = self.ca_dict.get(graph_name2)
+ # check if the participants are disjoint,
+ if ca1.participants.intersection(ca2.participants):
+ return ["[ERROR] participants are not disjoint"]
+ # if the user didn't add a .dot extension to the name of
+ # product graph, we'll add it
+ if not path_to_store.endswith('.dot'):
+ path_to_store += '.dot'
+ # get the name from the path
+ path_splitted = os.path.split(path_to_store)
+ graph_name = path_splitted[1].split('.')
+ # initializes graph
+ g = Digraph(graph_name[0])
+ g.node('s0', label="", shape='none', height='0', width='0')
+ start_node = ca1.s0 + ',' + ca2.s0
+ g.edge('s0', start_node)
+ # Build the product graph
+ # NOTE: for each edge
+ # edge[0] == sender node
+ # edge[1] == label
+ # edge[2] == receiver node
+ for i in ca1.states:
+ for j in ca2.states:
+ # current node we re considering
+ node = i + ',' + j
+ # set j, and look for some edges from i to other nodes
+ for edge in ca1.edges:
+ if edge[0] == i:
+ g.edge(node, edge[2] + ',' + j, label=edge[1])
+ # set i, and look for some edges from j to other nodes
+ for edge in ca2.edges:
+ if edge[0] == j:
+ g.edge(node, i + ',' + edge[2], label=edge[1])
+ # draw and save the graph
+ g.save(path_to_store)
+ # parser the product graph and
+ # store the relative CA
+ result = self.DOTparser(path_to_store)
+ # return a message, and
+ # the graph name just created
+ return ["[CREATED] " + path_to_store]
+
+ def synchronize(self, graph_name_to_sync, interface1, interface2, path_to_store):
+ """
+ Remove a pair of (compatible) roles by transforming them into forwarders.
+ e.g.: a synchronization over H and K removes participants H and K and
+ sends each message originally sent to H to whoever K used to send the
+ same message, and vice versa.
+ """
+ # get the choreography automaton from graph name
+ ca = self.ca_dict.get(graph_name_to_sync)
+ # check if interfaces aren't the same
+ if interface1 == interface2:
+ return ["[ERROR] you selected the same participant for both interfaces"]
+ # if the user didn't add a .dot extension to the name of
+ # sync graph, we'll add it
+ if not path_to_store.endswith('.dot'):
+ path_to_store += '.dot'
+ # get the name from the path
+ path_splitted = os.path.split(path_to_store)
+ graph_name = path_splitted[1].split('.')
+ # initializes graph
+ g = Digraph(graph_name[0])
+ g.node('s0', label="", shape='none', height='0', width='0')
+ g.edge('s0', ca.s0)
+ #
+ # NOTE: each edge in ca.edges contains a 6-uple like this:
+ # (source_node, label, dest_node, sender, receiver, message)
+ #
+ # ------------ STEP (1) -----------------
+ # each transition (p, A -> H: m, q) is removed, and for each
+ # transition (q, K -> B: m, r) a transition (p, A -> B: m, r)
+ # is added, and similarly by swapping H and K
+ new_edges = set()
+ edges_to_remove = set()
+ for i in ca.edges:
+ if i[4] == interface1:
+ edges_to_remove.add(i)
+ for j in ca.edges:
+ if j[0] == i[2] and j[3] == interface2 and j[5] == i[5] and i[3] != j[4]:
+ edges_to_remove.add(i)
+ label = i[3] + '->' + j[4] + ':' + i[5]
+ new_edges.add((i[0], label, j[2], i[3], j[4], i[5]))
+
+ if i[4] == interface2:
+ for j in ca.edges:
+ if j[0] == i[2] and j[3] == interface1 and j[5] == i[5] and i[3] != j[4]:
+ edges_to_remove.add(i)
+ label = i[3] + '->' + j[4] + ':' + i[5]
+ new_edges.add((i[0], label, j[2], i[3], j[4], i[5]))
+ ca.edges = ca.edges.difference(edges_to_remove)
+ ca.edges = ca.edges.union(new_edges)
+ # ------------ STEP (2) -----------------
+ # Transitions involving neither H nor K are preserved,
+ # whereas all other transitions are removed
+ edges_to_remove.clear()
+ for i in ca.edges:
+ if i[3] == interface1 or i[3] == interface2 or i[4] == interface1 or i[4] == interface2:
+ edges_to_remove.add(i)
+ ca.edges = ca.edges.difference(edges_to_remove)
+ # ------------ STEP (3) -----------------
+ # States and transitions unreachable from the initial
+ # state are removed.
+ ca.delete_unreachable_nodes()
+ for edge in ca.edges:
+ g.edge(edge[0], edge[2], label=str(edge[1]))
+ # draw and save the graph
+ g.save(path_to_store)
+ # parser the product graph and
+ # store the relative CA
+ result = self.DOTparser(path_to_store)
+ return ["[CREATED] " + path_to_store]
+
+ def projection(self, graph_name, participant, path_to_store):
+ """
+ The projection of a c-automaton on a participant A
+ is a CFSM, obtained by minimising the c-automaton
+ after updating the labels.
+ """
+ # get the choreography automaton from graph name
+ ca = self.ca_dict.get(graph_name)
+ # if the user didn't add a .dot extension to the name of
+ # sync graph, we'll add it
+ if not path_to_store.endswith('.dot'):
+ path_to_store += '.dot'
+ # get the name from the path
+ path_splitted = os.path.split(path_to_store)
+ graph_name = path_splitted[1].split('.')
+ # initializes graph
+ g = Digraph(graph_name[0])
+ # NOTE: each edge in ca.edges contains a 6-uple like this:
+ # (source_node, label, dest_node, sender, receiver, message)
+ new_edges = set()
+ new_labels = set()
+ for edge in ca.edges:
+ # if the participant is the sender
+ if edge[3] == participant:
+ label = participant + ' ' + edge[4] + '!' + edge[5]
+ new_edges.add((edge[0], label, edge[2]))
+ new_labels.add(label)
+ # if the participant is the receiver
+ elif edge[4] == participant:
+ label = edge[3] + ' ' + participant + '?' + edge[5]
+ new_edges.add((edge[0], label, edge[2]))
+ new_labels.add(label)
+ # in all the other cases, empty_label edges
+ else:
+ new_edges.add((edge[0], "", edge[2]))
+ c = CFSM(ca.states, new_labels, new_edges, ca.s0, ca.participants)
+
+ c.delete_epsilon_moves()
+ c.minimization()
+
+ for edge in c.edges:
+ g.edge(str(edge[0]), str(edge[2]), label=edge[1])
+ # draw and save the graph
+ g.save(path_to_store)
+
+ return ["[CREATED] " + path_to_store]
+
+
+
+
+
+ # Check each condition of Well-Branchedness one by one. It stops as soon as a
+ # condition is not met and returns the associated error
+
+ def make_well_branchedness(self, graph_name):
+
+ ca = self.ca_dict.get(graph_name)
+
+ # First error check
+ res1 = well_branchedness_first_condition(ca.edges,ca.states)
+
+ # Second error check
+ res2 = well_branchedness_second_condition(ca.edges, ca.states, ca.participants)
+
+ # Third error check
+ res3 = well_branchedness_third_condition(ca.states, ca.edges, ca.participants)
+
+ # Late Join Error Check
+ res4 = well_branchedness_late_join_condition(ca.states, ca.edges, ca.participants)
+
+
+ if res1 is not None:
+ if res2 is not None:
+ if res3 is not None:
+ if res4 is not None:
+ result = []
+ result.append(['Verified: NO. Well-branchedness in first condition: ' + res1])
+ result.append(['Verified: NO. Well-branchedness in second condition: ' + res2])
+ result.append(['Verified: NO. Well-branchedness in third condition: ' + res3])
+ result.append(['Verified: NO. Well-branchedness in Late Join: ' + res4])
+ return [result]
+ else:
+ result = []
+ result.append(['Verified: NO. Well-branchedness in first condition: ' + res1])
+ result.append(['Verified: NO. Well-branchedness in second condition: ' + res2])
+ result.append(['Verified: NO. Well-branchedness in third condition: ' + res3])
+ result.append(['Verified: Well-branched in Late Join'])
+ return [result]
+ else:
+ if res4 is not None:
+ result = []
+ result.append(['Verified: NO. Well-branchedness in first condition: ' + res1])
+ result.append(['Verified: NO. Well-branchedness in second condition: ' + res2])
+ result.append(['Verified: Well-branchedn in third condition'])
+ result.append(['Verified: NO. Well-branchedness in Late Join: ' + res4])
+ return [result]
+ else:
+ result = []
+ result.append(['Verified: NO. Well-branchedness in first condition: ' + res1])
+ result.append(['Verified: NO. Well-branchedness in second condition: ' + res2])
+ result.append(['Verified: Well-branched in third condition'])
+ result.append(['Verified: Well-branched in Late Join'])
+ return [result]
+ else:
+ if res3 is not None:
+ if res4 is not None:
+ result = []
+ result.append(['Verified: NO. Well-branchedness in first condition: ' + res1])
+ result.append(['Verified: Well-branched in second condition'])
+ result.append(['Verified: NO. Well-branchedness in third condition: ' + res3])
+ result.append(['Verified: NO. Well-branchedness in Late Join: ' + res4])
+ return [result]
+ else:
+ result = []
+ result.append(['Verified: NO. Well-branchedness in first condition: ' + res1])
+ result.append(['Verified: Well-branched in second condition'])
+ result.append(['Verified: NO. Well-branchedness in third condition: ' + res3])
+ result.append(['Verified: Well-branched in Late Join'])
+ return [result]
+ else:
+ if res4 is not None:
+ result = []
+ result.append(['Verified: NO. Well-branchedness in first condition: ' + res1])
+ result.append(['Verified: Well-branched in second condition'])
+ result.append(['Verified: Well-branched in third condition'])
+ result.append(['Verified: NO. Well-branchedness in Late Join: ' + res4])
+ return [result]
+ else:
+ result = []
+ result.append(['Verified: NO. Well-branchedness in first condition: ' + res1])
+ result.append(['Verified: Well-branched in second condition'])
+ result.append(['Verified: Well-branched in third condition'])
+ result.append(['Verified: Well-branched in Late Join'])
+ return [result]
+ else:
+ if res2 is not None:
+ if res3 is not None:
+ if res4 is not None:
+ result = []
+ result.append(['Verified: Well-branched in first condition'])
+ result.append(['Verified: NO. Well-branchedness in second condition: ' + res2])
+ result.append(['Verified: NO. Well-branchedness in third condition: ' + res3])
+ result.append(['Verified: NO. Well-branchedness in Late Join: ' + res4])
+ return [result]
+ else:
+ result = []
+ result.append(['Verified: Well-branched in first condition'])
+ result.append(['Verified: NO. Well-branchedness in second condition: ' + res2])
+ result.append(['Verified: NO. Well-branchedness in third condition: ' + res3])
+ result.append(['Verified: Well-branched in Late Join'])
+ return [result]
+ else:
+ if res4 is not None:
+ result = []
+ result.append(['Verified: Well-branched in first condition'])
+ result.append(['Verified: NO. Well-branchedness in second condition: ' + res2])
+ result.append(['Verified: Well-branched in third condition'])
+ result.append(['Verified: NO. Well-branchedness in Late Join: ' + res4])
+ return [result]
+ else:
+ result = []
+ result.append(['Verified: Well-branched in first condition'])
+ result.append(['Verified: NO. Well-branchedness in second condition: ' + res2])
+ result.append(['Verified: Well-branched in third condition'])
+ result.append(['Verified: Well-branched in Late Join'])
+ return [result]
+ else:
+ if res3 is not None:
+ if res4 is not None:
+ result = []
+ result.append(['Verified: Well-branched in first condition'])
+ result.append(['Verified: Well-branched in second condition'])
+ result.append(['Verified: NO. Well-branchedness in third condition: ' + res3])
+ result.append(['Verified: NO. Well-branchedness in Late Join: ' + res4])
+ return [result]
+ else:
+ result = []
+ result.append(['Verified: Well-branched in first condition'])
+ result.append(['Verified: Well-branched in second condition'])
+ result.append(['Verified: NO. Well-branchedness in third condition: ' + res3])
+ result.append(['Verified: Well-branched in Late Join'])
+ return [result]
+ else:
+ if res4 is not None:
+ result = []
+ result.append(['Verified: Well-branched in first condition'])
+ result.append(['Verified: Well-branched in second condition'])
+ result.append(['Verified: Well-branched in third condition'])
+ result.append(['Verified: NO. Well-branchedness in Late Join: ' + res4])
+ return [result]
+ else:
+ result = []
+ result.append(['Verified: Well-branched in first condition'])
+ result.append(['Verified: Well-branched in second condition'])
+ result.append(['Verified: Well-branched in third condition'])
+ result.append(['Verified: Well-branched in Late Join'])
+ return [result]
+
+
+
+
+ # Call the Well-Sequencedness condition check. If no error is returned
+ # then it returns that it has passed the check, otherwise it returns
+ # the error
+
+ def make_well_sequencedness(self, graph_name):
+ ca = self.ca_dict.get(graph_name)
+
+ ret = well_sequencedness_conditions(ca)
+
+ if ret == None:
+ result = ['Verified: Well-sequenced']
+ return [result]
+ else:
+ result = ['Verified: NO Well-sequenced, not verified in ' + ret]
+ return [result]
+
+ def make_asynchronous_well_sequencedness(self, graph_name):
+ ca = self.ca_dict.get(graph_name)
+
+ #ret = well_sequencedness_conditions(ca)
+ res01 = asynchronous_well_sequencedness_first_conditions(ca)
+ res02 = asynchronous_well_sequencedness_second_conditions(ca)
+
+ if res01 == None:
+ result = ['Verified: Well-sequenced']
+ return [result]
+ else:
+ result = ['Verified: NO Well-sequenced, not verified in ' + ret]
+ return [result]
+
+
+ # First it does the well-Sequencedness check and if it
+ # passes it then does the well-Branchedness check
+
+ def make_well_formedness(self, graph_name):
+ resultWS = self.make_well_sequencedness(graph_name)
+ resultWB = self.make_well_branchedness(graph_name)
+
+ result = []
+ count = 0
+ if resultWS[0][0] == 'Verified: Well-sequenced':
+ result.append(resultWS[0][0])
+ for val in resultWB[0]:
+ result.append(val)
+ if "Verified: Well" in val:
+ count = count + 1
+ if count == 3:
+ result.append('Verified: Well-formed')
+ else:
+ result.append('Verified: No Well-formed')
+ return [result]
+ else:
+ result.append(resultWS[0][0])
+ for val in resultWB[0]:
+ result.append(val)
+ if "Verified: Well" in val:
+ count = count + 1
+ if count == 3:
+ result.append('Verified: Well-formed')
+ else:
+ result.append('Verified: No Well-formed')
+ return [result]
diff --git a/converter.py b/converter.py
new file mode 100644
index 0000000..d3f9cf7
--- /dev/null
+++ b/converter.py
@@ -0,0 +1,21 @@
+
+import sys
+
+
+# replacement strings
+WINDOWS_LINE_ENDING = b'\r\n'
+UNIX_LINE_ENDING = b'\n'
+
+# relative or absolute file path, e.g.:
+file_path = r"C:\Users\Vairo\Downloads\Corinne-master\gatto.gv"
+
+with open(file_path, 'rb') as open_file:
+ content = open_file.read()
+
+content = content.replace(WINDOWS_LINE_ENDING, UNIX_LINE_ENDING)
+
+with open(file_path, 'wb') as open_file:
+ open_file.write(content)
+#filename = sys.argv[1]
+#text = open(filename, 'rb').read().replace('\r\n', '\n')
+#open(filename, 'wb').write(text)
\ No newline at end of file
diff --git a/corinne.sh b/corinne.sh
new file mode 100755
index 0000000..eb7516b
--- /dev/null
+++ b/corinne.sh
@@ -0,0 +1 @@
+python3 main.py
diff --git a/dot_parser/DOT.g4 b/dot_parser/DOT.g4
new file mode 100644
index 0000000..15e454f
--- /dev/null
+++ b/dot_parser/DOT.g4
@@ -0,0 +1,85 @@
+/*
+ This file defines a grammar to recognize graphs
+ in DOT language. But it's higly customized for
+ our purpose: recognize a Choreography Automaton (CAs).
+*/
+grammar DOT;
+
+/* So a graph in our grammar start with a 'digraph' string,
+ followed by a name, and a block with statement like this:
+
+ digraph MyGraphName {
+ statement1
+ statement2
+ ...
+ }
+ NOTE: '\n' (newlines) are required at start of block
+ after '{' and at the end of each statement.
+ */
+
+graph : 'digraph' string '{' '\n' stmt_list+ '}' ;
+
+stmt_list: stmt '\n' ;
+
+/* A statement could be:
+ - x1 [... some attributes ...] // a node
+ - x1 -> x2 // a simply edge (no label)
+ - x1 -> x2 [label="..."] // a label edge
+
+ where x1 and x2 are IDs of nodes (a number or a couple of numbers).
+*/
+stmt : node | edge | start_node | start_edge ;
+
+node : id_node '[' (attr_list ','?)* ']' ;
+
+edge : id_node '->' id_node ('[' 'label' '=' '"' label? '"' ']')? ;
+
+/* These are two special cases to specific a start node.
+ So, create a normal node with 's0' name as ID and with
+ an empty label. */
+start_node : 's0' '[' 'label' '=' '"' '"' (','? attr_list)* ']' ;
+/* Therefore, create an edge from this node to the initial
+ point you want to specify. */
+start_edge : 's0' '->' ('"' string '"' | Number) ;
+
+attr_list : 'label' '=' '"' label? '"'
+ | 'shape' '=' Shape
+ /* height and width are meaningful to hide
+ the node s0 and view just the arrow
+ enter in the start node. */
+ | 'height' '=' ('"' string '"' | Number)
+ | 'width' '=' ('"' string '"' | Number)
+ ;
+
+id_node : Number | '"' string '"' ;
+
+string : (Uppercase_letter | Lowercase_letter | Number | '-' | '_' | ',')+ ;
+
+/* In Choreography automaton we have a set of labels like:
+
+ " A -> B : some_msg "
+
+ So we check (at syntactic analysis time) if the input entered
+ respect this format, otherwise we reject it.
+
+*/
+
+
+label : Uppercase_letter '->' Uppercase_letter ':' string # interaction
+ | Uppercase_letter Uppercase_letter ('?'|'!') string # cfsm_interaction
+
+ /* these tokens are specified to recognize also
+ graphs from Domitilla's Graphs format
+ (https://github.com/dedo94/Domitilla.git) */
+ | '+' # choice
+ | '|' # fork /* NOTE: we reject Fork nodes */
+ ;
+
+Shape : 'circle' | 'square' | 'diamond' | 'rect' | 'doublecircle' | 'none' ;
+
+ /* Uppercase_letter : [A-Z]; */
+
+Uppercase_letter : [A-Z]+;
+Lowercase_letter : [a-z];
+Number : [0-9]+;
+WS: [ \t]+ -> skip; // skip spaces and tabs
\ No newline at end of file
diff --git a/dot_parser/DOT.interp b/dot_parser/DOT.interp
new file mode 100644
index 0000000..f8c1ae2
--- /dev/null
+++ b/dot_parser/DOT.interp
@@ -0,0 +1,76 @@
+token literal names:
+null
+'digraph'
+'{'
+'\n'
+'}'
+'['
+','
+']'
+'->'
+'label'
+'='
+'"'
+'s0'
+'shape'
+'height'
+'width'
+'-'
+'_'
+':'
+'?'
+'!'
+'+'
+'|'
+null
+null
+null
+null
+null
+
+token symbolic names:
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+Shape
+Uppercase_letter
+Lowercase_letter
+Number
+WS
+
+rule names:
+graph
+stmt_list
+stmt
+node
+edge
+start_node
+start_edge
+attr_list
+id_node
+string
+label
+
+
+atn:
+[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 29, 153, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 6, 2, 30, 10, 2, 13, 2, 14, 2, 31, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 4, 3, 4, 3, 4, 3, 4, 5, 4, 43, 10, 4, 3, 5, 3, 5, 3, 5, 3, 5, 5, 5, 49, 10, 5, 7, 5, 51, 10, 5, 12, 5, 14, 5, 54, 11, 5, 3, 5, 3, 5, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 5, 6, 66, 10, 6, 3, 6, 3, 6, 5, 6, 70, 10, 6, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 5, 7, 79, 10, 7, 3, 7, 7, 7, 82, 10, 7, 12, 7, 14, 7, 85, 11, 7, 3, 7, 3, 7, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 5, 8, 96, 10, 8, 3, 9, 3, 9, 3, 9, 3, 9, 5, 9, 102, 10, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 5, 9, 115, 10, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 5, 9, 124, 10, 9, 5, 9, 126, 10, 9, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 5, 10, 133, 10, 10, 3, 11, 6, 11, 136, 10, 11, 13, 11, 14, 11, 137, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 5, 12, 151, 10, 12, 3, 12, 2, 2, 13, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 2, 4, 5, 2, 8, 8, 18, 19, 26, 28, 3, 2, 21, 22, 2, 163, 2, 24, 3, 2, 2, 2, 4, 35, 3, 2, 2, 2, 6, 42, 3, 2, 2, 2, 8, 44, 3, 2, 2, 2, 10, 57, 3, 2, 2, 2, 12, 71, 3, 2, 2, 2, 14, 88, 3, 2, 2, 2, 16, 125, 3, 2, 2, 2, 18, 132, 3, 2, 2, 2, 20, 135, 3, 2, 2, 2, 22, 150, 3, 2, 2, 2, 24, 25, 7, 3, 2, 2, 25, 26, 5, 20, 11, 2, 26, 27, 7, 4, 2, 2, 27, 29, 7, 5, 2, 2, 28, 30, 5, 4, 3, 2, 29, 28, 3, 2, 2, 2, 30, 31, 3, 2, 2, 2, 31, 29, 3, 2, 2, 2, 31, 32, 3, 2, 2, 2, 32, 33, 3, 2, 2, 2, 33, 34, 7, 6, 2, 2, 34, 3, 3, 2, 2, 2, 35, 36, 5, 6, 4, 2, 36, 37, 7, 5, 2, 2, 37, 5, 3, 2, 2, 2, 38, 43, 5, 8, 5, 2, 39, 43, 5, 10, 6, 2, 40, 43, 5, 12, 7, 2, 41, 43, 5, 14, 8, 2, 42, 38, 3, 2, 2, 2, 42, 39, 3, 2, 2, 2, 42, 40, 3, 2, 2, 2, 42, 41, 3, 2, 2, 2, 43, 7, 3, 2, 2, 2, 44, 45, 5, 18, 10, 2, 45, 52, 7, 7, 2, 2, 46, 48, 5, 16, 9, 2, 47, 49, 7, 8, 2, 2, 48, 47, 3, 2, 2, 2, 48, 49, 3, 2, 2, 2, 49, 51, 3, 2, 2, 2, 50, 46, 3, 2, 2, 2, 51, 54, 3, 2, 2, 2, 52, 50, 3, 2, 2, 2, 52, 53, 3, 2, 2, 2, 53, 55, 3, 2, 2, 2, 54, 52, 3, 2, 2, 2, 55, 56, 7, 9, 2, 2, 56, 9, 3, 2, 2, 2, 57, 58, 5, 18, 10, 2, 58, 59, 7, 10, 2, 2, 59, 69, 5, 18, 10, 2, 60, 61, 7, 7, 2, 2, 61, 62, 7, 11, 2, 2, 62, 63, 7, 12, 2, 2, 63, 65, 7, 13, 2, 2, 64, 66, 5, 22, 12, 2, 65, 64, 3, 2, 2, 2, 65, 66, 3, 2, 2, 2, 66, 67, 3, 2, 2, 2, 67, 68, 7, 13, 2, 2, 68, 70, 7, 9, 2, 2, 69, 60, 3, 2, 2, 2, 69, 70, 3, 2, 2, 2, 70, 11, 3, 2, 2, 2, 71, 72, 7, 14, 2, 2, 72, 73, 7, 7, 2, 2, 73, 74, 7, 11, 2, 2, 74, 75, 7, 12, 2, 2, 75, 76, 7, 13, 2, 2, 76, 83, 7, 13, 2, 2, 77, 79, 7, 8, 2, 2, 78, 77, 3, 2, 2, 2, 78, 79, 3, 2, 2, 2, 79, 80, 3, 2, 2, 2, 80, 82, 5, 16, 9, 2, 81, 78, 3, 2, 2, 2, 82, 85, 3, 2, 2, 2, 83, 81, 3, 2, 2, 2, 83, 84, 3, 2, 2, 2, 84, 86, 3, 2, 2, 2, 85, 83, 3, 2, 2, 2, 86, 87, 7, 9, 2, 2, 87, 13, 3, 2, 2, 2, 88, 89, 7, 14, 2, 2, 89, 95, 7, 10, 2, 2, 90, 91, 7, 13, 2, 2, 91, 92, 5, 20, 11, 2, 92, 93, 7, 13, 2, 2, 93, 96, 3, 2, 2, 2, 94, 96, 7, 28, 2, 2, 95, 90, 3, 2, 2, 2, 95, 94, 3, 2, 2, 2, 96, 15, 3, 2, 2, 2, 97, 98, 7, 11, 2, 2, 98, 99, 7, 12, 2, 2, 99, 101, 7, 13, 2, 2, 100, 102, 5, 22, 12, 2, 101, 100, 3, 2, 2, 2, 101, 102, 3, 2, 2, 2, 102, 103, 3, 2, 2, 2, 103, 126, 7, 13, 2, 2, 104, 105, 7, 15, 2, 2, 105, 106, 7, 12, 2, 2, 106, 126, 7, 25, 2, 2, 107, 108, 7, 16, 2, 2, 108, 114, 7, 12, 2, 2, 109, 110, 7, 13, 2, 2, 110, 111, 5, 20, 11, 2, 111, 112, 7, 13, 2, 2, 112, 115, 3, 2, 2, 2, 113, 115, 7, 28, 2, 2, 114, 109, 3, 2, 2, 2, 114, 113, 3, 2, 2, 2, 115, 126, 3, 2, 2, 2, 116, 117, 7, 17, 2, 2, 117, 123, 7, 12, 2, 2, 118, 119, 7, 13, 2, 2, 119, 120, 5, 20, 11, 2, 120, 121, 7, 13, 2, 2, 121, 124, 3, 2, 2, 2, 122, 124, 7, 28, 2, 2, 123, 118, 3, 2, 2, 2, 123, 122, 3, 2, 2, 2, 124, 126, 3, 2, 2, 2, 125, 97, 3, 2, 2, 2, 125, 104, 3, 2, 2, 2, 125, 107, 3, 2, 2, 2, 125, 116, 3, 2, 2, 2, 126, 17, 3, 2, 2, 2, 127, 133, 7, 28, 2, 2, 128, 129, 7, 13, 2, 2, 129, 130, 5, 20, 11, 2, 130, 131, 7, 13, 2, 2, 131, 133, 3, 2, 2, 2, 132, 127, 3, 2, 2, 2, 132, 128, 3, 2, 2, 2, 133, 19, 3, 2, 2, 2, 134, 136, 9, 2, 2, 2, 135, 134, 3, 2, 2, 2, 136, 137, 3, 2, 2, 2, 137, 135, 3, 2, 2, 2, 137, 138, 3, 2, 2, 2, 138, 21, 3, 2, 2, 2, 139, 140, 7, 26, 2, 2, 140, 141, 7, 10, 2, 2, 141, 142, 7, 26, 2, 2, 142, 143, 7, 20, 2, 2, 143, 151, 5, 20, 11, 2, 144, 145, 7, 26, 2, 2, 145, 146, 7, 26, 2, 2, 146, 147, 9, 3, 2, 2, 147, 151, 5, 20, 11, 2, 148, 151, 7, 23, 2, 2, 149, 151, 7, 24, 2, 2, 150, 139, 3, 2, 2, 2, 150, 144, 3, 2, 2, 2, 150, 148, 3, 2, 2, 2, 150, 149, 3, 2, 2, 2, 151, 23, 3, 2, 2, 2, 18, 31, 42, 48, 52, 65, 69, 78, 83, 95, 101, 114, 123, 125, 132, 137, 150]
\ No newline at end of file
diff --git a/dot_parser/DOT.tokens b/dot_parser/DOT.tokens
new file mode 100644
index 0000000..cb7f18d
--- /dev/null
+++ b/dot_parser/DOT.tokens
@@ -0,0 +1,49 @@
+T__0=1
+T__1=2
+T__2=3
+T__3=4
+T__4=5
+T__5=6
+T__6=7
+T__7=8
+T__8=9
+T__9=10
+T__10=11
+T__11=12
+T__12=13
+T__13=14
+T__14=15
+T__15=16
+T__16=17
+T__17=18
+T__18=19
+T__19=20
+T__20=21
+T__21=22
+Shape=23
+Uppercase_letter=24
+Lowercase_letter=25
+Number=26
+WS=27
+'digraph'=1
+'{'=2
+'\n'=3
+'}'=4
+'['=5
+','=6
+']'=7
+'->'=8
+'label'=9
+'='=10
+'"'=11
+'s0'=12
+'shape'=13
+'height'=14
+'width'=15
+'-'=16
+'_'=17
+':'=18
+'?'=19
+'!'=20
+'+'=21
+'|'=22
diff --git a/dot_parser/DOTLexer.interp b/dot_parser/DOTLexer.interp
new file mode 100644
index 0000000..d24725a
--- /dev/null
+++ b/dot_parser/DOTLexer.interp
@@ -0,0 +1,98 @@
+token literal names:
+null
+'digraph'
+'{'
+'\n'
+'}'
+'['
+','
+']'
+'->'
+'label'
+'='
+'"'
+'s0'
+'shape'
+'height'
+'width'
+'-'
+'_'
+':'
+'?'
+'!'
+'+'
+'|'
+null
+null
+null
+null
+null
+
+token symbolic names:
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+Shape
+Uppercase_letter
+Lowercase_letter
+Number
+WS
+
+rule names:
+T__0
+T__1
+T__2
+T__3
+T__4
+T__5
+T__6
+T__7
+T__8
+T__9
+T__10
+T__11
+T__12
+T__13
+T__14
+T__15
+T__16
+T__17
+T__18
+T__19
+T__20
+T__21
+Shape
+Uppercase_letter
+Lowercase_letter
+Number
+WS
+
+channel names:
+DEFAULT_TOKEN_CHANNEL
+HIDDEN
+
+mode names:
+DEFAULT_MODE
+
+atn:
+[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 29, 186, 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, 9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9, 28, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3, 3, 4, 3, 4, 3, 5, 3, 5, 3, 6, 3, 6, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 9, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 3, 11, 3, 11, 3, 12, 3, 12, 3, 13, 3, 13, 3, 13, 3, 14, 3, 14, 3, 14, 3, 14, 3, 14, 3, 14, 3, 15, 3, 15, 3, 15, 3, 15, 3, 15, 3, 15, 3, 15, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 3, 17, 3, 17, 3, 18, 3, 18, 3, 19, 3, 19, 3, 20, 3, 20, 3, 21, 3, 21, 3, 22, 3, 22, 3, 23, 3, 23, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 5, 24, 166, 10, 24, 3, 25, 6, 25, 169, 10, 25, 13, 25, 14, 25, 170, 3, 26, 3, 26, 3, 27, 6, 27, 176, 10, 27, 13, 27, 14, 27, 177, 3, 28, 6, 28, 181, 10, 28, 13, 28, 14, 28, 182, 3, 28, 3, 28, 2, 2, 29, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19, 11, 21, 12, 23, 13, 25, 14, 27, 15, 29, 16, 31, 17, 33, 18, 35, 19, 37, 20, 39, 21, 41, 22, 43, 23, 45, 24, 47, 25, 49, 26, 51, 27, 53, 28, 55, 29, 3, 2, 6, 3, 2, 67, 92, 3, 2, 99, 124, 3, 2, 50, 59, 4, 2, 11, 11, 34, 34, 2, 193, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, 27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2, 2, 35, 3, 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, 2, 2, 2, 41, 3, 2, 2, 2, 2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 47, 3, 2, 2, 2, 2, 49, 3, 2, 2, 2, 2, 51, 3, 2, 2, 2, 2, 53, 3, 2, 2, 2, 2, 55, 3, 2, 2, 2, 3, 57, 3, 2, 2, 2, 5, 65, 3, 2, 2, 2, 7, 67, 3, 2, 2, 2, 9, 69, 3, 2, 2, 2, 11, 71, 3, 2, 2, 2, 13, 73, 3, 2, 2, 2, 15, 75, 3, 2, 2, 2, 17, 77, 3, 2, 2, 2, 19, 80, 3, 2, 2, 2, 21, 86, 3, 2, 2, 2, 23, 88, 3, 2, 2, 2, 25, 90, 3, 2, 2, 2, 27, 93, 3, 2, 2, 2, 29, 99, 3, 2, 2, 2, 31, 106, 3, 2, 2, 2, 33, 112, 3, 2, 2, 2, 35, 114, 3, 2, 2, 2, 37, 116, 3, 2, 2, 2, 39, 118, 3, 2, 2, 2, 41, 120, 3, 2, 2, 2, 43, 122, 3, 2, 2, 2, 45, 124, 3, 2, 2, 2, 47, 165, 3, 2, 2, 2, 49, 168, 3, 2, 2, 2, 51, 172, 3, 2, 2, 2, 53, 175, 3, 2, 2, 2, 55, 180, 3, 2, 2, 2, 57, 58, 7, 102, 2, 2, 58, 59, 7, 107, 2, 2, 59, 60, 7, 105, 2, 2, 60, 61, 7, 116, 2, 2, 61, 62, 7, 99, 2, 2, 62, 63, 7, 114, 2, 2, 63, 64, 7, 106, 2, 2, 64, 4, 3, 2, 2, 2, 65, 66, 7, 125, 2, 2, 66, 6, 3, 2, 2, 2, 67, 68, 7, 12, 2, 2, 68, 8, 3, 2, 2, 2, 69, 70, 7, 127, 2, 2, 70, 10, 3, 2, 2, 2, 71, 72, 7, 93, 2, 2, 72, 12, 3, 2, 2, 2, 73, 74, 7, 46, 2, 2, 74, 14, 3, 2, 2, 2, 75, 76, 7, 95, 2, 2, 76, 16, 3, 2, 2, 2, 77, 78, 7, 47, 2, 2, 78, 79, 7, 64, 2, 2, 79, 18, 3, 2, 2, 2, 80, 81, 7, 110, 2, 2, 81, 82, 7, 99, 2, 2, 82, 83, 7, 100, 2, 2, 83, 84, 7, 103, 2, 2, 84, 85, 7, 110, 2, 2, 85, 20, 3, 2, 2, 2, 86, 87, 7, 63, 2, 2, 87, 22, 3, 2, 2, 2, 88, 89, 7, 36, 2, 2, 89, 24, 3, 2, 2, 2, 90, 91, 7, 117, 2, 2, 91, 92, 7, 50, 2, 2, 92, 26, 3, 2, 2, 2, 93, 94, 7, 117, 2, 2, 94, 95, 7, 106, 2, 2, 95, 96, 7, 99, 2, 2, 96, 97, 7, 114, 2, 2, 97, 98, 7, 103, 2, 2, 98, 28, 3, 2, 2, 2, 99, 100, 7, 106, 2, 2, 100, 101, 7, 103, 2, 2, 101, 102, 7, 107, 2, 2, 102, 103, 7, 105, 2, 2, 103, 104, 7, 106, 2, 2, 104, 105, 7, 118, 2, 2, 105, 30, 3, 2, 2, 2, 106, 107, 7, 121, 2, 2, 107, 108, 7, 107, 2, 2, 108, 109, 7, 102, 2, 2, 109, 110, 7, 118, 2, 2, 110, 111, 7, 106, 2, 2, 111, 32, 3, 2, 2, 2, 112, 113, 7, 47, 2, 2, 113, 34, 3, 2, 2, 2, 114, 115, 7, 97, 2, 2, 115, 36, 3, 2, 2, 2, 116, 117, 7, 60, 2, 2, 117, 38, 3, 2, 2, 2, 118, 119, 7, 65, 2, 2, 119, 40, 3, 2, 2, 2, 120, 121, 7, 35, 2, 2, 121, 42, 3, 2, 2, 2, 122, 123, 7, 45, 2, 2, 123, 44, 3, 2, 2, 2, 124, 125, 7, 126, 2, 2, 125, 46, 3, 2, 2, 2, 126, 127, 7, 101, 2, 2, 127, 128, 7, 107, 2, 2, 128, 129, 7, 116, 2, 2, 129, 130, 7, 101, 2, 2, 130, 131, 7, 110, 2, 2, 131, 166, 7, 103, 2, 2, 132, 133, 7, 117, 2, 2, 133, 134, 7, 115, 2, 2, 134, 135, 7, 119, 2, 2, 135, 136, 7, 99, 2, 2, 136, 137, 7, 116, 2, 2, 137, 166, 7, 103, 2, 2, 138, 139, 7, 102, 2, 2, 139, 140, 7, 107, 2, 2, 140, 141, 7, 99, 2, 2, 141, 142, 7, 111, 2, 2, 142, 143, 7, 113, 2, 2, 143, 144, 7, 112, 2, 2, 144, 166, 7, 102, 2, 2, 145, 146, 7, 116, 2, 2, 146, 147, 7, 103, 2, 2, 147, 148, 7, 101, 2, 2, 148, 166, 7, 118, 2, 2, 149, 150, 7, 102, 2, 2, 150, 151, 7, 113, 2, 2, 151, 152, 7, 119, 2, 2, 152, 153, 7, 100, 2, 2, 153, 154, 7, 110, 2, 2, 154, 155, 7, 103, 2, 2, 155, 156, 7, 101, 2, 2, 156, 157, 7, 107, 2, 2, 157, 158, 7, 116, 2, 2, 158, 159, 7, 101, 2, 2, 159, 160, 7, 110, 2, 2, 160, 166, 7, 103, 2, 2, 161, 162, 7, 112, 2, 2, 162, 163, 7, 113, 2, 2, 163, 164, 7, 112, 2, 2, 164, 166, 7, 103, 2, 2, 165, 126, 3, 2, 2, 2, 165, 132, 3, 2, 2, 2, 165, 138, 3, 2, 2, 2, 165, 145, 3, 2, 2, 2, 165, 149, 3, 2, 2, 2, 165, 161, 3, 2, 2, 2, 166, 48, 3, 2, 2, 2, 167, 169, 9, 2, 2, 2, 168, 167, 3, 2, 2, 2, 169, 170, 3, 2, 2, 2, 170, 168, 3, 2, 2, 2, 170, 171, 3, 2, 2, 2, 171, 50, 3, 2, 2, 2, 172, 173, 9, 3, 2, 2, 173, 52, 3, 2, 2, 2, 174, 176, 9, 4, 2, 2, 175, 174, 3, 2, 2, 2, 176, 177, 3, 2, 2, 2, 177, 175, 3, 2, 2, 2, 177, 178, 3, 2, 2, 2, 178, 54, 3, 2, 2, 2, 179, 181, 9, 5, 2, 2, 180, 179, 3, 2, 2, 2, 181, 182, 3, 2, 2, 2, 182, 180, 3, 2, 2, 2, 182, 183, 3, 2, 2, 2, 183, 184, 3, 2, 2, 2, 184, 185, 8, 28, 2, 2, 185, 56, 3, 2, 2, 2, 7, 2, 165, 170, 177, 182, 3, 8, 2, 2]
\ No newline at end of file
diff --git a/dot_parser/DOTLexer.py b/dot_parser/DOTLexer.py
new file mode 100644
index 0000000..a9dfcf0
--- /dev/null
+++ b/dot_parser/DOTLexer.py
@@ -0,0 +1,145 @@
+# Generated from DOT.g4 by ANTLR 4.7.2
+from antlr4 import *
+from io import StringIO
+from typing.io import TextIO
+import sys
+
+
+def serializedATN():
+ with StringIO() as buf:
+ buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\35")
+ buf.write("\u00ba\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
+ buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
+ buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
+ buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
+ buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\3\2\3\2\3\2\3")
+ buf.write("\2\3\2\3\2\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7")
+ buf.write("\3\7\3\b\3\b\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\n\3\n\3\13")
+ buf.write("\3\13\3\f\3\f\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\16")
+ buf.write("\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20")
+ buf.write("\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25")
+ buf.write("\3\25\3\26\3\26\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30")
+ buf.write("\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30")
+ buf.write("\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30")
+ buf.write("\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30")
+ buf.write("\5\30\u00a6\n\30\3\31\6\31\u00a9\n\31\r\31\16\31\u00aa")
+ buf.write("\3\32\3\32\3\33\6\33\u00b0\n\33\r\33\16\33\u00b1\3\34")
+ buf.write("\6\34\u00b5\n\34\r\34\16\34\u00b6\3\34\3\34\2\2\35\3\3")
+ buf.write("\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16")
+ buf.write("\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61")
+ buf.write("\32\63\33\65\34\67\35\3\2\6\3\2C\\\3\2c|\3\2\62;\4\2\13")
+ buf.write("\13\"\"\2\u00c1\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2")
+ buf.write("\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21")
+ buf.write("\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3")
+ buf.write("\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2")
+ buf.write("\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2")
+ buf.write("\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2")
+ buf.write("\65\3\2\2\2\2\67\3\2\2\2\39\3\2\2\2\5A\3\2\2\2\7C\3\2")
+ buf.write("\2\2\tE\3\2\2\2\13G\3\2\2\2\rI\3\2\2\2\17K\3\2\2\2\21")
+ buf.write("M\3\2\2\2\23P\3\2\2\2\25V\3\2\2\2\27X\3\2\2\2\31Z\3\2")
+ buf.write("\2\2\33]\3\2\2\2\35c\3\2\2\2\37j\3\2\2\2!p\3\2\2\2#r\3")
+ buf.write("\2\2\2%t\3\2\2\2\'v\3\2\2\2)x\3\2\2\2+z\3\2\2\2-|\3\2")
+ buf.write("\2\2/\u00a5\3\2\2\2\61\u00a8\3\2\2\2\63\u00ac\3\2\2\2")
+ buf.write("\65\u00af\3\2\2\2\67\u00b4\3\2\2\29:\7f\2\2:;\7k\2\2;")
+ buf.write("<\7i\2\2<=\7t\2\2=>\7c\2\2>?\7r\2\2?@\7j\2\2@\4\3\2\2")
+ buf.write("\2AB\7}\2\2B\6\3\2\2\2CD\7\f\2\2D\b\3\2\2\2EF\7\177\2")
+ buf.write("\2F\n\3\2\2\2GH\7]\2\2H\f\3\2\2\2IJ\7.\2\2J\16\3\2\2\2")
+ buf.write("KL\7_\2\2L\20\3\2\2\2MN\7/\2\2NO\7@\2\2O\22\3\2\2\2PQ")
+ buf.write("\7n\2\2QR\7c\2\2RS\7d\2\2ST\7g\2\2TU\7n\2\2U\24\3\2\2")
+ buf.write("\2VW\7?\2\2W\26\3\2\2\2XY\7$\2\2Y\30\3\2\2\2Z[\7u\2\2")
+ buf.write("[\\\7\62\2\2\\\32\3\2\2\2]^\7u\2\2^_\7j\2\2_`\7c\2\2`")
+ buf.write("a\7r\2\2ab\7g\2\2b\34\3\2\2\2cd\7j\2\2de\7g\2\2ef\7k\2")
+ buf.write("\2fg\7i\2\2gh\7j\2\2hi\7v\2\2i\36\3\2\2\2jk\7y\2\2kl\7")
+ buf.write("k\2\2lm\7f\2\2mn\7v\2\2no\7j\2\2o \3\2\2\2pq\7/\2\2q\"")
+ buf.write("\3\2\2\2rs\7a\2\2s$\3\2\2\2tu\7<\2\2u&\3\2\2\2vw\7A\2")
+ buf.write("\2w(\3\2\2\2xy\7#\2\2y*\3\2\2\2z{\7-\2\2{,\3\2\2\2|}\7")
+ buf.write("~\2\2}.\3\2\2\2~\177\7e\2\2\177\u0080\7k\2\2\u0080\u0081")
+ buf.write("\7t\2\2\u0081\u0082\7e\2\2\u0082\u0083\7n\2\2\u0083\u00a6")
+ buf.write("\7g\2\2\u0084\u0085\7u\2\2\u0085\u0086\7s\2\2\u0086\u0087")
+ buf.write("\7w\2\2\u0087\u0088\7c\2\2\u0088\u0089\7t\2\2\u0089\u00a6")
+ buf.write("\7g\2\2\u008a\u008b\7f\2\2\u008b\u008c\7k\2\2\u008c\u008d")
+ buf.write("\7c\2\2\u008d\u008e\7o\2\2\u008e\u008f\7q\2\2\u008f\u0090")
+ buf.write("\7p\2\2\u0090\u00a6\7f\2\2\u0091\u0092\7t\2\2\u0092\u0093")
+ buf.write("\7g\2\2\u0093\u0094\7e\2\2\u0094\u00a6\7v\2\2\u0095\u0096")
+ buf.write("\7f\2\2\u0096\u0097\7q\2\2\u0097\u0098\7w\2\2\u0098\u0099")
+ buf.write("\7d\2\2\u0099\u009a\7n\2\2\u009a\u009b\7g\2\2\u009b\u009c")
+ buf.write("\7e\2\2\u009c\u009d\7k\2\2\u009d\u009e\7t\2\2\u009e\u009f")
+ buf.write("\7e\2\2\u009f\u00a0\7n\2\2\u00a0\u00a6\7g\2\2\u00a1\u00a2")
+ buf.write("\7p\2\2\u00a2\u00a3\7q\2\2\u00a3\u00a4\7p\2\2\u00a4\u00a6")
+ buf.write("\7g\2\2\u00a5~\3\2\2\2\u00a5\u0084\3\2\2\2\u00a5\u008a")
+ buf.write("\3\2\2\2\u00a5\u0091\3\2\2\2\u00a5\u0095\3\2\2\2\u00a5")
+ buf.write("\u00a1\3\2\2\2\u00a6\60\3\2\2\2\u00a7\u00a9\t\2\2\2\u00a8")
+ buf.write("\u00a7\3\2\2\2\u00a9\u00aa\3\2\2\2\u00aa\u00a8\3\2\2\2")
+ buf.write("\u00aa\u00ab\3\2\2\2\u00ab\62\3\2\2\2\u00ac\u00ad\t\3")
+ buf.write("\2\2\u00ad\64\3\2\2\2\u00ae\u00b0\t\4\2\2\u00af\u00ae")
+ buf.write("\3\2\2\2\u00b0\u00b1\3\2\2\2\u00b1\u00af\3\2\2\2\u00b1")
+ buf.write("\u00b2\3\2\2\2\u00b2\66\3\2\2\2\u00b3\u00b5\t\5\2\2\u00b4")
+ buf.write("\u00b3\3\2\2\2\u00b5\u00b6\3\2\2\2\u00b6\u00b4\3\2\2\2")
+ buf.write("\u00b6\u00b7\3\2\2\2\u00b7\u00b8\3\2\2\2\u00b8\u00b9\b")
+ buf.write("\34\2\2\u00b98\3\2\2\2\7\2\u00a5\u00aa\u00b1\u00b6\3\b")
+ buf.write("\2\2")
+ return buf.getvalue()
+
+
+class DOTLexer(Lexer):
+
+ atn = ATNDeserializer().deserialize(serializedATN())
+
+ decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
+
+ T__0 = 1
+ T__1 = 2
+ T__2 = 3
+ T__3 = 4
+ T__4 = 5
+ T__5 = 6
+ T__6 = 7
+ T__7 = 8
+ T__8 = 9
+ T__9 = 10
+ T__10 = 11
+ T__11 = 12
+ T__12 = 13
+ T__13 = 14
+ T__14 = 15
+ T__15 = 16
+ T__16 = 17
+ T__17 = 18
+ T__18 = 19
+ T__19 = 20
+ T__20 = 21
+ T__21 = 22
+ Shape = 23
+ Uppercase_letter = 24
+ Lowercase_letter = 25
+ Number = 26
+ WS = 27
+
+ channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
+
+ modeNames = [ "DEFAULT_MODE" ]
+
+ literalNames = [ "",
+ "'digraph'", "'{'", "'\n'", "'}'", "'['", "','", "']'", "'->'",
+ "'label'", "'='", "'\"'", "'s0'", "'shape'", "'height'", "'width'",
+ "'-'", "'_'", "':'", "'?'", "'!'", "'+'", "'|'" ]
+
+ symbolicNames = [ "",
+ "Shape", "Uppercase_letter", "Lowercase_letter", "Number", "WS" ]
+
+ ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
+ "T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13",
+ "T__14", "T__15", "T__16", "T__17", "T__18", "T__19",
+ "T__20", "T__21", "Shape", "Uppercase_letter", "Lowercase_letter",
+ "Number", "WS" ]
+
+ grammarFileName = "DOT.g4"
+
+ def __init__(self, input=None, output:TextIO = sys.stdout):
+ super().__init__(input, output)
+ self.checkVersion("4.7.2")
+ self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
+ self._actions = None
+ self._predicates = None
+
+
diff --git a/dot_parser/DOTLexer.tokens b/dot_parser/DOTLexer.tokens
new file mode 100644
index 0000000..cb7f18d
--- /dev/null
+++ b/dot_parser/DOTLexer.tokens
@@ -0,0 +1,49 @@
+T__0=1
+T__1=2
+T__2=3
+T__3=4
+T__4=5
+T__5=6
+T__6=7
+T__7=8
+T__8=9
+T__9=10
+T__10=11
+T__11=12
+T__12=13
+T__13=14
+T__14=15
+T__15=16
+T__16=17
+T__17=18
+T__18=19
+T__19=20
+T__20=21
+T__21=22
+Shape=23
+Uppercase_letter=24
+Lowercase_letter=25
+Number=26
+WS=27
+'digraph'=1
+'{'=2
+'\n'=3
+'}'=4
+'['=5
+','=6
+']'=7
+'->'=8
+'label'=9
+'='=10
+'"'=11
+'s0'=12
+'shape'=13
+'height'=14
+'width'=15
+'-'=16
+'_'=17
+':'=18
+'?'=19
+'!'=20
+'+'=21
+'|'=22
diff --git a/dot_parser/DOTListener.py b/dot_parser/DOTListener.py
new file mode 100644
index 0000000..0019bba
--- /dev/null
+++ b/dot_parser/DOTListener.py
@@ -0,0 +1,136 @@
+# Generated from DOT.g4 by ANTLR 4.7.2
+from antlr4 import *
+if __name__ is not None and "." in __name__:
+ from .DOTParser import DOTParser
+else:
+ from DOTParser import DOTParser
+
+# This class defines a complete listener for a parse tree produced by DOTParser.
+class DOTListener(ParseTreeListener):
+
+ # Enter a parse tree produced by DOTParser#graph.
+ def enterGraph(self, ctx:DOTParser.GraphContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#graph.
+ def exitGraph(self, ctx:DOTParser.GraphContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#stmt_list.
+ def enterStmt_list(self, ctx:DOTParser.Stmt_listContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#stmt_list.
+ def exitStmt_list(self, ctx:DOTParser.Stmt_listContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#stmt.
+ def enterStmt(self, ctx:DOTParser.StmtContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#stmt.
+ def exitStmt(self, ctx:DOTParser.StmtContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#node.
+ def enterNode(self, ctx:DOTParser.NodeContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#node.
+ def exitNode(self, ctx:DOTParser.NodeContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#edge.
+ def enterEdge(self, ctx:DOTParser.EdgeContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#edge.
+ def exitEdge(self, ctx:DOTParser.EdgeContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#start_node.
+ def enterStart_node(self, ctx:DOTParser.Start_nodeContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#start_node.
+ def exitStart_node(self, ctx:DOTParser.Start_nodeContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#start_edge.
+ def enterStart_edge(self, ctx:DOTParser.Start_edgeContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#start_edge.
+ def exitStart_edge(self, ctx:DOTParser.Start_edgeContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#attr_list.
+ def enterAttr_list(self, ctx:DOTParser.Attr_listContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#attr_list.
+ def exitAttr_list(self, ctx:DOTParser.Attr_listContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#id_node.
+ def enterId_node(self, ctx:DOTParser.Id_nodeContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#id_node.
+ def exitId_node(self, ctx:DOTParser.Id_nodeContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#string.
+ def enterString(self, ctx:DOTParser.StringContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#string.
+ def exitString(self, ctx:DOTParser.StringContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#interaction.
+ def enterInteraction(self, ctx:DOTParser.InteractionContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#interaction.
+ def exitInteraction(self, ctx:DOTParser.InteractionContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#cfsm_interaction.
+ def enterCfsm_interaction(self, ctx:DOTParser.Cfsm_interactionContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#cfsm_interaction.
+ def exitCfsm_interaction(self, ctx:DOTParser.Cfsm_interactionContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#choice.
+ def enterChoice(self, ctx:DOTParser.ChoiceContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#choice.
+ def exitChoice(self, ctx:DOTParser.ChoiceContext):
+ pass
+
+
+ # Enter a parse tree produced by DOTParser#fork.
+ def enterFork(self, ctx:DOTParser.ForkContext):
+ pass
+
+ # Exit a parse tree produced by DOTParser#fork.
+ def exitFork(self, ctx:DOTParser.ForkContext):
+ pass
+
+
diff --git a/dot_parser/DOTParser.py b/dot_parser/DOTParser.py
new file mode 100644
index 0000000..6cf7d8c
--- /dev/null
+++ b/dot_parser/DOTParser.py
@@ -0,0 +1,1125 @@
+# Generated from DOT.g4 by ANTLR 4.7.2
+# encoding: utf-8
+from antlr4 import *
+from io import StringIO
+from typing.io import TextIO
+import sys
+
+def serializedATN():
+ with StringIO() as buf:
+ buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\35")
+ buf.write("\u0099\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
+ buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\3\2\3\2\3\2")
+ buf.write("\3\2\3\2\6\2\36\n\2\r\2\16\2\37\3\2\3\2\3\3\3\3\3\3\3")
+ buf.write("\4\3\4\3\4\3\4\5\4+\n\4\3\5\3\5\3\5\3\5\5\5\61\n\5\7\5")
+ buf.write("\63\n\5\f\5\16\5\66\13\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3")
+ buf.write("\6\3\6\3\6\5\6B\n\6\3\6\3\6\5\6F\n\6\3\7\3\7\3\7\3\7\3")
+ buf.write("\7\3\7\3\7\5\7O\n\7\3\7\7\7R\n\7\f\7\16\7U\13\7\3\7\3")
+ buf.write("\7\3\b\3\b\3\b\3\b\3\b\3\b\3\b\5\b`\n\b\3\t\3\t\3\t\3")
+ buf.write("\t\5\tf\n\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3")
+ buf.write("\t\5\ts\n\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\5\t|\n\t\5\t~")
+ buf.write("\n\t\3\n\3\n\3\n\3\n\3\n\5\n\u0085\n\n\3\13\6\13\u0088")
+ buf.write("\n\13\r\13\16\13\u0089\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f")
+ buf.write("\3\f\3\f\3\f\5\f\u0097\n\f\3\f\2\2\r\2\4\6\b\n\f\16\20")
+ buf.write("\22\24\26\2\4\5\2\b\b\22\23\32\34\3\2\25\26\2\u00a3\2")
+ buf.write("\30\3\2\2\2\4#\3\2\2\2\6*\3\2\2\2\b,\3\2\2\2\n9\3\2\2")
+ buf.write("\2\fG\3\2\2\2\16X\3\2\2\2\20}\3\2\2\2\22\u0084\3\2\2\2")
+ buf.write("\24\u0087\3\2\2\2\26\u0096\3\2\2\2\30\31\7\3\2\2\31\32")
+ buf.write("\5\24\13\2\32\33\7\4\2\2\33\35\7\5\2\2\34\36\5\4\3\2\35")
+ buf.write("\34\3\2\2\2\36\37\3\2\2\2\37\35\3\2\2\2\37 \3\2\2\2 !")
+ buf.write("\3\2\2\2!\"\7\6\2\2\"\3\3\2\2\2#$\5\6\4\2$%\7\5\2\2%\5")
+ buf.write("\3\2\2\2&+\5\b\5\2\'+\5\n\6\2(+\5\f\7\2)+\5\16\b\2*&\3")
+ buf.write("\2\2\2*\'\3\2\2\2*(\3\2\2\2*)\3\2\2\2+\7\3\2\2\2,-\5\22")
+ buf.write("\n\2-\64\7\7\2\2.\60\5\20\t\2/\61\7\b\2\2\60/\3\2\2\2")
+ buf.write("\60\61\3\2\2\2\61\63\3\2\2\2\62.\3\2\2\2\63\66\3\2\2\2")
+ buf.write("\64\62\3\2\2\2\64\65\3\2\2\2\65\67\3\2\2\2\66\64\3\2\2")
+ buf.write("\2\678\7\t\2\28\t\3\2\2\29:\5\22\n\2:;\7\n\2\2;E\5\22")
+ buf.write("\n\2<=\7\7\2\2=>\7\13\2\2>?\7\f\2\2?A\7\r\2\2@B\5\26\f")
+ buf.write("\2A@\3\2\2\2AB\3\2\2\2BC\3\2\2\2CD\7\r\2\2DF\7\t\2\2E")
+ buf.write("<\3\2\2\2EF\3\2\2\2F\13\3\2\2\2GH\7\16\2\2HI\7\7\2\2I")
+ buf.write("J\7\13\2\2JK\7\f\2\2KL\7\r\2\2LS\7\r\2\2MO\7\b\2\2NM\3")
+ buf.write("\2\2\2NO\3\2\2\2OP\3\2\2\2PR\5\20\t\2QN\3\2\2\2RU\3\2")
+ buf.write("\2\2SQ\3\2\2\2ST\3\2\2\2TV\3\2\2\2US\3\2\2\2VW\7\t\2\2")
+ buf.write("W\r\3\2\2\2XY\7\16\2\2Y_\7\n\2\2Z[\7\r\2\2[\\\5\24\13")
+ buf.write("\2\\]\7\r\2\2]`\3\2\2\2^`\7\34\2\2_Z\3\2\2\2_^\3\2\2\2")
+ buf.write("`\17\3\2\2\2ab\7\13\2\2bc\7\f\2\2ce\7\r\2\2df\5\26\f\2")
+ buf.write("ed\3\2\2\2ef\3\2\2\2fg\3\2\2\2g~\7\r\2\2hi\7\17\2\2ij")
+ buf.write("\7\f\2\2j~\7\31\2\2kl\7\20\2\2lr\7\f\2\2mn\7\r\2\2no\5")
+ buf.write("\24\13\2op\7\r\2\2ps\3\2\2\2qs\7\34\2\2rm\3\2\2\2rq\3")
+ buf.write("\2\2\2s~\3\2\2\2tu\7\21\2\2u{\7\f\2\2vw\7\r\2\2wx\5\24")
+ buf.write("\13\2xy\7\r\2\2y|\3\2\2\2z|\7\34\2\2{v\3\2\2\2{z\3\2\2")
+ buf.write("\2|~\3\2\2\2}a\3\2\2\2}h\3\2\2\2}k\3\2\2\2}t\3\2\2\2~")
+ buf.write("\21\3\2\2\2\177\u0085\7\34\2\2\u0080\u0081\7\r\2\2\u0081")
+ buf.write("\u0082\5\24\13\2\u0082\u0083\7\r\2\2\u0083\u0085\3\2\2")
+ buf.write("\2\u0084\177\3\2\2\2\u0084\u0080\3\2\2\2\u0085\23\3\2")
+ buf.write("\2\2\u0086\u0088\t\2\2\2\u0087\u0086\3\2\2\2\u0088\u0089")
+ buf.write("\3\2\2\2\u0089\u0087\3\2\2\2\u0089\u008a\3\2\2\2\u008a")
+ buf.write("\25\3\2\2\2\u008b\u008c\7\32\2\2\u008c\u008d\7\n\2\2\u008d")
+ buf.write("\u008e\7\32\2\2\u008e\u008f\7\24\2\2\u008f\u0097\5\24")
+ buf.write("\13\2\u0090\u0091\7\32\2\2\u0091\u0092\7\32\2\2\u0092")
+ buf.write("\u0093\t\3\2\2\u0093\u0097\5\24\13\2\u0094\u0097\7\27")
+ buf.write("\2\2\u0095\u0097\7\30\2\2\u0096\u008b\3\2\2\2\u0096\u0090")
+ buf.write("\3\2\2\2\u0096\u0094\3\2\2\2\u0096\u0095\3\2\2\2\u0097")
+ buf.write("\27\3\2\2\2\22\37*\60\64AENS_er{}\u0084\u0089\u0096")
+ return buf.getvalue()
+
+
+class DOTParser ( Parser ):
+
+ grammarFileName = "DOT.g4"
+
+ atn = ATNDeserializer().deserialize(serializedATN())
+
+ decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
+
+ sharedContextCache = PredictionContextCache()
+
+ literalNames = [ "", "'digraph'", "'{'", "'\n'", "'}'", "'['",
+ "','", "']'", "'->'", "'label'", "'='", "'\"'", "'s0'",
+ "'shape'", "'height'", "'width'", "'-'", "'_'", "':'",
+ "'?'", "'!'", "'+'", "'|'" ]
+
+ symbolicNames = [ "", "", "", "",
+ "", "", "", "",
+ "", "", "", "",
+ "", "", "", "",
+ "", "", "", "",
+ "", "", "", "Shape", "Uppercase_letter",
+ "Lowercase_letter", "Number", "WS" ]
+
+ RULE_graph = 0
+ RULE_stmt_list = 1
+ RULE_stmt = 2
+ RULE_node = 3
+ RULE_edge = 4
+ RULE_start_node = 5
+ RULE_start_edge = 6
+ RULE_attr_list = 7
+ RULE_id_node = 8
+ RULE_string = 9
+ RULE_label = 10
+
+ ruleNames = [ "graph", "stmt_list", "stmt", "node", "edge", "start_node",
+ "start_edge", "attr_list", "id_node", "string", "label" ]
+
+ EOF = Token.EOF
+ T__0=1
+ T__1=2
+ T__2=3
+ T__3=4
+ T__4=5
+ T__5=6
+ T__6=7
+ T__7=8
+ T__8=9
+ T__9=10
+ T__10=11
+ T__11=12
+ T__12=13
+ T__13=14
+ T__14=15
+ T__15=16
+ T__16=17
+ T__17=18
+ T__18=19
+ T__19=20
+ T__20=21
+ T__21=22
+ Shape=23
+ Uppercase_letter=24
+ Lowercase_letter=25
+ Number=26
+ WS=27
+
+ def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
+ super().__init__(input, output)
+ self.checkVersion("4.7.2")
+ self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
+ self._predicates = None
+
+
+
+ class GraphContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def string(self):
+ return self.getTypedRuleContext(DOTParser.StringContext,0)
+
+
+ def stmt_list(self, i:int=None):
+ if i is None:
+ return self.getTypedRuleContexts(DOTParser.Stmt_listContext)
+ else:
+ return self.getTypedRuleContext(DOTParser.Stmt_listContext,i)
+
+
+ def getRuleIndex(self):
+ return DOTParser.RULE_graph
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterGraph" ):
+ listener.enterGraph(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitGraph" ):
+ listener.exitGraph(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitGraph" ):
+ return visitor.visitGraph(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def graph(self):
+
+ localctx = DOTParser.GraphContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 0, self.RULE_graph)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 22
+ self.match(DOTParser.T__0)
+ self.state = 23
+ self.string()
+ self.state = 24
+ self.match(DOTParser.T__1)
+ self.state = 25
+ self.match(DOTParser.T__2)
+ self.state = 27
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while True:
+ self.state = 26
+ self.stmt_list()
+ self.state = 29
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DOTParser.T__10) | (1 << DOTParser.T__11) | (1 << DOTParser.Number))) != 0)):
+ break
+
+ self.state = 31
+ self.match(DOTParser.T__3)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Stmt_listContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def stmt(self):
+ return self.getTypedRuleContext(DOTParser.StmtContext,0)
+
+
+ def getRuleIndex(self):
+ return DOTParser.RULE_stmt_list
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterStmt_list" ):
+ listener.enterStmt_list(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitStmt_list" ):
+ listener.exitStmt_list(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitStmt_list" ):
+ return visitor.visitStmt_list(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def stmt_list(self):
+
+ localctx = DOTParser.Stmt_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 2, self.RULE_stmt_list)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 33
+ self.stmt()
+ self.state = 34
+ self.match(DOTParser.T__2)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class StmtContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def node(self):
+ return self.getTypedRuleContext(DOTParser.NodeContext,0)
+
+
+ def edge(self):
+ return self.getTypedRuleContext(DOTParser.EdgeContext,0)
+
+
+ def start_node(self):
+ return self.getTypedRuleContext(DOTParser.Start_nodeContext,0)
+
+
+ def start_edge(self):
+ return self.getTypedRuleContext(DOTParser.Start_edgeContext,0)
+
+
+ def getRuleIndex(self):
+ return DOTParser.RULE_stmt
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterStmt" ):
+ listener.enterStmt(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitStmt" ):
+ listener.exitStmt(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitStmt" ):
+ return visitor.visitStmt(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def stmt(self):
+
+ localctx = DOTParser.StmtContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 4, self.RULE_stmt)
+ try:
+ self.state = 40
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,1,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 36
+ self.node()
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 37
+ self.edge()
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 38
+ self.start_node()
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 39
+ self.start_edge()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class NodeContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def id_node(self):
+ return self.getTypedRuleContext(DOTParser.Id_nodeContext,0)
+
+
+ def attr_list(self, i:int=None):
+ if i is None:
+ return self.getTypedRuleContexts(DOTParser.Attr_listContext)
+ else:
+ return self.getTypedRuleContext(DOTParser.Attr_listContext,i)
+
+
+ def getRuleIndex(self):
+ return DOTParser.RULE_node
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterNode" ):
+ listener.enterNode(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitNode" ):
+ listener.exitNode(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitNode" ):
+ return visitor.visitNode(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def node(self):
+
+ localctx = DOTParser.NodeContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 6, self.RULE_node)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 42
+ self.id_node()
+ self.state = 43
+ self.match(DOTParser.T__4)
+ self.state = 50
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DOTParser.T__8) | (1 << DOTParser.T__12) | (1 << DOTParser.T__13) | (1 << DOTParser.T__14))) != 0):
+ self.state = 44
+ self.attr_list()
+ self.state = 46
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==DOTParser.T__5:
+ self.state = 45
+ self.match(DOTParser.T__5)
+
+
+ self.state = 52
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 53
+ self.match(DOTParser.T__6)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class EdgeContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def id_node(self, i:int=None):
+ if i is None:
+ return self.getTypedRuleContexts(DOTParser.Id_nodeContext)
+ else:
+ return self.getTypedRuleContext(DOTParser.Id_nodeContext,i)
+
+
+ def label(self):
+ return self.getTypedRuleContext(DOTParser.LabelContext,0)
+
+
+ def getRuleIndex(self):
+ return DOTParser.RULE_edge
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterEdge" ):
+ listener.enterEdge(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitEdge" ):
+ listener.exitEdge(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitEdge" ):
+ return visitor.visitEdge(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def edge(self):
+
+ localctx = DOTParser.EdgeContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 8, self.RULE_edge)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 55
+ self.id_node()
+ self.state = 56
+ self.match(DOTParser.T__7)
+ self.state = 57
+ self.id_node()
+ self.state = 67
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==DOTParser.T__4:
+ self.state = 58
+ self.match(DOTParser.T__4)
+ self.state = 59
+ self.match(DOTParser.T__8)
+ self.state = 60
+ self.match(DOTParser.T__9)
+ self.state = 61
+ self.match(DOTParser.T__10)
+ self.state = 63
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DOTParser.T__20) | (1 << DOTParser.T__21) | (1 << DOTParser.Uppercase_letter))) != 0):
+ self.state = 62
+ self.label()
+
+
+ self.state = 65
+ self.match(DOTParser.T__10)
+ self.state = 66
+ self.match(DOTParser.T__6)
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Start_nodeContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def attr_list(self, i:int=None):
+ if i is None:
+ return self.getTypedRuleContexts(DOTParser.Attr_listContext)
+ else:
+ return self.getTypedRuleContext(DOTParser.Attr_listContext,i)
+
+
+ def getRuleIndex(self):
+ return DOTParser.RULE_start_node
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterStart_node" ):
+ listener.enterStart_node(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitStart_node" ):
+ listener.exitStart_node(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitStart_node" ):
+ return visitor.visitStart_node(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def start_node(self):
+
+ localctx = DOTParser.Start_nodeContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 10, self.RULE_start_node)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 69
+ self.match(DOTParser.T__11)
+ self.state = 70
+ self.match(DOTParser.T__4)
+ self.state = 71
+ self.match(DOTParser.T__8)
+ self.state = 72
+ self.match(DOTParser.T__9)
+ self.state = 73
+ self.match(DOTParser.T__10)
+ self.state = 74
+ self.match(DOTParser.T__10)
+ self.state = 81
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DOTParser.T__5) | (1 << DOTParser.T__8) | (1 << DOTParser.T__12) | (1 << DOTParser.T__13) | (1 << DOTParser.T__14))) != 0):
+ self.state = 76
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==DOTParser.T__5:
+ self.state = 75
+ self.match(DOTParser.T__5)
+
+
+ self.state = 78
+ self.attr_list()
+ self.state = 83
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 84
+ self.match(DOTParser.T__6)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Start_edgeContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def string(self):
+ return self.getTypedRuleContext(DOTParser.StringContext,0)
+
+
+ def Number(self):
+ return self.getToken(DOTParser.Number, 0)
+
+ def getRuleIndex(self):
+ return DOTParser.RULE_start_edge
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterStart_edge" ):
+ listener.enterStart_edge(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitStart_edge" ):
+ listener.exitStart_edge(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitStart_edge" ):
+ return visitor.visitStart_edge(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def start_edge(self):
+
+ localctx = DOTParser.Start_edgeContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 12, self.RULE_start_edge)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 86
+ self.match(DOTParser.T__11)
+ self.state = 87
+ self.match(DOTParser.T__7)
+ self.state = 93
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [DOTParser.T__10]:
+ self.state = 88
+ self.match(DOTParser.T__10)
+ self.state = 89
+ self.string()
+ self.state = 90
+ self.match(DOTParser.T__10)
+ pass
+ elif token in [DOTParser.Number]:
+ self.state = 92
+ self.match(DOTParser.Number)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Attr_listContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def label(self):
+ return self.getTypedRuleContext(DOTParser.LabelContext,0)
+
+
+ def Shape(self):
+ return self.getToken(DOTParser.Shape, 0)
+
+ def string(self):
+ return self.getTypedRuleContext(DOTParser.StringContext,0)
+
+
+ def Number(self):
+ return self.getToken(DOTParser.Number, 0)
+
+ def getRuleIndex(self):
+ return DOTParser.RULE_attr_list
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterAttr_list" ):
+ listener.enterAttr_list(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitAttr_list" ):
+ listener.exitAttr_list(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitAttr_list" ):
+ return visitor.visitAttr_list(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def attr_list(self):
+
+ localctx = DOTParser.Attr_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 14, self.RULE_attr_list)
+ self._la = 0 # Token type
+ try:
+ self.state = 123
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [DOTParser.T__8]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 95
+ self.match(DOTParser.T__8)
+ self.state = 96
+ self.match(DOTParser.T__9)
+ self.state = 97
+ self.match(DOTParser.T__10)
+ self.state = 99
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DOTParser.T__20) | (1 << DOTParser.T__21) | (1 << DOTParser.Uppercase_letter))) != 0):
+ self.state = 98
+ self.label()
+
+
+ self.state = 101
+ self.match(DOTParser.T__10)
+ pass
+ elif token in [DOTParser.T__12]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 102
+ self.match(DOTParser.T__12)
+ self.state = 103
+ self.match(DOTParser.T__9)
+ self.state = 104
+ self.match(DOTParser.Shape)
+ pass
+ elif token in [DOTParser.T__13]:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 105
+ self.match(DOTParser.T__13)
+ self.state = 106
+ self.match(DOTParser.T__9)
+ self.state = 112
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [DOTParser.T__10]:
+ self.state = 107
+ self.match(DOTParser.T__10)
+ self.state = 108
+ self.string()
+ self.state = 109
+ self.match(DOTParser.T__10)
+ pass
+ elif token in [DOTParser.Number]:
+ self.state = 111
+ self.match(DOTParser.Number)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ pass
+ elif token in [DOTParser.T__14]:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 114
+ self.match(DOTParser.T__14)
+ self.state = 115
+ self.match(DOTParser.T__9)
+ self.state = 121
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [DOTParser.T__10]:
+ self.state = 116
+ self.match(DOTParser.T__10)
+ self.state = 117
+ self.string()
+ self.state = 118
+ self.match(DOTParser.T__10)
+ pass
+ elif token in [DOTParser.Number]:
+ self.state = 120
+ self.match(DOTParser.Number)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Id_nodeContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def Number(self):
+ return self.getToken(DOTParser.Number, 0)
+
+ def string(self):
+ return self.getTypedRuleContext(DOTParser.StringContext,0)
+
+
+ def getRuleIndex(self):
+ return DOTParser.RULE_id_node
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterId_node" ):
+ listener.enterId_node(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitId_node" ):
+ listener.exitId_node(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitId_node" ):
+ return visitor.visitId_node(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def id_node(self):
+
+ localctx = DOTParser.Id_nodeContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 16, self.RULE_id_node)
+ try:
+ self.state = 130
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [DOTParser.Number]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 125
+ self.match(DOTParser.Number)
+ pass
+ elif token in [DOTParser.T__10]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 126
+ self.match(DOTParser.T__10)
+ self.state = 127
+ self.string()
+ self.state = 128
+ self.match(DOTParser.T__10)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class StringContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def Uppercase_letter(self, i:int=None):
+ if i is None:
+ return self.getTokens(DOTParser.Uppercase_letter)
+ else:
+ return self.getToken(DOTParser.Uppercase_letter, i)
+
+ def Lowercase_letter(self, i:int=None):
+ if i is None:
+ return self.getTokens(DOTParser.Lowercase_letter)
+ else:
+ return self.getToken(DOTParser.Lowercase_letter, i)
+
+ def Number(self, i:int=None):
+ if i is None:
+ return self.getTokens(DOTParser.Number)
+ else:
+ return self.getToken(DOTParser.Number, i)
+
+ def getRuleIndex(self):
+ return DOTParser.RULE_string
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterString" ):
+ listener.enterString(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitString" ):
+ listener.exitString(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitString" ):
+ return visitor.visitString(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def string(self):
+
+ localctx = DOTParser.StringContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 18, self.RULE_string)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 133
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while True:
+ self.state = 132
+ _la = self._input.LA(1)
+ if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DOTParser.T__5) | (1 << DOTParser.T__15) | (1 << DOTParser.T__16) | (1 << DOTParser.Uppercase_letter) | (1 << DOTParser.Lowercase_letter) | (1 << DOTParser.Number))) != 0)):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 135
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DOTParser.T__5) | (1 << DOTParser.T__15) | (1 << DOTParser.T__16) | (1 << DOTParser.Uppercase_letter) | (1 << DOTParser.Lowercase_letter) | (1 << DOTParser.Number))) != 0)):
+ break
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class LabelContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return DOTParser.RULE_label
+
+
+ def copyFrom(self, ctx:ParserRuleContext):
+ super().copyFrom(ctx)
+
+
+
+ class ForkContext(LabelContext):
+
+ def __init__(self, parser, ctx:ParserRuleContext): # actually a DOTParser.LabelContext
+ super().__init__(parser)
+ self.copyFrom(ctx)
+
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterFork" ):
+ listener.enterFork(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitFork" ):
+ listener.exitFork(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitFork" ):
+ return visitor.visitFork(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+ class Cfsm_interactionContext(LabelContext):
+
+ def __init__(self, parser, ctx:ParserRuleContext): # actually a DOTParser.LabelContext
+ super().__init__(parser)
+ self.copyFrom(ctx)
+
+ def Uppercase_letter(self, i:int=None):
+ if i is None:
+ return self.getTokens(DOTParser.Uppercase_letter)
+ else:
+ return self.getToken(DOTParser.Uppercase_letter, i)
+ def string(self):
+ return self.getTypedRuleContext(DOTParser.StringContext,0)
+
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterCfsm_interaction" ):
+ listener.enterCfsm_interaction(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitCfsm_interaction" ):
+ listener.exitCfsm_interaction(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitCfsm_interaction" ):
+ return visitor.visitCfsm_interaction(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+ class InteractionContext(LabelContext):
+
+ def __init__(self, parser, ctx:ParserRuleContext): # actually a DOTParser.LabelContext
+ super().__init__(parser)
+ self.copyFrom(ctx)
+
+ def Uppercase_letter(self, i:int=None):
+ if i is None:
+ return self.getTokens(DOTParser.Uppercase_letter)
+ else:
+ return self.getToken(DOTParser.Uppercase_letter, i)
+ def string(self):
+ return self.getTypedRuleContext(DOTParser.StringContext,0)
+
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterInteraction" ):
+ listener.enterInteraction(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitInteraction" ):
+ listener.exitInteraction(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitInteraction" ):
+ return visitor.visitInteraction(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+ class ChoiceContext(LabelContext):
+
+ def __init__(self, parser, ctx:ParserRuleContext): # actually a DOTParser.LabelContext
+ super().__init__(parser)
+ self.copyFrom(ctx)
+
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterChoice" ):
+ listener.enterChoice(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitChoice" ):
+ listener.exitChoice(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitChoice" ):
+ return visitor.visitChoice(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+ def label(self):
+
+ localctx = DOTParser.LabelContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 20, self.RULE_label)
+ self._la = 0 # Token type
+ try:
+ self.state = 148
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,15,self._ctx)
+ if la_ == 1:
+ localctx = DOTParser.InteractionContext(self, localctx)
+ self.enterOuterAlt(localctx, 1)
+ self.state = 137
+ self.match(DOTParser.Uppercase_letter)
+ self.state = 138
+ self.match(DOTParser.T__7)
+ self.state = 139
+ self.match(DOTParser.Uppercase_letter)
+ self.state = 140
+ self.match(DOTParser.T__17)
+ self.state = 141
+ self.string()
+ pass
+
+ elif la_ == 2:
+ localctx = DOTParser.Cfsm_interactionContext(self, localctx)
+ self.enterOuterAlt(localctx, 2)
+ self.state = 142
+ self.match(DOTParser.Uppercase_letter)
+ self.state = 143
+ self.match(DOTParser.Uppercase_letter)
+ self.state = 144
+ _la = self._input.LA(1)
+ if not(_la==DOTParser.T__18 or _la==DOTParser.T__19):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 145
+ self.string()
+ pass
+
+ elif la_ == 3:
+ localctx = DOTParser.ChoiceContext(self, localctx)
+ self.enterOuterAlt(localctx, 3)
+ self.state = 146
+ self.match(DOTParser.T__20)
+ pass
+
+ elif la_ == 4:
+ localctx = DOTParser.ForkContext(self, localctx)
+ self.enterOuterAlt(localctx, 4)
+ self.state = 147
+ self.match(DOTParser.T__21)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+
+
+
+
diff --git a/dot_parser/DOTVisitor.py b/dot_parser/DOTVisitor.py
new file mode 100644
index 0000000..152d1d6
--- /dev/null
+++ b/dot_parser/DOTVisitor.py
@@ -0,0 +1,83 @@
+# Generated from DOT.g4 by ANTLR 4.7.2
+from antlr4 import *
+if __name__ is not None and "." in __name__:
+ from .DOTParser import DOTParser
+else:
+ from DOTParser import DOTParser
+
+# This class defines a complete generic visitor for a parse tree produced by DOTParser.
+
+class DOTVisitor(ParseTreeVisitor):
+
+ # Visit a parse tree produced by DOTParser#graph.
+ def visitGraph(self, ctx:DOTParser.GraphContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#stmt_list.
+ def visitStmt_list(self, ctx:DOTParser.Stmt_listContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#stmt.
+ def visitStmt(self, ctx:DOTParser.StmtContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#node.
+ def visitNode(self, ctx:DOTParser.NodeContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#edge.
+ def visitEdge(self, ctx:DOTParser.EdgeContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#start_node.
+ def visitStart_node(self, ctx:DOTParser.Start_nodeContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#start_edge.
+ def visitStart_edge(self, ctx:DOTParser.Start_edgeContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#attr_list.
+ def visitAttr_list(self, ctx:DOTParser.Attr_listContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#id_node.
+ def visitId_node(self, ctx:DOTParser.Id_nodeContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#string.
+ def visitString(self, ctx:DOTParser.StringContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#interaction.
+ def visitInteraction(self, ctx:DOTParser.InteractionContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#cfsm_interaction.
+ def visitCfsm_interaction(self, ctx:DOTParser.Cfsm_interactionContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#choice.
+ def visitChoice(self, ctx:DOTParser.ChoiceContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by DOTParser#fork.
+ def visitFork(self, ctx:DOTParser.ForkContext):
+ return self.visitChildren(ctx)
+
+
+
+del DOTParser
\ No newline at end of file
diff --git a/dot_parser/MyErrorListener.py b/dot_parser/MyErrorListener.py
new file mode 100644
index 0000000..bd3b0ea
--- /dev/null
+++ b/dot_parser/MyErrorListener.py
@@ -0,0 +1,32 @@
+from antlr4.error.ErrorListener import ErrorListener
+
+
+class parseError(Exception):
+
+ def __init__(self, message):
+ self.message = message
+
+class MyErrorListener(ErrorListener):
+ '''
+ MyErrorListener redirect Errors from stdout
+ (normal behaviour of DefaultErrorListener)
+ to an exception
+ '''
+
+ def __init__(self):
+ super(MyErrorListener, self).__init__()
+
+ def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
+ position_of_the_error = "InputError in [line: " + str(line) + ",column: " + str(column) + "]"
+ error = [position_of_the_error, msg]
+
+ raise parseError(error)
+
+ def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs):
+ raise Exception("reportAmbiguity")
+
+ def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs):
+ raise Exception("reportAttemptingFullContext")
+
+ def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs):
+ raise Exception("reportContextSensitivity")
\ No newline at end of file
diff --git a/dot_parser/MyVisitor.py b/dot_parser/MyVisitor.py
new file mode 100644
index 0000000..482009a
--- /dev/null
+++ b/dot_parser/MyVisitor.py
@@ -0,0 +1,146 @@
+from chor_auto import ChoreographyAutomata
+from .utils import get_string_from_tokens, get_interaction_string
+from .DOTParser import DOTParser
+from .DOTVisitor import DOTVisitor
+
+
+class ForkStatementDetected(Exception):
+ def __init__(self):
+ self.message = ["[WARNING] Fork founded! I can't accept that"]
+
+
+class MyVisitor(DOTVisitor):
+ """
+ This class is based on the ANTLR visitor class.
+ It visit each node of the parse tree and produce
+ a choreography automata struct for the input graph.
+ It also verify if the input graph is a Domitilla
+ graph, so we'll check for :
+ - labelled nodes;
+ - choice node;
+ - fork node (it raise an exception);
+
+ Return a 3-tuple which contains a choreography
+ automata struct, a boolean value(for domitilla)
+ and the name of the graph
+ """
+ def __init__(self):
+ self.states = set()
+ self.labels = set()
+ self.edges = set()
+ self.s0 = None
+ self.participants = set()
+ self.domitilla = 0
+
+ # Visit a parse tree produced by DOTParser#graph.
+ def visitGraph(self, ctx:DOTParser.GraphContext):
+ self.visitChildren(ctx)
+ graph_name = get_string_from_tokens(ctx.string())
+ if self.s0 is not None:
+ start_node = self.s0
+ else:
+ start_node = min(self.states)
+ ca = ChoreographyAutomata(self.states, self.labels,
+ self.edges, start_node,
+ self.participants)
+
+ return ca, self.domitilla, graph_name
+
+ # Visit a parse tree produced by DOTParser#stmt_list.
+ def visitStmt_list(self, ctx:DOTParser.Stmt_listContext):
+ return self.visitChildren(ctx)
+
+ # Visit a parse tree produced by DOTParser#stmt.
+ def visitStmt(self, ctx:DOTParser.StmtContext):
+ return self.visitChildren(ctx)
+
+ # Visit a parse tree produced by DOTParser#node.
+ def visitNode(self, ctx:DOTParser.NodeContext):
+ return self.visitChildren(ctx)
+
+ # Visit a parse tree produced by DOTParser#edge.
+ def visitEdge(self, ctx:DOTParser.EdgeContext):
+ # get the nodes
+ nodes = ctx.id_node()
+ source_node = nodes[0]
+ dest_node = nodes[1]
+
+ if source_node.Number() is not None:
+ source_str = source_node.Number().getText()
+ else:
+ source_str = get_string_from_tokens(source_node.string())
+
+ if dest_node.Number() is not None:
+ dest_str = dest_node.Number().getText()
+ else:
+ dest_str = get_string_from_tokens(dest_node.string())
+
+ # check for label
+ if ctx.label() is not None:
+ if isinstance(ctx.label(), DOTParser.InteractionContext):
+ result = get_interaction_string(ctx.label())
+ # add the edge with label
+ # (source_node, label, dest_node, sender, receiver, message)
+ self.edges.add((source_str, result[3], dest_str, result[0], result[1], result[2]))
+ else:
+ # add a no-label edge
+ self.edges.add((source_str, "", dest_str, "", "", ""))
+
+ return self.visitChildren(ctx)
+
+ # Visit a parse tree produced by DOTParser#start_node.
+ def visitStart_node(self, ctx: DOTParser.Start_nodeContext):
+ return self.visitChildren(ctx)
+
+ # Visit a parse tree produced by DOTParser#start_edge.
+ def visitStart_edge(self, ctx: DOTParser.Start_edgeContext):
+ if ctx.Number() is not None:
+ start_node = ctx.Number().getText()
+ else:
+ start_node = get_string_from_tokens(ctx.string())
+ self.s0 = start_node
+ return self.visitChildren(ctx)
+
+ # Visit a parse tree produced by DOTParser#attr_list.
+ def visitAttr_list(self, ctx:DOTParser.Attr_listContext):
+ if ctx.label() is not None:
+ if isinstance(ctx.label(), DOTParser.ForkContext):
+ raise ForkStatementDetected
+ else:
+ self.domitilla = True
+ return self.visitChildren(ctx)
+
+ # Visit a parse tree produced by DOTParser#id_node.
+ def visitId_node(self, ctx: DOTParser.Id_nodeContext):
+ if ctx.Number() is not None:
+ self.states.add(ctx.Number().getText())
+ else:
+ self.states.add(get_string_from_tokens(ctx.string()))
+ return self.visitChildren(ctx)
+
+ # Visit a parse tree produced by DOTParser#string.
+ def visitString(self, ctx: DOTParser.StringContext):
+ return self.visitChildren(ctx)
+
+ # Visit a parse tree produced by DOTParser#interaction.
+ def visitInteraction(self, ctx:DOTParser.InteractionContext):
+ # add participants
+ self.participants.add(str(ctx.Uppercase_letter(0)))
+ self.participants.add(str(ctx.Uppercase_letter(1)))
+ interaction_string = get_interaction_string(ctx)
+ # add the label
+ self.labels.add(interaction_string[3])
+ return self.visitChildren(ctx)
+
+ # Visit a parse tree produced by DOTParser#cfsm_interaction.
+ def visitCfsm_interaction(self, ctx: DOTParser.Cfsm_interactionContext):
+ return self.visitChildren(ctx)
+
+ # Visit a parse tree produced by DOTParser#choice.
+ def visitChoice(self, ctx:DOTParser.ChoiceContext):
+ self.domitilla = True
+ return self.visitChildren(ctx)
+
+ # Visit a parse tree produced by DOTParser#Fork.
+ def visitFork(self, ctx:DOTParser.ForkContext):
+ raise ForkStatementDetected
diff --git a/dot_parser/domitilla_converter.py b/dot_parser/domitilla_converter.py
new file mode 100644
index 0000000..ab891bf
--- /dev/null
+++ b/dot_parser/domitilla_converter.py
@@ -0,0 +1,31 @@
+from antlr4 import *
+from .DOTLexer import DOTLexer
+from .DOTParser import DOTParser
+from .MyErrorListener import MyErrorListener
+from .domitilla_visitor import DomitillaVisitor
+
+
+def domitilla_converter(ca, path_file, path_store):
+ input_stream = FileStream(path_file)
+ # tokenize input into word (tokens)
+ lexer = DOTLexer(input_stream)
+ stream = CommonTokenStream(lexer)
+ # parser these tokens to recognize the sentence structure
+ # and build a parse tree
+ parser = DOTParser(stream)
+ # remove DefaultErrorListener
+ parser.removeErrorListeners()
+ lexer.removeErrorListeners()
+ # add MyErrorListener (See MyErrorListener.py)
+ parser.addErrorListener(MyErrorListener())
+ lexer.addErrorListener(MyErrorListener())
+
+ tree = parser.graph()
+ result = DomitillaVisitor(ca, path_file, path_store).visit(tree)
+ # result contains a 3-tuple with the updated ca,
+ # a new path for the converted graph and the
+ # graph name
+
+ ca = result[0]
+ message = "[CONVERTED] " + result[1]
+ return ca, message
diff --git a/dot_parser/domitilla_visitor.py b/dot_parser/domitilla_visitor.py
new file mode 100644
index 0000000..f12ad46
--- /dev/null
+++ b/dot_parser/domitilla_visitor.py
@@ -0,0 +1,86 @@
+import os
+from graphviz import Digraph
+from chor_auto import ChoreographyAutomata
+from .utils import extract_name, get_string_from_tokens, get_interaction_string
+from .DOTParser import DOTParser
+from .DOTVisitor import DOTVisitor
+
+
+class DomitillaVisitor(DOTVisitor):
+ """
+ This class is based on the ANTLR visitor class;
+ """
+
+ def __init__(self, ca: ChoreographyAutomata, path_file, path_store):
+ self.path_store = path_store
+ self.ca = ca # choreography automata
+ self.graph_name = extract_name(path_file)
+ self.g = Digraph(self.graph_name) # initializes graph
+
+ def __project_labels__(self, node, interaction_struct):
+ """
+ Aim of this function is to project label
+ (which in Domitilla are stored on nodes)
+ on edges. For example, if there is a node
+ like this:
+
+ -----> [A -> B : m] ----->
+
+ we'll project the label to the incoming edges
+ of the node, so it will become:
+
+ ---A->B:m---> [ ] ------->
+ """
+ incoming_edges = set()
+ # identify incoming edges
+ for edge in self.ca.edges:
+ if edge[2] == node:
+ incoming_edges.add(edge)
+
+ # Edges are stored in a set, so we can't
+ # update them directly, we have to remove
+ # the old ones and add the new ones
+
+ # remove old edges
+ self.ca.edges -= incoming_edges
+ # update edges with the interaction label
+ for i in incoming_edges:
+ # (source_node, label, dest_node, sender, receiver, message)
+ self.ca.edges.add((i[0], interaction_struct[3], i[2], interaction_struct[0],
+ interaction_struct[1], interaction_struct[2]))
+
+ def visitGraph(self, ctx: DOTParser.GraphContext):
+ self.visitChildren(ctx)
+ self.g.node('s0', label="", shape='none', height='0', width='0')
+ self.g.edge('s0', self.ca.s0)
+
+ self.ca.delete_epsilon_moves()
+ for edge in self.ca.edges:
+ self.g.edge(str(edge[0]), str(edge[2]), label=edge[1])
+
+ new_path = os.path.join(self.path_store, "[Converted]" + str(self.graph_name) + ".gv")
+ # save the graph
+ self.g.save(new_path)
+
+ # return CA and path of the converted graph
+ return self.ca, new_path
+
+ def visitNode(self, ctx: DOTParser.NodeContext):
+ # get the attributes list node
+ attrs = ctx.attr_list()
+ # get the node
+ node = ctx.id_node()
+ if node.Number() is not None:
+ node_id = node.Number().getText()
+ else:
+ node_id = get_string_from_tokens(node.string())
+
+ for i in attrs:
+ # check for labelled edge
+ label_context = i.label()
+ if label_context is not None:
+ # if we find a labelled edge
+ if isinstance(label_context, DOTParser.InteractionContext):
+ DomitillaVisitor.__project_labels__(self, node_id,
+ get_interaction_string(label_context))
+ return self.visitChildren(ctx)
diff --git a/dot_parser/main.py b/dot_parser/main.py
new file mode 100644
index 0000000..81bffd3
--- /dev/null
+++ b/dot_parser/main.py
@@ -0,0 +1,28 @@
+from antlr4 import *
+from .DOTLexer import DOTLexer
+from .DOTParser import DOTParser
+from .MyErrorListener import MyErrorListener
+from .MyVisitor import MyVisitor
+
+
+def main(path_file):
+ input_stream = FileStream(path_file)
+ # tokenize input into word (tokens)
+ lexer = DOTLexer(input_stream)
+ stream = CommonTokenStream(lexer)
+ # parser these tokens to recognize the sentence structure
+ # and build a parse tree
+ parser = DOTParser(stream)
+ # remove DefaultErrorListener
+ parser.removeErrorListeners()
+ lexer.removeErrorListeners()
+ # add MyErrorListener (See MyErrorListener.py)
+ parser.addErrorListener(MyErrorListener())
+ lexer.addErrorListener(MyErrorListener())
+
+ tree = parser.graph()
+ # result is a 3-tuple contains the choreography automaton
+ # of the input graph, a boolean_value to verify if a Domitilla
+ # graph was inserted, and the graph name
+ return MyVisitor().visit(tree)
+
diff --git a/dot_parser/makeParser.sh b/dot_parser/makeParser.sh
new file mode 100755
index 0000000..5b1dbeb
--- /dev/null
+++ b/dot_parser/makeParser.sh
@@ -0,0 +1 @@
+antlr4 -Dlanguage=Python3 -visitor DOT.g4
diff --git a/dot_parser/utils.py b/dot_parser/utils.py
new file mode 100644
index 0000000..30674ca
--- /dev/null
+++ b/dot_parser/utils.py
@@ -0,0 +1,43 @@
+import os
+from .DOTParser import DOTParser
+"""
+This file contains several functions used by check_and_fill class
+and DomitillaConverter class
+"""
+
+
+def extract_name(path_file):
+ """ Extract graph name from path """
+ path_splitted = os.path.split(path_file)
+ filename_splitted = path_splitted[1].split('.')
+ return filename_splitted[0]
+
+
+def get_string_from_tokens(ctx: DOTParser.StringContext):
+ """ Given a StringContext from DOTParser,
+ concatenate each character token in
+ a unique string result """
+ str_result = ""
+ for i in ctx.getChildren():
+ str_result += str(i)
+ return str_result
+
+
+def get_interaction_string(ctx: DOTParser.InteractionContext):
+ """ Given a InteractionContext from DOTParser,
+ return a 4-uple with:
+ - the sender (A)
+ - the receiver (B)
+ - the message
+ - a label string like: "A -> B : message"
+ """
+ # make the label
+ label = str(ctx.Uppercase_letter(0)) + '->' + \
+ str(ctx.Uppercase_letter(1)) + ':'
+ message = get_string_from_tokens(ctx.string())
+# for i in ctx.Lowercase_letter():
+# message += str(i)
+ label += message
+
+ return (str(ctx.Uppercase_letter(0)), str(ctx.Uppercase_letter(1)),
+ message, label)
diff --git a/examples/Case-Study/Healthcare-Final.dot b/examples/Case-Study/Healthcare-Final.dot
new file mode 100644
index 0000000..830e3d5
--- /dev/null
+++ b/examples/Case-Study/Healthcare-Final.dot
@@ -0,0 +1,37 @@
+digraph PCref {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="GDH->RC:fndCor"]
+ 1 -> 2 [label="GDH->RCOO:assRes"]
+ 2 -> 3 [label="GDH->RCOO:perFunc"]
+ 3 -> 4 [label="RCOO->RC:mkPropRHS"]
+ 4 -> 5 [label="RC->RHS:gvCrite"]
+ 5 -> 6 [label="RCOO->AC:chkAut"]
+ 6 -> 5 [label="AC->RCOO:rptAut"]
+ 5 -> 5 [label="RCOO->GDH:proAcc"]
+ 5 -> 7 [label="RCOO->GDH:chkPol"]
+ 7 -> 5 [label="GDH->RCOO:confChkPol"]
+ 5 -> 12 [label="OTAM->RC:proCriteEV"]
+ 12 -> 5 [label="RC->OTAM:confCriteEV"]
+ 8 -> 9 [label="OTA->OTAM:chkAutAcc"]
+ 9 -> 10 [label="OTAM->EV:tecAss"]
+ 10 -> 11 [label="EV->OTAM:rptTec"]
+ 11 -> 13 [label="OTAM->ET:tcAss"]
+ 13 -> 14 [label="ET->OTAM:rtTec"]
+ 14 -> 15 [label="OTAM->RCOO:rprTec"]
+ 15 -> 16 [label="RCOO->GDH:rptMon"]
+ 16 -> 5 [label="GDH->LHA:takAct"]
+ 15 -> 5 [label="RCOO->GDH:sndPro"]
+ 5 -> 17 [label="LHA->GDH:seekAcc"]
+ 17 -> 18 [label="GDH->RCOO: forAcc"]
+ 18 -> 19 [label="RCOO->HAS: perInvs"]
+ 19 -> 20 [label="HAS->RCOO: rptInvs"]
+ 20 -> 5 [label="RCOO->LHA: decAcc"]
+ 20 -> 21 [label="RCOO->LHA:grntAcc"]
+ 21 -> 22 [label="LHA->GDH:reqAuth"]
+ 22 -> 23 [label="GDH->RCOO:perfoChk"]
+ 23 -> 24 [label="RCOO->SEC:perChk"]
+ 24 -> 25 [label="SEC->RCOO:report"]
+ 25 -> 8 [label="RCOO->LHA:feedback"]
+ 25 -> 8 [label="RCOO->OTA:manVer"]
+}
diff --git a/examples/Case-Study/Healthcare-Final.dot.png b/examples/Case-Study/Healthcare-Final.dot.png
new file mode 100644
index 0000000..116eefe
Binary files /dev/null and b/examples/Case-Study/Healthcare-Final.dot.png differ
diff --git a/examples/Case-Study/Healthcare.dot b/examples/Case-Study/Healthcare.dot
new file mode 100644
index 0000000..745c220
--- /dev/null
+++ b/examples/Case-Study/Healthcare.dot
@@ -0,0 +1,32 @@
+digraph PCref {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="GDH->RC:fndCor"]
+ 1 -> 2 [label="GDH->RCOO:assRes"]
+ 2 -> 3 [label="GDH->RCOO:perFunc"]
+ 3 -> 4 [label="RCOO->RC:mkPropRHS"]
+ 4 -> 5 [label="RC->RHS:gvCrite"]
+ 5 -> 6 [label="RCOO->AC:chkAut"]
+ 6 -> 5 [label="AC->RCOO:rptAut"]
+ 5 -> 5 [label="RCOO->GDH:proAcc"]
+ 5 -> 7 [label="RCOO->GDH:chkPol"]
+ 7 -> 5 [label="GDH->RCOO:confChkPol"]
+ 5 -> 12 [label="OTAM->RC:proCriteEV"]
+ 12 -> 5 [label="RC->OTAM:confCriteEV"]
+ 8 -> 9 [label="OTA->OTAM:chkAutAcc"]
+ 9 -> 10 [label="OTAM->EV:tecAss"]
+ 10 -> 11 [label="EV->OTAM:rptTec"]
+ 11 -> 13 [label="OTAM->ET:tcAss"]
+ 13 -> 14 [label="ET->OTAM:rtTec"]
+ 14 -> 15 [label="OTAM->RCOO:rprTec"]
+ 15 -> 16 [label="RCOO->GDH:rptMon"]
+ 16 -> 5 [label="GDH->LHA:takAct"]
+ 15 -> 5 [label="RCOO->GDH:sndPro"]
+ 5 -> 17 [label="LHA->GDH:seekAcc"]
+ 17 -> 18 [label="GDH->RCOO: forAcc"]
+ 18 -> 19 [label="RCOO->HAS: perInvs"]
+ 19 -> 20 [label="HAS->RCOO: rptInvs"]
+ 20 -> 5 [label="RCOO->LHA: decAcc"]
+ 20 -> 21 [label="RCOO->LHA:grntAcc"]
+ 21 -> 8 [label="RCOO->OTA:manVer"]
+}
diff --git a/examples/Case-Study/Healthcare.dot.png b/examples/Case-Study/Healthcare.dot.png
new file mode 100644
index 0000000..163cafe
Binary files /dev/null and b/examples/Case-Study/Healthcare.dot.png differ
diff --git a/examples/Case-Study/HealthcareExperiment.dot b/examples/Case-Study/HealthcareExperiment.dot
new file mode 100644
index 0000000..56448e7
--- /dev/null
+++ b/examples/Case-Study/HealthcareExperiment.dot
@@ -0,0 +1,36 @@
+digraph PCref {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="GDH->RC:fndCor"]
+ 1 -> 2 [label="GDH->RCOO:assRes"]
+ 2 -> 3 [label="GDH->RCOO:perFunc"]
+ 3 -> 4 [label="RCOO->RC:mkPropRHS"]
+ 4 -> 5 [label="RC->RHS:gvCrite"]
+ 5 -> 6 [label="RCOO->AC:chkAut"]
+ 6 -> 24 [label="AC->RCOO:rptAut"]
+ 24 -> 5 [label="RCOO->GDH:m1"]
+ 5 -> 23 [label="RCOO->GDH:proAcc"]
+ 23 -> 5 [label="RCOO->AC:m2"]
+ 5 -> 7 [label="RCOO->GDH:chkPol"]
+ 7 -> 5 [label="GDH->RCOO:confChkPol"]
+ 5 -> 12 [label="OTAM->RC:proCriteEV"]
+ 12 -> 5 [label="RC->OTAM:confCriteEV"]
+ 8 -> 9 [label="OTA->OTAM:chkAutAcc"]
+ 9 -> 10 [label="OTAM->EV:tecAss"]
+ 10 -> 11 [label="EV->OTAM:rptTec"]
+ 11 -> 13 [label="OTAM->ET:tcAss"]
+ 13 -> 14 [label="ET->OTAM:rtTec"]
+ 14 -> 15 [label="OTAM->RCOO:rprTec"]
+ 15 -> 16 [label="RCOO->GDH:rptMon"]
+ 16 -> 5 [label="GDH->LHA:takAct"]
+ 15 -> 22 [label="RCOO->GDH:sndPro"]
+ 22 -> 5 [label="GDH->LHA:takAct"]
+ 5 -> 17 [label="LHA->GDH:seekAcc"]
+ 17 -> 18 [label="GDH->RCOO: forAcc"]
+ 18 -> 19 [label="RCOO->HAS: perInvs"]
+ 19 -> 20 [label="HAS->RCOO: rptInvs"]
+ 20 -> 25 [label="RCOO->LHA: decAcc"]
+ 25 -> 5 [label="RCOO->OTA:m3"]
+ 20 -> 21 [label="RCOO->LHA:grntAcc"]
+ 21 -> 8 [label="RCOO->OTA:manVer"]
+}
diff --git a/examples/Case-Study/HealthcareExperiment.dot.png b/examples/Case-Study/HealthcareExperiment.dot.png
new file mode 100644
index 0000000..5681586
Binary files /dev/null and b/examples/Case-Study/HealthcareExperiment.dot.png differ
diff --git a/examples/Case-Study/Well-Branch 3rd Condition b/examples/Case-Study/Well-Branch 3rd Condition
new file mode 100644
index 0000000..d2d8c2b
--- /dev/null
+++ b/examples/Case-Study/Well-Branch 3rd Condition
@@ -0,0 +1,42 @@
+# SOLVED GDH
+Verified: NO. Well-branchedness in third condition:
+("['20', '5']", "['20', '21', '8', '9', '10', '11', '13', '14', '15', '16', '5']", 'due to participant GDH')
+("['20', '5']", "['20', '21', '8', '9', '10', '11', '13', '14', '15', '5']", 'due to participant GDH')
+
+("['15', '16', '5']", "['15', '5']", 'due to participant LHA') #SOLVED
+
+("['5', '6', '5']", "['5', '7', '5']", 'due to participant GDH')
+("['5', '6', '5']", "['5', '7', '5', '12']", 'due to participant GDH')
+("['5', '6', '5']", "['5', '7', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", 'due to participant GDH')
+
+("['5', '6', '5']", "['5', '5']", 'due to participant GDH')
+
+("['5', '6', '5']", "['5', '5', '12']", 'due to participant GDH')
+("['5', '6', '5']", "['5', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", 'due to participant GDH')
+("['5', '6', '5']", "['5', '5', '7']", 'due to participant GDH')
+("['5', '6', '5', '12']", "['5', '7', '5']", 'due to participant GDH')
+("['5', '6', '5', '12']", "['5', '5']", 'due to participant GDH')
+("['5', '6', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", "['5', '7', '5']", 'due to participant OTAM')
+("['5', '6', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", "['5', '5']", 'due to participant OTAM')
+("['5', '6', '5', '7']", "['5', '5']", 'due to participant AC')
+("['5', '7', '5']", "['5', '5', '12']", 'due to participant OTAM')
+("['5', '7', '5']", "['5', '5', '6']", 'due to participant AC')
+("['5', '7', '5']", "['5', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", 'due to participant OTAM')
+("['5', '7', '5', '12']", "['5', '5']", 'due to participant OTAM')
+("['5', '7', '5', '6']", "['5', '5']", 'due to participant AC')
+("['5', '7', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", "['5', '5']", 'due to participant OTAM')
+("['5', '5']", "['5', '5', '12']", 'due to participant OTAM')
+("['5', '5']", "['5', '5', '6']", 'due to participant AC')
+("['5', '5']", "['5', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", 'due to participant GDH')
+("['5', '5']", "['5', '5', '7']", 'due to participant GDH')
+
+
+
+
+
+
+{Verified: NO. Well-branchedness in third condition:
+("['20', '21', '8', '9', '10', '11', '13', '14', '15', '16', '5']", "['20', '25', '5']", 'due to participant GDH')
+("['20', '21', '8', '9', '10', '11', '13', '14', '15', '22', '5']", "['20', '25', '5']", 'due to participant GDH')
+
+("['5', '6', '24', '5']", "['5', '7', '5']", 'due to participant AC')AND("['5', '6', '24', '5']", "['5', '7', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", 'due to participant OTA')AND("['5', '6', '24', '5']", "['5', '7', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '22']", 'due to participant OTA')AND("['5', '6', '24', '5']", "['5', '7', '5', '17', '18', '19', '20', '25']", 'due to participant AC')AND("['5', '6', '24', '5']", "['5', '7', '5', '23']", 'due to participant AC')AND("['5', '6', '24', '5']", "['5', '7', '5', '12']", 'due to participant RC')AND("['5', '6', '24', '5']", "['5', '23', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", 'due to participant OTA')AND("['5', '6', '24', '5']", "['5', '23', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '22']", 'due to participant OTA')AND("['5', '6', '24', '5']", "['5', '23', '5', '17', '18', '19', '20', '25']", 'due to participant LHA')AND("['5', '6', '24', '5']", "['5', '23', '5', '12']", 'due to participant RC')AND("['5', '6', '24', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", "['5', '7', '5']", 'due to participant OTA')AND("['5', '6', '24', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", "['5', '23', '5']", 'due to participant OTA')AND("['5', '6', '24', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '22']", "['5', '7', '5']", 'due to participant OTA')AND("['5', '6', '24', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '22']", "['5', '23', '5']", 'due to participant OTA')AND("['5', '6', '24', '5', '17', '18', '19', '20', '25']", "['5', '7', '5']", 'due to participant AC')AND("['5', '6', '24', '5', '17', '18', '19', '20', '25']", "['5', '23', '5']", 'due to participant LHA')AND("['5', '6', '24', '5', '23']", "['5', '7', '5']", 'due to participant AC')AND("['5', '6', '24', '5', '12']", "['5', '7', '5']", 'due to participant RC')AND("['5', '6', '24', '5', '12']", "['5', '23', '5']", 'due to participant RC')AND("['5', '7', '5']", "['5', '23', '5']", 'due to participant AC')AND("['5', '7', '5']", "['5', '23', '5', '6', '24']", 'due to participant AC')AND("['5', '7', '5']", "['5', '23', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", 'due to participant OTA')AND("['5', '7', '5']", "['5', '23', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '22']", 'due to participant OTA')AND("['5', '7', '5']", "['5', '23', '5', '17', '18', '19', '20', '25']", 'due to participant AC')AND("['5', '7', '5']", "['5', '23', '5', '12']", 'due to participant RC')AND("['5', '7', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '16']", "['5', '23', '5']", 'due to participant OTA')AND("['5', '7', '5', '17', '18', '19', '20', '21', '8', '9', '10', '11', '13', '14', '15', '22']", "['5', '23', '5']", 'due to participant OTA')AND("['5', '7', '5', '17', '18', '19', '20', '25']", "['5', '23', '5']", 'due to participant AC')AND("['5', '7', '5', '12']", "['5', '23', '5']", 'due to participant RC')AND}
diff --git a/examples/chorAuto/cref-coord2020.dot b/examples/chorAuto/cref-coord2020.dot
new file mode 100644
index 0000000..a9d5896
--- /dev/null
+++ b/examples/chorAuto/cref-coord2020.dot
@@ -0,0 +1,13 @@
+digraph cref {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="C->S:req"]
+ 1 -> 2 [label="S->C:res"]
+ 2 -> 3 [label="S->L:cnt"]
+ 3 -> 4 [label="C->S:ref"]
+ 4 -> 3 [label="S->C:noRef"]
+ 4 -> 2 [label="S->C:res"]
+ 3 -> 0 [label="C->S:ok"]
+ 3 -> 5 [label="C->S:bye"]
+ 5 -> 6 [label="S->L:bye"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/cref-coord2020.dot.png b/examples/chorAuto/cref-coord2020.dot.png
new file mode 100644
index 0000000..9a5da28
Binary files /dev/null and b/examples/chorAuto/cref-coord2020.dot.png differ
diff --git a/examples/chorAuto/es1.dot b/examples/chorAuto/es1.dot
new file mode 100644
index 0000000..6faaf88
--- /dev/null
+++ b/examples/chorAuto/es1.dot
@@ -0,0 +1,7 @@
+digraph c1 {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="A->B:l"]
+ 1 -> 2 [label="B->E:l"]
+ 0 -> 3 [label="A->B:r"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/es1.dot.png b/examples/chorAuto/es1.dot.png
new file mode 100644
index 0000000..dc879f7
Binary files /dev/null and b/examples/chorAuto/es1.dot.png differ
diff --git a/examples/chorAuto/es2.dot b/examples/chorAuto/es2.dot
new file mode 100644
index 0000000..5797ed4
--- /dev/null
+++ b/examples/chorAuto/es2.dot
@@ -0,0 +1,7 @@
+digraph c2 {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="D->B:l"]
+ 1 -> 2 [label="B->E:l"]
+ 0 -> 3 [label="D->B:r"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/esNoWb1.dot b/examples/chorAuto/esNoWb1.dot
new file mode 100644
index 0000000..f9d3dc7
--- /dev/null
+++ b/examples/chorAuto/esNoWb1.dot
@@ -0,0 +1,10 @@
+digraph noWb1 {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="A->B:l"]
+ 1 -> 2 [label="B->C:n"]
+ 2 -> 3 [label="C->D:l"]
+ 0 -> 4 [label="A->B:r"]
+ 4 -> 5 [label="B->C:n"]
+ 5 -> 6 [label="C->D:r"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/esNoWb1.dot.png b/examples/chorAuto/esNoWb1.dot.png
new file mode 100644
index 0000000..7470666
Binary files /dev/null and b/examples/chorAuto/esNoWb1.dot.png differ
diff --git a/examples/chorAuto/esNoWb2.dot b/examples/chorAuto/esNoWb2.dot
new file mode 100644
index 0000000..82ec5a0
--- /dev/null
+++ b/examples/chorAuto/esNoWb2.dot
@@ -0,0 +1,7 @@
+digraph noWb2 {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="A->B:loop"]
+ 1 -> 0 [label="B->C:loop"]
+ 0 -> 2 [label="A->B:exit"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/esNoWs.dot b/examples/chorAuto/esNoWs.dot
new file mode 100644
index 0000000..c6c15cd
--- /dev/null
+++ b/examples/chorAuto/esNoWs.dot
@@ -0,0 +1,8 @@
+digraph noWs {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="A->B:l"]
+ 1 -> 2 [label="D->E:l"]
+ 0 -> 3 [label="D->E:l"]
+ 3 -> 4 [label="A->B:l"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/esWb.dot b/examples/chorAuto/esWb.dot
new file mode 100644
index 0000000..0ab7c92
--- /dev/null
+++ b/examples/chorAuto/esWb.dot
@@ -0,0 +1,8 @@
+digraph Wb {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="A->B:l"]
+ 1 -> 2 [label="B->C:l"]
+ 0 -> 3 [label="A->C:r"]
+ 3 -> 4 [label="C->B:r"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/esWs.dot b/examples/chorAuto/esWs.dot
new file mode 100644
index 0000000..d658b22
--- /dev/null
+++ b/examples/chorAuto/esWs.dot
@@ -0,0 +1,8 @@
+digraph ws {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="A->B:l"]
+ 1 -> 2 [label="D->E:l"]
+ 0 -> 3 [label="D->E:l"]
+ 3 -> 2 [label="A->B:l"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/ex3.2jlamp2021.dot b/examples/chorAuto/ex3.2jlamp2021.dot
new file mode 100644
index 0000000..9d53eaa
--- /dev/null
+++ b/examples/chorAuto/ex3.2jlamp2021.dot
@@ -0,0 +1,13 @@
+digraph langProp {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="P->Q:text"]
+ 1 -> 2 [label="Q->H:text"]
+ 2 -> 3 [label="H->Q:ack"]
+ 3 -> 0 [label="Q->P:ok"]
+ 2 -> 4 [label="H->Q:nack"]
+ 4 -> 5 [label="Q->P:notyet"]
+ 5 -> 2 [label="Q->H:transf"]
+ 2 -> 6 [label="H->Q:stop"]
+ 6 -> 7 [label="Q->P:stop"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/ex6.5jlamp2021.dot b/examples/chorAuto/ex6.5jlamp2021.dot
new file mode 100644
index 0000000..eaa37c2
--- /dev/null
+++ b/examples/chorAuto/ex6.5jlamp2021.dot
@@ -0,0 +1,16 @@
+digraph alternator {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="K->R:text"]
+ 1 -> 2 [label="R->K:ack"]
+ 2 -> 5 [label="R->S:go"]
+ 1 -> 3 [label="R->K:nack"]
+ 3 -> 4 [label="R->S:wait"]
+ 4 -> 1 [label="K->R:transf"]
+ 5 -> 6 [label="K->S:text"]
+ 6 -> 7 [label="S->K:ack"]
+ 7 -> 0 [label="S->R:go"]
+ 6 -> 8 [label="S->K:nack"]
+ 8 -> 9 [label="S->R:wait"]
+ 9 -> 6 [label="K->S:transf"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/facs2021-1.dot b/examples/chorAuto/facs2021-1.dot
new file mode 100644
index 0000000..5a0248c
--- /dev/null
+++ b/examples/chorAuto/facs2021-1.dot
@@ -0,0 +1,6 @@
+digraph facs1 {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="A->H:m"]
+ 1 -> 2 [label="H->B:n"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/facs2021-2.dot b/examples/chorAuto/facs2021-2.dot
new file mode 100644
index 0000000..00317cf
--- /dev/null
+++ b/examples/chorAuto/facs2021-2.dot
@@ -0,0 +1,7 @@
+digraph facs2 {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="K->D:m"]
+ 1 -> 2 [label="D->C:p"]
+ 2 -> 3 [label="C->K:n"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/jlampsynch.dot b/examples/chorAuto/jlampsynch.dot
new file mode 100644
index 0000000..4cd7991
--- /dev/null
+++ b/examples/chorAuto/jlampsynch.dot
@@ -0,0 +1,34 @@
+digraph jlampsynch {
+ s0 [label="" height=0 shape=none width=0]
+ s0 -> "0,0"
+ "1,5" -> "2,6" [label="Q->S:text"]
+ "5,4" -> "2,1" [label="Q->R:transf"]
+ "3,7" -> "0,7" [label="Q->P:ok"]
+ "0,7" -> "1,7" [label="P->Q:text"]
+ "4,3" -> "5,3" [label="Q->P:notyet"]
+ "2,6" -> "3,7" [label="S->Q:ack"]
+ "4,8" -> "5,8" [label="Q->P:notyet"]
+ "1,2" -> "1,5" [label="R->S:go"]
+ "3,7" -> "3,0" [label="S->R:go"]
+ "5,8" -> "5,9" [label="S->R:wait"]
+ "0,7" -> "0,0" [label="S->R:go"]
+ "3,0" -> "0,0" [label="Q->P:ok"]
+ "5,3" -> "5,4" [label="R->S:wait"]
+ "0,2" -> "0,5" [label="R->S:go"]
+ "3,2" -> "0,2" [label="Q->P:ok"]
+ "0,0" -> "1,0" [label="P->Q:text"]
+ "4,4" -> "5,4" [label="Q->P:notyet"]
+ "5,9" -> "2,6" [label="Q->S:transf"]
+ "1,7" -> "1,0" [label="S->R:go"]
+ "3,5" -> "0,5" [label="Q->P:ok"]
+ "4,9" -> "5,9" [label="Q->P:notyet"]
+ "0,5" -> "1,5" [label="P->Q:text"]
+ "2,1" -> "3,2" [label="R->Q:ack"]
+ "1,0" -> "2,1" [label="Q->R:text"]
+ "4,8" -> "4,9" [label="S->R:wait"]
+ "4,3" -> "4,4" [label="R->S:wait"]
+ "2,1" -> "4,3" [label="R->Q:nack"]
+ "0,2" -> "1,2" [label="P->Q:text"]
+ "3,2" -> "3,5" [label="R->S:go"]
+ "2,6" -> "4,8" [label="S->Q:nack"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/loop.dot b/examples/chorAuto/loop.dot
new file mode 100644
index 0000000..956791f
--- /dev/null
+++ b/examples/chorAuto/loop.dot
@@ -0,0 +1,10 @@
+digraph loop {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="C->D:n1"]
+ 1 -> 2 [label="C->D:n2"]
+ 2 -> 0 [label="A->B:n3"]
+ 0 -> 4 [label="B->C:m1"]
+ 1 -> 4 [label="B->C:m2"]
+ 2 -> 4 [label="B->C:m3"]
+}
\ No newline at end of file
diff --git a/examples/chorAuto/nonCUI.dot b/examples/chorAuto/nonCUI.dot
new file mode 100644
index 0000000..74bf293
--- /dev/null
+++ b/examples/chorAuto/nonCUI.dot
@@ -0,0 +1,13 @@
+digraph notCUI {
+ s0 [label="" height=0 width=0]
+ s0 -> 0
+ 0 -> 1 [label="A->B:m"]
+ 1 -> 2 [label="D->C:m1"]
+ 1 -> 3 [label="D->C:m2"]
+ 2 -> 0 [label="C->A:m1"]
+ 3 -> 0 [label="C->B:m2"]
+ 0 -> 4 [label="D->C:m1"]
+ 4 -> 2 [label="A->B:m"]
+ 0 -> 5 [label="D->C:m2"]
+ 5 -> 3 [label="A->B:m"]
+}
\ No newline at end of file
diff --git a/examples/chorgram/chorgram.txt b/examples/chorgram/chorgram.txt
new file mode 100644
index 0000000..4956560
--- /dev/null
+++ b/examples/chorgram/chorgram.txt
@@ -0,0 +1,3 @@
+A -> B : m ; C -> D:m
++
+B -> Z: n ; K -> Z:m
\ No newline at end of file
diff --git a/examples/chorgram/ex2.txt b/examples/chorgram/ex2.txt
new file mode 100644
index 0000000..0111341
--- /dev/null
+++ b/examples/chorgram/ex2.txt
@@ -0,0 +1,3 @@
+A -> B : m ; B -> C:m
++
+A -> B: m ; B -> C:n
diff --git a/examples/chorgram/exCycleWB.txt b/examples/chorgram/exCycleWB.txt
new file mode 100644
index 0000000..be897c1
--- /dev/null
+++ b/examples/chorgram/exCycleWB.txt
@@ -0,0 +1,2 @@
+{* {A -> D : m ; A -> B : m; B -> C:m} @ A} ; A -> D : m
+
diff --git a/examples/chorgram/exNoWB2.txt b/examples/chorgram/exNoWB2.txt
new file mode 100644
index 0000000..de94ebd
--- /dev/null
+++ b/examples/chorgram/exNoWB2.txt
@@ -0,0 +1,3 @@
+A -> D : m ; A -> B : m; B -> C:m
++
+A -> D: n ; A -> B : m; B -> C:n
diff --git a/examples/chorgram/exWB2.txt b/examples/chorgram/exWB2.txt
new file mode 100644
index 0000000..c55fefd
--- /dev/null
+++ b/examples/chorgram/exWB2.txt
@@ -0,0 +1,3 @@
+A -> D : m ; A -> B : m; B -> C:m
++
+A -> D: n ; A -> B : m; B -> C:m
diff --git a/examples/chorgram/pingpong.txt b/examples/chorgram/pingpong.txt
new file mode 100644
index 0000000..3b9c365
--- /dev/null
+++ b/examples/chorgram/pingpong.txt
@@ -0,0 +1,5 @@
+P -> Q : finished
++
+{ *{
+ P -> Q : ping ; P -> Q : pong
+} @ P } ; P-> Q : finished
diff --git a/examples/domitilla/Simple_choiceStruct.gv b/examples/domitilla/Simple_choiceStruct.gv
new file mode 100644
index 0000000..396d77a
--- /dev/null
+++ b/examples/domitilla/Simple_choiceStruct.gv
@@ -0,0 +1,19 @@
+digraph Simple_choiceStruct {
+ 0 [label="" shape=circle]
+ 0 -> 1
+ 1 [label="+" shape=diamond]
+ 1 -> 3
+ 1 -> 5
+ 2 [label="+" shape=diamond]
+ 2 -> 7
+ 3 [label="K -> B : m" shape=rect]
+ 3 -> 4
+ 4 [label="K -> C : n" shape=rect]
+ 4 -> 2
+ 5 [label="K -> C : n" shape=rect]
+ 5 -> 6
+ 6 [label="K -> B : m" shape=rect]
+ 6 -> 2
+ 7 [label="" shape=doublecircle]
+ 7 [label="" shape=doublecircle]
+}
diff --git a/fsa.py b/fsa.py
new file mode 100644
index 0000000..080537a
--- /dev/null
+++ b/fsa.py
@@ -0,0 +1,123 @@
+from abc import ABC, abstractmethod
+
+class FSA(ABC):
+ """
+ This class is an implementation of a Finite-state automata (FSA),
+ with a finite set of states, set of labels, set of transitions (edges)
+ and one initial state (s0)
+ """
+
+ def __init__(self, states: set, labels: set, edges: set, s0: str):
+ self.states = states
+ self.labels = labels
+ self.edges = edges
+ self.s0 = s0
+
+ def __e_closure__(self, nodes):
+ """
+ Given a set of nodes, return the ∈-closure of the set:
+ a set of reachable nodes, starting from a node of the
+ given set and with only empty label moves.
+ """
+ result = set(nodes)
+ stack_nodes = [nodes]
+ while len(stack_nodes) > 0:
+ current_node = stack_nodes.pop()
+ for node in current_node:
+ for edge in self.edges:
+ if edge[0] == node and edge[1] == "":
+ if not edge[2] in result:
+ result.add(edge[2])
+ stack_nodes.append([edge[2]])
+ return result
+
+ def __label_closure__(self, node, label):
+ """
+ Given a node and a label return a set of
+ reachable nodes, starting from the given
+ node and with only the given label moves.
+ """
+ result = set()
+ for edge in self.edges:
+ if edge[0] == node and edge[1] == label:
+ result.add(edge[2])
+ return result
+
+ def delete_epsilon_moves(self):
+ """ delete epsilon (empty) moves from the FSA """
+ # Take ∈-closure of the start node as beginning state
+ start_node = self.__e_closure__(self.s0)
+ # a stack through iterate
+ stack_nodes = [start_node]
+ # a dict to store new nodes, in the form of:
+ # { 0 : set_of_nodes(0,1,..),
+ # 1 : set_of_nodes(2,5,..),
+ # ... : ,,, }
+ final_states = {}
+ # a count to enumerate new nodes
+ count = 0
+ final_states[count] = start_node
+ # a list to store new edges
+ final_edges = []
+
+ while len(stack_nodes) > 0:
+ current_node = stack_nodes.pop()
+ # find ID of the node
+ id_node = ""
+ for key, value in final_states.items():
+ if value == current_node:
+ id_node = str(key)
+
+ for label in self.labels:
+ new_node = set()
+ for node in current_node:
+ # find the set of nodes reachable from node with label
+ label_closure = self.__label_closure__(node, label)
+ print(label_closure)
+ # now from this set, find a set of nodes reachable with
+ # epsilon moves (e-closure)
+ if len(label_closure):
+ new_node = new_node.union(self.__e_closure__(label_closure))
+ if len(new_node):
+ sender, receiver, message = self.__get_participants_and_message_from_label__(label)
+ if not new_node in final_states.values():
+ count += 1
+ final_states[count] = new_node
+ stack_nodes.append(new_node)
+ final_edges.append((id_node, label, str(count), sender, receiver, message))
+ else:
+ for k, value in final_states.items():
+ if value == new_node:
+ final_edges.append((id_node, label, str(k), sender, receiver, message))
+ self.edges = set(final_edges)
+ self.states = set(final_states.keys())
+
+ def delete_unreachable_nodes(self):
+ """ delete unreachable nodes from the initial state s0 """
+ visited_edges = set()
+ visited_nodes = set()
+ stack = []
+
+ # add the start point
+ visited_nodes.add(self.s0)
+ stack.append(self.s0)
+
+ while len(stack) > 0:
+ current_node = stack.pop()
+ for edge in self.edges:
+ if edge[0] == current_node:
+ # add the edge to the result graph
+ visited_edges.add(edge)
+ # if i didn't already visit this node
+ if not edge[2] in stack and not edge[2] in visited_nodes:
+ # add the node to the result graph
+ visited_nodes.add(edge[2])
+ # add the node in the stack
+ # to visit later
+ stack.append(edge[2])
+ self.edges = visited_edges
+ self.states = visited_nodes
+
+ @abstractmethod
+ def __get_participants_and_message_from_label__(self, label):
+ pass
diff --git a/global_graph_parser/GlobalGraph.g4 b/global_graph_parser/GlobalGraph.g4
new file mode 100644
index 0000000..4d9ac12
--- /dev/null
+++ b/global_graph_parser/GlobalGraph.g4
@@ -0,0 +1,29 @@
+/* This file define a grammar based on Chorgram grammar
+ (https://bitbucket.org/emlio_tuosto/chorgram/wiki/Home)
+*/
+grammar GlobalGraph;
+
+init: g EOF | '(o)' EOF; // (o) empty
+
+g: Partecipant '->' Partecipant ':' String # interaction
+ | g ';' g # sequential
+ | g '+' g # choice
+ | g '|' g # fork
+ | '*' g '@' Partecipant # loop
+ | '{' g '}' # parenthesis
+ ;
+/* NOTE: names which begin with # on the right of productions
+ are not comments, but ID used by antlr4 runtime to
+ build methods in the grammar listener.
+*/
+
+/* NOTE: ANTLR allowing us to specify operator precedence.
+ So, in a case without brackets , e.g. 'G + G ; G'
+ ANTLR resolves the operator ambiguity in favor of the ';' operator.
+ In other words, the order of productions define the binding rules for each operator,
+ in this grammar ';' has priority to '+', and '+' has priority to '|'
+*/
+
+Partecipant : [A-Z]+ ; // match lower-case & upper-case identifiers
+String : [a-z]+ ; // match lower-case & upper-case identifiers
+WS : [ \t\r\n]+ -> skip ; // skip spaces, tabs, newlines, \r (Windows)
\ No newline at end of file
diff --git a/global_graph_parser/GlobalGraph.interp b/global_graph_parser/GlobalGraph.interp
new file mode 100644
index 0000000..b88e4f2
--- /dev/null
+++ b/global_graph_parser/GlobalGraph.interp
@@ -0,0 +1,39 @@
+token literal names:
+null
+'(o)'
+'->'
+':'
+';'
+'+'
+'|'
+'*'
+'@'
+'{'
+'}'
+null
+null
+null
+
+token symbolic names:
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+Partecipant
+String
+WS
+
+rule names:
+init
+g
+
+
+atn:
+[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 15, 45, 4, 2, 9, 2, 4, 3, 9, 3, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 5, 2, 12, 10, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 29, 10, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 7, 3, 40, 10, 3, 12, 3, 14, 3, 43, 11, 3, 3, 3, 2, 3, 4, 4, 2, 4, 2, 2, 2, 48, 2, 11, 3, 2, 2, 2, 4, 28, 3, 2, 2, 2, 6, 7, 5, 4, 3, 2, 7, 8, 7, 2, 2, 3, 8, 12, 3, 2, 2, 2, 9, 10, 7, 3, 2, 2, 10, 12, 7, 2, 2, 3, 11, 6, 3, 2, 2, 2, 11, 9, 3, 2, 2, 2, 12, 3, 3, 2, 2, 2, 13, 14, 8, 3, 1, 2, 14, 15, 7, 13, 2, 2, 15, 16, 7, 4, 2, 2, 16, 17, 7, 13, 2, 2, 17, 18, 7, 5, 2, 2, 18, 29, 7, 14, 2, 2, 19, 20, 7, 9, 2, 2, 20, 21, 5, 4, 3, 2, 21, 22, 7, 10, 2, 2, 22, 23, 7, 13, 2, 2, 23, 29, 3, 2, 2, 2, 24, 25, 7, 11, 2, 2, 25, 26, 5, 4, 3, 2, 26, 27, 7, 12, 2, 2, 27, 29, 3, 2, 2, 2, 28, 13, 3, 2, 2, 2, 28, 19, 3, 2, 2, 2, 28, 24, 3, 2, 2, 2, 29, 41, 3, 2, 2, 2, 30, 31, 12, 7, 2, 2, 31, 32, 7, 6, 2, 2, 32, 40, 5, 4, 3, 8, 33, 34, 12, 6, 2, 2, 34, 35, 7, 7, 2, 2, 35, 40, 5, 4, 3, 7, 36, 37, 12, 5, 2, 2, 37, 38, 7, 8, 2, 2, 38, 40, 5, 4, 3, 6, 39, 30, 3, 2, 2, 2, 39, 33, 3, 2, 2, 2, 39, 36, 3, 2, 2, 2, 40, 43, 3, 2, 2, 2, 41, 39, 3, 2, 2, 2, 41, 42, 3, 2, 2, 2, 42, 5, 3, 2, 2, 2, 43, 41, 3, 2, 2, 2, 6, 11, 28, 39, 41]
\ No newline at end of file
diff --git a/global_graph_parser/GlobalGraph.tokens b/global_graph_parser/GlobalGraph.tokens
new file mode 100644
index 0000000..d330b68
--- /dev/null
+++ b/global_graph_parser/GlobalGraph.tokens
@@ -0,0 +1,23 @@
+T__0=1
+T__1=2
+T__2=3
+T__3=4
+T__4=5
+T__5=6
+T__6=7
+T__7=8
+T__8=9
+T__9=10
+Partecipant=11
+String=12
+WS=13
+'(o)'=1
+'->'=2
+':'=3
+';'=4
+'+'=5
+'|'=6
+'*'=7
+'@'=8
+'{'=9
+'}'=10
diff --git a/global_graph_parser/GlobalGraphLexer.interp b/global_graph_parser/GlobalGraphLexer.interp
new file mode 100644
index 0000000..65200f4
--- /dev/null
+++ b/global_graph_parser/GlobalGraphLexer.interp
@@ -0,0 +1,56 @@
+token literal names:
+null
+'(o)'
+'->'
+':'
+';'
+'+'
+'|'
+'*'
+'@'
+'{'
+'}'
+null
+null
+null
+
+token symbolic names:
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+null
+Partecipant
+String
+WS
+
+rule names:
+T__0
+T__1
+T__2
+T__3
+T__4
+T__5
+T__6
+T__7
+T__8
+T__9
+Partecipant
+String
+WS
+
+channel names:
+DEFAULT_TOKEN_CHANNEL
+HIDDEN
+
+mode names:
+DEFAULT_MODE
+
+atn:
+[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 15, 69, 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 3, 2, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 4, 3, 4, 3, 5, 3, 5, 3, 6, 3, 6, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 10, 3, 10, 3, 11, 3, 11, 3, 12, 6, 12, 54, 10, 12, 13, 12, 14, 12, 55, 3, 13, 6, 13, 59, 10, 13, 13, 13, 14, 13, 60, 3, 14, 6, 14, 64, 10, 14, 13, 14, 14, 14, 65, 3, 14, 3, 14, 2, 2, 15, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19, 11, 21, 12, 23, 13, 25, 14, 27, 15, 3, 2, 5, 3, 2, 67, 92, 3, 2, 99, 124, 5, 2, 11, 12, 15, 15, 34, 34, 2, 71, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, 27, 3, 2, 2, 2, 3, 29, 3, 2, 2, 2, 5, 33, 3, 2, 2, 2, 7, 36, 3, 2, 2, 2, 9, 38, 3, 2, 2, 2, 11, 40, 3, 2, 2, 2, 13, 42, 3, 2, 2, 2, 15, 44, 3, 2, 2, 2, 17, 46, 3, 2, 2, 2, 19, 48, 3, 2, 2, 2, 21, 50, 3, 2, 2, 2, 23, 53, 3, 2, 2, 2, 25, 58, 3, 2, 2, 2, 27, 63, 3, 2, 2, 2, 29, 30, 7, 42, 2, 2, 30, 31, 7, 113, 2, 2, 31, 32, 7, 43, 2, 2, 32, 4, 3, 2, 2, 2, 33, 34, 7, 47, 2, 2, 34, 35, 7, 64, 2, 2, 35, 6, 3, 2, 2, 2, 36, 37, 7, 60, 2, 2, 37, 8, 3, 2, 2, 2, 38, 39, 7, 61, 2, 2, 39, 10, 3, 2, 2, 2, 40, 41, 7, 45, 2, 2, 41, 12, 3, 2, 2, 2, 42, 43, 7, 126, 2, 2, 43, 14, 3, 2, 2, 2, 44, 45, 7, 44, 2, 2, 45, 16, 3, 2, 2, 2, 46, 47, 7, 66, 2, 2, 47, 18, 3, 2, 2, 2, 48, 49, 7, 125, 2, 2, 49, 20, 3, 2, 2, 2, 50, 51, 7, 127, 2, 2, 51, 22, 3, 2, 2, 2, 52, 54, 9, 2, 2, 2, 53, 52, 3, 2, 2, 2, 54, 55, 3, 2, 2, 2, 55, 53, 3, 2, 2, 2, 55, 56, 3, 2, 2, 2, 56, 24, 3, 2, 2, 2, 57, 59, 9, 3, 2, 2, 58, 57, 3, 2, 2, 2, 59, 60, 3, 2, 2, 2, 60, 58, 3, 2, 2, 2, 60, 61, 3, 2, 2, 2, 61, 26, 3, 2, 2, 2, 62, 64, 9, 4, 2, 2, 63, 62, 3, 2, 2, 2, 64, 65, 3, 2, 2, 2, 65, 63, 3, 2, 2, 2, 65, 66, 3, 2, 2, 2, 66, 67, 3, 2, 2, 2, 67, 68, 8, 14, 2, 2, 68, 28, 3, 2, 2, 2, 6, 2, 55, 60, 65, 3, 8, 2, 2]
\ No newline at end of file
diff --git a/global_graph_parser/GlobalGraphLexer.py b/global_graph_parser/GlobalGraphLexer.py
new file mode 100644
index 0000000..6e83b7e
--- /dev/null
+++ b/global_graph_parser/GlobalGraphLexer.py
@@ -0,0 +1,80 @@
+# Generated from GlobalGraph.g4 by ANTLR 4.7.2
+from antlr4 import *
+from io import StringIO
+from typing.io import TextIO
+import sys
+
+
+def serializedATN():
+ with StringIO() as buf:
+ buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\17")
+ buf.write("E\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
+ buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
+ buf.write("\t\16\3\2\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\5\3\5\3\6")
+ buf.write("\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\13\3\13\3\f\6\f")
+ buf.write("\66\n\f\r\f\16\f\67\3\r\6\r;\n\r\r\r\16\r<\3\16\6\16@")
+ buf.write("\n\16\r\16\16\16A\3\16\3\16\2\2\17\3\3\5\4\7\5\t\6\13")
+ buf.write("\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\3\2\5\3\2")
+ buf.write("C\\\3\2c|\5\2\13\f\17\17\"\"\2G\2\3\3\2\2\2\2\5\3\2\2")
+ buf.write("\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2")
+ buf.write("\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27")
+ buf.write("\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\3\35\3\2\2\2\5!\3\2")
+ buf.write("\2\2\7$\3\2\2\2\t&\3\2\2\2\13(\3\2\2\2\r*\3\2\2\2\17,")
+ buf.write("\3\2\2\2\21.\3\2\2\2\23\60\3\2\2\2\25\62\3\2\2\2\27\65")
+ buf.write("\3\2\2\2\31:\3\2\2\2\33?\3\2\2\2\35\36\7*\2\2\36\37\7")
+ buf.write("q\2\2\37 \7+\2\2 \4\3\2\2\2!\"\7/\2\2\"#\7@\2\2#\6\3\2")
+ buf.write("\2\2$%\7<\2\2%\b\3\2\2\2&\'\7=\2\2\'\n\3\2\2\2()\7-\2")
+ buf.write("\2)\f\3\2\2\2*+\7~\2\2+\16\3\2\2\2,-\7,\2\2-\20\3\2\2")
+ buf.write("\2./\7B\2\2/\22\3\2\2\2\60\61\7}\2\2\61\24\3\2\2\2\62")
+ buf.write("\63\7\177\2\2\63\26\3\2\2\2\64\66\t\2\2\2\65\64\3\2\2")
+ buf.write("\2\66\67\3\2\2\2\67\65\3\2\2\2\678\3\2\2\28\30\3\2\2\2")
+ buf.write("9;\t\3\2\2:9\3\2\2\2;<\3\2\2\2<:\3\2\2\2<=\3\2\2\2=\32")
+ buf.write("\3\2\2\2>@\t\4\2\2?>\3\2\2\2@A\3\2\2\2A?\3\2\2\2AB\3\2")
+ buf.write("\2\2BC\3\2\2\2CD\b\16\2\2D\34\3\2\2\2\6\2\67",
+ "'(o)'", "'->'", "':'", "';'", "'+'", "'|'", "'*'", "'@'", "'{'",
+ "'}'" ]
+
+ symbolicNames = [ "",
+ "Partecipant", "String", "WS" ]
+
+ ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
+ "T__7", "T__8", "T__9", "Partecipant", "String", "WS" ]
+
+ grammarFileName = "GlobalGraph.g4"
+
+ def __init__(self, input=None, output:TextIO = sys.stdout):
+ super().__init__(input, output)
+ self.checkVersion("4.7.2")
+ self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
+ self._actions = None
+ self._predicates = None
+
+
diff --git a/global_graph_parser/GlobalGraphLexer.tokens b/global_graph_parser/GlobalGraphLexer.tokens
new file mode 100644
index 0000000..d330b68
--- /dev/null
+++ b/global_graph_parser/GlobalGraphLexer.tokens
@@ -0,0 +1,23 @@
+T__0=1
+T__1=2
+T__2=3
+T__3=4
+T__4=5
+T__5=6
+T__6=7
+T__7=8
+T__8=9
+T__9=10
+Partecipant=11
+String=12
+WS=13
+'(o)'=1
+'->'=2
+':'=3
+';'=4
+'+'=5
+'|'=6
+'*'=7
+'@'=8
+'{'=9
+'}'=10
diff --git a/global_graph_parser/GlobalGraphListener.py b/global_graph_parser/GlobalGraphListener.py
new file mode 100644
index 0000000..c298fb0
--- /dev/null
+++ b/global_graph_parser/GlobalGraphListener.py
@@ -0,0 +1,73 @@
+# Generated from GlobalGraph.g4 by ANTLR 4.7.2
+from antlr4 import *
+if __name__ is not None and "." in __name__:
+ from .GlobalGraphParser import GlobalGraphParser
+else:
+ from GlobalGraphParser import GlobalGraphParser
+
+# This class defines a complete listener for a parse tree produced by GlobalGraphParser.
+class GlobalGraphListener(ParseTreeListener):
+
+ # Enter a parse tree produced by GlobalGraphParser#init.
+ def enterInit(self, ctx:GlobalGraphParser.InitContext):
+ pass
+
+ # Exit a parse tree produced by GlobalGraphParser#init.
+ def exitInit(self, ctx:GlobalGraphParser.InitContext):
+ pass
+
+
+ # Enter a parse tree produced by GlobalGraphParser#fork.
+ def enterFork(self, ctx:GlobalGraphParser.ForkContext):
+ pass
+
+ # Exit a parse tree produced by GlobalGraphParser#fork.
+ def exitFork(self, ctx:GlobalGraphParser.ForkContext):
+ pass
+
+
+ # Enter a parse tree produced by GlobalGraphParser#loop.
+ def enterLoop(self, ctx:GlobalGraphParser.LoopContext):
+ pass
+
+ # Exit a parse tree produced by GlobalGraphParser#loop.
+ def exitLoop(self, ctx:GlobalGraphParser.LoopContext):
+ pass
+
+
+ # Enter a parse tree produced by GlobalGraphParser#sequential.
+ def enterSequential(self, ctx:GlobalGraphParser.SequentialContext):
+ pass
+
+ # Exit a parse tree produced by GlobalGraphParser#sequential.
+ def exitSequential(self, ctx:GlobalGraphParser.SequentialContext):
+ pass
+
+
+ # Enter a parse tree produced by GlobalGraphParser#interaction.
+ def enterInteraction(self, ctx:GlobalGraphParser.InteractionContext):
+ pass
+
+ # Exit a parse tree produced by GlobalGraphParser#interaction.
+ def exitInteraction(self, ctx:GlobalGraphParser.InteractionContext):
+ pass
+
+
+ # Enter a parse tree produced by GlobalGraphParser#choice.
+ def enterChoice(self, ctx:GlobalGraphParser.ChoiceContext):
+ pass
+
+ # Exit a parse tree produced by GlobalGraphParser#choice.
+ def exitChoice(self, ctx:GlobalGraphParser.ChoiceContext):
+ pass
+
+
+ # Enter a parse tree produced by GlobalGraphParser#parenthesis.
+ def enterParenthesis(self, ctx:GlobalGraphParser.ParenthesisContext):
+ pass
+
+ # Exit a parse tree produced by GlobalGraphParser#parenthesis.
+ def exitParenthesis(self, ctx:GlobalGraphParser.ParenthesisContext):
+ pass
+
+
diff --git a/global_graph_parser/GlobalGraphParser.py b/global_graph_parser/GlobalGraphParser.py
new file mode 100644
index 0000000..5043f08
--- /dev/null
+++ b/global_graph_parser/GlobalGraphParser.py
@@ -0,0 +1,468 @@
+# Generated from GlobalGraph.g4 by ANTLR 4.7.2
+# encoding: utf-8
+from antlr4 import *
+from io import StringIO
+from typing.io import TextIO
+import sys
+
+def serializedATN():
+ with StringIO() as buf:
+ buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\17")
+ buf.write("-\4\2\t\2\4\3\t\3\3\2\3\2\3\2\3\2\3\2\5\2\f\n\2\3\3\3")
+ buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3")
+ buf.write("\5\3\35\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\7\3(\n")
+ buf.write("\3\f\3\16\3+\13\3\3\3\2\3\4\4\2\4\2\2\2\60\2\13\3\2\2")
+ buf.write("\2\4\34\3\2\2\2\6\7\5\4\3\2\7\b\7\2\2\3\b\f\3\2\2\2\t")
+ buf.write("\n\7\3\2\2\n\f\7\2\2\3\13\6\3\2\2\2\13\t\3\2\2\2\f\3\3")
+ buf.write("\2\2\2\r\16\b\3\1\2\16\17\7\r\2\2\17\20\7\4\2\2\20\21")
+ buf.write("\7\r\2\2\21\22\7\5\2\2\22\35\7\16\2\2\23\24\7\t\2\2\24")
+ buf.write("\25\5\4\3\2\25\26\7\n\2\2\26\27\7\r\2\2\27\35\3\2\2\2")
+ buf.write("\30\31\7\13\2\2\31\32\5\4\3\2\32\33\7\f\2\2\33\35\3\2")
+ buf.write("\2\2\34\r\3\2\2\2\34\23\3\2\2\2\34\30\3\2\2\2\35)\3\2")
+ buf.write("\2\2\36\37\f\7\2\2\37 \7\6\2\2 (\5\4\3\b!\"\f\6\2\2\"")
+ buf.write("#\7\7\2\2#(\5\4\3\7$%\f\5\2\2%&\7\b\2\2&(\5\4\3\6\'\36")
+ buf.write("\3\2\2\2\'!\3\2\2\2\'$\3\2\2\2(+\3\2\2\2)\'\3\2\2\2)*")
+ buf.write("\3\2\2\2*\5\3\2\2\2+)\3\2\2\2\6\13\34\')")
+ return buf.getvalue()
+
+
+class GlobalGraphParser ( Parser ):
+
+ grammarFileName = "GlobalGraph.g4"
+
+ atn = ATNDeserializer().deserialize(serializedATN())
+
+ decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
+
+ sharedContextCache = PredictionContextCache()
+
+ literalNames = [ "", "'(o)'", "'->'", "':'", "';'", "'+'",
+ "'|'", "'*'", "'@'", "'{'", "'}'" ]
+
+ symbolicNames = [ "", "", "", "",
+ "", "", "", "",
+ "", "", "", "Partecipant",
+ "String", "WS" ]
+
+ RULE_init = 0
+ RULE_g = 1
+
+ ruleNames = [ "init", "g" ]
+
+ EOF = Token.EOF
+ T__0=1
+ T__1=2
+ T__2=3
+ T__3=4
+ T__4=5
+ T__5=6
+ T__6=7
+ T__7=8
+ T__8=9
+ T__9=10
+ Partecipant=11
+ String=12
+ WS=13
+
+ def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
+ super().__init__(input, output)
+ self.checkVersion("4.7.2")
+ self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
+ self._predicates = None
+
+
+
+ class InitContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def g(self):
+ return self.getTypedRuleContext(GlobalGraphParser.GContext,0)
+
+
+ def EOF(self):
+ return self.getToken(GlobalGraphParser.EOF, 0)
+
+ def getRuleIndex(self):
+ return GlobalGraphParser.RULE_init
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterInit" ):
+ listener.enterInit(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitInit" ):
+ listener.exitInit(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitInit" ):
+ return visitor.visitInit(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def init(self):
+
+ localctx = GlobalGraphParser.InitContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 0, self.RULE_init)
+ try:
+ self.state = 9
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [GlobalGraphParser.T__6, GlobalGraphParser.T__8, GlobalGraphParser.Partecipant]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 4
+ self.g(0)
+ self.state = 5
+ self.match(GlobalGraphParser.EOF)
+ pass
+ elif token in [GlobalGraphParser.T__0]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 7
+ self.match(GlobalGraphParser.T__0)
+ self.state = 8
+ self.match(GlobalGraphParser.EOF)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class GContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return GlobalGraphParser.RULE_g
+
+
+ def copyFrom(self, ctx:ParserRuleContext):
+ super().copyFrom(ctx)
+
+
+ class ForkContext(GContext):
+
+ def __init__(self, parser, ctx:ParserRuleContext): # actually a GlobalGraphParser.GContext
+ super().__init__(parser)
+ self.copyFrom(ctx)
+
+ def g(self, i:int=None):
+ if i is None:
+ return self.getTypedRuleContexts(GlobalGraphParser.GContext)
+ else:
+ return self.getTypedRuleContext(GlobalGraphParser.GContext,i)
+
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterFork" ):
+ listener.enterFork(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitFork" ):
+ listener.exitFork(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitFork" ):
+ return visitor.visitFork(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+ class LoopContext(GContext):
+
+ def __init__(self, parser, ctx:ParserRuleContext): # actually a GlobalGraphParser.GContext
+ super().__init__(parser)
+ self.copyFrom(ctx)
+
+ def g(self):
+ return self.getTypedRuleContext(GlobalGraphParser.GContext,0)
+
+ def Partecipant(self):
+ return self.getToken(GlobalGraphParser.Partecipant, 0)
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterLoop" ):
+ listener.enterLoop(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitLoop" ):
+ listener.exitLoop(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitLoop" ):
+ return visitor.visitLoop(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+ class SequentialContext(GContext):
+
+ def __init__(self, parser, ctx:ParserRuleContext): # actually a GlobalGraphParser.GContext
+ super().__init__(parser)
+ self.copyFrom(ctx)
+
+ def g(self, i:int=None):
+ if i is None:
+ return self.getTypedRuleContexts(GlobalGraphParser.GContext)
+ else:
+ return self.getTypedRuleContext(GlobalGraphParser.GContext,i)
+
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterSequential" ):
+ listener.enterSequential(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitSequential" ):
+ listener.exitSequential(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitSequential" ):
+ return visitor.visitSequential(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+ class InteractionContext(GContext):
+
+ def __init__(self, parser, ctx:ParserRuleContext): # actually a GlobalGraphParser.GContext
+ super().__init__(parser)
+ self.copyFrom(ctx)
+
+ def Partecipant(self, i:int=None):
+ if i is None:
+ return self.getTokens(GlobalGraphParser.Partecipant)
+ else:
+ return self.getToken(GlobalGraphParser.Partecipant, i)
+ def String(self):
+ return self.getToken(GlobalGraphParser.String, 0)
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterInteraction" ):
+ listener.enterInteraction(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitInteraction" ):
+ listener.exitInteraction(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitInteraction" ):
+ return visitor.visitInteraction(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+ class ChoiceContext(GContext):
+
+ def __init__(self, parser, ctx:ParserRuleContext): # actually a GlobalGraphParser.GContext
+ super().__init__(parser)
+ self.copyFrom(ctx)
+
+ def g(self, i:int=None):
+ if i is None:
+ return self.getTypedRuleContexts(GlobalGraphParser.GContext)
+ else:
+ return self.getTypedRuleContext(GlobalGraphParser.GContext,i)
+
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterChoice" ):
+ listener.enterChoice(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitChoice" ):
+ listener.exitChoice(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitChoice" ):
+ return visitor.visitChoice(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+ class ParenthesisContext(GContext):
+
+ def __init__(self, parser, ctx:ParserRuleContext): # actually a GlobalGraphParser.GContext
+ super().__init__(parser)
+ self.copyFrom(ctx)
+
+ def g(self):
+ return self.getTypedRuleContext(GlobalGraphParser.GContext,0)
+
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterParenthesis" ):
+ listener.enterParenthesis(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitParenthesis" ):
+ listener.exitParenthesis(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitParenthesis" ):
+ return visitor.visitParenthesis(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+ def g(self, _p:int=0):
+ _parentctx = self._ctx
+ _parentState = self.state
+ localctx = GlobalGraphParser.GContext(self, self._ctx, _parentState)
+ _prevctx = localctx
+ _startState = 2
+ self.enterRecursionRule(localctx, 2, self.RULE_g, _p)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 26
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [GlobalGraphParser.Partecipant]:
+ localctx = GlobalGraphParser.InteractionContext(self, localctx)
+ self._ctx = localctx
+ _prevctx = localctx
+
+ self.state = 12
+ self.match(GlobalGraphParser.Partecipant)
+ self.state = 13
+ self.match(GlobalGraphParser.T__1)
+ self.state = 14
+ self.match(GlobalGraphParser.Partecipant)
+ self.state = 15
+ self.match(GlobalGraphParser.T__2)
+ self.state = 16
+ self.match(GlobalGraphParser.String)
+ pass
+ elif token in [GlobalGraphParser.T__6]:
+ localctx = GlobalGraphParser.LoopContext(self, localctx)
+ self._ctx = localctx
+ _prevctx = localctx
+ self.state = 17
+ self.match(GlobalGraphParser.T__6)
+ self.state = 18
+ self.g(0)
+ self.state = 19
+ self.match(GlobalGraphParser.T__7)
+ self.state = 20
+ self.match(GlobalGraphParser.Partecipant)
+ pass
+ elif token in [GlobalGraphParser.T__8]:
+ localctx = GlobalGraphParser.ParenthesisContext(self, localctx)
+ self._ctx = localctx
+ _prevctx = localctx
+ self.state = 22
+ self.match(GlobalGraphParser.T__8)
+ self.state = 23
+ self.g(0)
+ self.state = 24
+ self.match(GlobalGraphParser.T__9)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ self._ctx.stop = self._input.LT(-1)
+ self.state = 39
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,3,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ if self._parseListeners is not None:
+ self.triggerExitRuleEvent()
+ _prevctx = localctx
+ self.state = 37
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,2,self._ctx)
+ if la_ == 1:
+ localctx = GlobalGraphParser.SequentialContext(self, GlobalGraphParser.GContext(self, _parentctx, _parentState))
+ self.pushNewRecursionContext(localctx, _startState, self.RULE_g)
+ self.state = 28
+ if not self.precpred(self._ctx, 5):
+ from antlr4.error.Errors import FailedPredicateException
+ raise FailedPredicateException(self, "self.precpred(self._ctx, 5)")
+ self.state = 29
+ self.match(GlobalGraphParser.T__3)
+ self.state = 30
+ self.g(6)
+ pass
+
+ elif la_ == 2:
+ localctx = GlobalGraphParser.ChoiceContext(self, GlobalGraphParser.GContext(self, _parentctx, _parentState))
+ self.pushNewRecursionContext(localctx, _startState, self.RULE_g)
+ self.state = 31
+ if not self.precpred(self._ctx, 4):
+ from antlr4.error.Errors import FailedPredicateException
+ raise FailedPredicateException(self, "self.precpred(self._ctx, 4)")
+ self.state = 32
+ self.match(GlobalGraphParser.T__4)
+ self.state = 33
+ self.g(5)
+ pass
+
+ elif la_ == 3:
+ localctx = GlobalGraphParser.ForkContext(self, GlobalGraphParser.GContext(self, _parentctx, _parentState))
+ self.pushNewRecursionContext(localctx, _startState, self.RULE_g)
+ self.state = 34
+ if not self.precpred(self._ctx, 3):
+ from antlr4.error.Errors import FailedPredicateException
+ raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
+ self.state = 35
+ self.match(GlobalGraphParser.T__5)
+ self.state = 36
+ self.g(4)
+ pass
+
+
+ self.state = 41
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,3,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.unrollRecursionContexts(_parentctx)
+ return localctx
+
+
+
+ def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int):
+ if self._predicates == None:
+ self._predicates = dict()
+ self._predicates[1] = self.g_sempred
+ pred = self._predicates.get(ruleIndex, None)
+ if pred is None:
+ raise Exception("No predicate with index:" + str(ruleIndex))
+ else:
+ return pred(localctx, predIndex)
+
+ def g_sempred(self, localctx:GContext, predIndex:int):
+ if predIndex == 0:
+ return self.precpred(self._ctx, 5)
+
+
+ if predIndex == 1:
+ return self.precpred(self._ctx, 4)
+
+
+ if predIndex == 2:
+ return self.precpred(self._ctx, 3)
+
+
+
+
+
diff --git a/global_graph_parser/GlobalGraphVisitor.py b/global_graph_parser/GlobalGraphVisitor.py
new file mode 100644
index 0000000..1f65928
--- /dev/null
+++ b/global_graph_parser/GlobalGraphVisitor.py
@@ -0,0 +1,48 @@
+# Generated from GlobalGraph.g4 by ANTLR 4.7.2
+from antlr4 import *
+if __name__ is not None and "." in __name__:
+ from .GlobalGraphParser import GlobalGraphParser
+else:
+ from GlobalGraphParser import GlobalGraphParser
+
+# This class defines a complete generic visitor for a parse tree produced by GlobalGraphParser.
+
+class GlobalGraphVisitor(ParseTreeVisitor):
+
+ # Visit a parse tree produced by GlobalGraphParser#init.
+ def visitInit(self, ctx:GlobalGraphParser.InitContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by GlobalGraphParser#fork.
+ def visitFork(self, ctx:GlobalGraphParser.ForkContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by GlobalGraphParser#loop.
+ def visitLoop(self, ctx:GlobalGraphParser.LoopContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by GlobalGraphParser#sequential.
+ def visitSequential(self, ctx:GlobalGraphParser.SequentialContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by GlobalGraphParser#interaction.
+ def visitInteraction(self, ctx:GlobalGraphParser.InteractionContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by GlobalGraphParser#choice.
+ def visitChoice(self, ctx:GlobalGraphParser.ChoiceContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by GlobalGraphParser#parenthesis.
+ def visitParenthesis(self, ctx:GlobalGraphParser.ParenthesisContext):
+ return self.visitChildren(ctx)
+
+
+
+del GlobalGraphParser
\ No newline at end of file
diff --git a/global_graph_parser/MyErrorListener.py b/global_graph_parser/MyErrorListener.py
new file mode 100644
index 0000000..bd3b0ea
--- /dev/null
+++ b/global_graph_parser/MyErrorListener.py
@@ -0,0 +1,32 @@
+from antlr4.error.ErrorListener import ErrorListener
+
+
+class parseError(Exception):
+
+ def __init__(self, message):
+ self.message = message
+
+class MyErrorListener(ErrorListener):
+ '''
+ MyErrorListener redirect Errors from stdout
+ (normal behaviour of DefaultErrorListener)
+ to an exception
+ '''
+
+ def __init__(self):
+ super(MyErrorListener, self).__init__()
+
+ def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
+ position_of_the_error = "InputError in [line: " + str(line) + ",column: " + str(column) + "]"
+ error = [position_of_the_error, msg]
+
+ raise parseError(error)
+
+ def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs):
+ raise Exception("reportAmbiguity")
+
+ def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs):
+ raise Exception("reportAttemptingFullContext")
+
+ def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs):
+ raise Exception("reportContextSensitivity")
\ No newline at end of file
diff --git a/global_graph_parser/MyGlobalGraphListener.py b/global_graph_parser/MyGlobalGraphListener.py
new file mode 100644
index 0000000..988029f
--- /dev/null
+++ b/global_graph_parser/MyGlobalGraphListener.py
@@ -0,0 +1,160 @@
+from chor_auto import ChoreographyAutomata
+from graphviz import Digraph
+from .GlobalGraphListener import GlobalGraphListener
+
+
+
+class ForkStatementDetected(Exception):
+ def __init__(self):
+ self.message = ["[WARNING] Fork founded!",
+ "I can't accept that"]
+
+
+class MyGlobalGraphListener(GlobalGraphListener):
+ """
+ There are 2 methods (enter and exit) for each rule of the grammar.
+ As the walker encounters the node for rule Choice, for example,
+ it triggers enterChoice(). After the walker visits all children
+ of the Choice node, it triggers exitChoice().
+
+ NOTE: For our purpose, we can't do anything in enter methods
+ (except for the enterInit). We need to go down in parse tree
+ and store information in stack, before we'll be able to build
+ the graph.
+ """
+
+ def __init__(self, graph_name, path_store):
+ self.path_store = path_store
+ self.stack = []
+ self.states = set()
+ self.labels = set()
+ self.edges = set()
+ self.participants = set()
+ self.g = Digraph(graph_name) # initializes graph
+ self.count = 0 # needed to number each node
+
+ def __get_participants_and_message_from_label__(self, label):
+ sender, receiver_message = label.split('->')
+ receiver, message = receiver_message.split(':')
+ return sender.strip(), receiver.strip(), message.strip()
+
+ # Enter a parse tree produced by GlobalGraph#Init.
+ def enterInit(self, ctx):
+ self.count += 1
+
+ # Exit a parse tree produced by GlobalGraph#Init.
+ def exitInit(self, ctx):
+ node = self.stack.pop()
+ self.g.node('s0', label="", shape='none', height='0', width='0')
+ self.g.edge('s0', "0")
+ self.edges.add(("0", "", str(node[1]), "", "", ""))
+ #self.g.edge("0", str(node[1]))
+ self.edges.add((str(node[2]), "", str(self.count), "", "", ""))
+ #self.g.edge(str(node[2]), str(self.count))
+ for edge in self.edges:
+ self.states.add(edge[0])
+ self.states.add(edge[2])
+ ca = ChoreographyAutomata(self.states, self.labels,
+ self.edges, '0',
+ self.participants)
+ ca.delete_epsilon_moves()
+ for edge in ca.edges:
+ self.g.edge(str(edge[0]), str(edge[2]), label=edge[1])
+
+ self.g.save(self.path_store) # draw the graph
+
+ # Enter a parse tree produced by GlobalGraph#interaction.
+ def enterInteraction(self, ctx):
+ pass
+
+ # Exit a parse tree produced by GlobalGraph#interaction.
+ def exitInteraction(self, ctx):
+ node = ['interaction', self.count, self.count + 1]
+ self.stack.append(node)
+ self.count += 2
+ #self.g.edge(str(node[1]), str(node[2]), label=ctx.getText())
+ label = ctx.getText()
+ sender, receiver, message = self.__get_participants_and_message_from_label__(label)
+ self.participants.add(sender)
+ self.participants.add(receiver)
+ self.labels.add(label)
+ self.edges.add((str(node[1]), label, str(node[2]), sender, receiver, message))
+
+ # Enter a parse tree produced by GlobalGraph#sequential.
+ def enterSequential(self, ctx):
+ pass
+
+ # Exit a parse tree produced by GlobalGraph#sequential.
+ def exitSequential(self, ctx):
+ right = self.stack.pop()
+ left = self.stack.pop()
+ node = ['sequential', left[1], right[2]]
+ self.stack.append(node)
+ #self.g.edge(str(left[2]), str(right[1]))
+ self.edges.add((str(left[2]), "", str(right[1]), "", "", ""))
+
+ # Enter a parse tree produced by GlobalGraph#choice.
+ def enterChoice(self, ctx):
+ pass
+
+ # Exit a parse tree produced by GlobalGraph#choice.
+ def exitChoice(self, ctx):
+ right = self.stack.pop()
+ left = self.stack.pop()
+ if left[0] == 'choice':
+ node = ['choice', left[1], left[2]]
+ self.stack.append(node)
+ #self.g.edge(str(left[1]), str(right[1]))
+ self.edges.add((str(left[1]), "", str(right[1]), "", "", ""))
+ #self.g.edge(str(right[2]), str(left[2]))
+ self.edges.add((str(right[2]), "", str(left[2]), "", "", ""))
+ else:
+ choice_node_start = str(self.count)
+ self.count += 1
+ choice_node_end = str(self.count)
+ self.count += 1
+ node = ['choice', choice_node_start, choice_node_end]
+ self.stack.append(node)
+ #self.g.edge(choice_node_start, str(left[1]))
+ self.edges.add((choice_node_start, "", str(left[1]), "", "", ""))
+ #self.g.edge(choice_node_start, str(right[1]))
+ self.edges.add((choice_node_start, "", str(right[1]), "", "", ""))
+ #self.g.edge(str(left[2]), choice_node_end)
+ self.edges.add((str(left[2]), "", choice_node_end, "", "", ""))
+ #self.g.edge(str(right[2]), choice_node_end)
+ self.edges.add((str(right[2]), "", choice_node_end, "", "", ""))
+
+ # Enter a parse tree produced by GlobalGraph#fork.
+ def enterFork(self, ctx):
+ raise ForkStatementDetected
+
+ # Exit a parse tree produced by GlobalGraph#fork.
+ def exitFork(self, ctx):
+ raise ForkStatementDetected
+
+ # Enter a parse tree produced by GlobalGraph#loop.
+ def enterLoop(self, ctx):
+ pass
+
+ # Exit a parse tree produced by GlobalGraph#loop.
+ def exitLoop(self, ctx):
+ node_to_loop = self.stack.pop()
+ loop_node_start = str(self.count)
+ self.count += 1
+ loop_node_end = str(self.count)
+ self.count += 1
+ node = ['loop', loop_node_start, loop_node_end]
+ self.stack.append(node)
+ #self.g.edge(loop_node_start, str(node_to_loop[1]))
+ self.edges.add((loop_node_start, "", str(node_to_loop[1]), "", "", ""))
+ #self.g.edge(str(node_to_loop[2]), loop_node_end)
+ self.edges.add((str(node_to_loop[2]), "", loop_node_end, "", "", ""))
+ #self.g.edge(loop_node_end, loop_node_start)
+ self.edges.add((loop_node_end, "", loop_node_start, "", "", ""))
+ # Enter a parse tree produced by GlobalGraph#parenthesis.
+ def enterParenthesis(self, ctx):
+ pass
+
+ # Exit a parse tree produced by GlobalGraph#parenthesis.
+ def exitParenthesis(self, ctx):
+ pass
diff --git a/global_graph_parser/main.py b/global_graph_parser/main.py
new file mode 100644
index 0000000..44207a9
--- /dev/null
+++ b/global_graph_parser/main.py
@@ -0,0 +1,50 @@
+import sys, os
+from antlr4 import *
+from .GlobalGraphLexer import GlobalGraphLexer
+from .GlobalGraphParser import GlobalGraphParser
+from .MyGlobalGraphListener import MyGlobalGraphListener, ForkStatementDetected
+from .MyErrorListener import MyErrorListener, parseError
+
+
+def __extract_name__(path_file):
+ path_splitted = os.path.split(path_file)
+ filename_splitted = path_splitted[1].split('.')
+ return filename_splitted[0]
+
+def main(path_file, path_store):
+ """
+ Aim of this function is to take a string of the GlobalGraph grammar
+ and translate it in a dot (graph description language) file.
+ """
+ graph_name = __extract_name__(path_file)
+
+ path_store = os.path.join(path_store, graph_name + '.dot')
+
+ input_stream = FileStream(path_file)
+
+ # tokenizes input into word (tokens)
+ lexer = GlobalGraphLexer(input_stream)
+ stream = CommonTokenStream(lexer)
+ # parser these tokens to recognize the sentence structure
+ # and build a parse tree
+ try:
+ parser = GlobalGraphParser(stream)
+ # remove DefaultErrorListener
+ parser.removeErrorListeners()
+ lexer.removeErrorListeners()
+ # add MyErrorListener (See MyErrorListener.py)
+ parser.addErrorListener(MyErrorListener())
+ lexer.addErrorListener(MyErrorListener())
+ tree = parser.init()
+ # "listener" is the mechanism through we visit
+ # each node of parse tree
+ listener = MyGlobalGraphListener(graph_name, path_store)
+ walker = ParseTreeWalker()
+ walker.walk(listener, tree)
+ except (parseError, ForkStatementDetected) as e:
+ return e.message
+ else:
+ return ["Read Successfully " + str(graph_name),
+ "[CREATED] " + path_store] , path_store
+
+
diff --git a/global_graph_parser/makeParser.sh b/global_graph_parser/makeParser.sh
new file mode 100755
index 0000000..b376e44
--- /dev/null
+++ b/global_graph_parser/makeParser.sh
@@ -0,0 +1 @@
+antlr4 -Dlanguage=Python3 -visitor GlobalGraph.g4
diff --git a/gui.py b/gui.py
new file mode 100644
index 0000000..2ec10e9
--- /dev/null
+++ b/gui.py
@@ -0,0 +1,662 @@
+import sys
+import os
+import tkinter as tk
+from tkinter import ttk
+from tkinter import filedialog, Frame, Entry, Button, messagebox, Listbox, Radiobutton, PhotoImage, Menu
+
+from PIL import ImageTk,Image
+import pickle
+
+
+class MyGui(ttk.Frame):
+ controller = None
+ tabs = None
+ tabs_history = []
+ _log = None
+ _screen_width = None
+ _screen_height = None
+ COLOR_buttons = '#FFCB6B'
+ COLOR_frames = '#333333'
+ COLOR_tabs = '#666666'
+ COLOR_foreground = '#D9C7B3'
+ COLOR_log = '#1E1E1E'
+
+
+ def v_restoreSession(self):
+ with open("saved_sessions/saved_tabs.pkl","rb") as f:
+ history = pickle.load(f)
+
+ for i in history:
+ self.open_file(i)
+
+
+ def v_splashscreen(self,master,height,width):
+ image_splash = "icons/logo-gif.gif"
+ image = tk.PhotoImage(file=image_splash)
+ canvas = tk.Canvas(master, height=height*0.8,width=width)
+ canvas.create_image(width*0.8/2, height*0.8/2, image =image)
+ canvas.grid()
+
+ master.after(5000,master.destroy)
+
+
+ def __init__(self, master, controller):
+ super().__init__()
+ master.protocol("WM_DELETE_WINDOW", self.v_on_closing)
+
+ self.controller = controller
+ self.v_initUi(master)
+
+ if os.path.isfile("saved_sessions/saved_tabs.pkl"):
+ self.v_restoreSession()
+
+
+ def v_on_closing(self):
+
+ close = messagebox.askyesnocancel(title="Warning", message="Do you want to save the session?")
+
+ if close == True:
+ picklefile = open("saved_sessions/saved_tabs.pkl","wb")
+ pickle.dump(self.tabs_history,picklefile)
+ picklefile.close()
+
+ self.master.destroy()
+
+ elif close == False:
+ if os.path.isfile("saved_sessions/saved_tabs.pkl"):
+ os.remove("saved_sessions/saved_tabs.pkl")
+ self.master.destroy()
+
+ def dos2unix(self,file_path):
+ # replacement strings
+ WINDOWS_LINE_ENDING = b'\r\n'
+ UNIX_LINE_ENDING = b'\n'
+
+ with open(file_path, 'rb') as open_file:
+ content = open_file.read()
+
+ content = content.replace(WINDOWS_LINE_ENDING, UNIX_LINE_ENDING)
+
+ with open(file_path, 'wb') as open_file:
+ open_file.write(content)
+
+ def help_window(self):
+ self.top = tk.Toplevel()
+ self.top.title("Usage")
+ label= tk.Label(self.top,text="Open: takes in input DOT files,but can also it can get:\n"+
+ " - Chorgram file (.txt), a grammar used for Global Graph\n"+
+ " - DOT files (.dot) generated by Domitilla and converte them into DOT files with CA sintax\n\n\n"+
+ "Once taken one or more files as input, Corinne can apply some functions on it:\n\n"+
+ " - Product: a cartesian product of two CA\n"+
+ " - Synchronization: given a CA, it can synchronize two participants of its\n"+
+ " - Projection: given a CA, you can select one participant from it and get the relative CFSM",justify=tk.LEFT,padx=15,pady=15).pack()
+
+ def v_initUi(self, master):
+ self.master.title("Corinne 3.0")
+ self.master.grid_columnconfigure(0, weight=1)
+ self.master.grid_rowconfigure(1, weight=1)
+ self.master.option_add('*foreground', 'black')
+ self.master.option_add('*background', 'white')
+
+ # Style for ttk widgets
+ style = ttk.Style()
+ style.configure("TNotebook", background=self.COLOR_frames, borderwidth=1, highlightthickness=1)
+ style.configure("TNotebook.Tab", background=self.COLOR_tabs, foreground="black",
+ lightcolor=self.COLOR_frames, borderwidth=0)
+ style.map("TNotebook.Tab", background=[("selected", self.COLOR_buttons)],
+ foreground=[("selected", 'black')])
+ style.configure("TFrame", background=self.COLOR_frames, foreground="black")
+
+ # get screen resolution
+ self._screen_width, self._screen_height = master.winfo_screenwidth(), master.winfo_screenheight()
+ start_x = int((self._screen_width / 4))
+ start_y = int((self._screen_height / 4))
+ # fit the gui at screen resolution
+ master.geometry('%dx%d+%d+%d' % (self._screen_width / 2, self._screen_height / 2, start_x, start_y))
+
+ #self.v_splashscreen(master,start_x,start_y)
+ #create all the containers
+ menu_frame = Frame(master,padx=10, pady=10)
+ menu_frame = Frame(master)
+ menu_frame.grid(row=0,column=0,sticky=(tk.N, tk.S, tk.E, tk.W))
+ menu_frame.configure(bg=self.COLOR_frames)
+ menu_frame.grid_columnconfigure(0, weight=1)
+ menu_frame.grid_columnconfigure(1,weight=1)
+
+
+ tab_frame = Frame(master,pady=15)
+ tab_frame.grid(row=1, column=0, sticky=(tk.N, tk.S, tk.E, tk.W))
+ tab_frame.grid(row=1, column=0, sticky=(tk.N, tk.S, tk.E, tk.W))
+ tab_frame.configure(bg=self.COLOR_frames)
+ tab_frame.grid_columnconfigure(0, weight=1)
+ tab_frame.grid_rowconfigure(0, weight=1)
+
+ log_frame = Frame(master)
+ log_frame.grid(row=2, column=0, sticky=(tk.N, tk.S, tk.E, tk.W))
+
+ #log_frame.grid(row=1, column=1, sticky=(tk.N, tk.S, tk.E, tk.W))
+
+ log_frame.configure(bg=self.COLOR_frames)
+ log_frame.grid_columnconfigure(0, weight=1)
+
+
+ prova = Image.open("icons/open.png")
+ open_icon = ImageTk.PhotoImage(prova)
+ render_icon = PhotoImage(file="icons/render.png")
+ prod_icon = PhotoImage(file="icons/product.png")
+ sync_icon = PhotoImage(file="icons/sync.png")
+ proj_icon = PhotoImage(file="icons/projection.png")
+
+ menu_bar = Menu(master)
+ file_menu = Menu(menu_bar,tearoff=False)
+ file_menu.add_command(label="Open",compound=tk.LEFT,command= lambda: self.open_file(None))
+ file_menu.add_command(label="Open Saved Tabs",command=self.v_restoreSession)
+ menu_bar.add_cascade(label="File",menu=file_menu)
+
+ trasformation_menu = Menu(menu_bar,tearoff=False)
+ trasformation_menu.add_command(label="Product",compound=tk.LEFT,command=self.open_product_view)
+ trasformation_menu.add_command(label="Sync",compound=tk.LEFT,command=self.open_sync_view)
+ trasformation_menu.add_command(label="Projection",compound=tk.LEFT,command=self.open_proj_view)
+ menu_bar.add_cascade(label="Trasformations",menu=trasformation_menu)
+
+ demonstration_menu = Menu(menu_bar, tearoff=False)
+ demonstration_menu.add_command(label="Synchronous-well-formedness",compound=tk.LEFT,command=self.open_well_formedness)
+ demonstration_menu.add_separator()
+ demonstration_menu.add_command(label="Well-branchedness", compound=tk.LEFT, command=self.open_well_branchedness)
+ demonstration_menu.add_command(label="Well-sequencedness", compound=tk.LEFT,
+ command=self.open_well_sequencedness)
+ demonstration_menu.add_separator()
+ demonstration_menu.add_command(label="Asynchronous-well-formedness",compound=tk.LEFT,command=self.open_well_formedness)
+ demonstration_menu.add_separator()
+ demonstration_menu.add_command(label="Well-branchedness",compound=tk.LEFT,command=self.open_well_branchedness)
+ demonstration_menu.add_command(label="Well-sequencedness",compound=tk.LEFT,command=self.open_asynchronous_well_sequencedness)
+ menu_bar.add_cascade(label="Properties", menu=demonstration_menu)
+
+
+ help_menu = Menu(menu_bar,tearoff=False)
+ help_menu.add_command(label="ReadMe",compound=tk.LEFT,command=self.help_window)
+ menu_bar.add_cascade(label="Help", menu= help_menu)
+
+ self.master.config(menu=menu_bar)
+
+ # create the log box
+ #self._log = Listbox(log_frame, highlightthickness=0, height=1, background=self.COLOR_log,
+ # foreground=self.COLOR_foreground)
+ self._log = Listbox(log_frame, highlightthickness=0, height=4, background=self.COLOR_log,
+ foreground=self.COLOR_foreground)
+
+ self._log.pack(side="bottom", fill="x",padx=5,pady=5)
+
+ self.tabs = ttk.Notebook(tab_frame)
+
+
+ def open_file(self,path):
+
+ if path == None:
+ path = filedialog.askopenfilename(initialdir=".",
+ filetypes=(("DOT graph", "*.gv *.dot"),
+ ("Chorgram grammar", "*.txt"), ("all files", "*.*")),
+ title="Choose a file."
+ )
+ self.dos2unix(path)
+ msg_result = []
+ # Check in case user enter an unknown file
+ # or closes without choosing a file.
+ try:
+ #path_splitted = os.path.split(path)
+ #ext = path_splitted[1].split('.')
+ ext = os.path.splitext(path)
+ # Chorgram file
+ if ext[1] == '.txt':
+ msg_result = self.__open_chorgram_file(path)
+ # DOT files
+ elif ext[1] == '.dot' or ext[1] == '.gv':
+ msg_result = self.__open_dot_file__(path)
+ else:
+ self.popupmsg("Unknown extension file: "+ext)
+ # update log box
+ self.log(msg_result)
+ except:
+ print("Unexpected error:", sys.exc_info())
+
+ def __open_chorgram_file(self, path):
+ path_splitted = os.path.split(path)
+ # ask where store the converted dot file
+ ask_for_path: bool = messagebox.askyesno("Chorgram", "A Chorgram file was inserted\n" +
+ "Do you wish to save the converted dot file in " +
+ path_splitted[0] + "?\n" +
+ "(Click NO to choose a new path)")
+ if ask_for_path: # Yes, use same path
+ # (input path, path to store)
+ msg_result, graph_name = self.controller.GGparser(path, path_splitted[0])
+ else:
+ new_folder = filedialog.askdirectory()
+ msg_result, graph_name = self.controller.GGparser(path, new_folder)
+ #changes extension from .txt to .dot
+ path=path[:-4]+".dot"
+ self.__add_new_tab__(graph_name,path)
+ return msg_result
+
+ def __open_dot_file__(self, path):
+ # result[0] domitilla boolean
+ # result[1] a message
+ # result[2] graph name
+ result = self.controller.DOTparser(path)
+ msg_result = result[1]
+ if result[0]: # if a domitilla graph was founded
+ # ask where store the converted dot file
+ path_splitted = os.path.split(path)
+ ask_for_path: bool = messagebox.askyesno("Domitilla", "A Domitilla file was inserted\n"
+ "Do you wish to store the converted file in " +
+ path_splitted[0] + "?\n"
+ "(Click NO to choose a new path)")
+ if ask_for_path: # Yes, use same path
+ msg_result.append(
+ self.controller.DomitillaConverter(result[2], path,
+ path_splitted[0])) # (graph name, input path, path to store)
+ else:
+ new_folder = filedialog.askdirectory()
+ msg_result.append(self.controller.DomitillaConverter(result[2], path, new_folder))
+ if len(result) > 2: # case NO-errors detected
+ # add a new tab for the new graph just opened
+ self.__add_new_tab__(result[2],path)
+ return msg_result
+
+ def open_render_view(self):
+ try:
+ path = filedialog.askopenfilename(initialdir=".", filetypes=(("DOT graph", "*.gv *.dot"),
+ ("all files", "*.*")), title="Choose a file.")
+ path_splitted = os.path.split(path)
+ ext = path_splitted[1].split('.')
+ # Check in case user enter an unknown file
+ # or closes without choosing a file.
+ if ext[1] != 'dot' and ext[1] != 'gv':
+ self.popupmsg("Wrong extension file inserted!\n"
+ "Please insert a DOT file")
+ else:
+ # define the frame and its geometry
+ r_window = tk.Toplevel(padx=20, pady=20, bg=self.COLOR_frames)
+ r_window.wm_title("Render")
+ r_window.resizable(False, False)
+ self.__set_window_dimension__(r_window)
+ label_format = tk.Label(r_window, text="Choose a file format for render:", fg=self.COLOR_foreground,
+ bg=self.COLOR_frames, wraplength=500)
+ label_format.grid(row=0, column=0)
+
+ # Initialize file format variable for radiobutton
+ option = tk.StringVar()
+ # Radiobutton
+ rb1 = Radiobutton(r_window, text='png', value="png", var=option, bg=self.COLOR_frames)
+ rb2 = Radiobutton(r_window, text='pdf', value="pdf", var=option, bg=self.COLOR_frames)
+ rb1.grid(row=1, column=0)
+ rb2.grid(row=1, column=1)
+ # TODO try except for wrong dot files
+ b = Button(r_window, text='Render', bg=self.COLOR_buttons,
+ command=lambda: (self.log(["[RENDER] " + self.controller.render(path, option.get(),True)]),
+ r_window.destroy()))
+ b.grid(row=2, column=1)
+ except:
+ pass
+
+ def open_product_view(self):
+ # define the frame and its geometry
+ p_window = tk.Toplevel(padx=20, pady=20, bg=self.COLOR_frames)
+ p_window.wm_title("Product")
+ p_window.resizable(False, False)
+ # set window dimension
+ self.__set_window_dimension__(p_window)
+
+ # label and combo for 1st graph
+ lbl1 = tk.Label(p_window, text="Choose 1st Graph", bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ lbl1.grid(row=0, column=0, pady=10)
+ combo1 = ttk.Combobox(p_window, values=list(self.controller.get_all_ca().keys()))
+ combo1.grid(row=0, column=1, pady=10)
+
+ # label and combo for 2st graph
+ lbl2 = tk.Label(p_window, text="Choose 2st Graph", bg=self.COLOR_frames, fg='white')
+ lbl2.grid(row=1, column=0, pady=10)
+ combo2 = ttk.Combobox(p_window, values=list(self.controller.get_all_ca().keys()))
+ combo2.grid(row=1, column=1, pady=10)
+
+ make_button = Button(p_window, text='Make product', bg=self.COLOR_buttons,
+ command=lambda:
+ (self.__exec_product_button__(combo1.get(), combo2.get()),
+ p_window.destroy()))
+ make_button.grid(row=2, column=0, pady=10)
+
+ def open_sync_view(self):
+ s_window = tk.Toplevel(padx=20, pady=20, bg=self.COLOR_frames)
+ s_window.wm_title("Synchronisation")
+ s_window.resizable(False, False)
+ # set window dimension
+ self.__set_window_dimension__(s_window)
+
+ # label and combo for the graph to synchronize
+ lbl1 = tk.Label(s_window, text="Choose Graph", fg='white', bg=self.COLOR_frames)
+ lbl1.grid(row=0, column=0, padx=10, pady=10)
+ option_v1 = tk.StringVar()
+ option_v2 = tk.StringVar()
+ combo = ttk.Combobox(s_window, values=list(self.controller.get_all_ca().keys()))
+ combo.bind("<>", lambda event: self.__make_sync_interface_menu__(s_window, list(
+ self.controller.get_participants(combo.get())), option_v1, option_v2))
+ combo.grid(row=1, column=0, padx=10, pady=10)
+
+ sync_button = Button(s_window, text='Synchronize', bg=self.COLOR_buttons,
+ command=lambda: (
+ self.__exec_sync_button__(combo.get(), option_v1.get(), option_v2.get()),
+ s_window.destroy()))
+
+ sync_button.grid(row=4, column=0)
+
+ def open_proj_view(self):
+ proj_window = tk.Toplevel(padx=20, pady=20, bg=self.COLOR_frames)
+ proj_window.wm_title("Projection")
+ proj_window.resizable(False, False)
+
+ # set window dimension
+ self.__set_window_dimension__(proj_window)
+
+ # label and combo for the graph to synchronize
+ lbl1 = tk.Label(proj_window, text="Choose Graph", bg=self.COLOR_frames, fg='white')
+ lbl1.grid(row=0, column=0, padx=10, pady=10)
+
+ option = tk.StringVar()
+ combo = ttk.Combobox(proj_window, values=list(self.controller.get_all_ca().keys()))
+ combo.bind("<>", lambda event: self.__make_proj_participant_menu__(proj_window, list(
+ self.controller.get_participants(combo.get())), option))
+ combo.grid(row=1, column=0, padx=10, pady=10)
+
+ proj_button = Button(proj_window, text='Project', bg=self.COLOR_buttons,
+ command=lambda: (
+ self.__exec_proj_button__(combo.get(), option.get()),
+ proj_window.destroy()))
+
+ proj_button.grid(row=4, column=0)
+
+
+ def open_well_formedness(self):
+ p_window = tk.Toplevel(padx=20, pady=20, bg=self.COLOR_frames)
+ p_window.wm_title("Well-formedness")
+ p_window.resizable(False, False)
+
+ # set window dimension
+ self.__set_window_dimension__(p_window)
+
+ # label and combo for 1st graph
+ lbl1 = tk.Label(p_window, text="Choose Graph", bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ lbl1.grid(row=0, column=0, pady=10)
+ combo1 = ttk.Combobox(p_window, values=list(self.controller.get_all_ca().keys()))
+ combo1.grid(row=0, column=1, pady=10)
+
+ verify_button = Button(p_window, text='Verify', bg=self.COLOR_buttons,
+ command=lambda:
+ (self.__exec_well_formedness__(combo1.get()),
+ p_window.destroy()))
+ verify_button.grid(row=2, column=0, pady=10)
+
+
+
+
+
+ def open_well_branchedness(self):
+ p_window = tk.Toplevel(padx=20, pady=20, bg=self.COLOR_frames)
+ p_window.wm_title("Well-branchedness")
+ p_window.resizable(False, False)
+
+ # set window dimension
+ self.__set_window_dimension__(p_window)
+
+ # label and combo for 1st graph
+ lbl1 = tk.Label(p_window, text="Choose Graph", bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ lbl1.grid(row=0, column=0, pady=10)
+ combo1 = ttk.Combobox(p_window, values=list(self.controller.get_all_ca().keys()))
+ combo1.grid(row=0, column=1, pady=10)
+
+ verify_button = Button(p_window, text='Verify', bg=self.COLOR_buttons,
+ command=lambda:
+ (self.__exec_well_branchedness__(combo1.get()),
+ p_window.destroy()))
+ verify_button.grid(row=2, column=0, pady=10)
+
+ def open_well_sequencedness(self):
+ p_window = tk.Toplevel(padx=20, pady=20, bg=self.COLOR_frames)
+ p_window.wm_title("Well-sequencedness")
+ p_window.resizable(False, False)
+
+ # set window dimension
+ self.__set_window_dimension__(p_window)
+
+ # label and combo for 1st graph
+ lbl1 = tk.Label(p_window, text="Choose Graph", bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ lbl1.grid(row=0, column=0, pady=10)
+ combo1 = ttk.Combobox(p_window, values=list(self.controller.get_all_ca().keys()))
+ combo1.grid(row=0, column=1, pady=10)
+
+ verify_button = Button(p_window, text='Verify', bg=self.COLOR_buttons,
+ command=lambda:
+ (self.__exec_well_sequencedness__(combo1.get()),
+ p_window.destroy()))
+ verify_button.grid(row=2, column=0, pady=10)
+
+ def open_asynchronous_well_sequencedness(self):
+ p_window = tk.Toplevel(padx=20, pady=20, bg=self.COLOR_frames)
+ p_window.wm_title("Asynchronous-Well-sequencedness")
+ p_window.resizable(False, False)
+
+ # set window dimension
+ self.__set_window_dimension__(p_window)
+
+ # label and combo for 1st graph
+ lbl1 = tk.Label(p_window, text="Choose Graph", bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ lbl1.grid(row=0, column=0, pady=10)
+ combo1 = ttk.Combobox(p_window, values=list(self.controller.get_all_ca().keys()))
+ combo1.grid(row=0, column=1, pady=10)
+
+ verify_button = Button(p_window, text='Verify', bg=self.COLOR_buttons,
+ command=lambda:
+ (self.__exec_asynchronous_well_sequencedness__(combo1.get()),
+ p_window.destroy()))
+ verify_button.grid(row=2, column=0, pady=10)
+
+
+
+
+ def __add_new_tab__(self, graph_name,v_path):
+
+ self.tabs.grid(row=0, column=0, sticky=(tk.N, tk.S, tk.E, tk.W), padx=10, pady=5)
+ frame = ttk.Frame(self.tabs)
+
+ frame.grid_columnconfigure(4, weight=1)
+
+ # Add the tab
+ self.tabs.add(frame, text=graph_name)
+
+ label_s = tk.Label(frame, text = "N° States:",wraplength=500,bg=self.COLOR_frames,fg=self.COLOR_foreground)
+ label_s.grid(row=0,column=0,pady=10,padx=20)
+
+ entry_s = tk.Label(frame, text=(str(len(self.controller.get_states(graph_name)))),wraplength=500,bg=self.COLOR_frames,fg=self.COLOR_foreground)
+ entry_s.grid(row=0,column=1,pady=10,padx=20)
+
+ label_e = tk.Label(frame, text="N° Edges:", wraplength=500, bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ label_e.grid(row=1, column=0, pady=10, padx=20)
+
+ entry_e = tk.Label(frame, text=str(len(self.controller.get_edges(graph_name))), wraplength=500, bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ entry_e.grid(row=1, column=1, pady=10, padx=20)
+
+ label_sn = tk.Label(frame, text="Start Node:", wraplength=500, bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ label_sn.grid(row=2, column=0, pady=10, padx=20)
+
+ entry_sn = tk.Label(frame, text=str(self.controller.get_start_node(graph_name)), wraplength=500, bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ entry_sn.grid(row=2, column=1, pady=10, padx=20)
+
+ label_eps = tk.Label(frame, text="Epsilon moves:", wraplength=500, bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ label_eps.grid(row=5, column=0, pady=10, padx=20)
+
+ entry_eps = tk.Label(frame, text=self.controller.check_for_epsilon_moves(graph_name), wraplength=500, bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ entry_eps.grid(row=5, column=1, pady=10, padx=20)
+
+ label_l = tk.Label(frame, text="N° Labels:", wraplength=500, bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ label_l.grid(row=3, column=0, pady=10, padx=20)
+
+ elements_l = list(self.controller.get_labels(graph_name))
+ option_l = tk.StringVar()
+ if not elements_l:
+ print("lista vuota")
+ else:
+ option_l.set(elements_l[0])
+
+ label_menu = ttk.OptionMenu(frame, option_l, elements_l[0], *elements_l)
+ label_menu.grid(row=3, column=2, pady=10, padx=20)
+
+ entry_l = tk.Label(frame, text = len(elements_l), wraplength=500, bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ entry_l.grid(row=3, column=1, pady=10, padx=20)
+
+ label_p = tk.Label(frame, text="N° Participants:", wraplength=500, bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ label_p.grid(row=4, column=0, pady=10, padx=20)
+
+ elements_p = list(self.controller.get_participants(graph_name))
+ option_p = tk.StringVar()
+ if not elements_p:
+ print("seconda lista vuota")
+ else:
+ option_p.set(elements_p[0])
+
+ part_menu = ttk.OptionMenu(frame, option_p, elements_p[0], *elements_p)
+ part_menu.grid(row=4, column=2, pady=10, padx=20)
+
+ entry_p = tk.Label(frame, text=len(elements_p), wraplength=500, bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ entry_p.grid(row=4, column=1, pady=10, padx=20)
+
+ self.controller.render(v_path,'png',False)
+ new_path = v_path + ".png"
+ image = Image.open(new_path)
+ scale = min(min((self._screen_width/2)/image.width,(self._screen_height-250)/image.height),1)
+ image = image.resize((int(image.width*scale),int(image.height*scale)), Image.ANTIALIAS)
+ img = ImageTk.PhotoImage(image)
+ labelprova = tk.Label(frame,image=img)
+ labelprova.photo=img
+ labelprova.grid(row=0,column=3,rowspan=5,padx=(150,0),pady=10)
+
+ # create close button
+ close_button = Button(frame, text='X', bg=self.COLOR_frames, highlightthickness=0, borderwidth=0, command=lambda: (
+ self.controller.remove_record(self.tabs.tab(self.tabs.select(), "text")),
+ # remove the record from opened graphs struct
+ self.tabs.forget(self.tabs.select()),
+ self.tabs_history.remove(v_path))) # delete the tab
+ close_button.grid(row=0, column=4, sticky=tk.E + tk.N)
+
+ #update tab in tab history
+ self.tabs_history.append(v_path)
+ # once created, select the tab
+ self.tabs.select(frame)
+
+ def __exec_sync_button__(self, combo_value, interface1, interface2):
+ path_to_store = filedialog.asksaveasfilename(initialdir=".", title="Save as",
+ filetypes=("DOT graph", "*.gv *.dot"))
+ result= self.controller.synchronize(combo_value, interface1, interface2, path_to_store)
+ # print the log message
+ self.log(result[0])
+ # create a new tab for the product graph
+ self.open_file(path_to_store+".dot")
+
+ def __exec_product_button__(self, combo_value1, combo_value2):
+ path_to_store = filedialog.asksaveasfilename(initialdir=".", title="Save as",
+ filetypes=("DOT graph", "*.gv *.dot"))
+ result = self.controller.make_product(combo_value1, combo_value2, path_to_store)
+ # print the log message
+ self.log(result[0])
+ # create a new tab for the product graph
+ self.open_file(path_to_store+".dot")
+
+ def __exec_proj_button__(self, combo_value, participant):
+ path_to_store = filedialog.asksaveasfilename(initialdir=".", title="Save as",
+ filetypes=("DOT graph", "*.gv *.dot"))
+ result = self.controller.projection(combo_value, participant, path_to_store)
+
+ self.open_file(path_to_store+".dot")
+
+ # print the log message
+ self.log(result[0])
+
+ def __make_sync_interface_menu__(self, frame, elements, option_v1, option_v2):
+ # label and optionMenu for the 1st interface
+ option_v1.set(elements[0])
+ lbl2 = tk.Label(frame, text="Select 1st participant", bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ lbl2.grid(row=2, column=0, padx=10, pady=10)
+ op_menu_1 = ttk.OptionMenu(frame, option_v1, elements[0], *elements)
+ op_menu_1.grid(row=2, column=1, padx=10, pady=10)
+
+ # label and optionMenu for the 2st interface
+ option_v2.set(elements[0])
+ lbl3 = tk.Label(frame, text='Select 2st participant', bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ lbl3.grid(row=3, column=0, pady=10)
+ op_menu_2 = ttk.OptionMenu(frame, option_v2, elements[0], *elements)
+ op_menu_2.grid(row=3, column=1, padx=10, pady=10)
+
+ # update window dimension
+ self.__set_window_dimension__(frame)
+
+ def __make_proj_participant_menu__(self, frame, elements, option):
+ option.set(elements[0])
+ lbl = tk.Label(frame, text='Select participant to project', bg=self.COLOR_frames, fg=self.COLOR_foreground)
+ lbl.grid(row=2, column=0, padx=10, pady=10)
+ op_menu = ttk.OptionMenu(frame, option, elements[0], *elements)
+ op_menu.grid(row=2, column=1, padx=10, pady=10)
+ self.__set_window_dimension__(frame)
+
+
+
+ def __exec_well_formedness__(self,combo_value):
+
+ result = self.controller.make_well_formedness(combo_value)
+ self.log(result[0])
+
+ return
+
+ def __exec_well_branchedness__(self,combo_value):
+
+ result = self.controller.make_well_branchedness(combo_value)
+ self.log(result[0])
+
+ return
+
+ def __exec_well_sequencedness__(self,combo_value):
+ result = self.controller.make_well_sequencedness(combo_value)
+ self.log(result[0])
+ return
+
+ def __exec_asynchronous_well_sequencedness__(self,combo_value):
+ result = self.controller.make_asynchronous_well_sequencedness(combo_value)
+ self.log(result[0])
+ return
+
+ def __set_window_dimension__(self, frame):
+ # set window dimension
+ width, height = frame.winfo_reqwidth(), frame.winfo_reqheight()
+ frame.geometry('+%d+%d' % (self._screen_width / 2 - width / 2, self._screen_height / 2 - height / 2))
+
+ def log(self, msg):
+ print(msg)
+ # Write a message in the log box
+ for line in msg:
+ self._log.insert(tk.END, line)
+ self._log.see(tk.END)
+ # make the last item background red
+ #self._log.itemconfig(tk.END, {'bg': 'red'})
+
+ def popupmsg(self, msg):
+ popup = tk.Toplevel(padx=20, pady=20)
+ popup.wm_title("!")
+ popup.resizable(False, False)
+
+ screen_width, screen_height = popup.winfo_screenwidth(), popup.winfo_screenheight()
+ width, height = popup.winfo_reqwidth(), popup.winfo_reqheight()
+
+ popup.geometry('+%d+%d' % (screen_width / 2 - width / 2, screen_height / 2 - height / 2))
+
+ max_size = popup.winfo_screenwidth() / 3
+ label = tk.Label(popup, text=msg, wraplength=max_size)
+ label.grid(row=0, column=0)
+
+ b = ttk.Button(popup, text="Okay", command=popup.destroy)
+ b.grid(row=1, column=0)
diff --git a/icons/logo-gif.gif b/icons/logo-gif.gif
new file mode 100644
index 0000000..e41881e
Binary files /dev/null and b/icons/logo-gif.gif differ
diff --git a/icons/logo-ico.ico b/icons/logo-ico.ico
new file mode 100644
index 0000000..d2c6620
Binary files /dev/null and b/icons/logo-ico.ico differ
diff --git a/icons/logo-png.png b/icons/logo-png.png
new file mode 100644
index 0000000..7981667
Binary files /dev/null and b/icons/logo-png.png differ
diff --git a/icons/open.png b/icons/open.png
new file mode 100644
index 0000000..8bc159a
Binary files /dev/null and b/icons/open.png differ
diff --git a/icons/product.png b/icons/product.png
new file mode 100644
index 0000000..0df439b
Binary files /dev/null and b/icons/product.png differ
diff --git a/icons/projection.png b/icons/projection.png
new file mode 100644
index 0000000..c10e733
Binary files /dev/null and b/icons/projection.png differ
diff --git a/icons/render.png b/icons/render.png
new file mode 100644
index 0000000..fba402c
Binary files /dev/null and b/icons/render.png differ
diff --git a/icons/sync.png b/icons/sync.png
new file mode 100644
index 0000000..db3d91d
Binary files /dev/null and b/icons/sync.png differ
diff --git a/main.py b/main.py
new file mode 100644
index 0000000..e30e849
--- /dev/null
+++ b/main.py
@@ -0,0 +1,19 @@
+from gui import MyGui
+from controller import Controller
+from tkinter import Tk, messagebox
+import tkinter as tkinter
+from PIL import ImageTk, Image
+
+import os.path
+
+def main():
+ root = Tk()
+ icon = ImageTk.PhotoImage(Image.open("icons/logo-ico.ico"))
+ root.tk.call("wm","iconphoto",root._w,icon)
+ c = Controller()
+ MyGui(root, c)
+ root.mainloop()
+
+
+if __name__ == '__main__':
+ main()
\ No newline at end of file
diff --git a/well_formedness.py b/well_formedness.py
new file mode 100644
index 0000000..a42715c
--- /dev/null
+++ b/well_formedness.py
@@ -0,0 +1,589 @@
+import os, itertools
+from enum import EnumMeta
+from _ast import Or
+from antlr4.atn.SemanticContext import OR
+import orca
+from secretstorage import item
+#from pygments.lexers._cocoa_builtins import res
+
+
+# TODO inserire introduzione
+
+
+# Check whether a run has cycle
+# with at most one occurrence
+
+def check_q_branch(array):
+ for i in array:
+ if array.count(i) == 3:
+ return False
+
+ return True
+
+
+# DFS given a set of transitions
+# returns the result in runs
+
+def search(i, arr, insieme1, runs):
+ if (i == None):
+ for j in insieme1:
+ arr = [j[0], j[2]]
+ runs.append(arr)
+ search(j[2], arr, insieme1, runs)
+ else:
+ for j in insieme1:
+ if i == j[0]:
+ io = []
+ for ite in arr:
+ io.append(ite)
+ io.append(j[2])
+ if check_q_branch(io):
+ runs.append(io)
+ search(j[2], io, insieme1, runs)
+
+
+# Given a set of transitions, it
+# returns all possible
+# pre-candidate q-branches
+
+def get_all_runs(set):
+ runs = []
+ search(None, None, set, runs)
+
+
+ # Find the branches with maximum number of cycle is one
+ # The first node is the q_node
+
+ candidate_q_cycle_one = []
+ for i in range(len(runs)):
+ visited = []
+ cycles = 0
+ for node in runs[i]:
+ if node in visited:
+ cycles += 1
+ if node not in visited:
+ visited.append(node)
+ if cycles < 2:
+ candidate_q_cycle_one.append(runs[i])
+
+ #return runs
+ return candidate_q_cycle_one
+
+
+# Given two states, returns the
+# transition betweeen those two
+# states
+
+def returnedge(s1, s2, edges):
+ for e in edges:
+ if e[0] == s1 and e[2] == s2:
+ return e
+
+
+# Support function of
+# "get_candidate_runs"
+# Iters every passed
+# run and checks
+# sublist()
+
+def iter(a, runs):
+ for b in runs:
+ if sublist(a, b):
+ return True
+ return False
+
+
+# Given a set of runs, filters the
+# maximal pre-candiddate q-branch
+# (candidate q-branch)
+
+def get_candidate_runs(runs, states):
+ candidates = []
+
+ for a in runs:
+
+ if iter(a, runs) == False:
+ candidates.append(a)
+
+ return candidates
+
+
+# Returns True if the path is a
+# pre-candididate q-branch
+
+def sublist(a, b):
+ if len(a) <= len(b) and a != b:
+
+ for i in range(len(a)):
+ if a[i] != b[i]:
+ return False
+
+ return True
+
+ return False
+
+
+# Check whether a pair of runs is cofinal, with no common node but the first and the last one
+
+def first_qspan_condition(sigma1, sigma2):
+ alpha = sigma1.copy()
+ beta = sigma2.copy()
+
+ if alpha[0] == beta[0] and alpha[-1] == beta[-1]:
+
+ a = alpha.pop(0)
+ beta.pop(0)
+ b = alpha.pop(-1)
+ beta.pop(-1)
+
+ if not (
+ any(x in alpha for x in beta)) and a not in alpha and b not in alpha and a not in beta and b not in beta:
+ return True
+ return False
+
+
+# Check whether a pair of runs is two candidate
+# q-branches with no common node but the first
+
+def second_qspan_condition(sigma1, sigma2, candidates):
+ a = sigma1.copy()
+ b = sigma2.copy()
+
+ if (sigma1 in candidates) and (sigma2 in candidates):
+ a.pop(0)
+ b.pop(0)
+
+ if not (any(x in a for x in b)):
+ return True
+ return False
+ return False
+
+
+# Check whether a pair is formed by a candidate
+# q-branch and a loop on q with no other common
+# nodes
+
+def third_qspan_condition(sigma1, sigma2, candidates):
+ a = sigma1.copy()
+ b = sigma2.copy()
+
+ # First loop
+ if a[0] == a[-1]:
+
+ if b in candidates:
+ a.pop(0)
+ a.pop(-1)
+
+ if not (any(x in a for x in b)):
+ return True
+
+ # Second loop
+ if b[0] == b[-1]:
+
+ if a in candidates:
+ b.pop(0)
+ b.pop(-1)
+
+ if not (any(y in b for y in a)):
+ return True
+
+ return False
+
+
+# Well-branchedness first condition:
+# Check if in all edges if a state is sender it cannot also be receiver in the same edge
+
+def well_branchedness_first_condition(edges, states):
+ temp_res = ""
+ for i in states:
+ senders = []
+ receivers = []
+
+ for j in edges:
+ if j[0] == i:
+ senders.append(j[3])
+ receivers.append(j[4])
+
+ if j[4] in senders:
+ temp_res = temp_res + str(j[4]) + " in " + str(j[0]) + "; "
+ # return ("participant "+str(j[4]+" is a sender and also a receiver in transitions from "+j[0]))
+
+ if j[3] in receivers:
+ temp_res = temp_res + str(j[3]) + " in " + str(j[0]) + "; "
+ # return ("participant "+str(j[3]+" is a sender and also a receiver in transitions from "+j[0]))
+ if temp_res:
+ temp_res = "Participant work as sender and receiver in transition: " + temp_res
+ return temp_res
+ else:
+ return None
+
+
+# Well-branchedness second condition:
+# Check for each edge where a state is not a sender in each transition if this two edges
+# are concurrent
+
+def well_branchedness_second_condition(edges, states, participants):
+ temp_res = ""
+ for s in states:
+ for a in participants:
+ for i in edges:
+ if i[3] == a:
+ for j in edges:
+ if i[0] == j[0] and i[3] != j[3]:
+
+ for x in edges:
+ print(x)
+ for y in edges:
+
+ if x[0] == i[2] and y[0] == j[2]:
+ # Found all edges
+
+ if x[2] == y[2] and x[1] == j[1] and y[1] == i[1]:
+ return None
+ val = "(" + str(i[0]) + " " + str(i[1]) + " " + str(i[2]) + ")" + " and " \
+ + "(" + str(i[0]) + " " + str(j[1]) + " " + str(j[2]) + ")" + " | "
+ if val not in temp_res:
+ temp_res = temp_res + "(" + str(i[0]) + " " + str(i[1]) + " " + str(
+ i[2]) + ")" + " and " \
+ + "(" + str(i[0]) + " " + str(j[1]) + " " + str(j[2]) + ")" + " | "
+ # return ("(" + str(i[0]) + " " + str(i[1]) + " " + str(i[2]) + ")" + " and " + "(" + str(i[0]) + " " + str(j[1]) + " " + str(j[2]) + ")" + " | ")
+ if temp_res:
+ return temp_res
+ else:
+ return None
+
+
+# Given a set of transition and a partecipant, returns
+# the first label where the partecipant appear
+def get_first(participant, edges):
+ if edges != None:
+ for run in edges:
+ if participant == run[3] or participant == run[4]:
+ return run
+ return None
+
+
+# Given a partecipant and two sets of transitions
+# it returns the first not different label where
+# the partecipant appears
+def get_first_label(participant, edges1, edges2):
+ firstlabel = get_first(participant, edges1)
+ secondlabel = get_first(participant, edges2)
+
+ if (firstlabel != None and secondlabel != None):
+ if firstlabel[1] == secondlabel[1]:
+ edges1.remove(firstlabel)
+ edges2.remove(secondlabel)
+
+ return (get_first_label(participant, edges1, edges2))
+
+ else:
+ return firstlabel, secondlabel
+
+ else:
+ return firstlabel, secondlabel
+
+
+# Simply check the "form" of the couple in the last part of the
+# third Well-Branchedness condition
+
+def check_form(edge1, edge2, B):
+ C = edge1[3]
+ D = edge2[3]
+ m = edge1[5]
+ n = edge2[5]
+
+ if ((edge1[4] == edge2[4] == B) and ((C != D) or (m != n))):
+ return True
+
+ return False
+
+
+# Checks if a q-span has the first pair of different labels
+# on the runs on the projection of sigma1 and sigma2 given
+# a partecipant checks the form in "check_form"
+
+def check_validity(sigma1, sigma2, edges, participants):
+ edges1 = []
+ edges2 = []
+
+ for i in range(len(sigma1)):
+
+ if i + 1 >= len(sigma1):
+ break
+
+ edges1.append(returnedge(sigma1[i], sigma1[i + 1], edges))
+
+ for i in range(len(sigma2)):
+
+ if i + 1 >= len(sigma2):
+ break
+
+ edges2.append(returnedge(sigma2[i], sigma2[i + 1], edges))
+
+ B = participants.copy()
+
+ chooser = returnedge(sigma1[0], sigma2[1], edges)
+
+ A = chooser[3]
+ B.remove(A)
+
+ #listParticipant = []
+
+ for bee in B:
+
+ firstlabel, secondlabel = get_first_label(bee, edges1, edges2)
+ if (firstlabel == None) or (secondlabel == None):
+ if firstlabel != secondlabel:
+ #listParticipant.append(bee)
+ return bee
+
+ elif firstlabel[1] != secondlabel[1]:
+
+ if not (check_form(firstlabel, secondlabel, bee)):
+ print("find")
+ #listParticipant.append(bee)
+ return bee
+
+ #return listParticipant
+ return None
+
+
+# Well-branchedness third condition:
+# First it finds every q-span where A chooses at, then it checks for every q-span if they pass the validity check in the definition
+# by calling "check_validity" for every q-span
+
+def well_branchedness_third_condition(states, edges, participants):
+ runs = get_all_runs(edges)
+
+
+ candidate = get_candidate_runs(runs, states)
+
+
+ passed = []
+
+ for p in states:
+
+ for A in participants:
+
+ for sigma, sigma_primo in itertools.combinations(runs, 2):
+
+ if sigma[0] == sigma_primo[0] == p:
+
+ # Check if the couple is a q-span couple
+
+ if first_qspan_condition(sigma, sigma_primo) or second_qspan_condition(sigma, sigma_primo,
+ candidate) or third_qspan_condition(
+ sigma, sigma_primo, candidate):
+
+ # Check if they have a "chooser"
+
+ chooserA = returnedge(sigma[0], sigma[1], edges)
+ chooserB = returnedge(sigma_primo[0], sigma_primo[1], edges)
+
+ if chooserA[3] == chooserB[3] == A:
+ passed.append([sigma, sigma_primo])
+
+ val = ""
+ for it in range(len(passed)):
+ result = check_validity(passed[it][0], passed[it][1], edges, participants)
+ if result:
+ val = val + str((str(passed[it][0]), str(passed[it][1]), "due to participant " + str(result))) + "AND"
+
+ if not (val == None):
+ return val
+ # return str((passed[it][0], passed[it][1], "due to participant " + result))
+
+ return None
+
+
+
+def well_branchedness_late_join_condition(states, edges, participants):
+ runs = get_all_runs(edges)
+
+ #Find states with more than one outgoing transitions
+ out_q_state = []
+ states_list = list(states)
+ edges_list = list(edges)
+ for i in range(len(states_list)):
+ count_out = 0
+ for j in range(len(edges_list)):
+ if states_list[i] == edges_list[j][0]:
+ count_out = count_out + 1
+ if count_out > 1:
+ out_q_state.append(states_list[i])
+
+ print(out_q_state)
+
+ #List of run start from the state where more than one outgoing transitions
+ run_branch = []
+ runs_list = list(runs)
+ for item in out_q_state:
+ for j in range(len(runs_list)):
+ if item == runs_list[j][0]:
+ run_branch.append(runs_list[j])
+
+ run_branch_unique = []
+ for i in range(len(run_branch)-1):
+ str_1 = ','.join(run_branch[i])
+ str_2 = ','.join(run_branch[i+1])
+ if str_1 not in str_2:
+ run_branch_unique.append(run_branch[i])
+ run_branch_unique.append(run_branch[len(run_branch)-1])
+
+ #print(run_branch)
+ print(run_branch_unique)
+
+ print(edges)
+
+
+ #Check if Intersection Null
+ val_null = ""
+ for i in range(len(edges_list)-1):
+ temp = 0
+ for j in range(i+1, len(edges_list)):
+ if edges_list[i][0]==edges_list[j][0]:
+ if edges_list[i][3] != edges_list[j][3] and edges_list[i][3] != edges_list[j][4] and edges_list[i][4] != edges_list[j][3] and edges_list[i][4] != edges_list[j][4]:
+ for k in range(len(edges_list)):
+ if edges_list[i][2] == edges_list[k][0]:
+ temp = 1
+ temp1 = 0
+ for l in range(len(edges_list)):
+ if edges_list[j][2] == edges_list[l][0] and edges_list[k][2] == edges_list[l][2]:
+ temp1 = 1
+ if edges_list[i][1] != edges_list[l][1] or edges_list[j][1] != edges_list[k][1]:
+ val_null = val_null + "[(" + str(edges_list[i]) + "," + str(edges_list[k]) + ") AND (" + str(edges_list[j]) + "," + str(edges_list[l]) + ")] not concurrent; "
+ #else:
+ # val_null = val_null + "[(" + str(edges_list[i]) + "," + str(edges_list[k]) + ") AND (" + str(edges_list[j]) + "," + str(edges_list[l]) + ")] not concurrent; "
+ if temp1 == 0:
+ val_null = val_null + "[(" + str(edges_list[i]) + "," + str(edges_list[k]) + ") AND (" + str(edges_list[j]) + ")] not concurrent; "
+ if temp == 0:
+ val_null = val_null + "(" + str(edges_list[i]) + "," + str(edges_list[j]) + ")" + "not concurrent; "
+
+ #Check if Intersection Not Null
+ val_not_null = ""
+ for i in range(len(run_branch_unique)-1):
+ for j in range(i+1, len(run_branch_unique)):
+ if run_branch_unique[i][0] == run_branch_unique[j][0]:
+ list1 = set()
+ list2 = set()
+ for item in edges_list:
+ if item[0] == run_branch_unique[i][0] and item[2] == run_branch_unique[i][1]:
+ list1.add(item[3])
+ list1.add(item[4])
+ if item[0] == run_branch_unique[j][0] and item[2] == run_branch_unique[j][1]:
+ list2.add(item[3])
+ list2.add(item[4])
+ aware_participant_pi_1 = set()
+ aware_participant_pi_2 = set()
+ not_aware_participant_pi_1 = set()
+ not_aware_participant_pi_2 = set()
+
+ intersection = [item for item in list1 if item in list2]
+ for item in intersection:
+ aware_participant_pi_1.add(item)
+ aware_participant_pi_2.add(item)
+
+ for item in list1:
+ if item not in aware_participant_pi_1:
+ not_aware_participant_pi_1.add(item)
+
+ for item in list2:
+ if item not in aware_participant_pi_2:
+ not_aware_participant_pi_2.add(item)
+
+ if aware_participant_pi_1 and aware_participant_pi_2:
+ for k in range(1, len(run_branch_unique[i])-1):
+ for item in edges_list:
+ if item[0] == run_branch_unique[i][k] and item[2] == run_branch_unique[i][k+1]:
+ if item[3] in aware_participant_pi_1 or item[4] in aware_participant_pi_1:
+ aware_participant_pi_1.add(item[3])
+ aware_participant_pi_1.add(item[4])
+ else:
+ not_aware_participant_pi_1.add(item[3])
+ not_aware_participant_pi_1.add(item[4])
+ for k in range(1, len(run_branch_unique[j])-1):
+ for item in edges_list:
+ if item[0] == run_branch_unique[j][k] and item[2] == run_branch_unique[j][k+1]:
+ if item[3] in aware_participant_pi_2 or item[4] in aware_participant_pi_2:
+ aware_participant_pi_2.add(item[3])
+ aware_participant_pi_2.add(item[4])
+ else:
+ not_aware_participant_pi_2.add(item[3])
+ not_aware_participant_pi_2.add(item[4])
+
+ if not_aware_participant_pi_1:
+ val_not_null = "("+ str(run_branch_unique[i]) + ", " + str(run_branch_unique[j]) +") " + str(not_aware_participant_pi_1) + "AND "
+ if not_aware_participant_pi_2:
+ val_not_null = "( " + str(run_branch_unique[i]) + "," + str(run_branch_unique[j]) + ") " + str(not_aware_participant_pi_2) + " AND "
+
+ res = "Result for null value [" + val_null + "] and Result for not null value [" + val_not_null + "]"
+
+ if val_null or val_not_null:
+ return res
+ else:
+ return None
+
+
+# Check edge by edge if the graph respects the first condition, if it doesn't respect it check if at that point
+# at least the second condition holds
+
+def well_sequencedness_conditions(ca):
+ # res = None
+ result = ""
+ # first condition check for every couple of transitions
+ for i in ca.edges:
+ for j in ca.edges:
+ if i[2] == j[0]:
+ if j[3] != i[3] and j[4] != i[4] and j[4] != i[3] and i[4] != j[3]:
+ res = well_sequencedness_second_condition(ca, i, j)
+ if res != None:
+ result = result + res + " ; "
+
+ if result:
+ return result
+ else:
+ return None
+
+
+def well_sequencedness_second_condition(ca, i, j):
+ # second condition check
+ for k in ca.edges:
+ if i[2] != k[2] and k[0] == i[0] and j[1] == k[1]: # first transition found
+ for h in ca.edges:
+ if h[0] == k[2] and h[2] == j[2] and i[1] == h[1]: # second transition found
+ return None
+ return (str(i[0] + " |" + str(i[1]) + "| " + str(i[2]) + " |" + str(j[1]) + "| " + str(j[2])))
+
+
+def asynchronous_well_sequencedness_first_conditions(ca):
+ print(ca.edges)
+ edge = list(ca.edges)
+ trans_ary = []
+
+ for i in range(len(edge)-1):
+ for j in range(i+1, len(edge)):
+ if edge[i][0] == edge[i + 1][2]:
+ temp = [edge[i + 1][0], edge[i][0], edge[i][0]]
+ trans_ary.append(temp)
+ if edge[i][3] == edge[i + 1][3] or edge[i][4] == edge[i + 1][3]:
+ print("------------------")
+ print(edge[i])
+ print(edge[i + 1])
+ print("------------------")
+ if edge[i][2] == edge[i + 1][0]:
+ temp = [edge[i][0], edge[i][2], edge[i + 1][2]]
+ trans_ary.append(temp)
+ if edge[i][3] == edge[i + 1][3] or edge[i][4] == edge[i + 1][3]:
+ print("------------------")
+ print(edge[i])
+ print(edge[i + 1])
+ print("------------------")
+
+ print(trans_ary)
+
+ return None
+
+
+def asynchronous_well_sequencedness_second_conditions(ca):
+ return None