diff --git a/.Rhistory b/.Rhistory new file mode 100644 index 0000000..e69de29 diff --git a/README.md b/README.md index c8ac2c7..e8971f3 100644 --- a/README.md +++ b/README.md @@ -1,20 +1,5 @@ -RapidMiner Extension Template +RapidProM ~ RapidMiner Extension ============================= -A template project for creating a RapidMiner Studio extension. - -### Prerequisite -* Requires Gradle 2.3+ (get it [here](http://gradle.org/installation) or use the Gradle wrapper shipped with this template) - -### Getting started -1. Clone the extension template - -2. Change the extension settings in _build.gradle_ (e.g. replace 'Template' by the desired extension name) - -3. Initialize the extension project by executing the _initializeExtensionProject_ Gradle task (e.g. via 'gradlew initializeExtensionProject') - -4. Add an extension icon by placing an image named "icon.png" in _src/main/resources/META-INF/_. - -5. Build and install your extension by executing the _installExtension_ Gradle task - -6. Start RapidMiner Studio and check whether your extension has been loaded +Source code for RapidProM, a RapidMiner Extension. +Forked from the template project for creating a RapidMiner Studio extension. diff --git a/build.gradle b/build.gradle index 8a813b9..f450a41 100644 --- a/build.gradle +++ b/build.gradle @@ -1,47 +1,72 @@ buildscript { repositories { - jcenter() + jcenter() maven { url 'https://maven.rapidminer.com/content/groups/public/' } - } + } } + plugins { id 'com.rapidminer.extension' version '0.8.2' } // Define Maven artifact repositories repositories { jcenter() maven { url 'https://maven.rapidminer.com/content/groups/public/' } + ivy { + url "https://github.com/rapidprom/rapidprom-libraries/raw/${project.properties["version"]}/prom/" + layout "pattern", { + artifact "[module]-[revision]/[module]-[revision].[ext]" + artifact "[module]-[revision]/[artifact].[ext]" + ivy "[module]-[revision]/ivy-[module]-[revision].xml" + } + } + ivy { + url "https://github.com/rapidprom/rapidprom-libraries/raw/${project.properties["version"]}/thirdparty/lib/" + layout "pattern", { + artifact "[module]-[revision]/[module]-[revision].[ext]" + ivy "[module]-[revision]/ivy-[module]-[revision].xml" + } + } + ivy { + url "https://github.com/rapidprom/rapidprom-libraries/raw/${project.properties["version"]}/thirdparty/resource/" + layout "pattern", { + artifact "[module]-[revision]/[module]-[revision].[ext]" + ivy "[module]-[revision]/ivy-[module]-[revision].xml" + } + } } extensionConfig { // The extension name - name 'Template' + name 'RapidProM' /* * The artifact group which will be used when publishing the extensions Jar * and for package customization when initializing the project repository. * - * It is 'com.rapidminer.extension' by default. */ - //groupId = 'com.rapidminer.extension' + groupId = 'org.rapidprom' /* * The extension vendor which will be displayed in the extensions about box * and for customizing the license headers when initializing the project repository. * - * It is 'RapidMiner GmbH' by default. */ - //vendor = "RapidMiner GmbH" + vendor = "Eindhoven University of Technology" /* * The vendor homepage which will be displayed in the extensions about box * and for customizing the license headers when initializing the project repository. * - * It is 'www.rapidminer.com' by default. */ - //homepage = "www.rapidminer.com" + homepage = "www.rapidprom.org" + // enable shadowJar before rapidminer dependency (otherwise build fails) + shadowJar { + zip64 true + } + // define RapidMiner version and extension dependencies dependencies { rapidminer '7.2.0' @@ -51,5 +76,47 @@ extensionConfig { // Define third party library dependencies dependencies { - //compile 'com.google.guava:guava:18.0' + + compile group:"org.rapidprom", name:"ProM-Framework", version:"29527" + compile "org.rapidprom:AcceptingPetriNet:6.5.62" + compile "org.rapidprom:AlphaMiner:6.5.47" + compile "org.rapidprom:Animation:6.5.50" + compile "org.rapidprom:ApacheUtils:6.5.66" + compile "org.rapidprom:BasicUtils:6.5.79" + compile "org.rapidprom:BPMN:6.5.56" + compile "org.rapidprom:BPMNConversions:6.5.48" + compile "org.rapidprom:CPNet:6.5.84" + compile "org.rapidprom:DataAwareReplayer:6.7.511" + compile "org.rapidprom:DataPetriNets:6.5.291" + compile "org.rapidprom:DottedChart:6.5.17" + compile "org.rapidprom:EvolutionaryTreeMiner:6.5.53" + compile "org.rapidprom:EventStream:6.5.72" + compile "org.rapidprom:FeaturePrediction:6.5.61" + compile "org.rapidprom:Fuzzy:6.5.33" + compile "org.rapidprom:GraphViz:6.7.185" + compile "org.rapidprom:GuideTreeMiner:6.5.18" + compile "org.rapidprom:HeuristicsMiner:6.5.49" + compile "org.rapidprom:HybridILPMiner:6.5.104" + compile "org.rapidprom:InductiveMiner:6.5.153" + compile "org.rapidprom:InductiveVisualMiner:6.5.274" + compile "org.rapidprom:Log:6.5.230" + compile "org.rapidprom:LogDialog:6.5.42" + compile "org.rapidprom:LogProjection:6.5.38" + compile "org.rapidprom:ModelRepair:6.5.12" + compile "org.rapidprom:Murata:6.5.54" + compile "org.rapidprom:PetriNets:6.5.83" + compile "org.rapidprom:PNetAlignmentAnalysis:6.5.35" + compile "org.rapidprom:PNAnalysis:6.5.67" + compile "org.rapidprom:PNetReplayer:6.5.75" + compile "org.rapidprom:PTConversions:6.5.3" + compile "org.rapidprom:PomPomView:6.5.36" + compile "org.rapidprom:SocialNetwork:6.5.35" + compile "org.rapidprom:StreamAlphaMiner:6.5.15" + compile "org.rapidprom:StreamAnalysis:6.5.38" + compile "org.rapidprom:StreamInductiveMiner:6.5.42" + compile "org.rapidprom:TransitionSystems:6.5.54" + compile "org.rapidprom:TSPetriNet:6.5.35" + compile "org.rapidprom:Uma:6.5.46" + compile "org.rapidprom:Woflan:6.5.44" + compile "org.rapidprom:XESLite:6.5.168" } diff --git a/changes/CHANGES_4.0.000.txt b/changes/CHANGES_4.0.000.txt new file mode 100644 index 0000000..04096e7 --- /dev/null +++ b/changes/CHANGES_4.0.000.txt @@ -0,0 +1,4 @@ +Changes in 1.0.0 +---------------- +* Extension release + diff --git a/config/HEADER b/config/HEADER new file mode 100644 index 0000000..bc083ef --- /dev/null +++ b/config/HEADER @@ -0,0 +1,20 @@ +Eindhoven University of Technology + +Copyright (C) 2016-${year} by Eindhoven University of Technology and the contributors + +Complete list of developers available at our web site: + + www.rapidprom.org + +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU Affero General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Affero General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with this program. If not, see http://www.gnu.org/licenses/. diff --git a/gradle.properties b/gradle.properties index beb72cc..7e62c67 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1 +1 @@ -version=1.0.0 \ No newline at end of file +version=4.0.0 \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 12793ae..d32b8e6 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ -#Thu Apr 23 10:20:58 CEST 2015 +#Sun Aug 07 13:24:35 CEST 2016 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-2.3-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-2.6-bin.zip diff --git a/gradlew.bat b/gradlew.bat index 8a0b282..aec9973 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -1,90 +1,90 @@ -@if "%DEBUG%" == "" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS= - -set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto init - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:init -@rem Get command-line arguments, handling Windowz variants - -if not "%OS%" == "Windows_NT" goto win9xME_args -if "%@eval[2+2]" == "4" goto 4NT_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* -goto execute - -:4NT_args -@rem Get arguments from the 4NT Shell from JP Software -set CMD_LINE_ARGS=%$ - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% - -:end -@rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windowz variants + +if not "%OS%" == "Windows_NT" goto win9xME_args +if "%@eval[2+2]" == "4" goto 4NT_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* +goto execute + +:4NT_args +@rem Get arguments from the 4NT Shell from JP Software +set CMD_LINE_ARGS=%$ + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/licenses/README b/licenses/README new file mode 100644 index 0000000..410d86f --- /dev/null +++ b/licenses/README @@ -0,0 +1 @@ +Put 3rd party licenses into this folder. diff --git a/src/main/java/org/rapidprom/PluginInitRapidProM.java b/src/main/java/org/rapidprom/PluginInitRapidProM.java new file mode 100644 index 0000000..f3b8b3b --- /dev/null +++ b/src/main/java/org/rapidprom/PluginInitRapidProM.java @@ -0,0 +1,93 @@ +/* + * Eindhoven University of Technology + * + * Copyright (C) 2016-2016 by Eindhoven University of Technology and the contributors + * + * Complete list of developers available at our web site: + * + * www.rapidprom.org + * + * This program is free software: you can redistribute it and/or modify it under the terms of the + * GNU Affero General Public License as published by the Free Software Foundation, either version 3 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without + * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License along with this program. + * If not, see http://www.gnu.org/licenses/. + */ +package org.rapidprom; + +import java.net.MalformedURLException; + +import org.processmining.framework.packages.PackageDescriptor; +import org.processmining.framework.plugin.PluginManager; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.external.connectors.prom.RapidProMPackageDescriptor; +import org.rapidprom.external.connectors.prom.RapidProMPluginContext; +import org.rapidprom.external.connectors.prom.RapidProMPluginManager; +import org.rapidprom.util.RapidMinerUtils; + +import com.rapidminer.gui.MainFrame; +import com.rapidminer.tools.plugin.Plugin; + +/** + * This class provides hooks for initialization and its methods are called via + * reflection by RapidMiner Studio. Without this class and its predefined + * methods, an extension will not be loaded. + * + * @author REPLACEME + */ +public final class PluginInitRapidProM { + + private PluginInitRapidProM() { + // Utility class constructor + } + + /** + * This method will be called directly after the extension is initialized. + * This is the first hook during start up. No initialization of the + * operators or renderers has taken place when this is called. + */ + public static void initPlugin() { + PluginManager promPluginManager = new RapidProMPluginManager(RapidProMPluginContext.class); + PackageDescriptor packageDescriptor = new RapidProMPackageDescriptor(); + Plugin rapidMinerPluginEntry = RapidMinerUtils.getRapidProMPlugin(); + try { + promPluginManager.register(rapidMinerPluginEntry.getFile().toURI().toURL(), packageDescriptor, + rapidMinerPluginEntry.getClassLoader()); + } catch (MalformedURLException e) { + e.printStackTrace(); + } + RapidProMGlobalContext.initialize(promPluginManager); + } + + /** + * This method is called during start up as the second hook. It is called + * before the gui of the mainframe is created. The Mainframe is given to + * adapt the gui. The operators and renderers have been registered in the + * meanwhile. + * + * @param mainframe + * the RapidMiner Studio {@link MainFrame}. + */ + public static void initGui(MainFrame mainframe) { + } + + /** + * The last hook before the splash screen is closed. Third in the row. + */ + public static void initFinalChecks() { + } + + /** + * Will be called as fourth method, directly before the UpdateManager is + * used for checking updates. Location for exchanging the UpdateManager. The + * name of this method unfortunately is a result of a historical typo, so + * it's a little bit misleading. + */ + public static void initPluginManager() { + } +} diff --git a/src/main/java/org/rapidprom/external/connectors/prom/RapidProMGlobalContext.java b/src/main/java/org/rapidprom/external/connectors/prom/RapidProMGlobalContext.java new file mode 100644 index 0000000..36b2a6f --- /dev/null +++ b/src/main/java/org/rapidprom/external/connectors/prom/RapidProMGlobalContext.java @@ -0,0 +1,117 @@ +package org.rapidprom.external.connectors.prom; + +import java.lang.annotation.Annotation; + +import org.processmining.framework.connections.ConnectionManager; +import org.processmining.framework.connections.impl.ConnectionManagerImpl; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.PluginManager; +import org.processmining.framework.plugin.ProMFuture; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.impl.AbstractGlobalContext; + +public final class RapidProMGlobalContext extends AbstractGlobalContext { + + private static boolean initialized = false; + private static RapidProMGlobalContext instance = null; + public static RapidProMGlobalContext initialize(PluginManager pluginManager) { + instance = new RapidProMGlobalContext(pluginManager); + initialized = true; + return instance; + } + + public static RapidProMGlobalContext instance() { + assert (initialized); + return instance; + } + + private final ConnectionManager connMgr; + + private final PluginContext context = new RapidProMPluginContext(this, "RapidProM root plugin context"); + + private final PluginManager pluginManager; + + private RapidProMGlobalContext(PluginManager pluginManager) { + this.pluginManager = pluginManager; + this.connMgr = new ConnectionManagerImpl(pluginManager); + } + + @Override + public ConnectionManager getConnectionManager() { + return connMgr; + } + + private ProMFuture[] createProMFutures(Plugin pluginAnn) { + ProMFuture[] futures = new ProMFuture[pluginAnn.returnTypes().length]; + for (int i = 0; i < pluginAnn.returnTypes().length; i++) { + futures[i] = new ProMFuture(pluginAnn.returnTypes()[i], pluginAnn.returnLabels()[i]) { + @Override + protected Object doInBackground() throws Exception { + // NOP + return null; + } + }; + } + return futures; + } + + @SuppressWarnings("unchecked") + private T findAnnotation(Annotation[] annotations, Class clazz) { + T result = null; + for (Annotation a : annotations) { + if (a.annotationType().equals(clazz)) { + result = (T) a; + break; + } + } + return result; + } + + /** + * This method prepares a PluginContext object, which is a child object of + * the PluginContext provided by the "PluginContextManager". Basically this + * method mimics some of the internal workings of the ProM framework, e.g. + * setting the future result objects. + * + * @param classContainingProMPlugin + * the class that contains the ProM plugin code + * @return + */ + public PluginContext getFutureResultAwarePluginContext(Class classContainingProMPlugin) { + assert (initialized); + final PluginContext result = instance.getMainPluginContext() + .createChildContext("RapidProMPluginContext_" + System.currentTimeMillis()); + Plugin pluginAnn = findAnnotation(classContainingProMPlugin.getAnnotations(), Plugin.class); + RapidProMPluginExecutionResultImpl per = new RapidProMPluginExecutionResultImpl(pluginAnn.returnTypes(), + pluginAnn.returnLabels(), RapidProMGlobalContext.instance().getPluginManager() + .getPlugin(classContainingProMPlugin.getCanonicalName())); + ProMFuture[] futures = createProMFutures(pluginAnn); + per.setRapidProMFuture(futures); + result.setFuture(per); + return result; + } + + @Override + protected PluginContext getMainPluginContext() { + return context; + } + + public PluginContext getPluginContext() { + return getMainPluginContext(); + } + + @Override + public Class getPluginContextType() { + return RapidProMPluginContext.class; + } + + @Override + public PluginManager getPluginManager() { + return pluginManager; + } + + public RapidProMPluginContext getRapidProMPluginContext() { + return (RapidProMPluginContext) getMainPluginContext(); + } + +} diff --git a/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPackageDescriptor.java b/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPackageDescriptor.java new file mode 100644 index 0000000..5edcbb0 --- /dev/null +++ b/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPackageDescriptor.java @@ -0,0 +1,42 @@ +package org.rapidprom.external.connectors.prom; + +import java.util.ArrayList; +import java.util.List; + +import org.processmining.framework.packages.PackageDescriptor; + +/** + * Placeholder class for RapidProM. This class defines RapidProM as a "prom" + * package. This is in fact not the case, though to properly load plugins from + * the source code, the corresponding PluginManager class needs a + * PackageDescriptor to register the plugin to. + * + * @author svzelst + * + */ +public class RapidProMPackageDescriptor extends PackageDescriptor { + + private static final String NAME = "rapidprom"; + //TODO: read this from the gradle.properties file + private static final String VERSION = "4.0.0"; + //TODO: use a util function that returns the appropriate value here + private static final OS OS_VAR = OS.ALL; + private static final String DESCRIPTION = "Placeholder package for RapidProM"; + private static final String ORGANISATION = "Eindhoven University of Technology"; + private static final String AUTHOR = "A. Bolt, S.J. van Zelst"; + private static final String MAINTAINER = "A. Bolt, S.J. van Zelst"; + private static final String LICENSE = "AGPLv3"; + private static final String URL = "http://rapidprom.org"; + private static final String LOGO_URL = "http://www.promtools.org/lib/exe/fetch.php?w=300&tok=d1d68b&media=rapidprom:logo.png"; + private static final String KEYWORDS = ""; + private static final boolean AUTO_INSTALLED = true; + private static final boolean HAS_PLUGINS = true; + private static final List DEPENDENCIES = new ArrayList<>(); + private static final List CONFLICTS = new ArrayList<>(); + + public RapidProMPackageDescriptor() { + super(NAME, VERSION, OS_VAR, DESCRIPTION, ORGANISATION, AUTHOR, MAINTAINER, LICENSE, URL, LOGO_URL, KEYWORDS, + AUTO_INSTALLED, HAS_PLUGINS, DEPENDENCIES, CONFLICTS); + } + +} diff --git a/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPluginContext.java b/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPluginContext.java new file mode 100644 index 0000000..cbcc3c3 --- /dev/null +++ b/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPluginContext.java @@ -0,0 +1,46 @@ +package org.rapidprom.external.connectors.prom; + +import java.util.concurrent.Executor; +import java.util.concurrent.Executors; + +import org.processmining.framework.plugin.GlobalContext; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.impl.AbstractPluginContext; + +public class RapidProMPluginContext extends AbstractPluginContext { + + private final Executor executor; + + public RapidProMPluginContext(GlobalContext context, String label) { + super(context, label); + executor = Executors.newCachedThreadPool(); + } + + public RapidProMPluginContext(RapidProMPluginContext context, String label) { + super(context, label); + if (context == null) { + executor = Executors.newCachedThreadPool(); + } else { + executor = context.getExecutor(); + } + } + + @Override + public Executor getExecutor() { + return executor; + } + + @Override + protected PluginContext createTypedChildContext(String label) { + return new RapidProMPluginContext(this, label); + } + + @Override + public void clear() { + for (PluginContext c : getChildContexts()) { + c.clear(); + } + super.clear(); + } + +} diff --git a/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPluginDescriptor.java b/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPluginDescriptor.java new file mode 100644 index 0000000..0bad3ae --- /dev/null +++ b/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPluginDescriptor.java @@ -0,0 +1,501 @@ +package org.rapidprom.external.connectors.prom; + +import java.lang.annotation.Annotation; +import java.lang.reflect.AnnotatedElement; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.processmining.framework.boot.Boot; +import org.processmining.framework.packages.PackageDescriptor; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.PluginDescriptor; +import org.processmining.framework.plugin.PluginDescriptorID; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginCategory; +import org.processmining.framework.plugin.annotations.PluginLevel; +import org.processmining.framework.plugin.annotations.PluginQuality; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.framework.plugin.impl.AbstractPluginDescriptor; +import org.processmining.framework.plugin.impl.PluginDescriptorIDImpl; + + +public class RapidProMPluginDescriptor extends AbstractPluginDescriptor { + + private final AnnotatedElement annotatedElement; + // This list contains either Class or + // Class + private final PluginDescriptorID id; + private final List>> parameterTypes; + private List parameterNames; + private final List> returnTypes; + private final List methods; + private final List returnNames; + private final String name; + // public Class contextType = null; + private final Class declaringClass; + private final PackageDescriptor pack; + + private String help; + private String[] keywords; + private PluginCategory[] categories; + private PluginQuality quality; + private PluginLevel level; + + public RapidProMPluginDescriptor(Method method, PackageDescriptor pack) throws Exception { + assert (method != null); + assert (method.isAnnotationPresent(Plugin.class)); + this.pack = pack; + id = new PluginDescriptorIDImpl(method); + parameterTypes = new ArrayList>>(1); + ArrayList> list = new ArrayList>(method.getParameterTypes().length - 1); + parameterTypes.add(list); + declaringClass = method.getDeclaringClass(); + + for (Class par : method.getParameterTypes()) { + if (!PluginContext.class.isAssignableFrom(par)) { + list.add(par); + } + } + methods = new ArrayList(1); + methods.add(method); + + annotatedElement = method; + name = method.getAnnotation(Plugin.class).name(); + help = method.getAnnotation(Plugin.class).help(); + keywords = method.getAnnotation(Plugin.class).keywords(); + categories = method.getAnnotation(Plugin.class).categories(); + quality = method.getAnnotation(Plugin.class).quality(); + level = method.getAnnotation(Plugin.class).level(); + // System.out.println("RapidProMPluginDescriptorImpl,\"" + name + "\",\"" + (pack == null ? "" : pack.getName()) + "\""); + + parameterNames = Arrays.asList(getAnnotation(Plugin.class).parameterLabels()); + if (parameterNames.size() == 0) { + parameterNames = new ArrayList(parameterTypes.size()); + for (Class par : list) { + parameterNames.add(par.getSimpleName()); + } + } + + returnTypes = Arrays.asList(getAnnotation(Plugin.class).returnTypes()); + assert (getAnnotation(Plugin.class).returnLabels().length == returnTypes.size()); + returnNames = Arrays.asList(getAnnotation(Plugin.class).returnLabels()); + } + + public RapidProMPluginDescriptor(Class type, Class acceptedContext, PackageDescriptor pack) + throws Exception { + assert (type != null); + assert (type.isAnnotationPresent(Plugin.class)); + + this.pack = pack; + id = new PluginDescriptorIDImpl(type); + annotatedElement = type; + declaringClass = type; + methods = new ArrayList(); + + String[] pls = getAnnotation(Plugin.class).parameterLabels(); + + int max = -1; + for (Method method : type.getMethods()) { + if (method.isAnnotationPresent(PluginVariant.class)) { + if (method.getParameterTypes()[0].isAssignableFrom(acceptedContext)) { + methods.add(method); + } + + int[] requiredPar = method.getAnnotation(PluginVariant.class).requiredParameterLabels(); + for (int i : requiredPar) { + if (i > max) { + max = i; + } + } + } + } + max++; + + parameterTypes = new ArrayList>>(methods.size()); + + for (Method method : methods) { + int[] requiredPar = method.getAnnotation(PluginVariant.class).requiredParameterLabels(); + ArrayList> list = new ArrayList>(method.getParameterTypes().length - 1); + for (int i = 0; i < requiredPar.length; i++) { + list.add(method.getParameterTypes()[i + 1]); + } + parameterTypes.add(list); + } + + name = type.getAnnotation(Plugin.class).name(); + help = type.getAnnotation(Plugin.class).help(); + keywords = type.getAnnotation(Plugin.class).keywords(); + categories = type.getAnnotation(Plugin.class).categories(); + quality = type.getAnnotation(Plugin.class).quality(); + level = type.getAnnotation(Plugin.class).level(); + // System.out.println("RapidProMPluginDescriptorImpl,\"" + name + "\",\"" + (pack == null ? "" : pack.getName()) + "\""); + + // There are either no parameters, or all parameters are required at least once + // in all variants, ignoring the specific context. + // + if ((pls.length > 0) && (pls.length < max)) { + String message = "Plugin " + name + + " could not be added as a plugin. There is at lease one declared parameter type," + + " which is not used by any of the plugin's variants."; + throw new AssertionError(message); + } + + parameterNames = Arrays.asList(pls); + + returnTypes = Arrays.asList(type.getAnnotation(Plugin.class).returnTypes()); + assert (getAnnotation(Plugin.class).returnLabels().length == returnTypes.size()); + returnNames = Arrays.asList(type.getAnnotation(Plugin.class).returnLabels()); + } + + public RapidProMPluginDescriptor(String className, String name, Class[] parTypes, PackageDescriptor pack) throws Exception { + this(Class.forName(className).getMethod(name, parTypes), pack); + } + + public PackageDescriptor getPackage() { + return pack; + } + + public int getMostSignificantResult() { + return getAnnotation(Plugin.class).mostSignificantResult(); + } + + AnnotatedElement getAnnotatedElement() { + return annotatedElement; + } + + /* + * (non-Javadoc) + * + * @see + * org.processmining.framework.plugin.PluginDescriptor#hasAnnotation(java + * .lang.Class) + */ + public boolean hasAnnotation(Class annotationClass) { + return getAnnotatedElement().isAnnotationPresent(annotationClass); + } + + /* + * (non-Javadoc) + * + * @see + * org.processmining.framework.plugin.PluginDescriptor#getAnnotation(java + * .lang.Class) + */ + public T getAnnotation(Class annotationClass) { + return getAnnotatedElement().getAnnotation(annotationClass); + } + + /* + * (non-Javadoc) + * + * @see org.processmining.framework.plugin.PluginDescriptor#getName() + */ + public String getName() { + return name; + } + + /* + * (non-Javadoc) + * + * @see org.processmining.framework.plugin.PluginDescriptor#hashCode() + */ + @Override + public int hashCode() { + return id.hashCode(); + } + + /* + * (non-Javadoc) + * + * @see + * org.processmining.framework.plugin.PluginDescriptor#equals(java.lang. + * Object) + */ + @Override + public boolean equals(Object other) { + if (!(other instanceof RapidProMPluginDescriptor)) { + return false; + } + RapidProMPluginDescriptor otherDesc = (RapidProMPluginDescriptor) other; + return id.equals(otherDesc.id); + + // note: this does not compare whether the plugins have been loaded by + // the same class loader + // return getMethod().getDeclaringClass().getName().equals( + // otherDesc.getMethod().getDeclaringClass().getName()) + // && getMethod().getName() + // .equals(otherDesc.getMethod().getName()); + } + + /* + * (non-Javadoc) + * + * @see org.processmining.framework.plugin.PluginDescriptor#toString() + */ + @Override + public String toString() { + return getName(); + } + + protected Object[] execute(PluginContext context, int methodIndex, Object... allArgs) throws Exception { + Method method = getMethod(methodIndex); + if (returnTypes.size() > 1) { // method.getReturnType().isArray()) + // { + Object[] result; + if ((method.getModifiers() & Modifier.STATIC) == Modifier.STATIC) { + result = (Object[]) method.invoke(null, allArgs); + } else { + result = (Object[]) method.invoke(declaringClass.newInstance(), allArgs); + } + + return result; + } else { + Object result; + if ((method.getModifiers() & Modifier.STATIC) == Modifier.STATIC) { + result = method.invoke(null, allArgs); + } else { + result = method.invoke(declaringClass.newInstance(), allArgs); + } + + return new Object[] { result }; + } + } + + protected Method findMethod(Object[] allArgs) { + for (Method m : ((Class) annotatedElement).getMethods()) { + if (m.isAnnotationPresent(PluginVariant.class)) { + boolean match = (m.getParameterTypes().length == allArgs.length); + for (int i = 0; (i < allArgs.length) && match; i++) { + Class type = m.getParameterTypes()[i]; + if (type.isArray()) { + if (allArgs[i].getClass().isArray()) { + for (Object o : (Object[]) allArgs[i]) { + match &= type.getComponentType().isAssignableFrom(o.getClass()); + } + } else { + match = false; + } + } else { + match &= type.isAssignableFrom(allArgs[i].getClass()); + } + } + if (match) { + return m; + } + } + } + return null; + } + + /** + * returns the labels of the objects returned if this plugin is invoked, in + * the order in which they are returned + * + * @return + */ + public List> getReturnTypes() { + return Collections.unmodifiableList(returnTypes); + } + + public List> getParameterTypes(int methodIndex) { + return Collections.unmodifiableList(getParameterTypes().get(methodIndex)); + } + + public Class getPluginParameterType(int methodIndex, int parameterIndex) { + if (methods.get(methodIndex).isAnnotationPresent(PluginVariant.class)) { + int[] req = methods.get(methodIndex).getAnnotation(PluginVariant.class).requiredParameterLabels(); + for (int j = 0; j < req.length; j++) { + if (req[j] == parameterIndex) { + return parameterTypes.get(methodIndex).get(j); + } + } + return null; + } else { + return parameterTypes.get(methodIndex).get(parameterIndex); + } + } + + public List>> getParameterTypes() { + return Collections.unmodifiableList(parameterTypes); + } + + public List getParameterNames() { + return Collections.unmodifiableList(parameterNames); + } + + public Set> getTypesAtParameterIndex(int index) { + HashSet> result = new HashSet>(); + for (int j = 0; j < methods.size(); j++) { + Class name = getPluginParameterType(j, index); + if (name != null) { + result.add(name); + } + } + + return result; + } + + /** + * Return the number of methods in this plugin. + * + * @return + */ + public int getNumberOfMethods() { + return methods.size(); + } + + public List getParameterNames(int methodIndex) { + if (methods.get(methodIndex).isAnnotationPresent(PluginVariant.class)) { + ArrayList list = new ArrayList(); + int[] req = methods.get(methodIndex).getAnnotation(PluginVariant.class).requiredParameterLabels(); + for (int i : req) { + list.add(parameterNames.get(i)); + } + return Collections.unmodifiableList(list); + } else { + return Collections.unmodifiableList(parameterNames); + } + } + + public String getPluginParameterName(int methodIndex, int parameterIndex) { + if (methods.get(methodIndex).isAnnotationPresent(PluginVariant.class)) { + int[] req = methods.get(methodIndex).getAnnotation(PluginVariant.class).requiredParameterLabels(); + for (int j = 0; j < req.length; j++) { + if (req[j] == parameterIndex) { + return parameterNames.get(parameterIndex); + } + } + return null; + } else { + return parameterNames.get(parameterIndex); + } + } + + /** + * returns the types of the objects returned if this plugin is invoked, in + * the order in which they are returned + * + * @return + */ + public List getReturnNames() { + return Collections.unmodifiableList(returnNames); + } + + public PluginDescriptorID getID() { + return id; + } + + @SuppressWarnings("unchecked") + public Class getContextType(int methodIndex) { + return (Class) methods.get(methodIndex).getParameterTypes()[0]; + } + + public int compareTo(PluginDescriptor plugin) { + if (plugin.equals(this)) { + return 0; + } + + int c = getName().toLowerCase().compareTo(plugin.getName().toLowerCase()); + if (c == 0) { + c = id.compareTo(plugin.getID()); + } + return c; + } + + Method getMethod(int i) { + return methods.get(i); + } + + public int getIndexInParameterNames(int methodIndex, int methodParameterIndex) { + if (methods.get(methodIndex).isAnnotationPresent(PluginVariant.class)) { + int[] req = methods.get(methodIndex).getAnnotation(PluginVariant.class).requiredParameterLabels(); + return req[methodParameterIndex]; + } else { + return methodParameterIndex; + } + } + + public int getIndexInMethod(int methodIndex, int parameterIndex) { + if (methods.get(methodIndex).isAnnotationPresent(PluginVariant.class)) { + int[] req = methods.get(methodIndex).getAnnotation(PluginVariant.class).requiredParameterLabels(); + for (int i = 0; i < req.length; i++) { + if (req[i] == parameterIndex) { + return i; + } + } + return -1; + } else { + return parameterIndex; + } + } + + public String getMethodLabel(int methodIndex) { + if (methods.get(methodIndex).isAnnotationPresent(PluginVariant.class) + && !methods.get(methodIndex).getAnnotation(PluginVariant.class).variantLabel().equals("")) { + return methods.get(methodIndex).getAnnotation(PluginVariant.class).variantLabel(); + } else { + return name; + } + } + + public boolean isUserAccessible() { + return getAnnotation(Plugin.class).userAccessible(); + } + + public boolean handlesCancel() { + return getAnnotation(Plugin.class).handlesCancel(); + } + + public T getAnnotation(Class annotationClass, int methodIndex) { + return getMethod(methodIndex).getAnnotation(annotationClass); + } + + public boolean hasAnnotation(Class annotationClass, int methodIndex) { + return getMethod(methodIndex).getAnnotation(annotationClass) != null; + } + + /* + * (non-Javadoc) + * + * @see org.processmining.framework.plugin.PluginDescriptor#getName() + */ + public String getHelp() { + return help; + } + + public String getMethodHelp(int methodIndex) { + if (methods.get(methodIndex).isAnnotationPresent(PluginVariant.class) + && !methods.get(methodIndex).getAnnotation(PluginVariant.class).help().equals("")) { + return methods.get(methodIndex).getAnnotation(PluginVariant.class).help(); + } else { + return name; + } + } + + public String[] getKeywords() { + return this.keywords; + } + + public String[] getCategories() { + String[] categoryLabels = new String[this.categories.length]; + for(int i = 0; i < this.categories.length; i++){ + categoryLabels[i] = this.categories[i].getName();//.getName(); + } + return categoryLabels; + } + + public boolean meetsQualityThreshold() { + return Boot.PLUGIN_QUALITY_THRESHOLD.getValue() <= quality.getValue(); + } + + public boolean meetsLevelThreshold() { + return Boot.PLUGIN_LEVEL_THRESHOLD.getValue() <= level.getValue(); + } + +} diff --git a/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPluginExecutionResultImpl.java b/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPluginExecutionResultImpl.java new file mode 100644 index 0000000..af21fa3 --- /dev/null +++ b/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPluginExecutionResultImpl.java @@ -0,0 +1,43 @@ +package org.rapidprom.external.connectors.prom; + +import org.processmining.framework.plugin.IncorrectReturnTypeException; +import org.processmining.framework.plugin.PluginDescriptor; +import org.processmining.framework.plugin.ProMFuture; +import org.processmining.framework.plugin.events.NameChangeListener; +import org.processmining.framework.plugin.impl.PluginExecutionResultImpl; +import org.processmining.framework.util.Cast; + +public class RapidProMPluginExecutionResultImpl extends PluginExecutionResultImpl { + + public RapidProMPluginExecutionResultImpl(Class[] returnTypes, String[] returnNames, PluginDescriptor plugin) { + super(returnTypes, returnNames, plugin); + } + + public void setRapidProMFuture(Object[] objects) throws IncorrectReturnTypeException { + Object[] objectArr = getResults(); + for (int i =0; i < objectArr.length; i++) { + objectArr[i] = objects[i]; + if (!getType(i).equals(void.class)) { + if (objectArr[i] == null) { + continue; + } + Class type = objectArr[i].getClass(); + if (objectArr[i] instanceof ProMFuture) { + type = Cast.>cast(objects[i]).getReturnType(); + } + if (!getType(i).isAssignableFrom(type)) { + throw new IncorrectReturnTypeException(getPlugin().getName(), i, getType(i), objectArr[i].getClass()); + } + if (objects[i] instanceof ProMFuture) { + final int index = i; + ((ProMFuture) objects[i]).getNameChangeListeners().add(new NameChangeListener() { + public void nameChanged(String newName) { + getResultNames()[index] = newName; + } + }); + } + } + } + } + +} diff --git a/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPluginManager.java b/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPluginManager.java new file mode 100644 index 0000000..25dfe44 --- /dev/null +++ b/src/main/java/org/rapidprom/external/connectors/prom/RapidProMPluginManager.java @@ -0,0 +1,817 @@ +package org.rapidprom.external.connectors.prom; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileFilter; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.lang.annotation.Annotation; +import java.lang.reflect.AnnotatedElement; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; +import java.util.jar.JarInputStream; + +import javax.swing.event.EventListenerList; +import javax.xml.parsers.ParserConfigurationException; + +import org.apache.commons.io.IOUtils; +import org.processmining.framework.boot.Boot; +import org.processmining.framework.boot.Boot.Level; +import org.processmining.framework.packages.PackageDescriptor; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.PluginDescriptor; +import org.processmining.framework.plugin.PluginDescriptorID; +import org.processmining.framework.plugin.PluginManager; +import org.processmining.framework.plugin.PluginParameterBinding; +import org.processmining.framework.plugin.annotations.Bootable; +import org.processmining.framework.plugin.annotations.KeepInProMCache; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.framework.plugin.impl.DependsOnUnknownException; +import org.processmining.framework.plugin.impl.MacroPluginDescriptorImpl; +import org.processmining.framework.util.Pair; +import org.processmining.framework.util.PathHacker; +import org.processmining.framework.util.collection.ComparablePair; +import org.rapidprom.util.LpSolveUtils; +import org.w3c.dom.DOMException; +import org.xml.sax.SAXException; + +import com.rapidminer.tools.SystemInfoUtilities; + +public class RapidProMPluginManager implements PluginManager { + + private static final char PACKAGE_SEPARATOR = '.'; + private static final char URL_SEPARATOR = '/'; + private static final char INNER_CLASS_MARKER = '$'; + + private static final String JAVA_TMP_DIR = System.getProperty("java.io.tmpdir"); + + private static final String PROM_NS = "org/processmining"; + + private final Set> knownObjectTypes = new HashSet>(); + + private final Map plugins = new HashMap(); + private final Map, Set> annotation2plugins = new HashMap, Set>(); + private final EventListenerList pluginManagerListeners = new EventListenerList(); + private final Class pluginContextType; + + private final Map, Set>> annotatedClasses = new HashMap, Set>>(); + + public RapidProMPluginManager(Class pluginContextType) { + this.pluginContextType = pluginContextType; + } + + /* + * (non-Javadoc) + * + * @see + * org.processmining.framework.plugin.PluginManager#addErrorListener(org + * .processmining.framework.plugin.PluginManagerImpl.ErrorListener) + */ + public void addListener(PluginManagerListener listener) { + pluginManagerListeners.add(PluginManagerListener.class, listener); + } + + public Set> getKnownClassesAnnotatedWith(Class annotationType) { + Set> set = annotatedClasses.get(annotationType); + if (set == null) { + return Collections.emptySet(); + } else { + return Collections.unmodifiableSet(set); + } + } + + /* + * (non-Javadoc) + * + * @see + * org.processmining.framework.plugin.PluginManager#removeErrorListener( + * org.processmining.framework.plugin.PluginManagerImpl.ErrorListener) + */ + public void removeListener(PluginManagerListener listener) { + pluginManagerListeners.remove(PluginManagerListener.class, listener); + } + + protected void fireError(URL url, Throwable t, String className) { + for (PluginManagerListener listener : pluginManagerListeners.getListeners(PluginManagerListener.class)) { + listener.error(url, t, className); + } + + } + + protected void firePluginAdded(PluginDescriptor plugin, Collection> types) { + for (PluginManagerListener listener : pluginManagerListeners.getListeners(PluginManagerListener.class)) { + listener.newPlugin(plugin, types); + } + } + + /* + * (non-Javadoc) + * + * @see + * org.processmining.framework.plugin.PluginManager#register(java.net.URL) + */ + public void register(URL url, PackageDescriptor pack) { + URLClassLoader loader = new URLClassLoader(new URL[] { url }); + register(url, pack, loader); + } + + public void register(URL url, PackageDescriptor pack, ClassLoader loader) { + if (url.getProtocol().equals(FILE_PROTOCOL)) { + try { + File file = new File(url.toURI()); + + if (file.isDirectory()) { + scanDirectory(file, pack, loader); + return; + } + if (file.getAbsolutePath().endsWith(PluginManager.MCR_EXTENSION)) { + try { + loadClassFromMacro(url.toURI(), pack); + } catch (DependsOnUnknownException e) { + // Can't add this URL. + } + } + if (file.getAbsolutePath().endsWith(JAR_EXTENSION)) { + scanUrl(url, pack, loader); + } + } catch (URISyntaxException e) { + fireError(url, e, null); + } + } else { + scanUrl(url, pack, loader); + } + } + + private void scanDirectory(File file, PackageDescriptor pack, ClassLoader loader) { + try { + URL url = file.toURI().toURL(); + + Queue todo = new LinkedList(); + FileFilter filter = new FileFilter() { + public boolean accept(File pathname) { + return pathname.isDirectory() || pathname.getPath().endsWith(CLASS_EXTENSION) + || pathname.getPath().endsWith(MCR_EXTENSION) || pathname.getPath().endsWith(JAR_EXTENSION); + + } + }; + + todo.add(file); + while (!todo.isEmpty()) { + File dir = todo.remove(); + + for (File f : dir.listFiles(filter)) { + if (f.isDirectory()) { + todo.add(f); + } else { + if (f.getAbsolutePath().endsWith(CLASS_EXTENSION)) { + loadClassFromFile(loader, url, + makeRelativePath(file.getAbsolutePath(), f.getAbsolutePath()), pack); + } else if (f.getAbsolutePath().endsWith(MCR_EXTENSION)) { + try { + loadClassFromMacro(f.toURI(), pack); + } catch (DependsOnUnknownException e) { + todo.add(dir); + } + } else if (f.getAbsolutePath().endsWith(JAR_EXTENSION)) { + scanUrl(f.toURI().toURL(), pack, loader); + } + } + } + } + } catch (MalformedURLException e) { + fireError(null, e, null); + } + } + + private String makeRelativePath(String root, String absolutePath) { + String relative = absolutePath; + + if (relative.startsWith(root)) { + relative = relative.substring(root.length()); + if (relative.startsWith(File.separator)) { + relative = relative.substring(File.separator.length()); + } + } + return relative; + } + + // svzelst@20160808 disable cache checking in RapidProM + // svzelst@20160812 allow us to load lpsolve to library path + private void scanUrl(URL url, PackageDescriptor pack, ClassLoader loader) { + // PluginCacheEntry cached = new PluginCacheEntry(url, Boot.VERBOSE, + // pack); + + // if (cached.isInCache()) { + // for (String className : cached.getCachedClassNames()) { + // loadClass(loader, url, className, pack); + // } + // } else { + try { + JarFile jarFile = new JarFile(new File(url.toURI())); + InputStream is = url.openStream(); + JarInputStream jis = new JarInputStream(is); + JarEntry je; + List loadedClasses = new ArrayList(); + + File lpsolveDir = new File(JAVA_TMP_DIR + File.separator + "rprom_lpsolve_" + + SystemInfoUtilities.getOperatingSystem() + "_" + SystemInfoUtilities.getJVMArchitecture()); + boolean unjarLpSolve = !lpsolveDir.exists(); + String lpsolveLibraryEntry = LpSolveUtils.getOSBasedLpSolvePath(SystemInfoUtilities.getOperatingSystem(), + SystemInfoUtilities.getJVMArchitecture()); + Collection lpSolveFiles = new HashSet<>(); + + while ((je = jis.getNextJarEntry()) != null) { + if (!je.isDirectory()) { + if (je.getName().endsWith(CLASS_EXTENSION) && je.getName().contains(PROM_NS)) { + String loadedClass = loadClassFromFile(loader, url, je.getName(), pack); + if (loadedClass != null) { + loadedClasses.add(loadedClass); + } + } else if (je.getName().contains(lpsolveLibraryEntry) && unjarLpSolve) { + if (!lpsolveDir.exists()) { + lpsolveDir.mkdir(); + } + lpSolveFiles.add(loadLpSolveLibraryFile(je, jarFile, lpsolveDir)); + } + } + } + jis.close(); + is.close(); + + // load lpsolve libraries to path. + PathHacker.addLibraryPathFromDirectory(lpsolveDir); + + // cached.update(loadedClasses); + } catch (IOException | URISyntaxException e) { + fireError(url, e, null); + } + // } + } + + private File loadLpSolveLibraryFile(JarEntry je, JarFile jarFile, File targetDir) { + File destination = new File(targetDir, je.getName()); + new File(destination.getParent()).mkdirs(); + assert (destination.getParentFile().isDirectory()); + try { + destination.createNewFile(); + InputStream in = new BufferedInputStream(jarFile.getInputStream(je)); + OutputStream out = new BufferedOutputStream(new FileOutputStream(destination)); + IOUtils.copy(in, out); + out.flush(); + out.close(); + in.close(); + } catch (IOException e) { + e.printStackTrace(); + } + return destination; + + } + + private String loadClassFromFile(ClassLoader loader, URL url, String classFilename, PackageDescriptor pack) { + if (classFilename.indexOf(INNER_CLASS_MARKER) >= 0) { + // we're not going to load inner classes + return null; + } + return loadClass(loader, url, + classFilename.substring(0, classFilename.length() - CLASS_EXTENSION.length()) + .replace(URL_SEPARATOR, PACKAGE_SEPARATOR).replace(File.separatorChar, PACKAGE_SEPARATOR), + pack); + } + + private String loadClassFromMacro(URI macroFile, PackageDescriptor pack) throws DependsOnUnknownException { + MacroPluginDescriptorImpl plugin = null; + try { + plugin = new MacroPluginDescriptorImpl(new File(macroFile), this, pack); + addPlugin(plugin); + } catch (DOMException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } catch (SAXException e) { + e.printStackTrace(); + } catch (ParserConfigurationException e) { + e.printStackTrace(); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } catch (DependsOnUnknownException e) { + throw e; + } + return plugin == null ? null : plugin.getFileName(); + } + + /** + * Returns the name of the class, if it is annotated, or if any of its + * methods carries a plugin annotation! + * + * @param loader + * @param url + * @param className + * @return + */ + private String loadClass(ClassLoader loader, URL url, String className, PackageDescriptor pack) { + boolean isAnnotated = false; + + if ((className == null) || className.trim().equals("") || className.startsWith("bin-test-instrument")) { + return null; + } + + className = className.trim(); + try { + Class pluginClass = Class.forName(className, false, loader); + // isAnnotated = (pluginClass.getAnnotations().length > 0); + + // register all annotated classes + if (pluginClass.isAnnotationPresent(KeepInProMCache.class) + && !Modifier.isAbstract(pluginClass.getModifiers())) { + Annotation[] annotations = pluginClass.getAnnotations(); + isAnnotated = true; + for (int i = 0; i < annotations.length; i++) { + Set> set = annotatedClasses.get(annotations[i].annotationType()); + if (set == null) { + set = new HashSet>(); + annotatedClasses.put(annotations[i].annotationType(), set); + } + set.add(pluginClass); + } + } + + Method[] methods = pluginClass.getMethods(); + // Check if plugin annotation is present + if (pluginClass.isAnnotationPresent(Plugin.class) && isGoodPlugin(pluginClass, methods)) { + RapidProMPluginDescriptor pl = new RapidProMPluginDescriptor(pluginClass, pluginContextType, pack); + addPlugin(pl); + isAnnotated = true; + } + + for (Method method : methods) { + if (method.isAnnotationPresent(Plugin.class) && isGoodPlugin(method)) { + try { + RapidProMPluginDescriptor pl = new RapidProMPluginDescriptor(method, pack); + addPlugin(pl); + isAnnotated = true; + } catch (Exception e) { + fireError(url, e, className); + if (Boot.VERBOSE != Level.NONE) { + System.err.println("ERROR while adding plugin: " + url + ":" + e.getMessage()); + } + } + } + } + } catch (Throwable t) { + fireError(url, t, className); + if (Boot.VERBOSE != Level.NONE) { + System.err.println("ERROR while scanning for plugins at: " + url + ":"); + System.err.println(" in file :" + className); + System.err.println(" " + t.getMessage()); + // t.printStackTrace(); + } + } + return isAnnotated ? className : null; + } + + private void addPlugin(RapidProMPluginDescriptor pl) { + RapidProMPluginDescriptor old = (RapidProMPluginDescriptor) plugins.put(pl.getID(), pl); + + if (old != null) { + if (Boot.VERBOSE == Level.ALL) { + System.out.println("Found new version of plugin: " + pl.getName() + " ....overwriting."); + } + for (Annotation annotation : old.getAnnotatedElement().getAnnotations()) { + annotation2plugins.get(annotation.annotationType()).remove(old); + } + } + + for (Annotation annotation : pl.getAnnotatedElement().getAnnotations()) { + Set pls = annotation2plugins.get(annotation.annotationType()); + if (pls == null) { + pls = new TreeSet(); + annotation2plugins.put(annotation.annotationType(), pls); + } + pls.add(pl); + + } + checkTypesAfterAdd(pl); + + } + + private void addPlugin(MacroPluginDescriptorImpl pl) { + PluginDescriptor old = plugins.put(pl.getID(), pl); + + if (old != null) { + if (Boot.VERBOSE == Level.ALL) { + System.out.println("Found new version of plugin: " + pl.getName() + " ....overwriting."); + } + annotation2plugins.get(Plugin.class).remove(old); + } + + Set pls = annotation2plugins.get(Plugin.class); + if (pls == null) { + pls = new TreeSet(); + annotation2plugins.put(Plugin.class, pls); + } + pls.add(pl); + + checkTypesAfterAdd(pl); + } + + private void checkTypesAfterAdd(PluginDescriptor pl) { + HashSet> newTypes = new HashSet>(); + for (List> parTypes : pl.getParameterTypes()) { + newTypes.addAll(addKnownObjectTypes(parTypes)); + } + newTypes.addAll(addKnownObjectTypes(pl.getReturnTypes())); + + firePluginAdded(pl, newTypes); + } + + private Collection> addKnownObjectTypes(Collection> types) { + List> newTypes = new ArrayList>(); + for (Class type : types) { + if (type.isArray()) { + if (knownObjectTypes.add(type.getComponentType())) { + newTypes.add(type.getComponentType()); + } + } + if (knownObjectTypes.add(type)) { + newTypes.add(type); + } + } + return newTypes; + } + + private boolean isGoodPlugin(Class type, Method[] methods) { + try { + if (!isRightlyAnnotated(type)) { + return false; + } + String[] names = type.getAnnotation(Plugin.class).parameterLabels(); + Class[] returnTypes = type.getAnnotation(Plugin.class).returnTypes(); + + // Check if there is at least one method annotated with + // PluginVariant + List pluginMethods = new ArrayList(methods.length); + for (Method m : methods) { + if (m.isAnnotationPresent(PluginVariant.class)) { + pluginMethods.add(m); + } + } + + // Check if for all methods, the requiredTypes are set Correctly + Iterator it = pluginMethods.iterator(); + loop: while (it.hasNext()) { + Method m = it.next(); + int[] required = m.getAnnotation(PluginVariant.class).requiredParameterLabels(); + Set set = new HashSet(); + for (int i : required) { + set.add(i); + if ((i < 0) || (i >= names.length)) { + if (Boot.VERBOSE != Level.NONE) { + System.err.println("Method " + m.toString() + + " could not be added as a plugin. At least one required parameter is not a valid index." + + "There is no parameterlabel at index " + i); + } + it.remove(); + continue loop; + } + } + if (set.size() != required.length) { + if (Boot.VERBOSE != Level.NONE) { + System.err.println("Method " + m.toString() + + " could not be added as a plugin. Some required indices are duplicated."); + } + it.remove(); + } + } + + // Check for corresponding contexts at first indes + it = pluginMethods.iterator(); + loop: while (it.hasNext()) { + Method m = it.next(); + if (!isCorrectPluginContextType(m)) { + if (Boot.VERBOSE != Level.NONE) { + System.err.println("Method " + m.toString() + + " could not be added as a plugin. The context should be asked as first parameter and should be a the same, or a superclass of " + + pluginContextType.getName() + "."); + } + it.remove(); + continue loop; + } + } + + it = pluginMethods.iterator(); + loop: while (it.hasNext()) { + Method m = it.next(); + if ((returnTypes.length > 1) && !Object[].class.isAssignableFrom(m.getReturnType()) + && !Object.class.equals(m.getReturnType())) { + if (Boot.VERBOSE != Level.NONE) { + System.err.println("Method " + m.toString() + + " could not be added as a plugin. The plugin should return an array of objects as specified in the context."); + } + it.remove(); + continue loop; + } + } + + if (pluginMethods.isEmpty()) { + if (Boot.VERBOSE != Level.NONE) { + System.err.println("Plugin " + type.toString() + + " could not be added as a plugin. At least one variant has to be specified."); + } + return false; + } + + return true; + + } catch (NoClassDefFoundError e) { + // required class not on classpath, cannot load as plugin + return false; + } + } + + private boolean isCorrectPluginContextType(Method method) { + if (method.getParameterTypes().length == 0) { + return false; + } + if (!PluginContext.class.isAssignableFrom(method.getParameterTypes()[0])) { + return false; + } + if (!method.getParameterTypes()[0].isAssignableFrom(pluginContextType)) { + return false; + } + return true; + } + + private boolean isRightlyAnnotated(AnnotatedElement element) { + if (element.getAnnotation(Plugin.class).returnLabels().length != element.getAnnotation(Plugin.class) + .returnTypes().length) { + if (Boot.VERBOSE != Level.NONE) { + System.err.println("Plugin " + element.toString() + " could not be added as a plugin, the number of " + + "return types and return labels do not match."); + } + return false; + } + return true; + } + + private boolean isGoodPlugin(Method method) throws NoClassDefFoundError { + try { + if (!isRightlyAnnotated(method)) { + return false; + } + + if ((method.getAnnotation(Plugin.class).parameterLabels().length != 0) + && (method.getAnnotation(Plugin.class).parameterLabels().length != method.getParameterTypes().length + - 1)) { + if (Boot.VERBOSE != Level.NONE) { + System.err.println("Plugin " + method.toString() + " could not be added as a plugin, the number of" + + " parameter labels does not match the number of parameters."); + } + return false; + } + + // if (void.class.equals(method.getReturnType())) { + // System.err.println("Method " + method.toGenericString() + // + " could not be added as a plugin, as the resultType is void."); + // return false; + // } + + Class[] returnTypes = method.getAnnotation(Plugin.class).returnTypes(); + if ((returnTypes.length > 1) && !Object[].class.isAssignableFrom(method.getReturnType()) + && !Object.class.equals(method.getReturnType())) { + if (Boot.VERBOSE != Level.NONE) { + System.err.println("Method " + method.toString() + + " could not be added as a plugin. The plugin should return an " + + "array of objects as specified in the context."); + } + return false; + } + + Class[] pars = method.getParameterTypes(); + if (!isCorrectPluginContextType(method)) { + if (!method.isAnnotationPresent(Bootable.class)) { + if (Boot.VERBOSE != Level.NONE) { + System.err.println("Method " + method.toGenericString() + + " could not be added as a plugin, the first parameter has to be a " + + "PluginContext and assignable from " + pluginContextType.getName() + "."); + } + } + return false; + } + for (int i = 1; i < pars.length; i++) { + Class type = pars[i]; + if (PluginContext.class.isAssignableFrom(type)) { + if (Boot.VERBOSE != Level.NONE) { + System.err.println("Method " + method.toGenericString() + + " could not be added as a plugin, only one context can be requested."); + } + return false; + } + } + for (int i = 0; i < pars.length; i++) { + if (pars[i].getTypeParameters().length > 0) { + if (Boot.VERBOSE != Level.NONE) { + System.err.println("Method " + method.toGenericString() + + " could not be added as a plugin, as one of the parameters " + + "is derived from a Type using Generics"); + } + return false; + } + } + return true; + } catch (NoClassDefFoundError e) { + // required class not on classpath, cannot load as plugin + return false; + } + } + + public Set> find(Class annotation, Class resultType, + Class contextType, boolean totalMatch, boolean orderedParameters, + boolean mustBeUserVisible, Class... parameters) { + + Set> result = new TreeSet>(); + Set pls = annotation2plugins.get(annotation); + if (pls == null) { + return result; + } + for (PluginDescriptor plugin : pls) { + if (mustBeUserVisible && (!plugin.meetsQualityThreshold() || !plugin.meetsLevelThreshold())) { + /* + * Plug-in does not meet some required threshold to do so. + * Ignore it. + */ + continue; + } + if (!mustBeUserVisible || plugin.isUserAccessible()) { + int i = (resultType == null ? 0 : plugin.getReturnTypes().indexOf(resultType)); + if (i < 0) { + // Check for returned subtypes of the requested type + i = checkIfRequestedReturnTypeIsPresent(plugin, resultType); + } + if (i >= 0) { + for (int j = 0; j < plugin.getParameterTypes().size(); j++) { + if (!plugin.getContextType(j).isAssignableFrom(contextType)) { + // Check context types + continue; + } + + List list = PluginParameterBinding.Factory.tryToBind(this, plugin, j, + totalMatch, orderedParameters, parameters); + for (PluginParameterBinding binding : list) { + + result.add(new ComparablePair(i, binding)); + // // Quit the loop since only one binding is to be + // // found. + // j = plugin.getParameterTypes().size(); + } + } + } + } + } + return result; + } + + private int checkIfRequestedReturnTypeIsPresent(PluginDescriptor plugin, Class resultType) { + for (int i = 0; i < plugin.getReturnTypes().size(); i++) { + if (isParameterAssignable(plugin.getReturnTypes().get(i), resultType)) { + return i; + } + } + + return -1; + } + + public Collection find(String pluginName) { + List result = new ArrayList(); + for (PluginDescriptor plugin : plugins.values()) { + if (plugin.getName().equals(pluginName)) { + result.add(plugin); + } + } + return result; + } + + public PluginDescriptor getPlugin(PluginDescriptorID id) { + return plugins.get(id); + } + + public PluginDescriptor getPlugin(String id) { + for (Map.Entry entry : plugins.entrySet()) { + if (entry.getKey().toString().equals(id)) { + return entry.getValue(); + } + } + return null; + } + + public Set getPluginsAcceptingAtLeast(Class contextType, + boolean mustBeUserVisible, Class... parameters) { + Set result = new TreeSet(); + for (Pair pair : find(Plugin.class, null, contextType, false, false, + mustBeUserVisible, parameters)) { + result.add(pair.getSecond()); + } + return result; + } + + public Set getPluginsAcceptingInAnyOrder(Class contextType, + boolean mustBeUserVisible, Class... parameters) { + Set result = new TreeSet(); + for (Pair pair : find(Plugin.class, null, contextType, true, false, + mustBeUserVisible, parameters)) { + result.add(pair.getSecond()); + } + return result; + } + + public Set getPluginsAcceptingOrdered(Class contextType, + boolean mustBeUserVisible, Class... parameters) { + Set result = new TreeSet(); + for (Pair pair : find(Plugin.class, null, contextType, true, true, + mustBeUserVisible, parameters)) { + result.add(pair.getSecond()); + } + return result; + } + + public Set> getPluginsResultingIn(Class resultType, + Class contextType, boolean mustBeUserVisible) { + Set> result = new TreeSet>(); + for (Pair pair : find(Plugin.class, resultType, contextType, false, false, + mustBeUserVisible)) { + result.add(new ComparablePair(pair.getFirst(), pair.getSecond().getPlugin())); + } + for (Pair pair : find(Plugin.class, resultType, contextType, true, false, + mustBeUserVisible)) { + result.add(new ComparablePair(pair.getFirst(), pair.getSecond().getPlugin())); + } + return result; + } + + private SortedSet getAllPluginsSorted(boolean canBeUserVisible, boolean mustBeUserVisible) { + SortedSet result = new TreeSet(); + for (PluginDescriptor plugin : plugins.values()) { + boolean visible = plugin.isUserAccessible(); + if (mustBeUserVisible && (!plugin.meetsQualityThreshold() || !plugin.meetsLevelThreshold())) { + /* + * Plug-in can be user visible (that is, should end up in the + * GUI), but does not meet some required threshold. Ignore it. + */ + continue; + } + // Do not include, if: + // mustBeUserVisible AND NOT visible, OR + // visible AND NOT canBeUserVisible + if (!((mustBeUserVisible && !visible) || (!canBeUserVisible && visible))) { + result.add(plugin); + } + } + return Collections.unmodifiableSortedSet(result); + } + + public SortedSet getAllPlugins() { + return getAllPluginsSorted(true, false); + } + + public SortedSet getAllPlugins(boolean mustBeVisible) { + return getAllPluginsSorted(mustBeVisible, mustBeVisible); + } + + public boolean isParameterAssignable(Class instanceType, Class requestedType) { + if (requestedType.isAssignableFrom(instanceType)) { + return true; + } + if (requestedType.isArray() && requestedType.getComponentType().isAssignableFrom(instanceType)) { + return true; + } + return false; + } + + public Set> getKnownObjectTypes() { + return Collections.unmodifiableSet(knownObjectTypes); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/AcceptingPetriNetIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/AcceptingPetriNetIOObjectRenderer.java new file mode 100644 index 0000000..bd98101 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/AcceptingPetriNetIOObjectRenderer.java @@ -0,0 +1,23 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.acceptingpetrinet.plugins.VisualizeAcceptingPetriNetPlugin; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.AcceptingPetriNetIOObject; + +public class AcceptingPetriNetIOObjectRenderer + extends AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "Accepting Petri Net Object Renderer"; + } + + @Override + protected JComponent runVisualization(AcceptingPetriNetIOObject ioObject) { + return VisualizeAcceptingPetriNetPlugin + .visualize(ioObject.getPluginContext(), ioObject.getArtifact()); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/BPMNIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/BPMNIOObjectRenderer.java new file mode 100644 index 0000000..fd14251 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/BPMNIOObjectRenderer.java @@ -0,0 +1,23 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.models.jgraph.ProMJGraphVisualizer; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.BPMNIOObject; + +public class BPMNIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer { + + @Override + protected JComponent runVisualization(BPMNIOObject artifact) { + return ProMJGraphVisualizer.instance().visualizeGraph( + artifact.getPluginContext(), artifact.getArtifact()); + } + + @Override + public String getName() { + return "BPMN renderer"; + } + +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/CPNModelIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/CPNModelIOObjectRenderer.java new file mode 100644 index 0000000..5697672 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/CPNModelIOObjectRenderer.java @@ -0,0 +1,44 @@ +package org.rapidprom.ioobjectrenderers; + +import java.io.IOException; + +import javax.swing.JComponent; +import javax.xml.parsers.ParserConfigurationException; + +import org.cpntools.accesscpn.model.importer.NetCheckException; +import org.processmining.plugins.cpnet.DrawCPNGraph; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.CPNModelIOObject; +import org.xml.sax.SAXException; + +public class CPNModelIOObjectRenderer extends AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "CPNModel renderer"; + } + + @Override + protected JComponent runVisualization(CPNModelIOObject artifact) { + DrawCPNGraph visualizer = new DrawCPNGraph(); + JComponent result = null; + try { + result = visualizer.visualize(artifact.getPluginContext(), artifact.getArtifact()); + } catch (NetCheckException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (SAXException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ParserConfigurationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + return result; + } + + +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/FuzzyAnimationIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/FuzzyAnimationIOObjectRenderer.java new file mode 100644 index 0000000..e7a67ca --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/FuzzyAnimationIOObjectRenderer.java @@ -0,0 +1,24 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.models.animation.visualization.AnimationVisualizer; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.FuzzyAnimationIOObject; + +public class FuzzyAnimationIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "FuzzyAnimation renderer"; + } + + @Override + protected JComponent runVisualization(FuzzyAnimationIOObject artifact) { + AnimationVisualizer visualizer = new AnimationVisualizer(); + return visualizer.visualize(artifact.getPluginContext(), + artifact.getArtifact()); + } + +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/HeuristicsNetIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/HeuristicsNetIOObjectRenderer.java new file mode 100644 index 0000000..fcaed95 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/HeuristicsNetIOObjectRenderer.java @@ -0,0 +1,22 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.plugins.heuristicsnet.visualizer.HeuristicsNetAnnotatedVisualization; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.HeuristicsNetIOObject; + +public class HeuristicsNetIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer { + + public String getName() { + return "Heuristics Net renderer"; + } + + @Override + protected JComponent runVisualization(HeuristicsNetIOObject artifact) { + return HeuristicsNetAnnotatedVisualization.visualize( + artifact.getPluginContext(), artifact.getArtifact()); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/InteractiveMinerLauncherIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/InteractiveMinerLauncherIOObjectRenderer.java new file mode 100644 index 0000000..e24544b --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/InteractiveMinerLauncherIOObjectRenderer.java @@ -0,0 +1,41 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.framework.plugin.ProMCanceller; +import org.processmining.plugins.inductiveVisualMiner.InductiveVisualMiner; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.InteractiveMinerLauncherIOObject; + +public class InteractiveMinerLauncherIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "Inductive Visual Miner renderer"; + } + + @Override + protected JComponent runVisualization( + InteractiveMinerLauncherIOObject artifact) { + InductiveVisualMiner visualizer = new InductiveVisualMiner(); + return visualizer.visualise(artifact.getPluginContext(), + artifact.getArtifact(), new ProMCancellerImpl()); + } + + private static final class ProMCancellerImpl implements ProMCanceller { + + private boolean isCancelled = false; + + public boolean isCancelled() { + return isCancelled; + } + + @SuppressWarnings("unused") + public void cancel() { + isCancelled = true; + } + + } + +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/ManifestIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/ManifestIOObjectRenderer.java new file mode 100644 index 0000000..9c5cf39 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/ManifestIOObjectRenderer.java @@ -0,0 +1,24 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.plugins.manifestanalysis.visualization.performance.ManifestPerfVisualization; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.ManifestIOObject; + +public class ManifestIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "Manifest renderer"; + } + + @Override + protected JComponent runVisualization(ManifestIOObject artifact) { + ManifestPerfVisualization visualizer = new ManifestPerfVisualization(); + return visualizer.visualize(artifact.getPluginContext(), + artifact.getArtifact()); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/MetricsRepositoryIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/MetricsRepositoryIOObjectRenderer.java new file mode 100644 index 0000000..2cd130c --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/MetricsRepositoryIOObjectRenderer.java @@ -0,0 +1,24 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.plugins.fuzzymodel.FastTransformerVisualization; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.MetricsRepositoryIOObject; + +public class MetricsRepositoryIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "Transition System (metrics repository) renderer"; + } + + @Override + protected JComponent runVisualization(MetricsRepositoryIOObject artifact) { + FastTransformerVisualization visualizer = new FastTransformerVisualization(); + return visualizer.visualize(artifact.getPluginContext(), + artifact.getArtifact()); + } + +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/PNRepResultIOObjectLogRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/PNRepResultIOObjectLogRenderer.java new file mode 100644 index 0000000..fb15650 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/PNRepResultIOObjectLogRenderer.java @@ -0,0 +1,22 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.plugins.petrinet.replayresult.visualization.PNLogReplayResultVisPanel; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.PNRepResultIOObject; + +public class PNRepResultIOObjectLogRenderer extends AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "PNRepResult (Project on Log) renderer"; + } + + @Override + protected JComponent runVisualization(PNRepResultIOObject ioObject) { + return new PNLogReplayResultVisPanel(ioObject.getPn().getArtifact(), ioObject.getXLog(), ioObject.getArtifact(), + ioObject.getPluginContext().getProgress()); + } + +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/PNRepResultIOObjectModelRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/PNRepResultIOObjectModelRenderer.java new file mode 100644 index 0000000..e3dda42 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/PNRepResultIOObjectModelRenderer.java @@ -0,0 +1,30 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.plugins.pnalignanalysis.visualization.projection.PNLogReplayProjectedVisPanel; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.PNRepResultIOObject; + +import javassist.tools.rmi.ObjectNotFoundException; + +public class PNRepResultIOObjectModelRenderer extends AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "PNRepResult (Project on Model) renderer"; + } + + @Override + protected JComponent runVisualization(PNRepResultIOObject ioObject) { + try { + return new PNLogReplayProjectedVisPanel(ioObject.getPluginContext(), ioObject.getPn().getArtifact(), + ioObject.getPn().getInitialMarking(), ioObject.getXLog(), ioObject.getMapping(), + ioObject.getArtifact()); + } catch (ObjectNotFoundException e) { + e.printStackTrace(); + } + return null; + } + +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/PetriNetIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/PetriNetIOObjectRenderer.java new file mode 100644 index 0000000..3a1426a --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/PetriNetIOObjectRenderer.java @@ -0,0 +1,40 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.models.graphbased.directed.petrinet.InhibitorNet; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.ResetInhibitorNet; +import org.processmining.models.graphbased.directed.petrinet.ResetNet; +import org.processmining.plugins.petrinet.PetriNetVisualization; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.PetriNetIOObject; + +public class PetriNetIOObjectRenderer + extends AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "Petri Net renderer"; + } + + @Override + protected JComponent runVisualization(PetriNetIOObject artifact) { + PetriNetVisualization visualizer = new PetriNetVisualization(); + if (artifact.getArtifact() instanceof Petrinet) + return visualizer.visualize(artifact.getPluginContext(), + (Petrinet) artifact.getArtifact()); + if (artifact.getArtifact() instanceof ResetInhibitorNet) + return visualizer.visualize(artifact.getPluginContext(), + (ResetInhibitorNet) artifact.getArtifact()); + if (artifact.getArtifact() instanceof ResetNet) + return visualizer.visualize(artifact.getPluginContext(), + (ResetNet) artifact.getArtifact()); + if (artifact.getArtifact() instanceof InhibitorNet) + return visualizer.visualize(artifact.getPluginContext(), + (InhibitorNet) artifact.getArtifact()); + else + return null; + } + +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/PomPomViewIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/PomPomViewIOObjectRenderer.java new file mode 100644 index 0000000..968b262 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/PomPomViewIOObjectRenderer.java @@ -0,0 +1,24 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.plugins.pompom.PomPomVisualization; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.PomPomViewIOObject; + +public class PomPomViewIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "PomPomView renderer"; + } + + @Override + protected JComponent runVisualization(PomPomViewIOObject artifact) { + PomPomVisualization visualizer = new PomPomVisualization(); + return visualizer.visualize(artifact.getPluginContext(), + artifact.getArtifact()); + } + +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/PredictorIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/PredictorIOObjectRenderer.java new file mode 100644 index 0000000..424c547 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/PredictorIOObjectRenderer.java @@ -0,0 +1,30 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.prediction.PredictorVisualizer; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.PredictorIOObject; + +public class PredictorIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "Predictor renderer"; + } + + @Override + protected JComponent runVisualization(PredictorIOObject artifact) { + PredictorVisualizer visualizer = new PredictorVisualizer(); + JComponent result = null; + try { + result = visualizer.visualizePrediction(artifact.getPluginContext(), + artifact.getArtifact()); + } catch (Exception e) { + e.printStackTrace(); + } + return result; + } + +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/ProcessTreeIOObjectDefaultRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/ProcessTreeIOObjectDefaultRenderer.java new file mode 100644 index 0000000..77dd130 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/ProcessTreeIOObjectDefaultRenderer.java @@ -0,0 +1,21 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.processtree.visualization.tree.TreeVisualization; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.ProcessTreeIOObject; + +public class ProcessTreeIOObjectDefaultRenderer extends AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "Process Tree (Default) renderer"; + } + + @Override + protected JComponent runVisualization(ProcessTreeIOObject ioObject) { + TreeVisualization visualizer = new TreeVisualization(); + return visualizer.visualize(null, ioObject.getArtifact()); + } +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/ProcessTreeIOObjectDotRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/ProcessTreeIOObjectDotRenderer.java new file mode 100644 index 0000000..67a78dc --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/ProcessTreeIOObjectDotRenderer.java @@ -0,0 +1,21 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.plugins.inductiveVisualMiner.plugins.ProcessTreeVisualisationPlugin; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.ProcessTreeIOObject; + +public class ProcessTreeIOObjectDotRenderer extends AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "Process Tree (Dot) renderer"; + } + + @Override + protected JComponent runVisualization(ProcessTreeIOObject ioObject) { + ProcessTreeVisualisationPlugin visualizer = new ProcessTreeVisualisationPlugin(); + return visualizer.fancy(null, ioObject.getArtifact()); + } +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/ReachabilityGraphIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/ReachabilityGraphIOObjectRenderer.java new file mode 100644 index 0000000..327fead --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/ReachabilityGraphIOObjectRenderer.java @@ -0,0 +1,22 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.models.jgraph.ProMJGraphVisualizer; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.ReachabilityGraphIOObject; + +public class ReachabilityGraphIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "Reachability Graph renderer"; + } + + @Override + protected JComponent runVisualization(ReachabilityGraphIOObject artifact) { + return ProMJGraphVisualizer.instance().visualizeGraph( + artifact.getPluginContext(), artifact.getArtifact()); + } +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/SocialNetworkIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/SocialNetworkIOObjectRenderer.java new file mode 100644 index 0000000..b3ccff1 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/SocialNetworkIOObjectRenderer.java @@ -0,0 +1,23 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.plugins.socialnetwork.analysis.SocialNetworkAnalysisPlugin; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.SocialNetworkIOObject; + +public class SocialNetworkIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "SocialNetwork renderer"; + } + + @Override + protected JComponent runVisualization(SocialNetworkIOObject artifact) { + return SocialNetworkAnalysisPlugin.invokeSNA( + artifact.getPluginContext(), artifact.getArtifact()); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/TransitionSystemIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/TransitionSystemIOObjectRenderer.java new file mode 100644 index 0000000..c738f1b --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/TransitionSystemIOObjectRenderer.java @@ -0,0 +1,25 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.plugins.transitionsystem.Visualization; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.TransitionSystemIOObject; + +public class TransitionSystemIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "TransitionSystemRenderer"; + } + + @Override + protected JComponent runVisualization(TransitionSystemIOObject artifact) { + + Visualization visualizator = new Visualization(); + return visualizator.visualize(artifact.getPluginContext(), + artifact.getArtifact()); + } + +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/WoflanDiagnosisIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/WoflanDiagnosisIOObjectRenderer.java new file mode 100644 index 0000000..47165ab --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/WoflanDiagnosisIOObjectRenderer.java @@ -0,0 +1,23 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; +import javax.swing.JTextArea; + +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.WoflanDiagnosisIOObject; + +public class WoflanDiagnosisIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer { + + public String getName() { + return "WoflanDiagnosisrenderer"; + } + + @Override + protected JComponent runVisualization(WoflanDiagnosisIOObject artifact) { + JComponent panel = new JTextArea(artifact.getArtifact().toString()); + panel.setEnabled(false); + return panel; + } + +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/XLogIOObjectDefaultRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/XLogIOObjectDefaultRenderer.java new file mode 100644 index 0000000..17cb5d0 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/XLogIOObjectDefaultRenderer.java @@ -0,0 +1,25 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.plugins.log.ui.logdialog.LogDialogInitializer; +import org.processmining.plugins.log.ui.logdialog.SlickerOpenLogSettings; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.XLogIOObject; + +public class XLogIOObjectDefaultRenderer extends AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "XLog default renderer"; + } + + @Override + protected JComponent runVisualization(XLogIOObject ioObject) { + + SlickerOpenLogSettings o = new SlickerOpenLogSettings(); + + return o.showLogVis(ioObject.getPluginContext(), ioObject.getArtifact()); + } + +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/XLogIOObjectExampleSetRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/XLogIOObjectExampleSetRenderer.java new file mode 100644 index 0000000..7b927d0 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/XLogIOObjectExampleSetRenderer.java @@ -0,0 +1,150 @@ +package org.rapidprom.ioobjectrenderers; + +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; + +import javax.swing.JComponent; + +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.XLogIOObject; +import org.rapidprom.util.XLogUtils; +import org.rapidprom.util.XLogUtils.AttributeTypes; +import org.rapidprom.util.XLogUtils.TableModelXLog; + +import com.rapidminer.example.Attribute; +import com.rapidminer.example.ExampleSet; +import com.rapidminer.example.table.AttributeFactory; +import com.rapidminer.example.table.DataRow; +import com.rapidminer.example.table.DataRowFactory; +import com.rapidminer.example.table.MemoryExampleTable; +import com.rapidminer.gui.renderer.data.ExampleSetDataRenderer; +import com.rapidminer.operator.io.AbstractDataReader.AttributeColumn; +import com.rapidminer.operator.ports.metadata.AttributeMetaData; +import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; +import com.rapidminer.operator.ports.metadata.MDInteger; +import com.rapidminer.operator.ports.metadata.SetRelation; +import com.rapidminer.tools.Ontology; +import com.rapidminer.tools.math.container.Range; + +public class XLogIOObjectExampleSetRenderer extends AbstractRapidProMIOObjectRenderer { + + private Attribute[] attributes; + + @Override + public String getName() { + return "XLog (ExampleSet) renderer"; + } + + @Override + protected JComponent runVisualization(XLogIOObject ioObject) { + + XLogIOObject object = (XLogIOObject) ioObject; + + MemoryExampleTable table = null; + ExampleSet es = null; + + try { + TableModelXLog convertLogToStringTable = XLogUtils.convertLogToStringTable(object.getArtifact(), true); + + table = createStructureTable(convertLogToStringTable); + es = fillTable(table, convertLogToStringTable); + + } catch (Exception error) { + error.printStackTrace(); + } + ExampleSetDataRenderer renderer = new ExampleSetDataRenderer(); + return (JComponent) renderer.getVisualizationComponent(es, null); + } + + private MemoryExampleTable createStructureTable(TableModelXLog convertedLog) { + ExampleSetMetaData metaData = new ExampleSetMetaData(); + List attributes = new LinkedList(); + for (int i = 0; i < convertedLog.getColumnCount(); i++) { + String columnName = convertedLog.getColumnName(i); + AttributeTypes columnType = convertedLog.getColumnType(i); + AttributeMetaData amd = null; + if (columnType.equals(AttributeTypes.CONTINUOUS)) { + attributes.add(AttributeFactory.createAttribute(columnName, Ontology.NUMERICAL)); + amd = new AttributeMetaData(columnName, Ontology.NUMERICAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + List minAndMaxValueColumn = getMinAndMaxValueColumn(convertedLog, columnName); + amd.setValueRange(new Range(minAndMaxValueColumn.get(0), minAndMaxValueColumn.get(1)), + SetRelation.EQUAL); + } else if (columnType.equals(AttributeTypes.DISCRETE)) { + attributes.add(AttributeFactory.createAttribute(columnName, Ontology.NOMINAL)); + amd = new AttributeMetaData(columnName, Ontology.NOMINAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + } else if (columnType.equals(AttributeTypes.DATE)) { + attributes.add(AttributeFactory.createAttribute(columnName, Ontology.DATE_TIME)); + amd = new AttributeMetaData(columnName, Ontology.DATE_TIME); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + List minAndMaxValueColumn = getMinAndMaxValueColumn(convertedLog, columnName); + amd.setValueRange(new Range(minAndMaxValueColumn.get(0), minAndMaxValueColumn.get(1)), + SetRelation.EQUAL); + } else if (columnType.equals(AttributeTypes.STRING)) { + attributes.add(AttributeFactory.createAttribute(columnName, Ontology.NOMINAL)); + amd = new AttributeMetaData(columnName, Ontology.NOMINAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + } else if (columnType.equals(AttributeTypes.BOOLEAN)) { + attributes.add(AttributeFactory.createAttribute(columnName, Ontology.BINOMINAL)); + amd = new AttributeMetaData(columnName, Ontology.NOMINAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + } + metaData.addAttribute(amd); + } + // convert the list to array + Attribute[] attribArray = new Attribute[attributes.size()]; + for (int i = 0; i < attributes.size(); i++) { + attribArray[i] = attributes.get(i); + } + metaData.setNumberOfExamples(convertedLog.getRowCount()); + + this.attributes = attribArray; + MemoryExampleTable memoryExampleTable = new MemoryExampleTable(attributes); + return memoryExampleTable; + } + + private ExampleSet fillTable(MemoryExampleTable table, TableModelXLog convertedLog) { + DataRowFactory factory = new DataRowFactory(DataRowFactory.TYPE_DOUBLE_ARRAY, '.'); + // now add per row + for (int i = 0; i < convertedLog.getRowCount(); i++) { + // fill strings + String[] strings = new String[convertedLog.getColumnCount()]; + for (int j = 0; j < convertedLog.getColumnCount(); j++) { + strings[j] = convertedLog.getValueAt(i, j).toString(); + } + DataRow dataRow = factory.create(strings, attributes); + table.addDataRow(dataRow); + } + ExampleSet createExampleSet = table.createExampleSet(); + return createExampleSet; + } + + private List getMinAndMaxValueColumn(TableModelXLog convertedLog, String nameCol) { + double min = Double.MAX_VALUE; + double max = Double.MIN_VALUE; + int intCol = convertedLog.getNameForColumn(nameCol); + for (int i = 0; i < convertedLog.getRowCount(); i++) { + Object valueAt = convertedLog.getValueAt(i, intCol); + if (valueAt instanceof String) { + try { + double parseDouble = Double.parseDouble((String) valueAt); + min = parseDouble < min ? parseDouble : min; + max = parseDouble > max ? parseDouble : max; + } catch (Exception e) { + // do nothing with it. + } + } + } + List doubleList = new ArrayList(); + doubleList.add(min); + doubleList.add(max); + return doubleList; + } +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/XLogIOObjectNewDottedChartRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/XLogIOObjectNewDottedChartRenderer.java new file mode 100644 index 0000000..f6e2740 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/XLogIOObjectNewDottedChartRenderer.java @@ -0,0 +1,33 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.logprojection.LogProjectionPlugin; +import org.processmining.logprojection.LogView; +import org.processmining.logprojection.plugins.dottedchart.DottedChart.DottedChartException; +import org.processmining.logprojection.plugins.dottedchart.ui.DottedChartInspector; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.XLogIOObject; + +public class XLogIOObjectNewDottedChartRenderer extends AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "XLog (Dotted Chart) renderer"; + } + + @Override + protected JComponent runVisualization(XLogIOObject ioObject) { + + LogView result = new LogView(ioObject.getArtifact()); + DottedChartInspector panel = null; + try { + panel = LogProjectionPlugin.visualize(ioObject.getPluginContext(), result); + } catch (DottedChartException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + return (JComponent) panel; + } + +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/XLogIOObjectOldDottedChartRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/XLogIOObjectOldDottedChartRenderer.java new file mode 100644 index 0000000..490c4ec --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/XLogIOObjectOldDottedChartRenderer.java @@ -0,0 +1,24 @@ +package org.rapidprom.ioobjectrenderers; + +import javax.swing.JComponent; + +import org.processmining.plugins.dottedchartanalysis.DottedChartAnalysis; +import org.processmining.plugins.dottedchartanalysis.model.DottedChartModel; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.XLogIOObject; + +public class XLogIOObjectOldDottedChartRenderer extends AbstractRapidProMIOObjectRenderer { + + @Override + public String getName() { + return "XLog (Dotted Chart - Legacy) renderer"; + } + + @Override + protected JComponent runVisualization(XLogIOObject ioObject) { + + DottedChartModel result = new DottedChartModel(ioObject.getPluginContext(), ioObject.getArtifact()); + return new DottedChartAnalysis(ioObject.getPluginContext(), result); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/abstr/AbstractRapidProMIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/abstr/AbstractRapidProMIOObjectRenderer.java new file mode 100644 index 0000000..149c371 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/abstr/AbstractRapidProMIOObjectRenderer.java @@ -0,0 +1,37 @@ +package org.rapidprom.ioobjectrenderers.abstr; + +import java.awt.Component; + +import javax.swing.JComponent; + +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +import com.rapidminer.gui.renderer.AbstractRenderer; +import com.rapidminer.gui.renderer.DefaultComponentRenderable; +import com.rapidminer.operator.IOContainer; +import com.rapidminer.report.Reportable; + +public abstract class AbstractRapidProMIOObjectRenderer> + extends AbstractRenderer { + + @SuppressWarnings("unchecked") + @Override + public Component getVisualizationComponent(Object renderable, + IOContainer ioContainer) { + if (renderable instanceof AbstractRapidProMIOObject) { + T object = (T) renderable; + JComponent panel = runVisualization(object); + return panel; + } + return null; + } + + @Override + public Reportable createReportable(Object renderable, + IOContainer ioContainer, int desiredWidth, int desiredHeight) { + return new DefaultComponentRenderable( + getVisualizationComponent(renderable, ioContainer)); + } + + protected abstract JComponent runVisualization(T ioObject); +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/streams/XSAuthorIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/streams/XSAuthorIOObjectRenderer.java new file mode 100644 index 0000000..36e454e --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/streams/XSAuthorIOObjectRenderer.java @@ -0,0 +1,22 @@ +package org.rapidprom.ioobjectrenderers.streams; + +import javax.swing.JComponent; + +import org.processmining.stream.core.visualizers.XSAuthorVisualizer; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.streams.XSAuthorIOObject; + +public class XSAuthorIOObjectRenderer + extends AbstractRapidProMIOObjectRenderer> { + + @Override + public String getName() { + return "XSAuthor Object Renderer"; + } + + @Override + protected JComponent runVisualization(XSAuthorIOObject artifact) { + return XSAuthorVisualizer.visualize(artifact.getArtifact()); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/streams/XSReaderIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/streams/XSReaderIOObjectRenderer.java new file mode 100644 index 0000000..21cec87 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/streams/XSReaderIOObjectRenderer.java @@ -0,0 +1,24 @@ +package org.rapidprom.ioobjectrenderers.streams; + +import javax.swing.JComponent; + +import org.processmining.stream.core.interfaces.XSDataPacket; +import org.processmining.stream.core.visualizers.XSReaderVisualizer; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.streams.XSReaderIOObject; + +public class XSReaderIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer, ?>> { + + @Override + public String getName() { + return "XSReader Object Renderer"; + } + + @Override + protected JComponent runVisualization( + XSReaderIOObject, ?> ioObject) { + return XSReaderVisualizer.visualize(ioObject.getArtifact()); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/streams/XSStreamAnalyzerIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/streams/XSStreamAnalyzerIOObjectRenderer.java new file mode 100644 index 0000000..3b7c657 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/streams/XSStreamAnalyzerIOObjectRenderer.java @@ -0,0 +1,23 @@ +package org.rapidprom.ioobjectrenderers.streams; + +import javax.swing.JComponent; + +import org.processmining.streamanalysis.core.visualizer.XSStreamAnalyzerVisualizer; +import org.rapidprom.ioobjectrenderers.abstr.AbstractRapidProMIOObjectRenderer; +import org.rapidprom.ioobjects.streams.XSStreamAnalyzerIOObject; + +public class XSStreamAnalyzerIOObjectRenderer extends + AbstractRapidProMIOObjectRenderer> { + + @Override + public String getName() { + return "XSStreamAnalyzer Object Renderer"; + } + + @Override + protected JComponent runVisualization( + XSStreamAnalyzerIOObject ioObject) { + return XSStreamAnalyzerVisualizer.visualize(ioObject.getArtifact()); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjectrenderers/streams/event/XSEventStreamIOObjectRenderer.java b/src/main/java/org/rapidprom/ioobjectrenderers/streams/event/XSEventStreamIOObjectRenderer.java new file mode 100644 index 0000000..4322cf6 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjectrenderers/streams/event/XSEventStreamIOObjectRenderer.java @@ -0,0 +1,54 @@ +package org.rapidprom.ioobjectrenderers.streams.event; + +import java.awt.Component; + +import javax.swing.JComponent; +import javax.swing.JLabel; + +import org.processmining.eventstream.core.interfaces.XSEvent; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSStream; +import org.processmining.stream.core.visualizers.XSStreamVisualizer; +import org.rapidprom.ioobjects.streams.event.XSEventStreamIOObject; + +import com.rapidminer.gui.renderer.AbstractRenderer; +import com.rapidminer.gui.renderer.DefaultComponentRenderable; +import com.rapidminer.gui.renderer.DefaultReadable; +import com.rapidminer.operator.IOContainer; +import com.rapidminer.report.Reportable; + +public class XSEventStreamIOObjectRenderer extends AbstractRenderer { + + private static String NAME = "XSEventStreamIOObjectRenderer renderer"; + + @Override + public String getName() { + return NAME; + } + + @Override + public Component getVisualizationComponent(Object renderable, + IOContainer ioContainer) { + + return new JLabel(getName()); + } + + @Override + public Reportable createReportable(Object renderable, + IOContainer ioContainer, int desiredWidth, int desiredHeight) { + if (renderable instanceof XSEventStreamIOObject) { + XSEventStreamIOObject object = (XSEventStreamIOObject) renderable; + + JComponent panel = runVisualization(object.getArtifact(), + object.getPluginContext()); + // put the thing in its own panel + return new DefaultComponentRenderable(panel); + } + return new DefaultReadable("No XSEventStream visualization available."); + } + + private JComponent runVisualization(XSStream artifact, + PluginContext pluginContext) { + return XSStreamVisualizer.visualize(pluginContext, artifact); + } +} diff --git a/src/main/java/org/rapidprom/ioobjects/AcceptingPetriNetArrayIOObject.java b/src/main/java/org/rapidprom/ioobjects/AcceptingPetriNetArrayIOObject.java new file mode 100644 index 0000000..8767d53 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/AcceptingPetriNetArrayIOObject.java @@ -0,0 +1,17 @@ +package org.rapidprom.ioobjects; + +import org.processmining.acceptingpetrinet.models.AcceptingPetriNetArray; +import org.processmining.framework.plugin.PluginContext; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class AcceptingPetriNetArrayIOObject + extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = 3428618552462283666L; + + public AcceptingPetriNetArrayIOObject(AcceptingPetriNetArray t, + PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/AcceptingPetriNetIOObject.java b/src/main/java/org/rapidprom/ioobjects/AcceptingPetriNetIOObject.java new file mode 100644 index 0000000..0ad5da1 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/AcceptingPetriNetIOObject.java @@ -0,0 +1,17 @@ +package org.rapidprom.ioobjects; + +import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +import org.processmining.framework.plugin.PluginContext; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class AcceptingPetriNetIOObject + extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = 7775793727750096919L; + + public AcceptingPetriNetIOObject(AcceptingPetriNet t, + PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/BPMNIOObject.java b/src/main/java/org/rapidprom/ioobjects/BPMNIOObject.java new file mode 100644 index 0000000..5e7999d --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/BPMNIOObject.java @@ -0,0 +1,14 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.bpmn.BPMNDiagram; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class BPMNIOObject extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = -6452670552752770876L; + + public BPMNIOObject(BPMNDiagram t, PluginContext context) { + super(t, context); + } +} diff --git a/src/main/java/org/rapidprom/ioobjects/CPNModelIOObject.java b/src/main/java/org/rapidprom/ioobjects/CPNModelIOObject.java new file mode 100644 index 0000000..373f851 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/CPNModelIOObject.java @@ -0,0 +1,16 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.cpnet.ColouredPetriNet; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class CPNModelIOObject extends + AbstractRapidProMIOObject { + + private static final long serialVersionUID = 4158861487079429809L; + + public CPNModelIOObject(ColouredPetriNet t, PluginContext context) { + super(t, context); + } + +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/ioobjects/FuzzyAnimationIOObject.java b/src/main/java/org/rapidprom/ioobjects/FuzzyAnimationIOObject.java new file mode 100644 index 0000000..bb4bd69 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/FuzzyAnimationIOObject.java @@ -0,0 +1,16 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.fuzzymodel.anim.FuzzyAnimation; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class FuzzyAnimationIOObject extends + AbstractRapidProMIOObject { + + private static final long serialVersionUID = -582044080446972654L; + + public FuzzyAnimationIOObject(FuzzyAnimation t, PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/HeuristicsNetIOObject.java b/src/main/java/org/rapidprom/ioobjects/HeuristicsNetIOObject.java new file mode 100644 index 0000000..58aaed9 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/HeuristicsNetIOObject.java @@ -0,0 +1,16 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.heuristics.HeuristicsNet; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class HeuristicsNetIOObject extends AbstractRapidProMIOObject{ + + private static final long serialVersionUID = -6715830623930619256L; + + public HeuristicsNetIOObject(HeuristicsNet t, PluginContext context) { + super(t, context); + } + + +} diff --git a/src/main/java/org/rapidprom/ioobjects/InteractiveMinerLauncherIOObject.java b/src/main/java/org/rapidprom/ioobjects/InteractiveMinerLauncherIOObject.java new file mode 100644 index 0000000..307137f --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/InteractiveMinerLauncherIOObject.java @@ -0,0 +1,17 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.inductiveVisualMiner.InductiveVisualMiner.InteractiveMinerLauncher; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class InteractiveMinerLauncherIOObject extends + AbstractRapidProMIOObject { + + private static final long serialVersionUID = 869496066868352283L; + + public InteractiveMinerLauncherIOObject(InteractiveMinerLauncher t, + PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/ManifestIOObject.java b/src/main/java/org/rapidprom/ioobjects/ManifestIOObject.java new file mode 100644 index 0000000..8720760 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/ManifestIOObject.java @@ -0,0 +1,15 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.petrinet.manifestreplayresult.Manifest; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class ManifestIOObject extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = -4626719613934848329L; + + public ManifestIOObject(Manifest t, PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/MetricsRepositoryIOObject.java b/src/main/java/org/rapidprom/ioobjects/MetricsRepositoryIOObject.java new file mode 100644 index 0000000..b4572d5 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/MetricsRepositoryIOObject.java @@ -0,0 +1,15 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.fuzzymodel.metrics.MetricsRepository; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class MetricsRepositoryIOObject extends AbstractRapidProMIOObject{ + + private static final long serialVersionUID = 6352054321839117409L; + + public MetricsRepositoryIOObject(MetricsRepository t, PluginContext context) { + super(t, context); + + } +} diff --git a/src/main/java/org/rapidprom/ioobjects/PNRepResultIOObject.java b/src/main/java/org/rapidprom/ioobjects/PNRepResultIOObject.java new file mode 100644 index 0000000..e0c31a8 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/PNRepResultIOObject.java @@ -0,0 +1,37 @@ +package org.rapidprom.ioobjects; + +import org.deckfour.xes.model.XLog; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.connectionfactories.logpetrinet.TransEvClassMapping; +import org.processmining.plugins.petrinet.replayresult.PNRepResult; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class PNRepResultIOObject extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = -543887352437614848L; + + private PetriNetIOObject pn; + private XLog log; + private TransEvClassMapping mapping; + + public PetriNetIOObject getPn() { + return pn; + } + + public XLog getXLog() { + return log; + } + + public TransEvClassMapping getMapping() { + return mapping; + } + + public PNRepResultIOObject(PNRepResult t, PluginContext context, PetriNetIOObject pn, XLog log, + TransEvClassMapping mapping) { + super(t, context); + this.pn = pn; + this.log = log; + this.mapping = mapping; + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/PetriNetIOObject.java b/src/main/java/org/rapidprom/ioobjects/PetriNetIOObject.java new file mode 100644 index 0000000..500dbd7 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/PetriNetIOObject.java @@ -0,0 +1,94 @@ +package org.rapidprom.ioobjects; + +import java.util.ArrayList; +import java.util.List; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.connections.petrinets.behavioral.FinalMarkingConnection; +import org.processmining.models.connections.petrinets.behavioral.InitialMarkingConnection; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.semantics.petrinet.Marking; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +import javassist.tools.rmi.ObjectNotFoundException; + +/** + * @author abolt + * + * Initial markings are referenced directly in the petrinet ioobject. + */ +public class PetriNetIOObject extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = -4574922526705299348L; + private Marking initialMarking = null; + private Marking finalMarking = null; + + public PetriNetIOObject(Petrinet t, Marking i, Marking f, + PluginContext context) { + super(t, context); + setInitialMarking(i); + setFinalMarking(f); + } + + public Marking getInitialMarking() throws ObjectNotFoundException { + if (initialMarking != null) + return initialMarking; + else + throw new ObjectNotFoundException( + "There is no initial marking associated with this petri net"); + } + + public boolean hasInitialMarking() { + if (initialMarking != null) + return true; + else + return false; + } + + public void setInitialMarking(Marking marking) { + initialMarking = marking; + if (initialMarking != null) + this.context.addConnection(new InitialMarkingConnection( + this.getArtifact(), initialMarking)); + } + + public Marking getFinalMarking() throws ObjectNotFoundException { + if (finalMarking != null) + return finalMarking; + else + throw new ObjectNotFoundException( + "There is no final marking associated with this petri net"); + } + + public boolean hasFinalMarking() { + if (finalMarking != null) + return true; + else + return false; + } + + public Marking[] getFinalMarkingAsArray() throws ObjectNotFoundException { + + List fM = new ArrayList(); + if (finalMarking.size() > 0) { + for (Place place : finalMarking) { + Marking m = new Marking(); + m.add(place); + fM.add(m); + } + return fM.toArray(new Marking[finalMarking.size()]); + } + else + return new Marking[]{new Marking()}; + + } + + public void setFinalMarking(Marking marking) { + finalMarking = marking; + if (finalMarking != null) + this.context.addConnection(new FinalMarkingConnection( + this.getArtifact(), finalMarking)); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/PetriNetListIOObject.java b/src/main/java/org/rapidprom/ioobjects/PetriNetListIOObject.java new file mode 100644 index 0000000..13725bb --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/PetriNetListIOObject.java @@ -0,0 +1,15 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.petrinets.list.PetriNetList; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class PetriNetListIOObject extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = -9118010607628257933L; + + public PetriNetListIOObject(PetriNetList t, PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/PomPomViewIOObject.java b/src/main/java/org/rapidprom/ioobjects/PomPomViewIOObject.java new file mode 100644 index 0000000..393f36a --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/PomPomViewIOObject.java @@ -0,0 +1,15 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.pompom.PomPomView; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class PomPomViewIOObject extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = 195421157097249716L; + + public PomPomViewIOObject(PomPomView t, PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/PredictorIOObject.java b/src/main/java/org/rapidprom/ioobjects/PredictorIOObject.java new file mode 100644 index 0000000..09071f1 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/PredictorIOObject.java @@ -0,0 +1,14 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.prediction.Predictor; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class PredictorIOObject extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = -1708411560288054840L; + + public PredictorIOObject(Predictor t, PluginContext context) { + super(t, context); + } +} diff --git a/src/main/java/org/rapidprom/ioobjects/ProcessTreeIOObject.java b/src/main/java/org/rapidprom/ioobjects/ProcessTreeIOObject.java new file mode 100644 index 0000000..cbf91df --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/ProcessTreeIOObject.java @@ -0,0 +1,15 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.processtree.ProcessTree; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class ProcessTreeIOObject extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = 780816193914598555L; + + public ProcessTreeIOObject(ProcessTree t, PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/RapidProMIOObject.java b/src/main/java/org/rapidprom/ioobjects/RapidProMIOObject.java new file mode 100644 index 0000000..a2afac7 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/RapidProMIOObject.java @@ -0,0 +1,7 @@ +package org.rapidprom.ioobjects; + +public interface RapidProMIOObject { + + public T getArtifact(); + +} diff --git a/src/main/java/org/rapidprom/ioobjects/ReachabilityGraphIOObject.java b/src/main/java/org/rapidprom/ioobjects/ReachabilityGraphIOObject.java new file mode 100644 index 0000000..42f7d79 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/ReachabilityGraphIOObject.java @@ -0,0 +1,17 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.transitionsystem.ReachabilityGraph; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class ReachabilityGraphIOObject extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = 1756769630354735278L; + + public ReachabilityGraphIOObject(ReachabilityGraph t, PluginContext context) { + super(t, context); + } + + + +} diff --git a/src/main/java/org/rapidprom/ioobjects/SocialNetworkIOObject.java b/src/main/java/org/rapidprom/ioobjects/SocialNetworkIOObject.java new file mode 100644 index 0000000..4dfcd61 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/SocialNetworkIOObject.java @@ -0,0 +1,16 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.socialnetwork.SocialNetwork; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class SocialNetworkIOObject extends + AbstractRapidProMIOObject { + + private static final long serialVersionUID = 4434539088563859762L; + + public SocialNetworkIOObject(SocialNetwork t, PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/TransitionSystemIOObject.java b/src/main/java/org/rapidprom/ioobjects/TransitionSystemIOObject.java new file mode 100644 index 0000000..3b91630 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/TransitionSystemIOObject.java @@ -0,0 +1,18 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.transitionsystem.miner.TSMinerTransitionSystem; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class TransitionSystemIOObject extends AbstractRapidProMIOObject { + + + private static final long serialVersionUID = 7513635369374245933L; + + public TransitionSystemIOObject(TSMinerTransitionSystem t, + PluginContext context) { + super(t, context); + } + + +} diff --git a/src/main/java/org/rapidprom/ioobjects/WoflanDiagnosisIOObject.java b/src/main/java/org/rapidprom/ioobjects/WoflanDiagnosisIOObject.java new file mode 100644 index 0000000..39aa95c --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/WoflanDiagnosisIOObject.java @@ -0,0 +1,16 @@ +package org.rapidprom.ioobjects; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.petrinet.behavioralanalysis.woflan.WoflanDiagnosis; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class WoflanDiagnosisIOObject extends + AbstractRapidProMIOObject { + + private static final long serialVersionUID = -3834518107921166815L; + + public WoflanDiagnosisIOObject(WoflanDiagnosis t, PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/XLogIOObject.java b/src/main/java/org/rapidprom/ioobjects/XLogIOObject.java new file mode 100644 index 0000000..1d3670a --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/XLogIOObject.java @@ -0,0 +1,15 @@ +package org.rapidprom.ioobjects; + +import org.deckfour.xes.model.XLog; +import org.processmining.framework.plugin.PluginContext; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class XLogIOObject extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = -1323690731245887615L; + + public XLogIOObject(XLog t, PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/abstr/AbstractRapidProMIOObject.java b/src/main/java/org/rapidprom/ioobjects/abstr/AbstractRapidProMIOObject.java new file mode 100644 index 0000000..64960bd --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/abstr/AbstractRapidProMIOObject.java @@ -0,0 +1,29 @@ +package org.rapidprom.ioobjects.abstr; + +import org.processmining.framework.plugin.PluginContext; +import org.rapidprom.ioobjects.RapidProMIOObject; + +import com.rapidminer.operator.ResultObjectAdapter; + +public abstract class AbstractRapidProMIOObject extends ResultObjectAdapter + implements RapidProMIOObject { + + private static final long serialVersionUID = -7924883865640486269L; + + protected final T artifact; + + protected final PluginContext context; + + public PluginContext getPluginContext() { + return context; + } + + public AbstractRapidProMIOObject(final T t, final PluginContext context) { + this.artifact = t; + this.context = context; + } + + public T getArtifact() { + return artifact; + } +} diff --git a/src/main/java/org/rapidprom/ioobjects/streams/XSAuthorIOObject.java b/src/main/java/org/rapidprom/ioobjects/streams/XSAuthorIOObject.java new file mode 100644 index 0000000..93dc555 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/streams/XSAuthorIOObject.java @@ -0,0 +1,16 @@ +package org.rapidprom.ioobjects.streams; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSAuthor; +import org.processmining.stream.core.interfaces.XSDataPacket; + +public class XSAuthorIOObject> + extends XSWriterXSRunnableIOObject> { + + private static final long serialVersionUID = 36779985659303201L; + + public XSAuthorIOObject(XSAuthor t, PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/streams/XSReaderIOObject.java b/src/main/java/org/rapidprom/ioobjects/streams/XSReaderIOObject.java new file mode 100644 index 0000000..a97393d --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/streams/XSReaderIOObject.java @@ -0,0 +1,16 @@ +package org.rapidprom.ioobjects.streams; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSDataPacket; +import org.processmining.stream.core.interfaces.XSReader; + +public class XSReaderIOObject, R> + extends XSWritableXSRunnableIOObject> { + + private static final long serialVersionUID = -7862503192309811538L; + + public XSReaderIOObject(XSReader reader, PluginContext context) { + super(reader, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/streams/XSRunnableIOObject.java b/src/main/java/org/rapidprom/ioobjects/streams/XSRunnableIOObject.java new file mode 100644 index 0000000..ccf9a0b --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/streams/XSRunnableIOObject.java @@ -0,0 +1,16 @@ +package org.rapidprom.ioobjects.streams; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSRunnable; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class XSRunnableIOObject + extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = -173963109922143034L; + + public XSRunnableIOObject(T t, PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/streams/XSStreamAnalyzerIOObject.java b/src/main/java/org/rapidprom/ioobjects/streams/XSStreamAnalyzerIOObject.java new file mode 100644 index 0000000..ef4b9e3 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/streams/XSStreamAnalyzerIOObject.java @@ -0,0 +1,17 @@ +package org.rapidprom.ioobjects.streams; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSDataPacket; +import org.processmining.streamanalysis.core.interfaces.XSStreamAnalyzer; + +public class XSStreamAnalyzerIOObject, R, REF> + extends XSWritableXSRunnableIOObject> { + + private static final long serialVersionUID = 7205510572450073788L; + + public XSStreamAnalyzerIOObject(XSStreamAnalyzer t, + PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/streams/XSWritableXSRunnableIOObject.java b/src/main/java/org/rapidprom/ioobjects/streams/XSWritableXSRunnableIOObject.java new file mode 100644 index 0000000..a22f834 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/streams/XSWritableXSRunnableIOObject.java @@ -0,0 +1,17 @@ +package org.rapidprom.ioobjects.streams; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSDataPacket; +import org.processmining.stream.core.interfaces.XSRunnable; +import org.processmining.stream.core.interfaces.XSWritable; + +public class XSWritableXSRunnableIOObject> & XSRunnable> + extends XSRunnableIOObject { + + private static final long serialVersionUID = -8550382465247394570L; + + public XSWritableXSRunnableIOObject(T t, PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/streams/XSWriterXSRunnableIOObject.java b/src/main/java/org/rapidprom/ioobjects/streams/XSWriterXSRunnableIOObject.java new file mode 100644 index 0000000..4b415e3 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/streams/XSWriterXSRunnableIOObject.java @@ -0,0 +1,17 @@ +package org.rapidprom.ioobjects.streams; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSDataPacket; +import org.processmining.stream.core.interfaces.XSRunnable; +import org.processmining.stream.core.interfaces.XSWriter; + +public class XSWriterXSRunnableIOObject> & XSRunnable> + extends XSRunnableIOObject { + + private static final long serialVersionUID = 2264908736826786732L; + + public XSWriterXSRunnableIOObject(T t, PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/streams/event/XSEventStreamIOObject.java b/src/main/java/org/rapidprom/ioobjects/streams/event/XSEventStreamIOObject.java new file mode 100644 index 0000000..b929b1b --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/streams/event/XSEventStreamIOObject.java @@ -0,0 +1,16 @@ +package org.rapidprom.ioobjects.streams.event; + +import org.processmining.eventstream.core.interfaces.XSEventStream; +import org.processmining.framework.plugin.PluginContext; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class XSEventStreamIOObject + extends AbstractRapidProMIOObject { + + public XSEventStreamIOObject(XSEventStream t, PluginContext context) { + super(t, context); + } + + private static final long serialVersionUID = -6518688117417296076L; + +} diff --git a/src/main/java/org/rapidprom/ioobjects/streams/event/XSEventStreamToAcceptingPetriNetReaderIOObject.java b/src/main/java/org/rapidprom/ioobjects/streams/event/XSEventStreamToAcceptingPetriNetReaderIOObject.java new file mode 100644 index 0000000..18b9645 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/streams/event/XSEventStreamToAcceptingPetriNetReaderIOObject.java @@ -0,0 +1,20 @@ +package org.rapidprom.ioobjects.streams.event; + +import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +import org.processmining.eventstream.core.interfaces.XSEvent; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSReader; +import org.rapidprom.ioobjects.streams.XSReaderIOObject; + +public class XSEventStreamToAcceptingPetriNetReaderIOObject + extends XSReaderIOObject { + + private static final long serialVersionUID = -3049278931265566812L; + + public XSEventStreamToAcceptingPetriNetReaderIOObject( + XSReader reader, + PluginContext context) { + super(reader, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/streams/event/XSEventStreamToProcessTreePetriNetReaderIOObject.java b/src/main/java/org/rapidprom/ioobjects/streams/event/XSEventStreamToProcessTreePetriNetReaderIOObject.java new file mode 100644 index 0000000..8e98a1e --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/streams/event/XSEventStreamToProcessTreePetriNetReaderIOObject.java @@ -0,0 +1,19 @@ +package org.rapidprom.ioobjects.streams.event; + +import org.processmining.eventstream.core.interfaces.XSEvent; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.processtree.ProcessTree; +import org.processmining.stream.core.interfaces.XSReader; +import org.rapidprom.ioobjects.streams.XSReaderIOObject; + +public class XSEventStreamToProcessTreePetriNetReaderIOObject + extends XSReaderIOObject { + + private static final long serialVersionUID = -7913307437313684794L; + + public XSEventStreamToProcessTreePetriNetReaderIOObject( + XSReader reader, PluginContext context) { + super(reader, context); + } + +} diff --git a/src/main/java/org/rapidprom/ioobjects/streams/event/XSStaticXSEventStreamIOObject.java b/src/main/java/org/rapidprom/ioobjects/streams/event/XSStaticXSEventStreamIOObject.java new file mode 100644 index 0000000..44e5a18 --- /dev/null +++ b/src/main/java/org/rapidprom/ioobjects/streams/event/XSStaticXSEventStreamIOObject.java @@ -0,0 +1,17 @@ +package org.rapidprom.ioobjects.streams.event; + +import org.processmining.eventstream.core.interfaces.XSStaticXSEventStream; +import org.processmining.framework.plugin.PluginContext; +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; + +public class XSStaticXSEventStreamIOObject + extends AbstractRapidProMIOObject { + + private static final long serialVersionUID = 3472668931720519972L; + + public XSStaticXSEventStreamIOObject(XSStaticXSEventStream t, + PluginContext context) { + super(t, context); + } + +} diff --git a/src/main/java/org/rapidprom/operators/abstr/AbstractInductiveMinerOperator.java b/src/main/java/org/rapidprom/operators/abstr/AbstractInductiveMinerOperator.java new file mode 100644 index 0000000..52b6db0 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/abstr/AbstractInductiveMinerOperator.java @@ -0,0 +1,80 @@ +package org.rapidprom.operators.abstr; + +import java.util.List; +import org.processmining.plugins.InductiveMiner.mining.MiningParameters; +import org.processmining.plugins.InductiveMiner.mining.MiningParametersEKS; +import org.processmining.plugins.InductiveMiner.mining.MiningParametersIM; +import org.processmining.plugins.InductiveMiner.mining.MiningParametersIMi; +import org.processmining.plugins.InductiveMiner.mining.MiningParametersIMin; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.parameter.ParameterTypeDouble; +import com.rapidminer.parameter.UndefinedParameterError; + +public abstract class AbstractInductiveMinerOperator + extends AbstractRapidProMDiscoveryOperator { + + public AbstractInductiveMinerOperator(OperatorDescription description) { + super(description); + // TODO Auto-generated constructor stub + } + + private static final String PARAMETER_1_KEY = "Variation", + PARAMETER_1_DESCR = "The \"Inductive Miner\" variation is described in: " + + "http://dx.doi.org/10.1007/978-3-642-38697-8_17. \nThe \"Inductive Miner" + + " - Infrequent\" variation is described in: " + + "http://dx.doi.org/10.1007/978-3-319-06257-0_6. \nThe \"Inductive Miner" + + " - Incompleteness\" variation is described in:" + + "http://dx.doi.org/10.1007/978-3-319-07734-5_6. \nThe \"Inductive Miner" + + " - exhaustive K-successor\" variation applies a brute-force approach: " + + "in each recursion, it tries all 4*2^n cuts and measures which one " + + "fits the event log best. It measures this using the k-successor, " + + "which is a relation between pairs of activities, denoting how many " + + "events are in between them in any trace at minimum.", + PARAMETER_2_KEY = "Noise Threshold", + PARAMETER_2_DESCR = "This threshold represents the percentage of infrequent (noisy) " + + "traces that are filtered out. The remaining traces are used to discover a model. "; + + private static final String IM = "Inductive Miner", + IMi = "Inductive Miner - Infrequent", + IMin = "Inductive Miner - Incompleteness", + IMeks = "Inductive Miner - exhaustive K-successor"; + + public List getParameterTypes() { + + List parameterTypes = super.getParameterTypes(); + + ParameterTypeCategory parameter1 = new ParameterTypeCategory( + PARAMETER_1_KEY, PARAMETER_1_DESCR, + new String[] { IM, IMi, IMin, IMeks }, 1); + parameterTypes.add(parameter1); + + ParameterTypeDouble parameter2 = new ParameterTypeDouble( + PARAMETER_2_KEY, PARAMETER_2_DESCR, 0, 1, 0.2); + parameterTypes.add(parameter2); + + return parameterTypes; + } + + protected MiningParameters getConfiguration() { + MiningParameters miningParameters = null; + try { + if (getParameterAsString(PARAMETER_1_KEY).matches(IM)) + miningParameters = new MiningParametersIM(); + else if (getParameterAsString(PARAMETER_1_KEY).matches(IMi)) + miningParameters = new MiningParametersIMi(); + else if (getParameterAsString(PARAMETER_1_KEY).matches(IMin)) + miningParameters = new MiningParametersIMin(); + else if (getParameterAsString(PARAMETER_1_KEY).matches(IMeks)) + miningParameters = new MiningParametersEKS(); + + miningParameters.setNoiseThreshold( + (float) getParameterAsDouble(PARAMETER_2_KEY)); + miningParameters.setClassifier(getXEventClassifier()); + } catch (UndefinedParameterError e) { + e.printStackTrace(); + } + return miningParameters; + } +} diff --git a/src/main/java/org/rapidprom/operators/abstr/AbstractRapidProMDiscoveryOperator.java b/src/main/java/org/rapidprom/operators/abstr/AbstractRapidProMDiscoveryOperator.java new file mode 100644 index 0000000..783d7c6 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/abstr/AbstractRapidProMDiscoveryOperator.java @@ -0,0 +1,67 @@ +package org.rapidprom.operators.abstr; + +import java.util.List; + +import org.deckfour.xes.classification.XEventAndClassifier; +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.classification.XEventLifeTransClassifier; +import org.deckfour.xes.classification.XEventNameClassifier; +import org.deckfour.xes.model.XLog; +import org.rapidprom.ioobjects.XLogIOObject; +import org.rapidprom.parameter.ParameterTypeXEventClassifierCategory; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.UserError; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.UndefinedParameterError; + +public class AbstractRapidProMDiscoveryOperator extends Operator { + + private InputPort inputXLog = getInputPorts() + .createPort("event log (ProM Event Log)", XLogIOObject.class); + + private static final String PARAMETER_KEY_EVENT_CLASSIFIER = "event_classifier"; + private static final String PARAMETER_DESC_EVENT_CLASSIFIER = "Specifies how to identify events within the event log, as defined in http://www.xes-standard.org/"; + private static XEventClassifier[] PARAMETER_DEFAULT_CLASSIFIERS = new XEventClassifier[] { + new XEventAndClassifier(new XEventNameClassifier(), new XEventLifeTransClassifier())}; + + public AbstractRapidProMDiscoveryOperator(OperatorDescription description) { + super(description); + // TODO: make the precondition give a more meaningful warning if the + // metadata is null + // inputXLog.addPrecondition(new + // XLogContainsXEventClassifiersPreCondition(inputXLog)); + } + + @Override + public List getParameterTypes() { + List params = super.getParameterTypes(); + params.add(new ParameterTypeXEventClassifierCategory( + PARAMETER_KEY_EVENT_CLASSIFIER, PARAMETER_DESC_EVENT_CLASSIFIER, + new String[] { PARAMETER_DEFAULT_CLASSIFIERS[0].toString() }, + PARAMETER_DEFAULT_CLASSIFIERS, 0, false, inputXLog)); + return params; + } + + protected XEventClassifier getXEventClassifier() + throws UndefinedParameterError { + ParameterTypeXEventClassifierCategory eClassParam = (ParameterTypeXEventClassifierCategory) getParameterType( + PARAMETER_KEY_EVENT_CLASSIFIER); + try { + return eClassParam + .valueOf(getParameterAsInt(PARAMETER_KEY_EVENT_CLASSIFIER)); + } catch (IndexOutOfBoundsException e) { + throw new UndefinedParameterError( + "The index chosen is no longer available"); + } + + } + + protected XLog getXLog() throws UserError { + return ((XLogIOObject) inputXLog.getData(XLogIOObject.class)) + .getArtifact(); + } + +} diff --git a/src/main/java/org/rapidprom/operators/abstr/AbstractRapidProMExporterOperator.java b/src/main/java/org/rapidprom/operators/abstr/AbstractRapidProMExporterOperator.java new file mode 100644 index 0000000..2a11842 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/abstr/AbstractRapidProMExporterOperator.java @@ -0,0 +1,102 @@ +package org.rapidprom.operators.abstr; + +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +import org.rapidprom.ioobjects.abstr.AbstractRapidProMIOObject; +import org.rapidprom.util.IOUtils; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.io.AbstractWriter; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.parameter.ParameterTypeDirectory; +import com.rapidminer.parameter.ParameterTypeString; + +/** + * + * @param + * IOObject + * @param + * Internal object of IOObject (i.e. XLogIOObject => T2 == XLog) + * @param + * FileFormat, assumes: toString(); gives the file format. + */ +public abstract class AbstractRapidProMExporterOperator, T2, F> + extends AbstractWriter { + + protected final static String PARAMETER_KEY_FOLDER = "folder"; + protected final static String PARAMETER_DESC_FOLDER = "The folder where the file should be stored."; + + protected final static String PARAMETER_KEY_FILE_NAME = "file_name"; + protected final static String PARAMETER_DESC_FILE_NAME = "The file name of the exported object."; + + protected final static String PARAMETER_KEY_FILE_FORMAT = "file_format"; + protected final static String PARAMETER_DESC_FILE_FORMAT = "The file format of the exported object."; + + protected final F[] PARAMETER_VALUES_FILE_FORMAT; + protected final F defaultFileFormat; + + public AbstractRapidProMExporterOperator(OperatorDescription description, + Class savedClass, F[] fileFormats, F defaultFileFormat) { + super(description, savedClass); + assert (Arrays.asList(fileFormats).contains(defaultFileFormat)); + PARAMETER_VALUES_FILE_FORMAT = fileFormats; + this.defaultFileFormat = defaultFileFormat; + } + + @Override + public T write(T ioobject) throws OperatorException { + try { + F format = PARAMETER_VALUES_FILE_FORMAT[getParameterAsInt( + PARAMETER_KEY_FILE_FORMAT)]; + File target = IOUtils.prepareTargetFile( + getParameterAsFile(PARAMETER_KEY_FOLDER).getCanonicalPath(), + getParameterAsString(PARAMETER_KEY_FILE_NAME), format); + if (target.exists()) { + target.delete(); + } + target.createNewFile(); + writeToFile(target, ioobject.getArtifact(), format); + } catch (IOException e) { + e.printStackTrace(); + } + return ioobject; + } + + protected abstract void writeToFile(File file, T2 object, F format) + throws IOException; + + @Override + public List getParameterTypes() { + List types = super.getParameterTypes(); + ParameterType dir = new ParameterTypeDirectory(PARAMETER_KEY_FOLDER, + PARAMETER_DESC_FOLDER, ""); + dir.setOptional(false); + types.add(dir); + + ParameterTypeString fileNameParam = new ParameterTypeString( + PARAMETER_KEY_FILE_NAME, PARAMETER_DESC_FILE_NAME); + fileNameParam.setExpert(false); + fileNameParam.setOptional(false); + types.add(fileNameParam); + + String[] fileFormatStr = new String[PARAMETER_VALUES_FILE_FORMAT.length]; + for (int i = 0; i < fileFormatStr.length; i++) { + fileFormatStr[i] = PARAMETER_VALUES_FILE_FORMAT[i].toString(); + } + + ParameterTypeCategory fileFormat = new ParameterTypeCategory( + PARAMETER_KEY_FILE_FORMAT, PARAMETER_DESC_FILE_FORMAT, + fileFormatStr, Arrays.asList(PARAMETER_VALUES_FILE_FORMAT) + .indexOf(defaultFileFormat)); + fileFormat.setExpert(false); + fileFormat.setOptional(false); + + types.add(fileFormat); + return types; + } +} diff --git a/src/main/java/org/rapidprom/operators/abstr/AbstractRapidProMExtractorOperator.java b/src/main/java/org/rapidprom/operators/abstr/AbstractRapidProMExtractorOperator.java new file mode 100644 index 0000000..6bc98e9 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/abstr/AbstractRapidProMExtractorOperator.java @@ -0,0 +1,160 @@ +package org.rapidprom.operators.abstr; + +import java.util.HashMap; +import java.util.Map; + +import com.rapidminer.operator.Annotations; +import com.rapidminer.operator.IOObject; +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ProcessSetupError.Severity; +import com.rapidminer.operator.io.AbstractReader; +import com.rapidminer.operator.nio.file.FileObject; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.MDTransformationRule; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.operator.ports.metadata.MetaDataError; +import com.rapidminer.operator.ports.metadata.SimpleMetaDataError; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.tools.Observable; +import com.rapidminer.tools.Observer; + +/** + * The Abstract Extractor uses a lot of code from the {@link AbstractReader} + * class in order to be able to produce meta data. Also it predefines an input + * and output port + * + * @author svzelst + * + * @param + */ +public abstract class AbstractRapidProMExtractorOperator + extends Operator { + + protected final InputPort inputfile = getInputPorts().createPort("file", + FileObject.class); + protected final OutputPort outputPort = getOutputPorts() + .createPort("output"); + protected final Class generatedClass; + + protected boolean cacheDirty = true; + protected MetaData cachedMetaData; + protected MetaDataError cachedError; + + public AbstractRapidProMExtractorOperator(OperatorDescription description, + Class generatedClass) { + super(description); + this.generatedClass = generatedClass; + getTransformer().addRule(new MDTransformationRule() { + @Override + public void transformMD() { + if (cacheDirty || !isMetaDataCacheable()) { + try { + cachedMetaData = AbstractRapidProMExtractorOperator.this + .getGeneratedMetaData(); + cachedError = null; + } catch (OperatorException e) { + cachedMetaData = new MetaData( + AbstractRapidProMExtractorOperator.this.generatedClass); + String msg = e.getMessage(); + if ((msg == null) || (msg.length() == 0)) { + msg = e.toString(); + } + // will be added below + cachedError = new SimpleMetaDataError(Severity.WARNING, + outputPort, "cannot_create_exampleset_metadata", + new Object[] { msg }); + } + if (cachedMetaData != null) { + cachedMetaData.addToHistory(outputPort); + } + cacheDirty = false; + } + outputPort.deliverMD(cachedMetaData); + if (cachedError != null) { + outputPort.addError(cachedError); + } + } + }); + observeParameters(); + } + + protected void observeParameters() { + // we add this as the first observer. otherwise, this change is not seen + // by the resulting meta data transformation + getParameters().addObserverAsFirst(new Observer() { + @Override + public void update(Observable observable, String arg) { + cacheDirty = true; + } + }, false); + } + + public MetaData getGeneratedMetaData() throws OperatorException { + return new MetaData(generatedClass); + } + + protected boolean isMetaDataCacheable() { + return false; + } + + /** + * Creates (or reads) the ExampleSet that will be returned by + * {@link #apply()}. + */ + public abstract T read() throws OperatorException; + + @Override + public void doWork() throws OperatorException { + final T result = read(); + addAnnotations(result); + outputPort.deliver(result); + } + + /** Describes an operator that can read certain file types. */ + public static class ExtractorDescription { + private final String fileExtension; + private final Class> extractorClass; + /** This parameter must be set to the file name. */ + private final String fileParameterKey; + + public ExtractorDescription(String fileExtension, + Class> extractorClass, + String fileParameterKey) { + super(); + this.fileExtension = fileExtension; + this.extractorClass = extractorClass; + this.fileParameterKey = fileParameterKey; + } + } + + private static final Map EXTRACTOR_DESCRIPTIONS = new HashMap(); + + protected void addAnnotations(T result) { + for (ExtractorDescription ed : EXTRACTOR_DESCRIPTIONS.values()) { + if (ed.extractorClass.equals(this.getClass())) { + if (result.getAnnotations() + .getAnnotation(Annotations.KEY_SOURCE) == null) { + try { + String source = getParameter(ed.fileParameterKey); + if (source != null) { + result.getAnnotations().setAnnotation( + Annotations.KEY_SOURCE, source); + } + } catch (UndefinedParameterError e) { + } + } + return; + } + } + } + + /** Registers an operator that can read files with a given extension. */ + protected static void registerExtractorDescription( + ExtractorDescription rd) { + EXTRACTOR_DESCRIPTIONS.put(rd.fileExtension.toLowerCase(), rd); + } + +} diff --git a/src/main/java/org/rapidprom/operators/abstr/AbstractRapidProMImportOperator.java b/src/main/java/org/rapidprom/operators/abstr/AbstractRapidProMImportOperator.java new file mode 100644 index 0000000..99cb3f1 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/abstr/AbstractRapidProMImportOperator.java @@ -0,0 +1,84 @@ +package org.rapidprom.operators.abstr; + +import java.io.File; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.rapidprom.operators.io.ImportXLogOperator; + +import com.rapidminer.operator.IOObject; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.UserError; +import com.rapidminer.operator.io.AbstractReader; +import com.rapidminer.tools.LogService; + +/** + * + * @author svzelst + * + * @param + */ +public abstract class AbstractRapidProMImportOperator + extends AbstractReader { + + protected final static String PARAMETER_KEY_FILE = "file"; + protected final static String PARAMETER_DESC_FILE = "Select the file you would like to use to import."; + protected Class generatedClass; + + public AbstractRapidProMImportOperator(OperatorDescription description, + Class clazz, String[] supportedExtentions) { + super(description, clazz); + generatedClass = clazz; + registerExtentions(supportedExtentions); + } + + protected boolean checkFileParameterMetaData(String key) throws UserError { + boolean result; + File file = getParameterAsFile(key); + if (!file.exists()) { + throw new UserError(this, "301", file); + } else if (!file.canRead()) { + throw new UserError(this, "302", file, ""); + } else { + result = true; + } + return result; + } + + @Override + public T read() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: importing " + generatedClass.getName()); + long time = System.currentTimeMillis(); + if (checkFileParameterMetaData(PARAMETER_KEY_FILE)) { + try { + T result = read(getFile()); + logger.log(Level.INFO, + "End: importing " + generatedClass.getName() + "(" + + (System.currentTimeMillis() - time) / 1000 + + " sec)"); + return result; + } catch (Exception e) { + e.printStackTrace(); + throw new OperatorException("Import Failed! "); + + } + } else { + throw new OperatorException("Import Failed"); + } + } + + protected abstract T read(File file) throws Exception; + + protected File getFile() throws UserError { + return getParameterAsFile(PARAMETER_KEY_FILE); + } + + public void registerExtentions(String[] exts) { + for (String ext : exts) { + AbstractReader.registerReaderDescription(new ReaderDescription(ext, + ImportXLogOperator.class, PARAMETER_KEY_FILE)); + } + } +} diff --git a/src/main/java/org/rapidprom/operators/analysis/AnimateLogInFuzzyModelAnalysisOperator.java b/src/main/java/org/rapidprom/operators/analysis/AnimateLogInFuzzyModelAnalysisOperator.java new file mode 100644 index 0000000..ffd3312 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/analysis/AnimateLogInFuzzyModelAnalysisOperator.java @@ -0,0 +1,79 @@ +package org.rapidprom.operators.analysis; + +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.fuzzymodel.MutableFuzzyGraph; +import org.processmining.plugins.fuzzymodel.adapter.FuzzyAdapterPlugin; +import org.processmining.plugins.fuzzymodel.anim.FuzzyAnimation; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.FuzzyAnimationIOObject; +import org.rapidprom.ioobjects.MetricsRepositoryIOObject; +import org.rapidprom.ioobjects.XLogIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.tools.LogService; + +public class AnimateLogInFuzzyModelAnalysisOperator extends Operator { + + private static final String PARAMETER_1_KEY = "Lookahead", + PARAMETER_1_DESCR = "Indicates the size of the window of event that can be potentially connected to an event.", + PARAMETER_2_KEY = "Extra lookahead", + PARAMETER_2_DESCR = "Indicates the number of events that can be potentially connected to an event."; + + private InputPort inputMetricsRepository = getInputPorts().createPort("model (MetricsRepository)", + MetricsRepositoryIOObject.class); + private InputPort inputXLog = getInputPorts().createPort("event log (ProM Event Log)", XLogIOObject.class); + private OutputPort outputFuzzyAnimation = getOutputPorts().createPort("model (FuzzyAnimation)"); + + public AnimateLogInFuzzyModelAnalysisOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputFuzzyAnimation, FuzzyAnimationIOObject.class)); + } + + @SuppressWarnings("deprecation") + public void doWork() throws OperatorException { + + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: animate event log in fuzzy model"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(FuzzyAdapterPlugin.class); + MetricsRepositoryIOObject metricsRepository = inputMetricsRepository.getData(MetricsRepositoryIOObject.class); + XLogIOObject xLog = inputXLog.getData(XLogIOObject.class); + + FuzzyAdapterPlugin adapter = new FuzzyAdapterPlugin(); + MutableFuzzyGraph fuzzyInstance = adapter.mineGeneric(pluginContext, metricsRepository.getArtifact()); + + FuzzyAnimation animation = new FuzzyAnimation(pluginContext, fuzzyInstance, xLog.getArtifact(), + getParameterAsInt(PARAMETER_1_KEY), getParameterAsInt(PARAMETER_2_KEY)); + animation.initialize(pluginContext, fuzzyInstance, xLog.getArtifact()); + + outputFuzzyAnimation.deliver(new FuzzyAnimationIOObject(animation, pluginContext)); + logger.log(Level.INFO, + "End: animate event log in fuzzy model (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeInt parameterType2 = new ParameterTypeInt(PARAMETER_1_KEY, PARAMETER_1_DESCR, 1, 25, 5, false); + parameterTypes.add(parameterType2); + + ParameterTypeInt parameterType3 = new ParameterTypeInt(PARAMETER_2_KEY, PARAMETER_2_DESCR, 0, 15, 3, false); + parameterTypes.add(parameterType3); + + return parameterTypes; + } + +} diff --git a/src/main/java/org/rapidprom/operators/analysis/CaseDataExtractorOperator.java b/src/main/java/org/rapidprom/operators/analysis/CaseDataExtractorOperator.java new file mode 100644 index 0000000..a9c1de8 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/analysis/CaseDataExtractorOperator.java @@ -0,0 +1,719 @@ +package org.rapidprom.operators.analysis; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import java.util.logging.Level; +import java.util.logging.Logger; + +import javax.swing.table.AbstractTableModel; + +import org.deckfour.xes.classification.XEventClass; +import org.deckfour.xes.extension.std.XConceptExtension; +import org.deckfour.xes.extension.std.XLifecycleExtension; +import org.deckfour.xes.extension.std.XOrganizationalExtension; +import org.deckfour.xes.extension.std.XTimeExtension; +import org.deckfour.xes.info.XLogInfo; +import org.deckfour.xes.info.XLogInfoFactory; +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XAttributeBoolean; +import org.deckfour.xes.model.XAttributeContinuous; +import org.deckfour.xes.model.XAttributeDiscrete; +import org.deckfour.xes.model.XAttributeLiteral; +import org.deckfour.xes.model.XAttributeTimestamp; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.rapidprom.ioobjects.XLogIOObject; + +import com.rapidminer.example.Attribute; +import com.rapidminer.example.ExampleSet; +import com.rapidminer.example.table.AttributeFactory; +import com.rapidminer.example.table.DataRow; +import com.rapidminer.example.table.DataRowFactory; +import com.rapidminer.example.table.MemoryExampleTable; +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.io.AbstractDataReader.AttributeColumn; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.AttributeMetaData; +import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.operator.ports.metadata.MDInteger; +import com.rapidminer.operator.ports.metadata.SetRelation; +import com.rapidminer.tools.LogService; +import com.rapidminer.tools.Ontology; +import com.rapidminer.tools.math.container.Range; + +public class CaseDataExtractorOperator extends Operator { + + /** defining the ports */ + private InputPort inputLog = getInputPorts().createPort("event log (ProM Event Log)", XLogIOObject.class); + private OutputPort output = getOutputPorts().createPort("example set (Data Table)"); + + + private ExampleSetMetaData metaData = null; + private Attribute[] attributes; + + private TableModel tm = null; + private Map> mappingAttributesEventClass = new HashMap>(); + + /** + * The default constructor needed in exactly this signature + */ + public CaseDataExtractorOperator(OperatorDescription description) { + super(description); + /** Adding a rule for the output */ + getTransformer().addRule( new GenerateNewMDRule(output, ExampleSet.class)); + } + + @Override + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, + "Start: log to data table conversion"); + long time = System.currentTimeMillis(); + + XLogIOObject log = inputLog.getData(XLogIOObject.class); + XLog promLog = log.getArtifact(); + MemoryExampleTable table = null; + ExampleSet es = null; + try { + // create the exampleset` + XLogInfo summary = XLogInfoFactory.createLogInfo(promLog); + createMappingEventClassesAndAttributes(summary,promLog); + table = createStructureTable(promLog, summary); + es = fillTable(table, promLog); + } catch (Exception e) { + e.printStackTrace(); + System.out.println("error when creating exampleset, creating empty exampleset"); + List attributes = new LinkedList(); + table = new MemoryExampleTable(attributes); + es = table.createExampleSet(); + } + /** Adding a rule for the output */ + getTransformer().addRule( new GenerateNewMDRule(output, this.metaData)); + output.deliver(es); + logger.log(Level.INFO, + "End: log to data table conversion (" + + (System.currentTimeMillis() - time) / 1000 + " sec)"); + + } + + private void createMappingEventClassesAndAttributes(XLogInfo summary, + XLog log) { + for (int i=0; i()); + } + for (XTrace t : log) { + for (XEvent e : t) { + XEventClass c = summary.getNameClasses().getClassOf(e); + Set set = mappingAttributesEventClass.get(c); + Iterator iterator = e.getAttributes().keySet().iterator(); + while (iterator.hasNext()) { + String next = iterator.next(); + if (next.equals("concept:name") || next.equals("lifecycle:transition") || next.equals("time:timestamp") || + next.equals("org:resource") || next.equals("org:group") || next.equals("org:role")) { + // ignore + } + else { + set.add(next); + } + } + } + } + } + + private ExampleSet fillTable(MemoryExampleTable table, XLog log) { + DataRowFactory factory = new DataRowFactory(DataRowFactory.TYPE_DOUBLE_ARRAY, '.'); + // for each trace add the information + for (XTrace t : log) { + System.out.println("name:" + XConceptExtension.instance().extractName(t)); + String[] strings = new String[tm.getColumnCount()]; + // add the name + String name = XConceptExtension.instance().extractName(t); + strings[0] = name; + // add the number of events + strings[1] = t.size() + ""; + // sojourn time + long startTime = XTimeExtension.instance().extractTimestamp(t.get(0)).getTime(); + long endTime = XTimeExtension.instance().extractTimestamp(t.get(t.size()-1)).getTime(); + strings[2] = endTime-startTime + ""; + // now for the data + Iterator iterator = t.getAttributes().keySet().iterator(); + while (iterator.hasNext()) { + String next = iterator.next(); + // now search for the right column + int numberColumn = tm.getNumberColumn("T:data." + next); + if (numberColumn > -1 && numberColumn < strings.length) { + XAttribute xAttribute = t.getAttributes().get(next); + if (xAttribute instanceof XAttributeLiteral) { + XAttributeLiteral attribLit = (XAttributeLiteral) xAttribute; + String value = attribLit.getValue(); + strings[numberColumn] = value; + } + if (xAttribute instanceof XAttributeBoolean) { + XAttributeBoolean attribBool = (XAttributeBoolean) xAttribute; + boolean value = attribBool.getValue(); + strings[numberColumn] = value + ""; + } + if (xAttribute instanceof XAttributeContinuous) { + XAttributeContinuous attribCont = (XAttributeContinuous) xAttribute; + double value = attribCont.getValue(); + strings[numberColumn] = value + ""; + } + if (xAttribute instanceof XAttributeDiscrete) { + XAttributeDiscrete attribDisc = (XAttributeDiscrete) xAttribute; + long value = attribDisc.getValue(); + strings[numberColumn] = value + ""; + } + if (xAttribute instanceof XAttributeTimestamp) { + XAttributeTimestamp attribTs = (XAttributeTimestamp) xAttribute; + long value = attribTs.getValue().getTime(); + strings[numberColumn] = value + ""; + } + } + } + // now for the events + for (int i=0; i attributes = new LinkedList(); + AttributeMetaData amd = null; + this.tm = new TableModel(numberOfTraces); + // first for the log + // identifier of the trace + attributes.add(AttributeFactory.createAttribute("T:concept:name", Ontology.NOMINAL)); + amd = new AttributeMetaData("T:concept:name", Ontology.NOMINAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + tm.addDescriptionColumn(new CaseRow("T:concept:name",true,false,false,false,"")); + // number of events + attributes.add(AttributeFactory.createAttribute("T:number_of_events", Ontology.NUMERICAL)); + amd = new AttributeMetaData("T:number_of_events", Ontology.NUMERICAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + amd.setValueRange(new Range(0, Long.MAX_VALUE), SetRelation.EQUAL); + metaData.addAttribute(amd); + tm.addDescriptionColumn(new CaseRow("T:number_of_events",false,true,false,false,"")); + // sojourn time + attributes.add(AttributeFactory.createAttribute("T:sojourn_time.seconds", Ontology.NUMERICAL)); + amd = new AttributeMetaData("T:sojourn_time.seconds", Ontology.NUMERICAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + amd.setValueRange(new Range(0, Long.MAX_VALUE), SetRelation.EQUAL); + metaData.addAttribute(amd); + tm.addDescriptionColumn(new CaseRow("T:sojourn_time.seconds",false,false,true,false,"")); + // data of the trace + Iterator iterator2 = summary.getTraceAttributeInfo().getAttributeKeys().iterator(); + while (iterator2.hasNext()) { + String next = iterator2.next(); + if (!next.equals("concept:name")) { + attributes.add(AttributeFactory.createAttribute("T:data." + next, Ontology.NOMINAL)); + amd = new AttributeMetaData("T:data." + next, Ontology.NOMINAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + tm.addDescriptionColumn(new CaseRow("T:data." + next,true,false,false,true,next)); + } + } + // now for the events/tasks + for (int i=0; i iterator = summary.getEventAttributeInfo().getAttributeKeys().iterator(); + while (iterator.hasNext()) { + String next = iterator.next(); + // check whether the attribute exists for the event + Set set = mappingAttributesEventClass.get(ec); + if (set != null && set.contains(next)) { + attributes.add(AttributeFactory.createAttribute("E:data." + ec.getId() + "." + next, Ontology.NOMINAL)); + amd = new AttributeMetaData("E:data." + ec.getId() + "." + next, Ontology.NOMINAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + metaData.addAttribute(amd); + tm.addDescriptionColumn(new EventRow("E:data." + ec.getId() + "." + next,false,false,false,false,false,true,false,ec.getId(),"","",next)); + } + } + } + // convert the list to array + Attribute[] attribArray = new Attribute[attributes.size()]; + for (int i=0; i descriptionColumns = new ArrayList(); + private List values = new ArrayList(); + + public TableModel(int nrRows) { + values = new ArrayList(nrRows); + } + + public void addDescriptionColumn (Object obj) { + this.descriptionColumns.add(obj); + } + + public Object getDescriptionColumn (int nrColumn) { + return this.descriptionColumns.get(nrColumn); + } + + public String getNameColumn (int nrColumn) { + Object object = this.descriptionColumns.get(nrColumn); + if (object instanceof CaseRow) { + CaseRow cr = (CaseRow) object; + return cr.getNameColum(); + } + if (object instanceof EventRow) { + EventRow er = (EventRow) object; + return er.getNameColumn(); + } + return ""; + } + + public int getNumberColumn (String name) { + for (int i=0; i getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + String[] options = new String[] { SOUNDNESS, BEHAVIOR, RETAIN }; + + ParameterTypeCategory variation = new ParameterTypeCategory(VARIATION, VARIATION_DESCR, options, 0); + parameterTypes.add(variation); + + return parameterTypes; + } + +} diff --git a/src/main/java/org/rapidprom/operators/analysis/RepairModelOperator.java b/src/main/java/org/rapidprom/operators/analysis/RepairModelOperator.java new file mode 100644 index 0000000..eba3610 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/analysis/RepairModelOperator.java @@ -0,0 +1,211 @@ +package org.rapidprom.operators.analysis; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.modelrepair.parameters.RepairConfiguration; +import org.processmining.modelrepair.plugins.Uma_RepairModel_Plugin; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.semantics.petrinet.Marking; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.ioobjects.XLogIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMDiscoveryOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeBoolean; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.tools.LogService; + +import javassist.tools.rmi.ObjectNotFoundException; + +public class RepairModelOperator extends AbstractRapidProMDiscoveryOperator { + + private static final String PARAMETER_1_KEY = "Detect loops", + PARAMETER_1_DESCR = "If set to 'true', the plugin will apply a few heuristics " + + "to detect whether the event log contains cyclic behavior (repetitions) " + + "of a certain number of steps where the process model contains no cycle " + + "with all these steps. If such a cycle is found, the plugin tests whether " + + "introducing a single \"loop back\" transitions increases fitness of the log " + + "to the model. If yes, the transition is added, if not, the model remains " + + "unchanged. This parameter is optional and reduces the number of " + + "sub-processes added due to the parameter \"Detect sub-processes\"", + PARAMETER_2_KEY = "Detect sub-processes", + PARAMETER_2_DESCR = "If set to 'true', the plugin will extend the process model in " + + "two ways. (1) If the log requires certain process steps to be skipped " + + "(by a model move in the alignment), the plugin adds a 'skip' transition " + + "for this step that allows to proceed in the process without taking the " + + "process step. (2) If the log requires additional process steps that are " + + "currently not in the model (due to log moves in the alignment), the " + + "plugin idenfities the exact locations where consecutive sequences of " + + "additional steps should be added and inserts subprocesses that fit the " + + "missing behavior. This parameter is mandatory to obtain a model that " + + "perfectly fits the given log ", + PARAMETER_3_KEY = "Remove infrequent nodes", + PARAMETER_3_DESCR = "If set to 'true', the plugin identifies process steps which " + + "are never or rarely executed according to the log and removes any " + + "step that is infrequent without breaking the flow in the model. " + + "Use \"Cost of loop model move\" to set the threshold for when a noce is " + + "considered infrequent. This parameter is optional and should be used to" + + " obtain a simpler model", + PARAMETER_4_KEY = "Global cost alignment", + PARAMETER_4_DESCR = "If set to 'true', the plugin analyzes the deviations between " + + "model and log on a global level to identify the smallest set of process " + + "steps that are missing or should be skipped. This parameter is optional. " + + "It causes higher runtime cost in the deviation analysis as several " + + "alignments are computed, but it results in simpler models with a higher " + + "similarity to the original model. In both cases, the resulting model " + + "will perfectly fit the log (if \"Detect sub-processes\" is set to 'true')", + PARAMETER_5_KEY = "Align alignments", + PARAMETER_5_DESCR = "Use in conjunction with subprocess detection parameter " + + "(\"Detect sub-processes\"). " + + "If set to 'true', the identified sequences of steps that have to be " + + "added to the model as sub-processes are analyzed for similarities. " + + "Subsequences of similar events are grouped together which leads to " + + "smaller subprocesses that are inserted at more specific locations in " + + "the process. This parameter is optional and may lead to simpler models " + + "with a higher similarity to the original model. In both cases, the " + + "resulting model will perfectly fit the log (if \"Detect sub-processes\" " + "is set to 'true').", + PARAMETER_6_KEY = "Cost of loop model move", + PARAMETER_6_DESCR = "A technical parameter used during loop detection (\"Detect loops\"). " + + "When set to '0' (default value), loop detection will ignore that some " + + "iterations of a loop may require to skip certain process steps within the " + + "loop. If set to a value >= 1, loop detection will balance between the " + + "'skip transitions' that have to be added if the loop is added and the " + + "sub-process that has to be added if the loop is not added. Generally, " + + "the parameter should be set to '0' to ease loop detection and preserve " + + "similarity to the original model. However, if the possible loop has a " + + "complex inner structure, the analysis for loops may incur very high running" + + " times. In this case, set a value >= 1 to ensure faster completion. ", + PARAMETER_7_KEY = "Remove / Keep if more than", + PARAMETER_7_DESCR = "The threshold value for when a node is considered 'infrequent' " + + "in the removal of infrequent nodes (\"Remove infrequent nodes\"). The " + + "threshold is specified " + + "as the absolute number of occurrences of a process step in the log. Set to '0' " + + "(default) to remove only process steps which never occur in the log (this " + + "ensures a fitting model); set to > 0 to also remove parts of the model used " + + "only infrequently (gives a simpler model that does not show all behaviors " + "of the log).", + PARAMETER_8_KEY = "Global cost max iterations", + PARAMETER_8_DESCR = "Parameter used by computation of a global cost alignment " + + "(\"Global cost alignment\"). It specifies the number of analysis iterations " + + "done to identify the smallest number of process steps in the model that " + + "require a repair. Usually, the smallest number is found after one global " + + "analysis (default value '1'). "; + + private InputPort inputPetrinet = getInputPorts().createPort("model (ProM Petri Net)", PetriNetIOObject.class); + private OutputPort outputPetrinet = getOutputPorts().createPort("model (ProM Petri Net)"); + + public RepairModelOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputPetrinet, PetriNetIOObject.class)); + } + + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: repair model using event log"); + long time = System.currentTimeMillis(); + + Uma_RepairModel_Plugin repairer = new Uma_RepairModel_Plugin(); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(Uma_RepairModel_Plugin.class); + + XLogIOObject xLog = new XLogIOObject(getXLog(), pluginContext); + + PetriNetIOObject petriNet = inputPetrinet.getData(PetriNetIOObject.class); + + Object[] result = null; + try { + if (!petriNet.hasFinalMarking()) + petriNet.setFinalMarking(getFinalMarking(petriNet.getArtifact())); + result = repairer.repairModel_buildT2Econnection(pluginContext, xLog.getArtifact(), petriNet.getArtifact(), + petriNet.getInitialMarking(), petriNet.getFinalMarking(), getConfiguration(), + getXEventClassifier()); + } catch (ObjectNotFoundException e) { + e.printStackTrace(); + } + + PetriNetIOObject output = new PetriNetIOObject((Petrinet) result[0], (Marking) result[1], null, pluginContext); + + outputPetrinet.deliver(output); + + logger.log(Level.INFO, + "End: repair model using event log (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeBoolean parameter1 = new ParameterTypeBoolean(PARAMETER_1_KEY, PARAMETER_1_DESCR, true); + parameterTypes.add(parameter1); + + ParameterTypeBoolean parameter2 = new ParameterTypeBoolean(PARAMETER_2_KEY, PARAMETER_2_DESCR, true); + parameterTypes.add(parameter2); + + ParameterTypeBoolean parameter5 = new ParameterTypeBoolean(PARAMETER_5_KEY, PARAMETER_5_DESCR, true); + parameterTypes.add(parameter5); + + ParameterTypeBoolean parameter3 = new ParameterTypeBoolean(PARAMETER_3_KEY, PARAMETER_3_DESCR, true); + parameterTypes.add(parameter3); + + ParameterTypeInt parameter7 = new ParameterTypeInt(PARAMETER_7_KEY, PARAMETER_7_DESCR, 0, Integer.MAX_VALUE, 0); + parameterTypes.add(parameter7); + + ParameterTypeBoolean parameter4 = new ParameterTypeBoolean(PARAMETER_4_KEY, PARAMETER_4_DESCR, true); + parameterTypes.add(parameter4); + + ParameterTypeInt parameter6 = new ParameterTypeInt(PARAMETER_6_KEY, PARAMETER_6_DESCR, 0, Integer.MAX_VALUE, 0); + parameterTypes.add(parameter6); + + ParameterTypeInt parameter8 = new ParameterTypeInt(PARAMETER_8_KEY, PARAMETER_8_DESCR, 0, Integer.MAX_VALUE, 1); + parameterTypes.add(parameter8); + + return parameterTypes; + } + + private RepairConfiguration getConfiguration() { + RepairConfiguration repairConfiguration = new RepairConfiguration(); + try { + repairConfiguration.detectLoops = getParameterAsBoolean(PARAMETER_1_KEY); + repairConfiguration.loopModelMoveCosts = getParameterAsInt(PARAMETER_6_KEY); + repairConfiguration.detectSubProcesses = getParameterAsBoolean(PARAMETER_2_KEY); + repairConfiguration.removeInfrequentNodes = getParameterAsBoolean(PARAMETER_3_KEY); + repairConfiguration.remove_keepIfMoreThan = getParameterAsInt(PARAMETER_7_KEY); + repairConfiguration.globalCostAlignment = getParameterAsBoolean(PARAMETER_4_KEY); + repairConfiguration.globalCost_maxIterations = getParameterAsInt(PARAMETER_8_KEY); + repairConfiguration.alignAlignments = getParameterAsBoolean(PARAMETER_5_KEY); + + } catch (UndefinedParameterError e) { + e.printStackTrace(); + } + return repairConfiguration; + } + + @SuppressWarnings("rawtypes") + public static Marking getFinalMarking(Petrinet pn) { + List places = new ArrayList(); + Iterator placesIt = pn.getPlaces().iterator(); + while (placesIt.hasNext()) { + Place nextPlace = placesIt.next(); + Collection inEdges = pn.getOutEdges(nextPlace); + if (inEdges.isEmpty()) { + places.add(nextPlace); + } + } + Marking m = new Marking(); + m.addAll(places); + return m; + } +} diff --git a/src/main/java/org/rapidprom/operators/analysis/ShowPomPomViewOperator.java b/src/main/java/org/rapidprom/operators/analysis/ShowPomPomViewOperator.java new file mode 100644 index 0000000..d45b688 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/analysis/ShowPomPomViewOperator.java @@ -0,0 +1,56 @@ +package org.rapidprom.operators.analysis; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.pompom.PomPomView; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.ioobjects.PomPomViewIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMDiscoveryOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.tools.LogService; + +import javassist.tools.rmi.ObjectNotFoundException; + +public class ShowPomPomViewOperator extends AbstractRapidProMDiscoveryOperator { + + private InputPort inputPetrinet = getInputPorts().createPort("model (ProM Petri Net)", PetriNetIOObject.class); + private OutputPort outputPomPomView = getOutputPorts().createPort("model (ProM PomPomView)"); + + public ShowPomPomViewOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputPomPomView, PomPomViewIOObject.class)); + } + + public void doWork() throws OperatorException { + + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: create pompom view"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(PomPomView.class); + PomPomViewIOObject result = null; + try { + result = new PomPomViewIOObject( + new PomPomView(pluginContext, inputPetrinet.getData(PetriNetIOObject.class).getArtifact(), + inputPetrinet.getData(PetriNetIOObject.class).getInitialMarking(), getXLog(), + getXEventClassifier()), + pluginContext); + } catch (ObjectNotFoundException e) { + e.printStackTrace(); + } + + outputPomPomView.deliver(result); + + logger.log(Level.INFO, "End: create pompom view (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + +} diff --git a/src/main/java/org/rapidprom/operators/analysis/WoflanAnalysisOperator.java b/src/main/java/org/rapidprom/operators/analysis/WoflanAnalysisOperator.java new file mode 100644 index 0000000..459bef1 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/analysis/WoflanAnalysisOperator.java @@ -0,0 +1,121 @@ +package org.rapidprom.operators.analysis; + +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.petrinet.behavioralanalysis.woflan.Woflan; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.ioobjects.WoflanDiagnosisIOObject; + +import com.google.common.util.concurrent.SimpleTimeLimiter; +import com.google.common.util.concurrent.UncheckedTimeoutException; +import com.rapidminer.example.ExampleSet; +import com.rapidminer.example.ExampleSetFactory; +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeBoolean; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.tools.LogService; + +public class WoflanAnalysisOperator extends Operator { + + private static final String PARAMETER_0_KEY = "Enable Time limit", + PARAMETER_0_DESCR = "Tries to evaluate soundness within a given time period.", + PARAMETER_1_KEY = "Time limit (sec)", PARAMETER_1_DESCR = "Time limit before the analysis is cancelled. " + + "Helpful when analyzing large Petri nets."; + + private InputPort input = getInputPorts().createPort("model (ProM Petri Net)", PetriNetIOObject.class); + private OutputPort outputWoflan = getOutputPorts().createPort("woflan diagnosis (ProM WoflanDiagnosis)"); + private OutputPort outputWoflanString = getOutputPorts().createPort("woflan diagnosis (String)"); + + public WoflanAnalysisOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputWoflan, WoflanDiagnosisIOObject.class)); + getTransformer().addRule(new GenerateNewMDRule(outputWoflanString, ExampleSet.class)); + } + + public void doWork() throws OperatorException { + + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: woflan analysis"); + long time = System.currentTimeMillis(); + + WoflanDiagnosisIOObject woflanDiagnosisIOObject = null; + PluginContext pluginContext = RapidProMGlobalContext.instance().getFutureResultAwarePluginContext(Woflan.class); + SimpleTimeLimiter limiter = new SimpleTimeLimiter(Executors.newSingleThreadExecutor()); + Object[][] outputString = new Object[1][1]; + + try { + if (getParameterAsBoolean(PARAMETER_0_KEY)) + woflanDiagnosisIOObject = limiter.callWithTimeout(new WOFLANER(pluginContext), + getParameterAsInt(PARAMETER_1_KEY), TimeUnit.SECONDS, true); + else + woflanDiagnosisIOObject = limiter.callWithTimeout(new WOFLANER(pluginContext), Long.MAX_VALUE, + TimeUnit.SECONDS, true); + + outputString[0][0] = woflanDiagnosisIOObject.getArtifact().toString(); + outputWoflan.deliver(woflanDiagnosisIOObject); + + } catch (UncheckedTimeoutException e) { + + outputString[0][0] = " Woflan could not evaluate soundness in the given time."; + logger.log(Level.INFO, "Woflan timed out."); + + pluginContext.getProgress().cancel(); + + } catch (Exception e1) { + + e1.printStackTrace(); + outputString[0][0] = " Error checking soundness."; + pluginContext.getProgress().cancel(); + } + + ExampleSet es = ExampleSetFactory.createExampleSet(outputString); + + outputWoflanString.deliver(es); + + logger.log(Level.INFO, "End: woflan analysis (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + public List getParameterTypes() { + + List parameterTypes = super.getParameterTypes(); + + ParameterTypeBoolean parameter0 = new ParameterTypeBoolean(PARAMETER_0_KEY, PARAMETER_0_DESCR, true); + parameterTypes.add(parameter0); + + ParameterTypeInt parameter1 = new ParameterTypeInt(PARAMETER_1_KEY, PARAMETER_1_DESCR, 0, 10000, 60); + parameterTypes.add(parameter1); + + return parameterTypes; + } + + class WOFLANER implements Callable { + + private PluginContext pluginContext; + + public WOFLANER(PluginContext input) { + pluginContext = input; + } + + @Override + public WoflanDiagnosisIOObject call() throws Exception { + PetriNetIOObject petriNet = input.getData(PetriNetIOObject.class); + Woflan woflan = new Woflan(); + return new WoflanDiagnosisIOObject(woflan.diagnose(pluginContext, petriNet.getArtifact()), pluginContext); + } + + } + +} diff --git a/src/main/java/org/rapidprom/operators/conformance/ConformanceAnalysisOperator.java b/src/main/java/org/rapidprom/operators/conformance/ConformanceAnalysisOperator.java new file mode 100644 index 0000000..84c01d1 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/conformance/ConformanceAnalysisOperator.java @@ -0,0 +1,610 @@ +package org.rapidprom.operators.conformance; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.classification.XEventClass; +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.extension.std.XConceptExtension; +import org.deckfour.xes.info.XLogInfo; +import org.deckfour.xes.info.XLogInfoFactory; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.PetrinetGraph; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.plugins.astar.petrinet.AbstractPetrinetReplayer; +import org.processmining.plugins.astar.petrinet.PetrinetReplayerWithILP; +import org.processmining.plugins.astar.petrinet.PetrinetReplayerWithoutILP; +import org.processmining.plugins.connectionfactories.logpetrinet.TransEvClassMapping; +import org.processmining.plugins.petrinet.replayer.PNLogReplayer; +import org.processmining.plugins.petrinet.replayer.algorithms.IPNReplayParameter; +import org.processmining.plugins.petrinet.replayer.algorithms.costbasedcomplete.CostBasedCompleteParam; +import org.processmining.plugins.petrinet.replayresult.PNRepResult; +import org.processmining.plugins.replayer.replayresult.SyncReplayResult; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PNRepResultIOObject; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.ioobjects.XLogIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMDiscoveryOperator; + +import com.google.common.util.concurrent.SimpleTimeLimiter; +import com.google.common.util.concurrent.UncheckedTimeoutException; +import com.rapidminer.example.Attribute; +import com.rapidminer.example.ExampleSet; +import com.rapidminer.example.table.AttributeFactory; +import com.rapidminer.example.table.DataRow; +import com.rapidminer.example.table.DataRowFactory; +import com.rapidminer.example.table.MemoryExampleTable; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.io.AbstractDataReader.AttributeColumn; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.AttributeMetaData; +import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.operator.ports.metadata.MDInteger; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.tools.LogService; +import com.rapidminer.tools.Ontology; + +import javassist.tools.rmi.ObjectNotFoundException; +import nl.tue.astar.AStarException; + +public class ConformanceAnalysisOperator extends AbstractRapidProMDiscoveryOperator { + + private static final String PARAMETER_0_KEY = "Replay Algorithm", + PARAMETER_0_DESCR = "The Petri net replayer algorithm that will be used to calculate alignments.", + PARAMETER_1_KEY = "Max Explored States (in Thousands)", + PARAMETER_1_DESCR = "The maximum number of states that are searched for a trace alignment.", + PARAMETER_2_KEY = "Timeout (sec)", + PARAMETER_2_DESCR = "The number of seconds that this operator will run before " + + "returning whatever it could manage to calculate (or null otherwise).", + PARAMETER_3_KEY = "Number of Threads", + PARAMETER_3_DESCR = "Specify the number of threads used to calculate alignments in parallel." + + " With each extra thread, more memory is used but less cpu time is required."; + + private static final String WITH_ILP = "ILP Replayer", WITHOUT_ILP = "non-ILP Replayer"; + + private PNRepResultIOObject alignments; + + private final String NAMECOL = "Name"; + private final String VALUECOL = "Value"; + + // alignment + private final String TRACEIDENTIFIER = "Trace Identifier"; + private final String TRACEINDEX = "Trace Index"; + private final String RELIABLE = "Unreliable Alignments Exist"; + + private InputPort inputPN = getInputPorts().createPort("model (ProM Petri Net)", PetriNetIOObject.class); + private OutputPort output = getOutputPorts().createPort("alignments (ProM PNRepResult)"); + + private OutputPort outputData = getOutputPorts().createPort("example set with metrics (Data Table)"); + private OutputPort outputAlignment = getOutputPorts().createPort("example set with alignment values (Data Table)"); + private OutputPort outputAlignmentTrace = getOutputPorts() + .createPort("example set with alignment values per trace (Data Table)"); + private OutputPort outputReliable = getOutputPorts() + .createPort("example set with indicator for reliable traces (Data Table)"); + + private ExampleSetMetaData metaData = null; + private ExampleSetMetaData metaData2 = null; + private ExampleSetMetaData metaData3 = null; + private ExampleSetMetaData metaData4 = null; + + public ConformanceAnalysisOperator(OperatorDescription description) { + super(description); + + getTransformer().addRule(new GenerateNewMDRule(output, PNRepResultIOObject.class)); + + this.metaData = new ExampleSetMetaData(); + AttributeMetaData amd1 = new AttributeMetaData(NAMECOL, Ontology.STRING); + amd1.setRole(AttributeColumn.REGULAR); + amd1.setNumberOfMissingValues(new MDInteger(0)); + metaData.addAttribute(amd1); + AttributeMetaData amd2 = new AttributeMetaData(VALUECOL, Ontology.NUMERICAL); + amd2.setRole(AttributeColumn.REGULAR); + amd2.setNumberOfMissingValues(new MDInteger(0)); + metaData.addAttribute(amd2); + metaData.setNumberOfExamples(1); + getTransformer().addRule(new GenerateNewMDRule(outputData, this.metaData)); + // for the alignment + this.metaData2 = new ExampleSetMetaData(); + AttributeMetaData alignAmd1 = new AttributeMetaData(this.TRACEINDEX, Ontology.STRING); + alignAmd1.setRole(AttributeColumn.REGULAR); + alignAmd1.setNumberOfMissingValues(new MDInteger(0)); + metaData2.addAttribute(alignAmd1); + AttributeMetaData alignAmd2 = new AttributeMetaData(PNRepResult.TRACEFITNESS, Ontology.NUMERICAL); + alignAmd2.setRole(AttributeColumn.REGULAR); + alignAmd2.setNumberOfMissingValues(new MDInteger(0)); + metaData2.addAttribute(alignAmd2); + AttributeMetaData alignAmd3 = new AttributeMetaData(PNRepResult.MOVELOGFITNESS, Ontology.NUMERICAL); + alignAmd3.setRole(AttributeColumn.REGULAR); + alignAmd3.setNumberOfMissingValues(new MDInteger(0)); + metaData2.addAttribute(alignAmd3); + AttributeMetaData alignAmd4 = new AttributeMetaData(PNRepResult.MOVEMODELFITNESS, Ontology.NUMERICAL); + alignAmd4.setRole(AttributeColumn.REGULAR); + alignAmd4.setNumberOfMissingValues(new MDInteger(0)); + metaData2.addAttribute(alignAmd4); + AttributeMetaData alignAmd5 = new AttributeMetaData(PNRepResult.RAWFITNESSCOST, Ontology.NUMERICAL); + alignAmd5.setRole(AttributeColumn.REGULAR); + alignAmd5.setNumberOfMissingValues(new MDInteger(0)); + metaData2.addAttribute(alignAmd5); + AttributeMetaData alignAmd6 = new AttributeMetaData(PNRepResult.NUMSTATEGENERATED, Ontology.NUMERICAL); + alignAmd6.setRole(AttributeColumn.REGULAR); + alignAmd6.setNumberOfMissingValues(new MDInteger(0)); + metaData2.addAttribute(alignAmd6); + metaData2.setNumberOfExamples(1); + getTransformer().addRule(new GenerateNewMDRule(outputAlignment, this.metaData2)); + // for the alignment per trace + this.metaData3 = new ExampleSetMetaData(); + AttributeMetaData alignAmd11 = new AttributeMetaData(this.TRACEINDEX, Ontology.STRING); + alignAmd11.setRole(AttributeColumn.REGULAR); + alignAmd11.setNumberOfMissingValues(new MDInteger(0)); + metaData3.addAttribute(alignAmd11); + AttributeMetaData alignAmd111 = new AttributeMetaData(this.TRACEIDENTIFIER, Ontology.STRING); + alignAmd111.setRole(AttributeColumn.REGULAR); + alignAmd111.setNumberOfMissingValues(new MDInteger(0)); + metaData3.addAttribute(alignAmd111); + AttributeMetaData alignAmd12 = new AttributeMetaData(PNRepResult.TRACEFITNESS, Ontology.NUMERICAL); + alignAmd12.setRole(AttributeColumn.REGULAR); + alignAmd12.setNumberOfMissingValues(new MDInteger(0)); + metaData3.addAttribute(alignAmd12); + AttributeMetaData alignAmd13 = new AttributeMetaData(PNRepResult.MOVELOGFITNESS, Ontology.NUMERICAL); + alignAmd13.setRole(AttributeColumn.REGULAR); + alignAmd13.setNumberOfMissingValues(new MDInteger(0)); + metaData3.addAttribute(alignAmd3); + AttributeMetaData alignAmd14 = new AttributeMetaData(PNRepResult.MOVEMODELFITNESS, Ontology.NUMERICAL); + alignAmd14.setRole(AttributeColumn.REGULAR); + alignAmd14.setNumberOfMissingValues(new MDInteger(0)); + metaData3.addAttribute(alignAmd14); + AttributeMetaData alignAmd15 = new AttributeMetaData(PNRepResult.RAWFITNESSCOST, Ontology.NUMERICAL); + alignAmd15.setRole(AttributeColumn.REGULAR); + alignAmd15.setNumberOfMissingValues(new MDInteger(0)); + metaData3.addAttribute(alignAmd15); + AttributeMetaData alignAmd16 = new AttributeMetaData(PNRepResult.NUMSTATEGENERATED, Ontology.NUMERICAL); + alignAmd16.setRole(AttributeColumn.REGULAR); + alignAmd16.setNumberOfMissingValues(new MDInteger(0)); + metaData3.addAttribute(alignAmd16); + metaData3.setNumberOfExamples(1); + getTransformer().addRule(new GenerateNewMDRule(outputAlignmentTrace, this.metaData3)); + // md4 + this.metaData4 = new ExampleSetMetaData(); + getTransformer().addRule(new GenerateNewMDRule(outputReliable, this.metaData4)); + + alignments = null; + } + + @Override + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: replay log on petri net for conformance checking"); + long time = System.currentTimeMillis(); + + SimpleTimeLimiter limiter = new SimpleTimeLimiter(Executors.newSingleThreadExecutor()); + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(PNLogReplayer.class); + + PNRepResult repResult = null; + + try { + alignments = limiter.callWithTimeout(new ALIGNMENT_CALCULATOR(pluginContext), + getParameterAsInt(PARAMETER_2_KEY), TimeUnit.SECONDS, true); + repResult = alignments.getArtifact(); + + output.deliver(alignments); + + } catch (UncheckedTimeoutException e1) { + pluginContext.getProgress().cancel(); + logger.log(Level.INFO, "Conformance Checker timed out."); + output.deliver(new PNRepResultIOObject(null, pluginContext, null, null, null)); + + } catch (Exception e) { + e.printStackTrace(); + } + + fillTables(repResult); + + logger.log(Level.INFO, "End: replay log on petri net for conformance checking (" + + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + class ALIGNMENT_CALCULATOR implements Callable { + + PluginContext pluginContext; + + public ALIGNMENT_CALCULATOR(PluginContext input) { + pluginContext = input; + } + + @Override + public PNRepResultIOObject call() throws Exception { + + XLogIOObject xLog = new XLogIOObject(getXLog(), pluginContext); + PetriNetIOObject pNet = inputPN.getData(PetriNetIOObject.class); + + PNRepResult repResult = null; + try { + if (!pNet.hasFinalMarking()) + pNet.setFinalMarking(getFinalMarking(pNet.getArtifact())); + repResult = getAlignment(pluginContext, pNet.getArtifact(), xLog.getArtifact(), + pNet.getInitialMarking(), pNet.getFinalMarking()); + } catch (ObjectNotFoundException e1) { + e1.printStackTrace(); + } + + PNRepResultIOObject result = new PNRepResultIOObject(repResult, pluginContext, pNet, xLog.getArtifact(), + constructMapping(pNet.getArtifact(), xLog.getArtifact(), getXEventClassifier())); + + return result; + } + + } + + private List convertIntListToArray(String s) { + List result = new ArrayList(); + s = s.replace("[", ""); + s = s.replace("]", ""); + String[] split = s.split(","); + for (int i = 0; i < split.length; i++) { + String string = split[i]; + String trim = string.trim(); + Integer in = Integer.parseInt(trim); + result.add(in); + } + return result; + } + + @SuppressWarnings("rawtypes") + public static Marking getFinalMarking(Petrinet pn) { + List places = new ArrayList(); + Iterator placesIt = pn.getPlaces().iterator(); + while (placesIt.hasNext()) { + Place nextPlace = placesIt.next(); + Collection inEdges = pn.getOutEdges(nextPlace); + if (inEdges.isEmpty()) { + places.add(nextPlace); + } + } + Marking finalMarking = new Marking(); + for (Place place : places) { + finalMarking.add(place); + } + return finalMarking; + } + + private void fillTableWithRow(MemoryExampleTable table, String name, Object value, List attributes) { + // fill table + DataRowFactory factory = new DataRowFactory(DataRowFactory.TYPE_DOUBLE_ARRAY, '.'); + Object[] vals = new Object[2]; + vals[0] = name; + vals[1] = value; + // convert the list to array + Attribute[] attribArray = new Attribute[attributes.size()]; + for (int i = 0; i < attributes.size(); i++) { + attribArray[i] = attributes.get(i); + } + DataRow dataRow = factory.create(vals, attribArray); + table.addDataRow(dataRow); + } + + // Boudewijn's methods for creating alignments + + public PNRepResult getAlignment(PluginContext pluginContext, PetrinetGraph net, XLog log, Marking initialMarking, + Marking finalMarking) throws UndefinedParameterError { + + Map costMOS = constructMOSCostFunction(net); + XEventClassifier eventClassifier = getXEventClassifier(); + Map costMOT = constructMOTCostFunction(net, log, eventClassifier); + TransEvClassMapping mapping = constructMapping(net, log, eventClassifier); + + AbstractPetrinetReplayer replayEngine = null; + if (getParameterAsString(PARAMETER_0_KEY).equals(WITH_ILP)) + replayEngine = new PetrinetReplayerWithILP(); + else + replayEngine = new PetrinetReplayerWithoutILP(); + + IPNReplayParameter parameters = new CostBasedCompleteParam(costMOT, costMOS); + parameters.setInitialMarking(initialMarking); + parameters.setFinalMarkings(finalMarking); + parameters.setGUIMode(false); + parameters.setCreateConn(false); + parameters.setNumThreads(getParameterAsInt(PARAMETER_3_KEY)); + ((CostBasedCompleteParam) parameters).setMaxNumOfStates(getParameterAsInt(PARAMETER_1_KEY) * 1000); + + PNRepResult result = null; + try { + result = replayEngine.replayLog(pluginContext, net, log, mapping, parameters); + + } catch (AStarException e) { + e.printStackTrace(); + } + + return result; + } + + private static Map constructMOSCostFunction(PetrinetGraph net) { + Map costMOS = new HashMap(); + + for (Transition t : net.getTransitions()) + if (t.isInvisible()) + costMOS.put(t, 0); + else + costMOS.put(t, 1); + + return costMOS; + } + + private static Map constructMOTCostFunction(PetrinetGraph net, XLog log, + XEventClassifier eventClassifier) { + Map costMOT = new HashMap(); + XLogInfo summary = XLogInfoFactory.createLogInfo(log, eventClassifier); + + for (XEventClass evClass : summary.getEventClasses().getClasses()) { + costMOT.put(evClass, 1); + } + + return costMOT; + } + + private static TransEvClassMapping constructMapping(PetrinetGraph net, XLog log, XEventClassifier eventClassifier) { + TransEvClassMapping mapping = new TransEvClassMapping(eventClassifier, new XEventClass("DUMMY", 99999)); + + XLogInfo summary = XLogInfoFactory.createLogInfo(log, eventClassifier); + + for (Transition t : net.getTransitions()) { + for (XEventClass evClass : summary.getEventClasses().getClasses()) { + String id = evClass.getId(); + + if (t.getLabel().equals(id)) { + mapping.put(t, evClass); + break; + } + } + + } + + return mapping; + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeCategory parameterType0 = new ParameterTypeCategory(PARAMETER_0_KEY, PARAMETER_0_DESCR, + new String[] { WITH_ILP, WITHOUT_ILP }, 0); + parameterTypes.add(parameterType0); + + ParameterTypeInt parameterType1 = new ParameterTypeInt(PARAMETER_1_KEY, PARAMETER_1_DESCR, 0, Integer.MAX_VALUE, + 200); + parameterTypes.add(parameterType1); + + ParameterTypeInt parameterType2 = new ParameterTypeInt(PARAMETER_2_KEY, PARAMETER_2_DESCR, 0, Integer.MAX_VALUE, + 60); + parameterTypes.add(parameterType2); + + ParameterTypeInt parameterType3 = new ParameterTypeInt(PARAMETER_3_KEY, PARAMETER_3_DESCR, 1, Integer.MAX_VALUE, + Runtime.getRuntime().availableProcessors()); + parameterTypes.add(parameterType3); + + return parameterTypes; + } + + public void fillTables(PNRepResult repResult) throws OperatorException { + + if (repResult != null && !repResult.isEmpty()) { + + Iterator iterator3 = repResult.iterator(); + boolean unreliable = false; + while (iterator3.hasNext()) { + SyncReplayResult next = iterator3.next(); + boolean reliable = next.isReliable(); + if (!reliable) { + unreliable = true; + break; + } + } + + fillFitnessTable(repResult, unreliable); + fillTraceGroupAlignmentTable(repResult); + fillTraceSingleAlignmentTable(repResult); + fillUnreliableAlignmentsTable(unreliable); + + } else { + + fillFitnessTable(null, true); + fillTraceGroupAlignmentTable(null); + fillTraceSingleAlignmentTable(null); + fillUnreliableAlignmentsTable(true); + + } + + } + + public void fillFitnessTable(PNRepResult repResult, boolean unreliable) { + + ExampleSet es = null; + MemoryExampleTable table = null; + List attributes = new LinkedList(); + attributes.add(AttributeFactory.createAttribute(this.NAMECOL, Ontology.STRING)); + attributes.add(AttributeFactory.createAttribute(this.VALUECOL, Ontology.NUMERICAL)); + table = new MemoryExampleTable(attributes); + if (unreliable || repResult == null) { + fillTableWithRow(table, PNRepResult.TRACEFITNESS, Double.NaN, attributes); + fillTableWithRow(table, PNRepResult.MOVELOGFITNESS, Double.NaN, attributes); + fillTableWithRow(table, PNRepResult.MOVEMODELFITNESS, Double.NaN, attributes); + fillTableWithRow(table, PNRepResult.RAWFITNESSCOST, Double.NaN, attributes); + fillTableWithRow(table, PNRepResult.NUMSTATEGENERATED, Double.NaN, attributes); + + } else { + + Map info = repResult.getInfo(); + double trace_fitness = 0; + try { + trace_fitness = Double.parseDouble((String) info.get(PNRepResult.TRACEFITNESS)); + } catch (Exception e) { + trace_fitness = (Double) info.get(PNRepResult.TRACEFITNESS); + } + double move_log_fitness = (Double) info.get(PNRepResult.MOVELOGFITNESS); + double move_model_fitness = (Double) info.get(PNRepResult.MOVEMODELFITNESS); + double raw_fitness_costs = (Double) info.get(PNRepResult.RAWFITNESSCOST); + double num_state_gen = (Double) info.get(PNRepResult.NUMSTATEGENERATED); + + fillTableWithRow(table, PNRepResult.TRACEFITNESS, trace_fitness, attributes); + fillTableWithRow(table, PNRepResult.MOVELOGFITNESS, move_log_fitness, attributes); + fillTableWithRow(table, PNRepResult.MOVEMODELFITNESS, move_model_fitness, attributes); + fillTableWithRow(table, PNRepResult.RAWFITNESSCOST, raw_fitness_costs, attributes); + fillTableWithRow(table, PNRepResult.NUMSTATEGENERATED, num_state_gen, attributes); + } + es = table.createExampleSet(); + outputData.deliver(es); + } + + public void fillTraceGroupAlignmentTable(PNRepResult repResult) { + // output the trace alignment + ExampleSet es2 = null; + MemoryExampleTable table2 = null; + List attributes2 = new LinkedList(); + attributes2.add(AttributeFactory.createAttribute(this.TRACEINDEX, Ontology.STRING)); + attributes2.add(AttributeFactory.createAttribute(PNRepResult.TRACEFITNESS, Ontology.NUMERICAL)); + attributes2.add(AttributeFactory.createAttribute(PNRepResult.MOVELOGFITNESS, Ontology.NUMERICAL)); + attributes2.add(AttributeFactory.createAttribute(PNRepResult.MOVEMODELFITNESS, Ontology.NUMERICAL)); + attributes2.add(AttributeFactory.createAttribute(PNRepResult.RAWFITNESSCOST, Ontology.NUMERICAL)); + attributes2.add(AttributeFactory.createAttribute(PNRepResult.NUMSTATEGENERATED, Ontology.NUMERICAL)); + table2 = new MemoryExampleTable(attributes2); + + if (repResult != null) { + Iterator iterator = repResult.iterator(); + while (iterator.hasNext()) { + SyncReplayResult next = iterator.next(); + DataRowFactory factory = new DataRowFactory(DataRowFactory.TYPE_DOUBLE_ARRAY, '.'); + Object[] vals = new Object[6]; + vals[0] = next.getTraceIndex().toString(); + vals[1] = next.getInfo().get(PNRepResult.TRACEFITNESS); + vals[2] = next.getInfo().get(PNRepResult.MOVELOGFITNESS); + vals[3] = next.getInfo().get(PNRepResult.MOVEMODELFITNESS); + vals[4] = next.getInfo().get(PNRepResult.RAWFITNESSCOST); + vals[5] = next.getInfo().get(PNRepResult.NUMSTATEGENERATED); + + Attribute[] attribArray = new Attribute[attributes2.size()]; + for (int i = 0; i < attributes2.size(); i++) { + attribArray[i] = attributes2.get(i); + } + DataRow dataRow = factory.create(vals, attribArray); + table2.addDataRow(dataRow); + } + } else { + DataRowFactory factory = new DataRowFactory(DataRowFactory.TYPE_DOUBLE_ARRAY, '.'); + Object[] vals = new Object[6]; + vals[0] = "?"; + vals[1] = Double.NaN; + vals[2] = Double.NaN; + vals[3] = Double.NaN; + vals[4] = Double.NaN; + vals[5] = Double.NaN; + + Attribute[] attribArray = new Attribute[attributes2.size()]; + for (int i = 0; i < attributes2.size(); i++) { + attribArray[i] = attributes2.get(i); + } + DataRow dataRow = factory.create(vals, attribArray); + table2.addDataRow(dataRow); + } + + es2 = table2.createExampleSet(); + outputAlignment.deliver(es2); + } + + public void fillTraceSingleAlignmentTable(PNRepResult repResult) throws OperatorException { + + // create the third exampleset + ExampleSet es3 = null; + MemoryExampleTable table3 = null; + List attributes3 = new LinkedList(); + attributes3.add(AttributeFactory.createAttribute(this.TRACEINDEX, Ontology.STRING)); + attributes3.add(AttributeFactory.createAttribute(this.TRACEIDENTIFIER, Ontology.STRING)); + attributes3.add(AttributeFactory.createAttribute(PNRepResult.TRACEFITNESS, Ontology.NUMERICAL)); + attributes3.add(AttributeFactory.createAttribute(PNRepResult.MOVELOGFITNESS, Ontology.NUMERICAL)); + attributes3.add(AttributeFactory.createAttribute(PNRepResult.MOVEMODELFITNESS, Ontology.NUMERICAL)); + attributes3.add(AttributeFactory.createAttribute(PNRepResult.RAWFITNESSCOST, Ontology.NUMERICAL)); + attributes3.add(AttributeFactory.createAttribute(PNRepResult.NUMSTATEGENERATED, Ontology.NUMERICAL)); + table3 = new MemoryExampleTable(attributes3); + + if (repResult != null) { + Iterator iterator2 = repResult.iterator(); + while (iterator2.hasNext()) { + SyncReplayResult next = iterator2.next(); + DataRowFactory factory = new DataRowFactory(DataRowFactory.TYPE_DOUBLE_ARRAY, '.'); + Object[] vals = new Object[7]; + vals[2] = next.getInfo().get(PNRepResult.TRACEFITNESS); + vals[3] = next.getInfo().get(PNRepResult.MOVELOGFITNESS); + vals[4] = next.getInfo().get(PNRepResult.MOVEMODELFITNESS); + vals[5] = next.getInfo().get(PNRepResult.RAWFITNESSCOST); + vals[6] = next.getInfo().get(PNRepResult.NUMSTATEGENERATED); + // convert the list to array + Attribute[] attribArray = new Attribute[attributes3.size()]; + for (int i = 0; i < attributes3.size(); i++) { + attribArray[i] = attributes3.get(i); + } + List listArray = convertIntListToArray(next.getTraceIndex().toString()); + for (Integer s : listArray) { + // get the right trace + XTrace xTrace = getXLog().get(s); + String name = XConceptExtension.instance().extractName(xTrace); + vals[0] = s.toString(); + vals[1] = name; + DataRow dataRow = factory.create(vals, attribArray); + table3.addDataRow(dataRow); + } + } + } else { + DataRowFactory factory = new DataRowFactory(DataRowFactory.TYPE_DOUBLE_ARRAY, '.'); + Object[] vals = new Object[7]; + vals[0] = "?"; + vals[1] = "?"; + vals[2] = Double.NaN; + vals[3] = Double.NaN; + vals[4] = Double.NaN; + vals[5] = Double.NaN; + vals[6] = Double.NaN; + + Attribute[] attribArray = new Attribute[attributes3.size()]; + for (int i = 0; i < attributes3.size(); i++) { + attribArray[i] = attributes3.get(i); + } + DataRow dataRow = factory.create(vals, attribArray); + table3.addDataRow(dataRow); + } + es3 = table3.createExampleSet(); + outputAlignmentTrace.deliver(es3); + } + + public void fillUnreliableAlignmentsTable(boolean unreliable) { + // CREATE THE fourth es + ExampleSet es4 = null; + MemoryExampleTable table4 = null; + List attributes4 = new LinkedList(); + attributes4.add(AttributeFactory.createAttribute(this.NAMECOL, Ontology.STRING)); + attributes4.add(AttributeFactory.createAttribute(this.VALUECOL, Ontology.STRING)); + table4 = new MemoryExampleTable(attributes4); + fillTableWithRow(table4, RELIABLE, Boolean.toString(unreliable), attributes4); + es4 = table4.createExampleSet(); + outputReliable.deliver(es4); + } + +} diff --git a/src/main/java/org/rapidprom/operators/conformance/ETCPrecisionOperator.java b/src/main/java/org/rapidprom/operators/conformance/ETCPrecisionOperator.java new file mode 100644 index 0000000..2ab5a5f --- /dev/null +++ b/src/main/java/org/rapidprom/operators/conformance/ETCPrecisionOperator.java @@ -0,0 +1,173 @@ +package org.rapidprom.operators.conformance; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.SortedSet; +import java.util.logging.Level; +import java.util.logging.Logger; + +import javassist.tools.rmi.ObjectNotFoundException; + +import org.processmining.framework.connections.ConnectionCannotBeObtained; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.semantics.IllegalTransitionException; +import org.processmining.plugins.alignetc.AlignETCPlugin; +import org.processmining.plugins.alignetc.AlignETCSettings; +import org.processmining.plugins.alignetc.core.ReplayAutomaton; +import org.processmining.plugins.alignetc.result.AlignETCResult; +import org.processmining.plugins.petrinet.replayresult.PNMatchInstancesRepResult; +import org.processmining.plugins.petrinet.replayresult.StepTypes; +import org.processmining.plugins.replayer.replayresult.AllSyncReplayResult; +import org.processmining.plugins.replayer.replayresult.SyncReplayResult; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PNRepResultIOObject; +import org.rapidprom.ioobjects.PetriNetIOObject; + +import com.rapidminer.example.Attribute; +import com.rapidminer.example.ExampleSet; +import com.rapidminer.example.table.AttributeFactory; +import com.rapidminer.example.table.DataRow; +import com.rapidminer.example.table.DataRowFactory; +import com.rapidminer.example.table.MemoryExampleTable; +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeDouble; +import com.rapidminer.tools.LogService; +import com.rapidminer.tools.Ontology; + +public class ETCPrecisionOperator extends Operator { + + private static final String PARAMETER_1 = "Pruning (cut) threshold for the Automaton"; + + private InputPort input1 = getInputPorts().createPort("model (PetriNet)", PetriNetIOObject.class); + private InputPort input2 = getInputPorts().createPort("alignments (ProM PNRepResult)", PNRepResultIOObject.class); + + + private OutputPort outputMetrics = getOutputPorts().createPort("example set (Data Table)"); + + //private ExampleSetMetaData metaData = null; + + private final String NAMECOL = "Name"; + private final String VALUECOL = "Value"; + + public ETCPrecisionOperator(OperatorDescription description) { + super(description); + +// this.metaData = new ExampleSetMetaData(); +// AttributeMetaData amd1 = new AttributeMetaData(NAMECOL, +// Ontology.STRING); +// amd1.setRole(AttributeColumn.REGULAR); +// amd1.setNumberOfMissingValues(new MDInteger(0)); +// metaData.addAttribute(amd1); +// AttributeMetaData amd2 = new AttributeMetaData(VALUECOL, +// Ontology.NUMERICAL); +// amd2.setRole(AttributeColumn.REGULAR); +// amd2.setNumberOfMissingValues(new MDInteger(0)); +// metaData.addAttribute(amd2); +// metaData.setNumberOfExamples(2); +// getTransformer() +// .addRule(new GenerateNewMDRule(outputMetrics, this.metaData)); + } + + @Override + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, + "Start: precision"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance().getPluginContext(); + + PetriNetIOObject net = input1.getData(PetriNetIOObject.class); + PNRepResultIOObject alignment = input2.getData(PNRepResultIOObject.class); + + //Convert to n-alignments object + Collection col = new ArrayList(); + for (SyncReplayResult rep : alignment.getArtifact()) { + + //Get all the attributes of the 1-alignment result + List> nodes = new ArrayList>(); + nodes.add(rep.getNodeInstance()); + + List> types = new ArrayList>(); + types.add(rep.getStepTypes()); + + SortedSet traces = rep.getTraceIndex(); + boolean rel = rep.isReliable(); + + //Create a n-alignment result with this attributes + AllSyncReplayResult allRep = new AllSyncReplayResult(nodes, types, -1, rel); + allRep.setTraceIndex(traces);//The creator not allow add the set directly + col.add(allRep); + } + PNMatchInstancesRepResult alignments = new PNMatchInstancesRepResult(col); + + AlignETCPlugin etc = new AlignETCPlugin(); + AlignETCResult res = new AlignETCResult(); + AlignETCSettings sett = new AlignETCSettings(res); + + ReplayAutomaton ra = null; + try { + ra = new ReplayAutomaton(pluginContext, alignments, net.getArtifact()); + } catch (ConnectionCannotBeObtained e1) { + e1.printStackTrace(); + } + ra.cut(0d); // TODO: turn the threshold parameter into a RapidProm parameter + + try { + ra.extend(net.getArtifact(), net.getInitialMarking()); + } catch (IllegalTransitionException | ObjectNotFoundException e1) { + e1.printStackTrace(); + } + ra.conformance(res); + + + ExampleSet es = null; + MemoryExampleTable table = null; + List attributes = new LinkedList(); + attributes.add(AttributeFactory.createAttribute(this.NAMECOL, + Ontology.STRING)); + attributes.add(AttributeFactory.createAttribute(this.VALUECOL, + Ontology.NUMERICAL)); + table = new MemoryExampleTable(attributes); + fillTableWithRow(table, "Precision", res.ap, attributes); + + es = table.createExampleSet(); + outputMetrics.deliver(es); + + logger.log(Level.INFO, "End: precision ("+ (System.currentTimeMillis() - time) / 1000 + " sec)"); + + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeDouble parameterType1 = new ParameterTypeDouble(PARAMETER_1, PARAMETER_1, 0d, 1d, 0d); + parameterTypes.add(parameterType1); + + return parameterTypes; + } + + private void fillTableWithRow(MemoryExampleTable table, String name, + Object value, List attributes) { + // fill table + DataRowFactory factory = new DataRowFactory( + DataRowFactory.TYPE_DOUBLE_ARRAY, '.'); + Object[] vals = new Object[2]; + vals[0] = name; + vals[1] = value; + // convert the list to array + Attribute[] attribArray = new Attribute[attributes.size()]; + for (int i = 0; i < attributes.size(); i++) { + attribArray[i] = attributes.get(i); + } + DataRow dataRow = factory.create(vals, attribArray); + table.addDataRow(dataRow); + } +} diff --git a/src/main/java/org/rapidprom/operators/conformance/MeasurePrecisionAnalysisOperator.java b/src/main/java/org/rapidprom/operators/conformance/MeasurePrecisionAnalysisOperator.java new file mode 100644 index 0000000..80ea377 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/conformance/MeasurePrecisionAnalysisOperator.java @@ -0,0 +1,129 @@ +package org.rapidprom.operators.conformance; + +import java.util.LinkedList; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.pnalignanalysis.conformance.AlignmentPrecGen; +import org.processmining.plugins.pnalignanalysis.conformance.AlignmentPrecGenRes; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PNRepResultIOObject; + +import com.rapidminer.example.Attribute; +import com.rapidminer.example.ExampleSet; +import com.rapidminer.example.table.AttributeFactory; +import com.rapidminer.example.table.DataRow; +import com.rapidminer.example.table.DataRowFactory; +import com.rapidminer.example.table.MemoryExampleTable; +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeBoolean; +import com.rapidminer.tools.LogService; +import com.rapidminer.tools.Ontology; + +import javassist.tools.rmi.ObjectNotFoundException; + +public class MeasurePrecisionAnalysisOperator extends Operator { + + private static final String PARAMETER_1 = "Consider traces with the same activity sequence as the same trace"; + private InputPort input = getInputPorts().createPort("alignments (ProM PNRepResult)", PNRepResultIOObject.class); + + private OutputPort outputMetrics = getOutputPorts().createPort("example set (Data Table)"); + + // private ExampleSetMetaData metaData = null; + + private final String NAMECOL = "Name"; + private final String VALUECOL = "Value"; + + public MeasurePrecisionAnalysisOperator(OperatorDescription description) { + super(description); + + // this.metaData = new ExampleSetMetaData(); + // AttributeMetaData amd1 = new AttributeMetaData(NAMECOL, + // Ontology.STRING); + // amd1.setRole(AttributeColumn.REGULAR); + // amd1.setNumberOfMissingValues(new MDInteger(0)); + // metaData.addAttribute(amd1); + // AttributeMetaData amd2 = new AttributeMetaData(VALUECOL, + // Ontology.NUMERICAL); + // amd2.setRole(AttributeColumn.REGULAR); + // amd2.setNumberOfMissingValues(new MDInteger(0)); + // metaData.addAttribute(amd2); + // metaData.setNumberOfExamples(2); + // getTransformer() + // .addRule(new GenerateNewMDRule(outputMetrics, this.metaData)); + } + + @Override + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: measure precision/generalization based on alignments"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance().getPluginContext(); + + PNRepResultIOObject alignment = input.getData(PNRepResultIOObject.class); + + ExampleSet es = null; + MemoryExampleTable table = null; + List attributes = new LinkedList(); + attributes.add(AttributeFactory.createAttribute(this.NAMECOL, Ontology.STRING)); + attributes.add(AttributeFactory.createAttribute(this.VALUECOL, Ontology.NUMERICAL)); + table = new MemoryExampleTable(attributes); + + if (alignment.getArtifact() != null) { + AlignmentPrecGen aligner = new AlignmentPrecGen(); + AlignmentPrecGenRes result = null; + try { + result = aligner.measureConformanceAssumingCorrectAlignment(pluginContext, alignment.getMapping(), + alignment.getArtifact(), alignment.getPn().getArtifact(), alignment.getPn().getInitialMarking(), + getParameterAsBoolean(PARAMETER_1)); + + fillTableWithRow(table, "Precision", result.getPrecision(), attributes); + fillTableWithRow(table, "Generalization", result.getGeneralization(), attributes); + + } catch (ObjectNotFoundException e) { + e.printStackTrace(); + } + } else { + fillTableWithRow(table, "Precision", Double.NaN, attributes); + fillTableWithRow(table, "Generalization", Double.NaN, attributes); + } + es = table.createExampleSet(); + outputMetrics.deliver(es); + + logger.log(Level.INFO, "End: measure precision/generalization based on alignments (" + + (System.currentTimeMillis() - time) / 1000 + " sec)"); + + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeBoolean parameterType1 = new ParameterTypeBoolean(PARAMETER_1, PARAMETER_1, true); + parameterTypes.add(parameterType1); + + return parameterTypes; + } + + private void fillTableWithRow(MemoryExampleTable table, String name, Object value, List attributes) { + // fill table + DataRowFactory factory = new DataRowFactory(DataRowFactory.TYPE_DOUBLE_ARRAY, '.'); + Object[] vals = new Object[2]; + vals[0] = name; + vals[1] = value; + // convert the list to array + Attribute[] attribArray = new Attribute[attributes.size()]; + for (int i = 0; i < attributes.size(); i++) { + attribArray[i] = attributes.get(i); + } + DataRow dataRow = factory.create(vals, attribArray); + table.addDataRow(dataRow); + } +} diff --git a/src/main/java/org/rapidprom/operators/conformance/PerformanceConformanceAnalysisOperator.java b/src/main/java/org/rapidprom/operators/conformance/PerformanceConformanceAnalysisOperator.java new file mode 100644 index 0000000..4ad7fea --- /dev/null +++ b/src/main/java/org/rapidprom/operators/conformance/PerformanceConformanceAnalysisOperator.java @@ -0,0 +1,242 @@ +package org.rapidprom.operators.conformance; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.classification.XEventClass; +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.info.XLogInfoFactory; +import org.deckfour.xes.model.XLog; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.plugins.astar.petrinet.PetrinetReplayerNoILPRestrictedMoveModel; +import org.processmining.plugins.astar.petrinet.manifestreplay.CostBasedCompleteManifestParam; +import org.processmining.plugins.astar.petrinet.manifestreplay.ManifestFactory; +import org.processmining.plugins.astar.petrinet.manifestreplay.PNManifestFlattener; +import org.processmining.plugins.petrinet.manifestreplayer.EvClassPattern; +import org.processmining.plugins.petrinet.manifestreplayer.PNManifestReplayer; +import org.processmining.plugins.petrinet.manifestreplayer.PNManifestReplayerParameter; +import org.processmining.plugins.petrinet.manifestreplayer.TransClass2PatternMap; +import org.processmining.plugins.petrinet.manifestreplayer.transclassifier.TransClass; +import org.processmining.plugins.petrinet.manifestreplayer.transclassifier.TransClasses; +import org.processmining.plugins.petrinet.manifestreplayresult.Manifest; +import org.processmining.plugins.petrinet.replayer.PNLogReplayer; +import org.processmining.plugins.petrinet.replayresult.PNRepResult; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.ManifestIOObject; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMDiscoveryOperator; + +import com.google.common.util.concurrent.SimpleTimeLimiter; +import com.google.common.util.concurrent.UncheckedTimeoutException; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.tools.LogService; + +import javassist.tools.rmi.ObjectNotFoundException; + +public class PerformanceConformanceAnalysisOperator extends AbstractRapidProMDiscoveryOperator { + + private static final String PARAMETER_1_KEY = "Max Explored States (in Thousands)", + PARAMETER_1_DESCR = "The maximum number of states that are searched for a trace alignment.", + PARAMETER_2_KEY = "Timeout (sec)", + PARAMETER_2_DESCR = "The number of seconds that this operator will run before " + + "returning whatever it could manage to calculate (or null otherwise).", + PARAMETER_3_KEY = "Number of Threads", + PARAMETER_3_DESCR = "Specify the number of threads used to calculate alignments in parallel." + + " With each extra thread, more memory is used but less cpu time is required."; + + private InputPort inputPN = getInputPorts().createPort("model (ProM Petri Net)", PetriNetIOObject.class); + private OutputPort outputManifest = getOutputPorts().createPort("model (ProM Manifest)"); + + public PerformanceConformanceAnalysisOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputManifest, ManifestIOObject.class)); + // getTransformer().addRule( new GenerateNewMDRule(outputFitness, + // FitnessIOObject.class)); + } + + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: replay log on petri net for performance/conformance checking"); + long time = System.currentTimeMillis(); + + ManifestIOObject manifestIOObject; + SimpleTimeLimiter limiter = new SimpleTimeLimiter(Executors.newSingleThreadExecutor()); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(PNManifestReplayer.class); + try { + manifestIOObject = limiter.callWithTimeout(new PERFORMANCE_CALCULATOR(pluginContext), + getParameterAsInt(PARAMETER_2_KEY), TimeUnit.SECONDS, true); + outputManifest.deliver(manifestIOObject); + } catch (UncheckedTimeoutException e1) { + pluginContext.getProgress().cancel(); + logger.log(Level.INFO, "Performance Ckecker timed out."); + } catch (Exception e) { + e.printStackTrace(); + return; + } + + logger.log(Level.INFO, "End: replay log on petri net for performance/conformance checking (" + + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + class PERFORMANCE_CALCULATOR implements Callable { + + PluginContext pluginContext; + + public PERFORMANCE_CALCULATOR(PluginContext input) { + pluginContext = input; + } + + @Override + public ManifestIOObject call() throws Exception { + + PetriNetIOObject pNet = inputPN.getData(PetriNetIOObject.class); + XLog xLog = getXLog(); + + PNManifestReplayerParameter manifestParameters = getParameterObject(pNet, xLog); + + PNManifestFlattener flattener = new PNManifestFlattener(pNet.getArtifact(), manifestParameters); + + CostBasedCompleteManifestParam parameter = new CostBasedCompleteManifestParam( + flattener.getMapEvClass2Cost(), flattener.getMapTrans2Cost(), flattener.getMapSync2Cost(), + flattener.getInitMarking(), flattener.getFinalMarkings(), manifestParameters.getMaxNumOfStates(), + flattener.getFragmentTrans()); + parameter.setGUIMode(false); + parameter.setCreateConn(false); + parameter.setNumThreads(getParameterAsInt(PARAMETER_3_KEY)); + + PNLogReplayer replayer = new PNLogReplayer(); + PetrinetReplayerNoILPRestrictedMoveModel replayAlgorithm = new PetrinetReplayerNoILPRestrictedMoveModel(); + + Manifest result = null; + try { + PNRepResult alignment = replayer.replayLog(pluginContext, flattener.getNet(), xLog, flattener.getMap(), + replayAlgorithm, parameter); + result = ManifestFactory.construct(pNet.getArtifact(), manifestParameters.getInitMarking(), + manifestParameters.getFinalMarkings(), xLog, flattener, alignment, + manifestParameters.getMapping()); + + return new ManifestIOObject(result, pluginContext); + + } catch (Exception e) { + e.printStackTrace(); + return null; + } + + } + + } + + private PNManifestReplayerParameter getParameterObject(PetriNetIOObject pNet, XLog log) + throws UndefinedParameterError { + PNManifestReplayerParameter parameter = new PNManifestReplayerParameter(); + try { + parameter.setGUIMode(false); + parameter.setInitMarking(pNet.getInitialMarking()); + if (!pNet.hasFinalMarking()) + pNet.setFinalMarking(getFinalMarking(pNet.getArtifact())); + parameter.setFinalMarkings(pNet.getFinalMarkingAsArray()); + + parameter.setMaxNumOfStates(getParameterAsInt(PARAMETER_1_KEY) * 1000); + TransClasses tc = new TransClasses(pNet.getArtifact()); + Map> pattern = new HashMap>(); + + XEventClassifier classifier = getXEventClassifier(); + Collection eventClasses = XLogInfoFactory.createLogInfo(log, classifier).getEventClasses() + .getClasses(); + + for (TransClass t : tc.getTransClasses()) { + Set p = new HashSet(); + line: for (XEventClass clazz : eventClasses) + // look for exact matches on the id + if (clazz.getId().equals(t.getId())) { + EvClassPattern pat = new EvClassPattern(); + pat.add(clazz); + p.add(pat); + pattern.put(t, p); + break line; + } + + } + TransClass2PatternMap mapping = new TransClass2PatternMap(log, pNet.getArtifact(), classifier, tc, pattern); + parameter.setMapping(mapping); + + Map mapEvClass2Cost = new HashMap(); + for (XEventClass c : eventClasses) { + mapEvClass2Cost.put(c, 1); + } + + parameter.setMapEvClass2Cost(mapEvClass2Cost); + + Map costs = new HashMap(); + Map costsSync = new HashMap(); + for (TransClass t : tc.getTransClasses()) { + costs.put(t, 1); + costsSync.put(t, 0); + } + parameter.setTrans2Cost(costs); + parameter.setTransSync2Cost(costsSync); + + } catch (ObjectNotFoundException e1) { + e1.printStackTrace(); + } + + return parameter; + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeInt parameterType1 = new ParameterTypeInt(PARAMETER_1_KEY, PARAMETER_1_DESCR, 0, Integer.MAX_VALUE, + 200); + parameterTypes.add(parameterType1); + + ParameterTypeInt parameterType2 = new ParameterTypeInt(PARAMETER_2_KEY, PARAMETER_2_DESCR, 0, Integer.MAX_VALUE, + 60); + parameterTypes.add(parameterType2); + + ParameterTypeInt parameterType3 = new ParameterTypeInt(PARAMETER_3_KEY, PARAMETER_3_DESCR, 1, Integer.MAX_VALUE, + Runtime.getRuntime().availableProcessors()); + parameterTypes.add(parameterType3); + + return parameterTypes; + } + + @SuppressWarnings("rawtypes") + public static Marking getFinalMarking(Petrinet pn) { + List places = new ArrayList(); + Iterator placesIt = pn.getPlaces().iterator(); + while (placesIt.hasNext()) { + Place nextPlace = placesIt.next(); + Collection inEdges = pn.getOutEdges(nextPlace); + if (inEdges.isEmpty()) { + places.add(nextPlace); + } + } + Marking m = new Marking(); + m.addAll(places); + return m; + } +} diff --git a/src/main/java/org/rapidprom/operators/conversion/ExampleSetToXLogConversionOperator.java b/src/main/java/org/rapidprom/operators/conversion/ExampleSetToXLogConversionOperator.java new file mode 100644 index 0000000..31fe9b7 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/conversion/ExampleSetToXLogConversionOperator.java @@ -0,0 +1,521 @@ +package org.rapidprom.operators.conversion; + +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.classification.XEventAndClassifier; +import org.deckfour.xes.classification.XEventLifeTransClassifier; +import org.deckfour.xes.classification.XEventNameClassifier; +import org.deckfour.xes.extension.std.XConceptExtension; +import org.deckfour.xes.extension.std.XLifecycleExtension; +import org.deckfour.xes.extension.std.XOrganizationalExtension; +import org.deckfour.xes.extension.std.XTimeExtension; +import org.deckfour.xes.factory.XFactory; +import org.deckfour.xes.model.XAttributeLiteral; +import org.deckfour.xes.model.XAttributeMap; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.deckfour.xes.model.impl.XAttributeLiteralImpl; +import org.deckfour.xes.model.impl.XAttributeMapImpl; +import org.deckfour.xes.model.impl.XAttributeTimestampImpl; +import org.processmining.xeslite.external.XFactoryExternalStore; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.XLogIOObject; +import org.rapidprom.operators.ports.metadata.ExampleSetNumberOfAttributesPrecondition; +import org.rapidprom.parameter.ParameterTypeExampleSetAttributesDynamicCategory; + +import com.google.gwt.dev.util.collect.HashSet; +import com.rapidminer.example.Attribute; +import com.rapidminer.example.Example; +import com.rapidminer.example.ExampleSet; +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeBoolean; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.parameter.conditions.BooleanParameterCondition; +import com.rapidminer.tools.LogService; + +public class ExampleSetToXLogConversionOperator extends Operator { + + private static final String DEFAULT_VALUE_OPTIONAL = ""; + private static final String GLOBAL_INVALID = "__INVALID__"; + private static final String PARAMETER_DEFAULT_EVENT_IDENTIFIER = "E:concept:name"; + private static final String PARAMETER_DEFAULT_EVENT_LIFECYCLE_TRANSITION = "E:lifecycle:transition"; + private static final String PARAMETER_DEFAULT_EVENT_RESOURCE = "E:org:resource"; + private static final String PARAMETER_DEFAULT_EVENT_RESOURCE_GROUP = DEFAULT_VALUE_OPTIONAL; + private static final String PARAMETER_DEFAULT_EVENT_RESOURCE_ROLE = DEFAULT_VALUE_OPTIONAL; + private static final String PARAMETER_DEFAULT_EVENT_TIMESTAMP = "E:time:timestamp"; + private static final String PARAMETER_DEFAULT_TRACE_IDENTIFIER = "T:concept:name"; + private static final String PARAMETER_DESC_EVENT_IDENTIFIER = "Please select an attribute of the example set to act as an event identifier"; + private static final String PARAMETER_DESC_EVENT_LIFECYCLE_TRANSITION = "Please select an (optional) attribute of the example set to act as lifecycle transition information"; + private static final String PARAMETER_DESC_EVENT_RESOURCE = "Please select an (optional) attribute of the example set that signifies the resource that executed the event"; + private static final String PARAMETER_DESC_EVENT_RESOURCE_GROUP = "Please select an (optional) attribute of the example set that signifies the resource group of the resource that executed the event"; + private static final String PARAMETER_DESC_EVENT_RESOURCE_ROLE = "Please select an (optional) attribute of the example set that signifies the role of the resource that executed the event"; + private static final String PARAMETER_DESC_EVENT_TIMESTAMP = "Please select an (optional) attribute of the example set to act as an event timestamp"; + private static final String PARAMETER_DESC_INCLUDE_ALL_DATA = "Please set this option to true if all (non-used) data columns should be added as event attributes. The attributes will have a key value event_attr:[name_of_column]."; + private static final String PARAMETER_DESC_INCLUDE_EVENT_TIME_STAMP = "Please set this option to true if the data contains timestamp information, and, you want to incorporate this in the resulting event log"; + private static final String PARAMETER_DESC_INCLUDE_ORGANIZATIONAL = "Please set this option to true if the data contains information about the organizational perspective of the process, and, you want to incorporate this in the resulting event log. The organizational perspective contains three attributes: resource, role and group. If you only want to use one of these, don't add the organziational perspective just select that all other data should be added as and attribute."; + private static final String PARAMETER_DESC_INLCUDE_EVENT_LIFECYCLE_TRANSITION = "Please set this option to true if the data contains lifecycle transition information, and, you want to incorporate this in the resulting event log."; + // private static final String PARAMETER_DESC_REORDER_BY_TIMESTAMP = "If the + // example set contains timestamps, this option will reorder the events + // within traces based on their time-stamps"; + private static final String PARAMETER_DESC_TRACE_IDENTIFIER = "Please select an attribute of the example set to act as a trace identifier"; + // private static final boolean PARAMETER_KEY_DEFAULT_REORDER_BY_TIMESTAMP = + // false; + private static final String PARAMETER_KEY_EVENT_IDENTIFIER = "event_identifier"; + private static final String PARAMETER_KEY_EVENT_LIFECYCLE_TRANSITION = "event_lifecycle_transition"; + private static final String PARAMETER_KEY_EVENT_RESOURCE = "event_resource"; + private static final String PARAMETER_KEY_EVENT_RESOURCE_GROUP = "event_resource_group"; + private static final String PARAMETER_KEY_EVENT_RESOURCE_ROLE = "event_resource_role"; + private static final String PARAMETER_KEY_EVENT_TIMESTAMP = "event_time_stamp"; + private static final String PARAMETER_KEY_INCLUDE_ALL_DATA = "include_all_columns_as_event_attributes"; + private static final String PARAMETER_KEY_INCLUDE_EVENT_LIFECYCLE_TRANSITION = "include_lifecycle_transition_information"; + private static final String PARAMETER_KEY_INCLUDE_EVENT_TIME_STAMP = "include_time_stamps"; + private static final String PARAMETER_KEY_INCLUDE_ORGANIZATIONAL = "include_organizational_perspective"; + // private static final String PARAMETER_KEY_REORDER_BY_TIMESTAMP = + // "reorder_by_time_stamp"; + private static final String PARAMETER_KEY_TRACE_IDENTIFIER = "trace_identifier"; + + /** defining the ports */ + private InputPort inputExampleSet = getInputPorts().createPort("example set (Data Table)", + new ExampleSetMetaData()); + + private OutputPort outputLog = getOutputPorts().createPort("event log (ProM Event Log)"); + private Collection reservedColumns = new HashSet<>(); + + public ExampleSetToXLogConversionOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputLog, XLogIOObject.class)); + inputExampleSet.addPrecondition(new ExampleSetNumberOfAttributesPrecondition(inputExampleSet, 2)); + } + + private List addAllDataParameterType(List params) { + ParameterType allData = new ParameterTypeBoolean(PARAMETER_KEY_INCLUDE_ALL_DATA, + PARAMETER_DESC_INCLUDE_ALL_DATA, true, false); + params.add(allData); + return params; + } + + private XLog addClassifiers(XLog log, boolean lifecycle) { + log.getClassifiers().add(new XEventNameClassifier()); + if (lifecycle) { + log.getClassifiers() + .add(new XEventAndClassifier(new XEventNameClassifier(), new XEventLifeTransClassifier())); + } + return log; + } + + private List addEventIdentificationParameterType(List params) { + ParameterType eventIdentification = setupDynamicExampleSetBasedParameterType(PARAMETER_KEY_EVENT_IDENTIFIER, + PARAMETER_DESC_EVENT_IDENTIFIER, new String[] { PARAMETER_DEFAULT_EVENT_IDENTIFIER }, 0, false, + inputExampleSet); + eventIdentification.setOptional(false); + params.add(eventIdentification); + return params; + } + + private XLog addExtensions(XLog log, boolean lifecycle, boolean time, boolean oragnizational) { + log.getExtensions().add(XConceptExtension.instance()); + if (lifecycle) { + log.getExtensions().add(XLifecycleExtension.instance()); + } + if (time) { + log.getExtensions().add(XTimeExtension.instance()); + } + if (oragnizational) { + log.getExtensions().add(XOrganizationalExtension.instance()); + } + return log; + } + + private XLog addGlobals(XLog log, boolean useLifeCycle, boolean useOrganizational, boolean useTimeStamp) { + log.getGlobalTraceAttributes().add( + new XAttributeLiteralImpl(XConceptExtension.KEY_NAME, GLOBAL_INVALID, XConceptExtension.instance())); + log.getGlobalEventAttributes().add( + new XAttributeLiteralImpl(XConceptExtension.KEY_NAME, GLOBAL_INVALID, XConceptExtension.instance())); + if (useLifeCycle) { + log.getGlobalEventAttributes().add(new XAttributeLiteralImpl(XLifecycleExtension.KEY_TRANSITION, + GLOBAL_INVALID, XLifecycleExtension.instance())); + } + if (useTimeStamp) { + log.getGlobalEventAttributes() + .add(new XAttributeTimestampImpl(XTimeExtension.KEY_TIMESTAMP, 0, XTimeExtension.instance())); + } + if (useOrganizational) { + if (!getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_RESOURCE).equals(DEFAULT_VALUE_OPTIONAL)) { + log.getGlobalEventAttributes().add(new XAttributeLiteralImpl(XOrganizationalExtension.KEY_RESOURCE, + GLOBAL_INVALID, XOrganizationalExtension.instance())); + } + if (!getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_RESOURCE_ROLE).equals(DEFAULT_VALUE_OPTIONAL)) { + log.getGlobalEventAttributes().add(new XAttributeLiteralImpl(XOrganizationalExtension.KEY_ROLE, + GLOBAL_INVALID, XOrganizationalExtension.instance())); + } + if (!getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_RESOURCE_GROUP).equals(DEFAULT_VALUE_OPTIONAL)) { + log.getGlobalEventAttributes().add(new XAttributeLiteralImpl(XOrganizationalExtension.KEY_GROUP, + GLOBAL_INVALID, XOrganizationalExtension.instance())); + } + } + return log; + } + + private List addLifecycleTransitionParameterTypes(List params) { + ParameterType includeLifecycleTransition = new ParameterTypeBoolean( + PARAMETER_KEY_INCLUDE_EVENT_LIFECYCLE_TRANSITION, PARAMETER_DESC_INLCUDE_EVENT_LIFECYCLE_TRANSITION, + false, false); + params.add(includeLifecycleTransition); + + ParameterType lifecycleTransition = setupDynamicExampleSetBasedParameterType( + PARAMETER_KEY_EVENT_LIFECYCLE_TRANSITION, PARAMETER_DESC_EVENT_LIFECYCLE_TRANSITION, + new String[] { PARAMETER_DEFAULT_EVENT_LIFECYCLE_TRANSITION }, 0, true, inputExampleSet); + lifecycleTransition.setOptional(true); + lifecycleTransition.registerDependencyCondition( + new BooleanParameterCondition(this, PARAMETER_KEY_INCLUDE_EVENT_LIFECYCLE_TRANSITION, true, true)); + params.add(lifecycleTransition); + return params; + } + + private List addOrganizationalPerspectiveSelector(List params) { + ParameterTypeBoolean orgPerspective = new ParameterTypeBoolean(PARAMETER_KEY_INCLUDE_ORGANIZATIONAL, + PARAMETER_DESC_INCLUDE_ORGANIZATIONAL, false, false); + params.add(orgPerspective); + return params; + } + + private List addResourceGroupParameterType(List params, String isOrgKey) { + ParameterType group = setupDynamicExampleSetBasedParameterType(PARAMETER_KEY_EVENT_RESOURCE_GROUP, + PARAMETER_DESC_EVENT_RESOURCE_GROUP, new String[] { PARAMETER_DEFAULT_EVENT_RESOURCE_GROUP }, 0, true, + inputExampleSet); + group.setOptional(true); + group.registerDependencyCondition(new BooleanParameterCondition(this, isOrgKey, true, true)); + params.add(group); + return params; + } + + private List addResourceParameterType(List params, String isOrgKey) { + + ParameterType resource = setupDynamicExampleSetBasedParameterType(PARAMETER_KEY_EVENT_RESOURCE, + PARAMETER_DESC_EVENT_RESOURCE, new String[] { PARAMETER_DEFAULT_EVENT_RESOURCE }, 0, true, + inputExampleSet); + resource.setOptional(true); + + resource.registerDependencyCondition(new BooleanParameterCondition(this, isOrgKey, true, true)); + + params.add(resource); + return params; + } + + private List addResourceRoleParameterType(List params, String isOrgKey) { + + ParameterType role = setupDynamicExampleSetBasedParameterType(PARAMETER_KEY_EVENT_RESOURCE_ROLE, + PARAMETER_DESC_EVENT_RESOURCE_ROLE, new String[] { PARAMETER_DEFAULT_EVENT_RESOURCE_ROLE }, 0, true, + inputExampleSet); + role.setOptional(true); + role.registerDependencyCondition(new BooleanParameterCondition(this, isOrgKey, true, true)); + params.add(role); + return params; + } + + private List addTimeStampParameterTypes(List params) { + + ParameterType includeTimeStamps = new ParameterTypeBoolean(PARAMETER_KEY_INCLUDE_EVENT_TIME_STAMP, + PARAMETER_DESC_INCLUDE_EVENT_TIME_STAMP, false, false); + params.add(includeTimeStamps); + + ParameterType eventTimeStamp = setupDynamicExampleSetBasedParameterType(PARAMETER_KEY_EVENT_TIMESTAMP, + PARAMETER_DESC_EVENT_TIMESTAMP, new String[] { PARAMETER_DEFAULT_EVENT_TIMESTAMP }, 0, false, + inputExampleSet); + eventTimeStamp.setOptional(true); + eventTimeStamp.registerDependencyCondition( + new BooleanParameterCondition(this, PARAMETER_KEY_INCLUDE_EVENT_TIME_STAMP, true, true)); + params.add(eventTimeStamp); + + // FIXME we do not allow for reordering as the ProM reordering operator + // does not copy all extensions / attributes etc. + // ParameterType eventTimeStampReorder = new ParameterTypeBoolean( + // PARAMETER_KEY_REORDER_BY_TIMESTAMP, + // PARAMETER_DESC_REORDER_BY_TIMESTAMP, + // PARAMETER_KEY_DEFAULT_REORDER_BY_TIMESTAMP, false); + // eventTimeStampReorder.setOptional(true); + // eventTimeStampReorder.registerDependencyCondition( + // new NonEqualStringCondition(this, PARAMETER_KEY_EVENT_TIMESTAMP, + // true, new String[] { DEFAULT_VALUE_OPTIONAL })); + // params.add(eventTimeStampReorder); + return params; + } + + private List addTraceIdentificationParameterType(List params) { + ParameterType traceIdentification = setupDynamicExampleSetBasedParameterType(PARAMETER_KEY_TRACE_IDENTIFIER, + PARAMETER_DESC_TRACE_IDENTIFIER, new String[] { PARAMETER_DEFAULT_TRACE_IDENTIFIER }, 0, false, + inputExampleSet); + traceIdentification.setOptional(false); + params.add(traceIdentification); + return params; + } + + private XEvent constructEvent(XFactory factory, ExampleSet data, Example example, String eventIdentifier) { + XAttributeMap attributes = new XAttributeMapImpl(); + String eventName = example.getValueAsString(data.getAttributes().get(eventIdentifier)); + attributes.put(XConceptExtension.KEY_NAME, + new XAttributeLiteralImpl(XConceptExtension.KEY_NAME, eventName, XConceptExtension.instance())); + XEvent event = factory.createEvent(attributes); + return decorateEvent(event, data, example); + } + + private XLog constructLogByExampleSet(ExampleSet data) { + // XFactory factory = XFactoryRegistry.instance().currentDefault(); + XFactory factory = new XFactoryExternalStore.MapDBDiskImpl(); + + XLog log = createLog(factory, getParameterAsBoolean(PARAMETER_KEY_INCLUDE_EVENT_LIFECYCLE_TRANSITION)); + + log = addExtensions(log, isUseLifeCycle(), isUseTime(), isUseOrganizational()); + + log = addGlobals(log, isUseLifeCycle(), isUseOrganizational(), isUseTime()); + + log = addClassifiers(log, isUseLifeCycle()); + + // iterate over traces and events + Iterator iterator = data.iterator(); + Map mapping = new HashMap(); + String traceIdentifier = getDynamicParameterTypeValue(PARAMETER_KEY_TRACE_IDENTIFIER); + String eventIdentifier = getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_IDENTIFIER); + while (iterator.hasNext()) { + log = processExampleAsEvent(factory, log, data, iterator.next(), mapping, traceIdentifier, eventIdentifier); + + } + return log; + } + + /** + * given some trace identifier, this function returns a corresponding XTrace + * object. if it already exists in the map, the corresponding object will be + * returned. If it is a new instance, the trace will be added to the given + * event log and, the map will be updated. + * + * @param factory + * @param log + * @param traceIdentifier + * @param mapping + * @return + */ + private XTrace constructTrace(XFactory factory, XLog log, String traceIdentifier, Map mapping) { + if (mapping.containsKey(traceIdentifier)) + return mapping.get(traceIdentifier); + XAttributeLiteral attribNameTrace = factory.createAttributeLiteral("concept:name", traceIdentifier, + XConceptExtension.instance()); + XAttributeMap attribMapTrace = new XAttributeMapImpl(); + attribMapTrace.put(XConceptExtension.KEY_NAME, attribNameTrace); + XTrace trace = factory.createTrace(attribMapTrace); + log.add(trace); + mapping.put(traceIdentifier, trace); + return trace; + } + + private XLog createLog(XFactory factory, boolean useLifeCycleModel) { + XAttributeLiteral attribNameLog = factory.createAttributeLiteral(XConceptExtension.KEY_NAME, + "Event Log (created by RapidMiner @ " + + (new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")).format(Calendar.getInstance().getTime()) + ")", + XConceptExtension.instance()); + XAttributeMap attribMapLog = new XAttributeMapImpl(); + attribMapLog.put(XConceptExtension.KEY_NAME, attribNameLog); + + if (useLifeCycleModel) { + XAttributeLiteral attribLifecycleLog = factory.createAttributeLiteral(XLifecycleExtension.KEY_MODEL, + XLifecycleExtension.VALUE_MODEL_STANDARD, XLifecycleExtension.instance()); + attribMapLog.put(XLifecycleExtension.KEY_MODEL, attribLifecycleLog); + } + XLog log = factory.createLog(attribMapLog); + + return log; + } + + private XEvent decorateEvent(XEvent event, ExampleSet data, Example example) { + if (isUseTime()) { + event = decorateEventWithTime(event, data, example); + } + if (isUseLifeCycle()) { + event = decorateEventWithLifeCycle(event, data, example); + } + if (isUseOrganizational()) { + event = decorateEventWithResource(event, data, example); + event = decorateEventWithRole(event, data, example); + event = decorateEventWithGroup(event, data, example); + } + if (isIncludeAllData()) { + event = decorateEventWithAdditionalData(event, data, example); + } + + return event; + } + + private XEvent decorateEventWithAdditionalData(XEvent event, ExampleSet data, Example example) { + for (Attribute a : data.getAttributes()) { + if (!(reservedColumns.contains(a.getName()))) { + String attrVal = example.getValueAsString(a); + String attrKey = "event_attr:" + a.getName(); + event.getAttributes().put(attrKey, new XAttributeLiteralImpl(attrKey, attrVal)); + } + } + return event; + } + + private XEvent decorateEventWithGroup(XEvent event, ExampleSet data, Example example) { + String resourceGroupAttr = getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_RESOURCE_GROUP); + if (!resourceGroupAttr.equals(DEFAULT_VALUE_OPTIONAL)) { + String group = example.getValueAsString(data.getAttributes().get(resourceGroupAttr)); + event.getAttributes().put(XOrganizationalExtension.KEY_GROUP, new XAttributeLiteralImpl( + XOrganizationalExtension.KEY_GROUP, group, XOrganizationalExtension.instance())); + } + return event; + } + + private XEvent decorateEventWithLifeCycle(XEvent event, ExampleSet data, Example example) { + String ltAttr = getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_LIFECYCLE_TRANSITION); + if (!ltAttr.equals(DEFAULT_VALUE_OPTIONAL)) { + String lifecycle = example.getValueAsString(data.getAttributes().get(ltAttr)); + event.getAttributes().put(XLifecycleExtension.KEY_TRANSITION, new XAttributeLiteralImpl( + XLifecycleExtension.KEY_TRANSITION, lifecycle, XLifecycleExtension.instance())); + } + return event; + } + + private XEvent decorateEventWithResource(XEvent event, ExampleSet data, Example example) { + String resourceAttr = getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_RESOURCE); + if (!resourceAttr.equals(DEFAULT_VALUE_OPTIONAL)) { + String resource = example.getValueAsString(data.getAttributes().get(resourceAttr)); + event.getAttributes().put(XOrganizationalExtension.KEY_RESOURCE, new XAttributeLiteralImpl( + XOrganizationalExtension.KEY_RESOURCE, resource, XOrganizationalExtension.instance())); + } + return event; + } + + private XEvent decorateEventWithRole(XEvent event, ExampleSet data, Example example) { + String resourceRoleAttr = getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_RESOURCE_ROLE); + if (!resourceRoleAttr.equals(DEFAULT_VALUE_OPTIONAL)) { + String role = example.getValueAsString(data.getAttributes().get(resourceRoleAttr)); + event.getAttributes().put(XOrganizationalExtension.KEY_ROLE, new XAttributeLiteralImpl( + XOrganizationalExtension.KEY_ROLE, role, XOrganizationalExtension.instance())); + + } + return event; + } + + private XEvent decorateEventWithTime(XEvent event, ExampleSet data, Example example) { + String timeAttr = getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_TIMESTAMP); + if (!timeAttr.equals(DEFAULT_VALUE_OPTIONAL)) { + Date time = example.getDateValue(data.getAttributes().get(timeAttr)); + event.getAttributes().put(XTimeExtension.KEY_TIMESTAMP, + new XAttributeTimestampImpl(XTimeExtension.KEY_TIMESTAMP, time, XTimeExtension.instance())); + } + return event; + } + + private Collection determineReservedColumns() { + Collection reserved = new HashSet<>(); + reserved.add(getDynamicParameterTypeValue(PARAMETER_KEY_TRACE_IDENTIFIER)); + reserved.add(getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_IDENTIFIER)); + if (isUseTime()) { + reserved.add(getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_TIMESTAMP)); + } + if (isUseLifeCycle()) { + reserved.add(getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_LIFECYCLE_TRANSITION)); + } + if (isUseOrganizational()) { + reserved.add(getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_RESOURCE)); + reserved.add(getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_RESOURCE_ROLE)); + reserved.add(getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_RESOURCE_GROUP)); + } + return reserved; + } + + @Override + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: ExampleSet to XLog conversion"); + long time = System.currentTimeMillis(); + reservedColumns = determineReservedColumns(); + XLog log = constructLogByExampleSet(inputExampleSet.getData(ExampleSet.class)); + // FIXME time based reordering in ProM does not properly copy all log + // extensions, classifiers etc. + // if (!getDynamicParameterTypeValue(PARAMETER_KEY_EVENT_TIMESTAMP) + // .equals(DEFAULT_VALUE_OPTIONAL) + // && getParameterAsBoolean(PARAMETER_KEY_REORDER_BY_TIMESTAMP)) { + // log = ReSortLog.removeEdgePoints( + // ProMPluginContextManager.instance().getContext(), log); + // } + outputLog.deliver(new XLogIOObject(log, RapidProMGlobalContext.instance().getPluginContext())); + logger.log(Level.INFO, + "End: Table to Event Log conversion (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + private String getDynamicParameterTypeValue(String key) { + try { + return ((ParameterTypeExampleSetAttributesDynamicCategory) getParameterType(key)) + .getValues()[getParameterAsInt(key)]; + } catch (UndefinedParameterError e) { + e.printStackTrace(); + } + return DEFAULT_VALUE_OPTIONAL; + } + + @Override + public List getParameterTypes() { + List params = super.getParameterTypes(); + params = addTraceIdentificationParameterType(params); + params = addEventIdentificationParameterType(params); + params = addTimeStampParameterTypes(params); + params = addLifecycleTransitionParameterTypes(params); + params = addOrganizationalPerspectiveSelector(params); + params = addResourceParameterType(params, PARAMETER_KEY_INCLUDE_ORGANIZATIONAL); + params = addResourceRoleParameterType(params, PARAMETER_KEY_INCLUDE_ORGANIZATIONAL); + params = addResourceGroupParameterType(params, PARAMETER_KEY_INCLUDE_ORGANIZATIONAL); + params = addAllDataParameterType(params); + return params; + } + + private boolean isIncludeAllData() { + return getParameterAsBoolean(PARAMETER_KEY_INCLUDE_ALL_DATA); + } + + private boolean isUseLifeCycle() { + return getParameterAsBoolean(PARAMETER_KEY_INCLUDE_EVENT_LIFECYCLE_TRANSITION); + } + + private boolean isUseOrganizational() { + return getParameterAsBoolean(PARAMETER_KEY_INCLUDE_ORGANIZATIONAL); + } + + private boolean isUseTime() { + return getParameterAsBoolean(PARAMETER_KEY_INCLUDE_EVENT_TIME_STAMP); + } + + private XLog processExampleAsEvent(XFactory factory, XLog log, ExampleSet data, Example example, + Map mapping, String traceIdentifier, String eventIdentifier) { + XTrace trace = constructTrace(factory, log, example.getValueAsString(data.getAttributes().get(traceIdentifier)), + mapping); + trace.add(constructEvent(factory, data, example, eventIdentifier)); + return log; + + } + + private ParameterType setupDynamicExampleSetBasedParameterType(String key, String desc, String[] values, + int defaultValue, boolean expert, InputPort inputPort) { + return new ParameterTypeExampleSetAttributesDynamicCategory(key, desc, values, values, defaultValue, expert, + inputPort); + } + +} diff --git a/src/main/java/org/rapidprom/operators/conversion/HeuristicNetToPetriNetConversionOperator.java b/src/main/java/org/rapidprom/operators/conversion/HeuristicNetToPetriNetConversionOperator.java new file mode 100644 index 0000000..0f849a5 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/conversion/HeuristicNetToPetriNetConversionOperator.java @@ -0,0 +1,50 @@ +package org.rapidprom.operators.conversion; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.plugins.heuristicsnet.miner.heuristics.converter.HeuristicsNetToPetriNetConverter; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.HeuristicsNetIOObject; +import org.rapidprom.ioobjects.PetriNetIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.tools.LogService; + +public class HeuristicNetToPetriNetConversionOperator extends Operator { + + private InputPort input = getInputPorts().createPort("model (ProM Heuristics Net)", HeuristicsNetIOObject.class); + private OutputPort output = getOutputPorts().createPort("model (ProM Petri Net)"); + + public HeuristicNetToPetriNetConversionOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(output, PetriNetIOObject.class)); + } + + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: heuristics net to petri net conversion"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(HeuristicsNetToPetriNetConverter.class); + + Object[] result = HeuristicsNetToPetriNetConverter.converter(pluginContext, + input.getData(HeuristicsNetIOObject.class).getArtifact()); + + PetriNetIOObject finalPetriNet = new PetriNetIOObject((Petrinet) result[0], (Marking) result[1], null, + pluginContext); + output.deliver(finalPetriNet); + + logger.log(Level.INFO, + "End: heuristics net to petri net conversion (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } +} diff --git a/src/main/java/org/rapidprom/operators/conversion/PetriNetToBpmnConversionOperator.java b/src/main/java/org/rapidprom/operators/conversion/PetriNetToBpmnConversionOperator.java new file mode 100644 index 0000000..bb023e6 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/conversion/PetriNetToBpmnConversionOperator.java @@ -0,0 +1,56 @@ +package org.rapidprom.operators.conversion; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.bpmn.BPMNDiagram; +import org.processmining.plugins.converters.PetriNetToBPMNConverterPlugin; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.BPMNIOObject; +import org.rapidprom.ioobjects.PetriNetIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.tools.LogService; + +public class PetriNetToBpmnConversionOperator extends Operator { + + private InputPort input = getInputPorts().createPort("model (ProM Petri Net)", PetriNetIOObject.class); + + private OutputPort output = getOutputPorts().createPort("model (ProM BPMN)"); + + public PetriNetToBpmnConversionOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(output, BPMNIOObject.class)); + } + + @Override + public void doWork() throws OperatorException { // TO_DO : deliver the + // output converted BPMN + + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: Petri Net to BPMN conversion"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(PetriNetToBPMNConverterPlugin.class); + + PetriNetToBPMNConverterPlugin converter = new PetriNetToBPMNConverterPlugin(); + Object[] result = converter.convert(pluginContext, input.getData(PetriNetIOObject.class).getArtifact()); + // BPMN2PetriNetConverter_Plugin converter = new + // BPMN2PetriNetConverter_Plugin(); + // Object[] result = converter.convert(pluginContext, + // input.getData(PetriNetIOObject.class).getData())); + + output.deliver(new BPMNIOObject((BPMNDiagram) result[0], pluginContext)); + + logger.log(Level.INFO, + "End: Petri Net to BPMN conversion (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + +} diff --git a/src/main/java/org/rapidprom/operators/conversion/PetriNetToReachabilityGraphConversionOperator.java b/src/main/java/org/rapidprom/operators/conversion/PetriNetToReachabilityGraphConversionOperator.java new file mode 100644 index 0000000..8351585 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/conversion/PetriNetToReachabilityGraphConversionOperator.java @@ -0,0 +1,58 @@ +package org.rapidprom.operators.conversion; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.transitionsystem.ReachabilityGraph; +import org.processmining.plugins.petrinet.behavioralanalysis.TSGenerator; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.ioobjects.ReachabilityGraphIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.tools.LogService; + +public class PetriNetToReachabilityGraphConversionOperator extends Operator { + + private InputPort input = getInputPorts().createPort("model (ProM Petri Net)", PetriNetIOObject.class); + + private OutputPort output = getOutputPorts().createPort("model (ProM Reachability Graph)"); + + public PetriNetToReachabilityGraphConversionOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(output, ReachabilityGraphIOObject.class)); + } + + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: Petri Net to Reachability Graph conversion"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(TSGenerator.class); + + PetriNetIOObject petriNet = input.getData(PetriNetIOObject.class); + + TSGenerator converter = new TSGenerator(); + Object[] result = null; + try { + result = converter.calculateTS(pluginContext, petriNet.getArtifact(), petriNet.getInitialMarking()); + } catch (Exception e) { + e.printStackTrace(); + throw new OperatorException("The marking could not be found"); + } + + ReachabilityGraphIOObject reachabilityGraphIOObject = new ReachabilityGraphIOObject( + (ReachabilityGraph) result[0], pluginContext); + output.deliver(reachabilityGraphIOObject); + + logger.log(Level.INFO, "End: Petri Net to Reachability Graph conversion (" + + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } +} diff --git a/src/main/java/org/rapidprom/operators/conversion/ProcessTreeToPetriNetConversionOperator.java b/src/main/java/org/rapidprom/operators/conversion/ProcessTreeToPetriNetConversionOperator.java new file mode 100644 index 0000000..e53a049 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/conversion/ProcessTreeToPetriNetConversionOperator.java @@ -0,0 +1,54 @@ +package org.rapidprom.operators.conversion; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.ptconversions.pn.ProcessTree2Petrinet; +import org.processmining.ptconversions.pn.ProcessTree2Petrinet.PetrinetWithMarkings; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.ioobjects.ProcessTreeIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.tools.LogService; + +public class ProcessTreeToPetriNetConversionOperator extends Operator { + + private InputPort input = getInputPorts().createPort("model (ProM ProcessTree)", ProcessTreeIOObject.class); + private OutputPort output = getOutputPorts().createPort("model (ProM Petri Net)"); + + public ProcessTreeToPetriNetConversionOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(output, PetriNetIOObject.class)); + } + + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: Process Tree to Petri Net conversion"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance().getPluginContext(); + + PetrinetWithMarkings result = null; + try { + result = ProcessTree2Petrinet.convert(input.getData(ProcessTreeIOObject.class).getArtifact()); + } catch (Exception e) { + e.printStackTrace(); + throw new OperatorException("The process tree could not be converted to a petri net"); + } + + PetriNetIOObject petriNet = new PetriNetIOObject(result.petrinet, result.initialMarking, result.finalMarking, + pluginContext); + + output.deliver(petriNet); + + logger.log(Level.INFO, + "End: Process Tree to Petri Net conversion (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } +} diff --git a/src/main/java/org/rapidprom/operators/conversion/TransitionSystemtoPetriNetConversionOperator.java b/src/main/java/org/rapidprom/operators/conversion/TransitionSystemtoPetriNetConversionOperator.java new file mode 100644 index 0000000..fe885b0 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/conversion/TransitionSystemtoPetriNetConversionOperator.java @@ -0,0 +1,61 @@ +package org.rapidprom.operators.conversion; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.plugins.transitionsystem.regions.TransitionSystem2Petrinet; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.ioobjects.TransitionSystemIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.tools.LogService; + +public class TransitionSystemtoPetriNetConversionOperator extends Operator { + + private InputPort input = getInputPorts().createPort("model (ProM Transition System)", + TransitionSystemIOObject.class); + private OutputPort output = getOutputPorts().createPort("model (ProM Petri Net)"); + + public TransitionSystemtoPetriNetConversionOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(output, PetriNetIOObject.class)); + } + + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: transition system to petri net conversion"); + long time = System.currentTimeMillis(); + + TransitionSystem2Petrinet converter = new TransitionSystem2Petrinet(); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(TransitionSystem2Petrinet.class); + + Object[] result; + try { + result = converter.convertToPetrinet(pluginContext, + input.getData(TransitionSystemIOObject.class).getArtifact()); + } catch (Exception e) { + e.printStackTrace(); + throw new OperatorException("There was an error obtaining connected elements for this transition system"); + } + + PetriNetIOObject petriNet = new PetriNetIOObject((Petrinet) result[0], (Marking) result[1], null, + input.getData(TransitionSystemIOObject.class).getPluginContext()); + + output.deliver(petriNet); + + logger.log(Level.INFO, "End: transition system to petri net conversion (" + + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + +} diff --git a/src/main/java/org/rapidprom/operators/conversion/XLogToExampleSetConversionOperator.java b/src/main/java/org/rapidprom/operators/conversion/XLogToExampleSetConversionOperator.java new file mode 100644 index 0000000..a5e601e --- /dev/null +++ b/src/main/java/org/rapidprom/operators/conversion/XLogToExampleSetConversionOperator.java @@ -0,0 +1,185 @@ +package org.rapidprom.operators.conversion; + +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.model.XLog; +import org.rapidprom.ioobjects.XLogIOObject; +import org.rapidprom.util.XLogUtils; +import org.rapidprom.util.XLogUtils.AttributeTypes; +import org.rapidprom.util.XLogUtils.TableModelXLog; + +import com.rapidminer.example.Attribute; +import com.rapidminer.example.ExampleSet; +import com.rapidminer.example.table.AttributeFactory; +import com.rapidminer.example.table.DataRow; +import com.rapidminer.example.table.DataRowFactory; +import com.rapidminer.example.table.MemoryExampleTable; +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.io.AbstractDataReader.AttributeColumn; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.AttributeMetaData; +import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.operator.ports.metadata.MDInteger; +import com.rapidminer.operator.ports.metadata.SetRelation; +import com.rapidminer.tools.LogService; +import com.rapidminer.tools.Ontology; +import com.rapidminer.tools.math.container.Range; + +public class XLogToExampleSetConversionOperator extends Operator { + + /** defining the ports */ + private InputPort inputLog = getInputPorts().createPort("event log (ProM Event Log)", XLogIOObject.class); + private OutputPort output = getOutputPorts().createPort("example set (Data Table)"); + + private Attribute[] attributes = null; + private ExampleSetMetaData metaData = null; + + /** + * The default constructor needed in exactly this signature + */ + public XLogToExampleSetConversionOperator(OperatorDescription description) { + super(description); + } + + @Override + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: Event Log to Table conversion"); + long time = System.currentTimeMillis(); + + XLogIOObject log = inputLog.getData(XLogIOObject.class); + XLog promLog = log.getArtifact(); + TableModelXLog convertedLog = null; + MemoryExampleTable table = null; + ExampleSet es = null; + try { + convertedLog = XLogUtils.convertLogToStringTable(promLog, true); + // create the exampleset + table = createStructureTable(convertedLog); + es = fillTable(table, convertedLog); + } catch (Exception e) { + e.printStackTrace(); + System.out.println("error when creating exampleset, creating empty exampleset"); + List attributes = new LinkedList(); + table = new MemoryExampleTable(attributes); + es = table.createExampleSet(); + } + /** Adding a rule for the output */ + getTransformer().addRule( new GenerateNewMDRule(output, this.metaData)); + output.deliverMD(metaData); + output.deliver(es); + + logger.log(Level.INFO, "End: Event Log to Table conversion (" + + (System.currentTimeMillis() - time) / 1000 + " sec)"); + + } + + private MemoryExampleTable createStructureTable (TableModelXLog convertedLog) { + ExampleSetMetaData metaData = new ExampleSetMetaData(); + List attributes = new LinkedList(); + for (int i=0; i < convertedLog.getColumnCount(); i++) { + String columnName = convertedLog.getColumnName(i); + AttributeTypes columnType = convertedLog.getColumnType(i); + AttributeMetaData amd = null; + if (columnType.equals(AttributeTypes.CONTINUOUS)) { + attributes.add(AttributeFactory.createAttribute(columnName, Ontology.NUMERICAL)); + amd = new AttributeMetaData(columnName, Ontology.NUMERICAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + List minAndMaxValueColumn = getMinAndMaxValueColumn(convertedLog, columnName); + amd.setValueRange(new Range(minAndMaxValueColumn.get(0), minAndMaxValueColumn.get(1)), SetRelation.EQUAL); + } + else if (columnType.equals(AttributeTypes.DISCRETE)) { + attributes.add(AttributeFactory.createAttribute(columnName, Ontology.NOMINAL)); + amd = new AttributeMetaData(columnName, Ontology.NOMINAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + } +// else if (columnType.equals(AttributeTypes.DATE)) { //treat dates as string for now +// attributes.add(AttributeFactory.createAttribute(columnName, Ontology.NOMINAL)); +// amd = new AttributeMetaData(columnName, Ontology.NOMINAL); +// amd.setRole(AttributeColumn.REGULAR); +// amd.setNumberOfMissingValues(new MDInteger(0)); +// } + else if (columnType.equals(AttributeTypes.DATE)) { + attributes.add(AttributeFactory.createAttribute(columnName, Ontology.DATE_TIME)); + amd = new AttributeMetaData(columnName, Ontology.DATE_TIME); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + List minAndMaxValueColumn = getMinAndMaxValueColumn(convertedLog, columnName); + amd.setValueRange(new Range(minAndMaxValueColumn.get(0), minAndMaxValueColumn.get(1)), SetRelation.EQUAL); + } + else if (columnType.equals(AttributeTypes.STRING)) { + attributes.add(AttributeFactory.createAttribute(columnName, Ontology.NOMINAL)); + amd = new AttributeMetaData(columnName, Ontology.NOMINAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + } + else if (columnType.equals(AttributeTypes.BOOLEAN)) { + attributes.add(AttributeFactory.createAttribute(columnName, Ontology.BINOMINAL)); + amd = new AttributeMetaData(columnName, Ontology.NOMINAL); + amd.setRole(AttributeColumn.REGULAR); + amd.setNumberOfMissingValues(new MDInteger(0)); + } + metaData.addAttribute(amd); + } + // convert the list to array + Attribute[] attribArray = new Attribute[attributes.size()]; + for (int i=0; i getMinAndMaxValueColumn(TableModelXLog convertedLog, String nameCol) { + double min = Double.MAX_VALUE; + double max = Double.MIN_VALUE; + int intCol = convertedLog.getNameForColumn(nameCol); + for (int i=0; i max ? parseDouble : max; + } + catch (Exception e) { + // do nothing with it. + } + } + } + List doubleList = new ArrayList(); + doubleList.add(min); + doubleList.add(max); + return doubleList; + } + +} diff --git a/src/main/java/org/rapidprom/operators/discovery/AlphaMinerOperator.java b/src/main/java/org/rapidprom/operators/discovery/AlphaMinerOperator.java new file mode 100644 index 0000000..fc05499 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/discovery/AlphaMinerOperator.java @@ -0,0 +1,103 @@ +package org.rapidprom.operators.discovery; + +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.alphaminer.plugins.AlphaMinerPlugin; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.semantics.petrinet.Marking; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMDiscoveryOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.tools.LogService; + +/** + * This operator implements different variants of the alpha miner algorithm The + * "classic" variant is defined in http://dx.doi.org/10.1109/TKDE.2004.47 The + * "+" variant is defined in http://dx.doi.org/10.1007/978-3-540-30188-2_12 The + * "++" variant is defined in http://dx.doi.org/10.1007/s10618-007-0065-y The + * "#" variant is defined in http://dx.doi.org/10.1016/j.datak.2010.06.001 + * + * @author abolt + * + */ +public class AlphaMinerOperator extends AbstractRapidProMDiscoveryOperator { + + private static final String PARAMETER_1_KEY = "Variant", + PARAMETER_1_DESCR = "Defines which version of the AlphaMiner will be used: " + + "The \"classic\" variant is defined in http://dx.doi.org/10.1109/TKDE.2004.47 . " + + "The \"+\" variant is defined in http://dx.doi.org/10.1007/978-3-540-30188-2_12 . " + + "The \"++\" variant is defined in http://dx.doi.org/10.1007/s10618-007-0065-y . " + + "The \"#\" variant is defined in http://dx.doi.org/10.1016/j.datak.2010.06.001 ."; + + private static final String CLASSIC = "AlphaMiner classic", PLUS = "AlphaMiner +", PLUSPLUS = "AlphaMiner ++", + SHARP = "AlphaMiner #"; + + private OutputPort output = getOutputPorts().createPort("model (ProM Petri Net)"); + + /** + * The default constructor needed in exactly this signature + */ + public AlphaMinerOperator(OperatorDescription description) { + super(description); + + /** Adding a rule for the output */ + getTransformer().addRule(new GenerateNewMDRule(output, PetriNetIOObject.class)); + } + + @Override + public void doWork() throws OperatorException { + + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: alpha miner"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(AlphaMinerPlugin.class); + + Object[] result = null; + switch (getParameterAsString(PARAMETER_1_KEY)) { + case CLASSIC: + result = AlphaMinerPlugin.applyAlphaClassic(pluginContext, getXLog(), getXEventClassifier()); + break; + case PLUS: + result = AlphaMinerPlugin.applyAlphaPlus(pluginContext, getXLog(), getXEventClassifier()); + break; + case PLUSPLUS: + result = AlphaMinerPlugin.applyAlphaPlusPlus(pluginContext, getXLog(), getXEventClassifier()); + break; + case SHARP: + result = AlphaMinerPlugin.applyAlphaSharp(pluginContext, getXLog(), getXEventClassifier()); + break; + } + + PetriNetIOObject petriNetIOObject = new PetriNetIOObject((Petrinet) result[0], (Marking) result[1], null, + pluginContext); + + output.deliver(petriNetIOObject); + + logger.log(Level.INFO, "End: alpha miner (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + + } + + public List getParameterTypes() { + + List parameterTypes = super.getParameterTypes(); + + ParameterTypeCategory parameter1 = new ParameterTypeCategory(PARAMETER_1_KEY, PARAMETER_1_DESCR, + new String[] { CLASSIC, PLUS, PLUSPLUS, SHARP }, 1); + parameterTypes.add(parameter1); + + return parameterTypes; + } + +} diff --git a/src/main/java/org/rapidprom/operators/discovery/ETMdMinerOperator.java b/src/main/java/org/rapidprom/operators/discovery/ETMdMinerOperator.java new file mode 100644 index 0000000..e7bc28d --- /dev/null +++ b/src/main/java/org/rapidprom/operators/discovery/ETMdMinerOperator.java @@ -0,0 +1,190 @@ +package org.rapidprom.operators.discovery; + +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.model.XLog; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.etm.parameters.ETMParam; +import org.processmining.plugins.etm.parameters.ETMParamFactory; +import org.processmining.plugins.etm.ui.plugins.ETMwithoutGUI; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.ProcessTreeIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMDiscoveryOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeDouble; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.tools.LogService; + +public class ETMdMinerOperator extends AbstractRapidProMDiscoveryOperator { + + private static final String PARAMETER_1_KEY = "Population Size", + PARAMETER_1_DESCR = "the number of candidate process models to change and evaluate " + + "in each generation (/round). Recommendation: 20", + PARAMETER_2_KEY = "Elite Count", + PARAMETER_2_DESCR = "The number of candidate process models to keep unchanged, " + + "e.g. the top X of process models. Recommendation: 20% to 25% of the " + + "population size, minimally 1 otherwise quality can be reduced.", + PARAMETER_3_KEY = "Number of Random Trees", + PARAMETER_3_DESCR = "The number of completely random process models/trees to be " + + "added in each round. A high number of random trees helps in finding " + + "process models/trees that are different that the current ones, but " + + "at the same time slows the ETM down. Recommendation: ~10% of the " + + "population size, minimum 1, maximum 50% of the population size.", + PARAMETER_4_KEY = "Crossover Probability", + PARAMETER_4_DESCR = "The probability for 2 process models/trees to \"mate\": " + + "e.g. to have parts swapped between them to create offspring. " + + "Experiments show that crossover should be kept low, possibly " + "even at 0.0, maximum 0.25.", + PARAMETER_5_KEY = "Mutation Probability", + PARAMETER_5_DESCR = "The probability for a process model/tree to have a (random) " + + "mutation applied. We recommend this to be set high, e.g. close to 1.0.", + PARAMETER_6_KEY = "Maximum Generations", + PARAMETER_6_DESCR = "The number of generations/rounds the ETM goes through. " + + "The more rounds the higher the quality of the process model/tree " + + "but the longer it takes for the ETM to finish. Recommendation: " + + "set to 100+. When the population size is around 20, then 500+ is recommended.", + PARAMETER_7_KEY = "Target Fitness", + PARAMETER_7_DESCR = "The fitness, or process model/tree quality, at which the " + + "ETM is allowed to stop. When set to 1.0 then the number of generations " + + "will effectively determine when to stop.", + PARAMETER_8_KEY = "Fitness Limit", + PARAMETER_8_DESCR = "Stop calculations for a particular process tree as soon " + + "as the replay fitness is lower than the provided value (double between " + + "0 and 1), or -1 to disable. This is used to save time and not waste " + + "it on bad process trees.", + PARAMETER_9_KEY = "Single Trace Alignment Timeout", + PARAMETER_9_DESCR = "Maximum time (in milliseconds (so 1000 = 1 second)) " + + "after which the calculation time for a single trace is cancelled. " + + "Recommendation: keep at default, or -1 to disable.", + PARAMETER_10_KEY = "Weight: Replay Fitness", + PARAMETER_10_DESCR = "The weight used for the replay fitness quality dimension " + + "in the overall quality/fitness of a process model/tree. Recommendation " + + "is to have this as the highest of all four quality dimensions. Replay " + + "fitness is the same as recall in data mining: the fraction of the observed " + + "data that can be replayed correctly on the process model. Recommended is a " + + "weight of 10 to 15.", + PARAMETER_11_KEY = "Weight: Precision", + PARAMETER_11_DESCR = "The weight used for the precision quality dimension in the " + + "overall quality/fitness of the process model/tree. Precision punishes " + + "the process model/tree if it allows for more behaviour than seen in " + + "the data. A delicate balance between replay fitness and precision " + + "results in a \"good\" process model. Recommended is a weight of 5 to 10.", + PARAMETER_12_KEY = "Weight: Generalization", + PARAMETER_12_DESCR = "The weight used for the generalization quality dimension in " + + "the overall quality/fitness of the process model/tree. This dimension " + + "is required next to replay fitness and precision, but plays a less important " + + "role. Recommended setting is a weight of 1.", + PARAMETER_13_KEY = "Weight: Simplicity", + PARAMETER_13_DESCR = "The weight used for the simplicity quality dimension in the " + + "overall quality/fitness of the process model/tree. This dimension is " + + "required next to replay fitness and precision, but plays a less important " + + "role. Recommended setting is a weight of 1."; + + private OutputPort outputProcessTree = getOutputPorts().createPort("model (ProM ProcessTree)"); + + public ETMdMinerOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputProcessTree, ProcessTreeIOObject.class)); + } + + public void doWork() throws OperatorException { + + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: evolutionary tree miner"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(ETMwithoutGUI.class); + XLog xLog = getXLog(); + + ETMParam eTMParam = getConfiguration(xLog, pluginContext); + + ProcessTreeIOObject processTreeIOObject = new ProcessTreeIOObject( + ETMwithoutGUI.minePTWithParameters(pluginContext, xLog, getXEventClassifier(), eTMParam), + pluginContext); + outputProcessTree.deliver(processTreeIOObject); + + logger.log(Level.INFO, + "End: evolutionary tree miner " + "(" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + + } + + public List getParameterTypes() { + + List parameterTypes = super.getParameterTypes(); + + ParameterTypeInt parameter1 = new ParameterTypeInt(PARAMETER_1_KEY, PARAMETER_1_DESCR, 0, Integer.MAX_VALUE, + 20); + parameterTypes.add(parameter1); + + ParameterTypeInt parameter2 = new ParameterTypeInt(PARAMETER_2_KEY, PARAMETER_2_DESCR, 0, Integer.MAX_VALUE, 5); + parameterTypes.add(parameter2); + + ParameterTypeInt parameter3 = new ParameterTypeInt(PARAMETER_3_KEY, PARAMETER_3_DESCR, 0, Integer.MAX_VALUE, 2); + parameterTypes.add(parameter3); + + ParameterTypeDouble parameter4 = new ParameterTypeDouble(PARAMETER_4_KEY, PARAMETER_4_DESCR, 0, 1, 0.2); + parameterTypes.add(parameter4); + + ParameterTypeDouble parameter5 = new ParameterTypeDouble(PARAMETER_5_KEY, PARAMETER_5_DESCR, 0, 1, 0.8); + parameterTypes.add(parameter5); + + ParameterTypeInt parameter6 = new ParameterTypeInt(PARAMETER_6_KEY, PARAMETER_6_DESCR, 0, Integer.MAX_VALUE, + 500); + parameterTypes.add(parameter6); + + ParameterTypeDouble parameter7 = new ParameterTypeDouble(PARAMETER_7_KEY, PARAMETER_7_DESCR, 0, 1, 1); + parameterTypes.add(parameter7); + + ParameterTypeDouble parameter8 = new ParameterTypeDouble(PARAMETER_8_KEY, PARAMETER_8_DESCR, -1, 1, 1); + parameterTypes.add(parameter8); + + ParameterTypeInt parameter9 = new ParameterTypeInt(PARAMETER_9_KEY, PARAMETER_9_DESCR, -1, Integer.MAX_VALUE, + 100); + parameterTypes.add(parameter9); + + ParameterTypeInt parameter10 = new ParameterTypeInt(PARAMETER_10_KEY, PARAMETER_10_DESCR, 0, Integer.MAX_VALUE, + 10); + parameterTypes.add(parameter10); + + ParameterTypeInt parameter11 = new ParameterTypeInt(PARAMETER_11_KEY, PARAMETER_11_DESCR, 0, Integer.MAX_VALUE, + 5); + parameterTypes.add(parameter11); + + ParameterTypeInt parameter12 = new ParameterTypeInt(PARAMETER_12_KEY, PARAMETER_12_DESCR, 0, Integer.MAX_VALUE, + 1); + parameterTypes.add(parameter12); + + ParameterTypeInt parameter13 = new ParameterTypeInt(PARAMETER_13_KEY, PARAMETER_13_DESCR, 0, Integer.MAX_VALUE, + 1); + parameterTypes.add(parameter13); + + return parameterTypes; + } + + private ETMParam getConfiguration(XLog log, PluginContext context) { + ETMParam param; + try { + param = ETMParamFactory.buildParam(log, context, getParameterAsInt(PARAMETER_1_KEY), + getParameterAsInt(PARAMETER_2_KEY), getParameterAsInt(PARAMETER_3_KEY), + getParameterAsDouble(PARAMETER_4_KEY), getParameterAsDouble(PARAMETER_5_KEY), true, + getParameterAsInt(PARAMETER_6_KEY), getParameterAsDouble(PARAMETER_7_KEY), + getParameterAsDouble(PARAMETER_10_KEY), getParameterAsDouble(PARAMETER_8_KEY), + getParameterAsDouble(PARAMETER_9_KEY), getParameterAsDouble(PARAMETER_11_KEY), + getParameterAsDouble(PARAMETER_12_KEY), getParameterAsDouble(PARAMETER_13_KEY), null, 0.0); + + } catch (UndefinedParameterError e) { + e.printStackTrace(); + param = null; + } + return param; + } + +} diff --git a/src/main/java/org/rapidprom/operators/discovery/FuzzyMinerOperator.java b/src/main/java/org/rapidprom/operators/discovery/FuzzyMinerOperator.java new file mode 100644 index 0000000..4fcc1b7 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/discovery/FuzzyMinerOperator.java @@ -0,0 +1,220 @@ +package org.rapidprom.operators.discovery; + +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.info.XLogInfo; +import org.deckfour.xes.info.XLogInfoFactory; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.fuzzymodel.attenuation.Attenuation; +import org.processmining.models.graphbased.directed.fuzzymodel.attenuation.NRootAttenuation; +import org.processmining.models.graphbased.directed.fuzzymodel.metrics.MetricsRepository; +import org.processmining.plugins.fuzzymodel.miner.FuzzyMinerPlugin; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.MetricsRepositoryIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMDiscoveryOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.UserError; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeDouble; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.tools.LogService; + +public class FuzzyMinerOperator extends AbstractRapidProMDiscoveryOperator { + + private static final String PARAMETER_1_KEY = "Frequency significance metric (Unitary)", + PARAMETER_1_DESCR = "Unary significance describes the relative importance of an event class, " + + "which will be represented as a node in the process model. " + + "As our approach is based on removing less significant behavior, " + + "and as removing a node implies removing all of its connected arcs, " + + "unary significance is the primary driver of simplification. " + + "For this metric: the more often acertain event class was observed in the log, " + + "the more significant it is", + + PARAMETER_2_KEY = "Routing significance metric (Unitary)", + PARAMETER_2_DESCR = "The idea behind routing significance is that points, at which the " + + "process either forks (i.e., split nodes) or synchronizes (i.e., join nodes), " + + "are interesting in that they substantially define the structure of a process. " + + "These points in the process are routing nodes. They are characterized by the fact " + + "that they have much fewer ingoing arcs than outgoing arcs, or the other way around. " + + "The more unbalanced the number of ingoing and outgoing arcs of a node, the greater " + + "its significance for routing. Therefore, the higher the number and significance of " + + "predecessors for a node (i.e., its incoming arcs) differs from the number and " + + "significance of its successors (i.e., outgoing arcs), the more important that " + + "node is for routing in the process. Routing significance is important as amplifier " + + "metric, i.e. it helps separating important routing nodes (whose significance it " + + "increases) from those less important", + + PARAMETER_3_KEY = "Frequency significance (Binary)", + PARAMETER_3_DESCR = "Binary significance describes the relative importance of a precedence " + + "relation between two event classes, i.e. an edge in the process model. Its purpose " + + "is to amplify and to isolate the observed behavior that is supposed to be of the " + + "greatest interest. In our simplification approach, it primarily influences the " + + "selection of edges that will be included in the simplified process model. Like " + + "for unary significance, the log-based frequency significance metric is also the " + + "most important implementation for binary significance. The more often two event " + + "classes are observed after one another, the more significant their precedence " + "relation", + + PARAMETER_4_KEY = "Distance significance (Binary)", + PARAMETER_4_DESCR = "The distance significance metric is a derivative implementation of binary " + + "significance. The more the significance of a relation differs from its source and " + + "target nodes' significances, the less its distance significance value. The " + + "rationale behind this metric is that globally important relations are also " + + "always the most important relations for their endpoints. Distance significance " + + "locally amplifies crucial key relations between event classes, and weakens " + + "already insignificant relations. Thereby, it can clarify ambiguous situations " + + "in edge abstraction, where many relations �compete� over being included in the " + + "simplified process model. Especially in very unstructured execution logs, this" + + " metric is an indispensible tool for isolating behavior of interest.", + + PARAMETER_5_KEY = "Proximity correlation (Binary)", + PARAMETER_5_DESCR = "Binary correlation measures the distance of events in a precedence " + + "relation, i.e. how closely related two events following one another are. " + + "Distance, in the process domain, can be equated to the magnitude of context " + + "change between two activity executions. Subsequently occurring activities " + + "that have a more similar context (e.g., that are executed by the same person " + + "or in a short timeframe) are thus evaluated to be higher correlated. Binary " + + "correlation is the main driver of the decision between aggregation or abstraction " + + "of less-significant behavior. Proximity correlation evaluates event classes that " + + "occur shortly after one another, i.e. within a small timeframe, as highly " + + "correlated. This is important for identifying clusters of events that correspond " + + "to one logical activity, as these are commonly executed within a short timeframe.", + + PARAMETER_6_KEY = "Endpoint correlation (Binary)", + PARAMETER_6_DESCR = "Endpoint correlation is quite similar, however, instead of resources it " + + "compares the activity names of subsequent events. More similar names will be " + + "interpreted as higher correlation. This is important for low-level logs including " + + "a large amount of less significant events that are closely related. Most of " + + "the time, events that reflect similar tasks also are given similar names " + + "(e.g., \"open valve13\" and \"close valve13\"), and this metric can unveil these " + + "implicit dependencies.", + + PARAMETER_7_KEY = "Originator correlation (Binary)", + PARAMETER_7_DESCR = "One feature of clusters of events occurring within the realm of one " + + "higher-level activity is that they are executed by the same person. Originator " + + "correlation between event classes is determined from the names of the persons " + + "that have triggered two subsequent events. The more similar these names, the " + + "higher correlated the respective event classes. In real applications, user names " + + "often include job titles or function identifiers (e.g.\"sales John\" and \"sales " + + "Paul\"). Therefore, this metric implementation is a valuable tool also for " + + "unveiling implicit correlation between events. ", + + PARAMETER_8_KEY = "Data type correlation (Binary)", + PARAMETER_8_DESCR = "In most logs, events also include additional attributes, containing " + + "snapshots from the data perspective of the process (e.g., the value of an " + + "insurance claim). In such cases, the selection of attributes logged for each " + + "event can be interpreted as its context. Thus, the data type correlation metric " + + "evaluates event classes, where subsequent events share a large amount of data " + + "types (i.e., attribute keys), as highly correlated. ", + + PARAMETER_9_KEY = "Data value correlation (Binary)", + PARAMETER_9_DESCR = "Data value correlation is more specific, in that it also takes the " + + "values of these common attributes into account. In that, it uses relative " + + "similarity, i.e. small changes of an attribute value will compromise correlation " + + "less than a completely different value.", + + PARAMETER_10_KEY = "Maximum Distance", + PARAMETER_10_DESCR = "Defines the maximum length of long-term relations"; + + private OutputPort outputMetricsRepository = getOutputPorts().createPort("model (ProM MetricsRepository)"); + + public FuzzyMinerOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputMetricsRepository, MetricsRepositoryIOObject.class)); + } + + public void doWork() throws OperatorException { + + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: fuzzy miner"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(FuzzyMinerPlugin.class); + + MetricsRepository metricsRepository = getMetricsConfiguration(); + Attenuation attenuation = new NRootAttenuation(2.7, 5); + int maxDistance = getParameterAsInt(PARAMETER_10_KEY); + + FuzzyMinerPlugin executer = new FuzzyMinerPlugin(); + MetricsRepositoryIOObject metricsRepositoryIOObject = new MetricsRepositoryIOObject( + executer.mineGeneric(pluginContext, getXLog(), metricsRepository, attenuation, maxDistance), + pluginContext); + + outputMetricsRepository.deliver(metricsRepositoryIOObject); + logger.log(Level.INFO, "End: fuzzy miner (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeDouble parameter1 = new ParameterTypeDouble(PARAMETER_1_KEY, PARAMETER_1_DESCR, 0, 1, 1); + parameterTypes.add(parameter1); + + ParameterTypeDouble parameter2 = new ParameterTypeDouble(PARAMETER_2_KEY, PARAMETER_2_DESCR, 0, 1, 1); + parameterTypes.add(parameter2); + + ParameterTypeDouble parameter3 = new ParameterTypeDouble(PARAMETER_3_KEY, PARAMETER_3_DESCR, 0, 1, 1); + parameterTypes.add(parameter3); + + ParameterTypeDouble parameter4 = new ParameterTypeDouble(PARAMETER_4_KEY, PARAMETER_4_DESCR, 0, 1, 1); + parameterTypes.add(parameter4); + + ParameterTypeDouble parameter5 = new ParameterTypeDouble(PARAMETER_5_KEY, PARAMETER_5_DESCR, 0, 1, 1); + parameterTypes.add(parameter5); + + ParameterTypeDouble parameter7 = new ParameterTypeDouble(PARAMETER_7_KEY, PARAMETER_7_DESCR, 0, 1, 1); + parameterTypes.add(parameter7); + + ParameterTypeDouble parameter6 = new ParameterTypeDouble(PARAMETER_6_KEY, PARAMETER_6_DESCR, 0, 1, 1); + parameterTypes.add(parameter6); + + ParameterTypeDouble parameter8 = new ParameterTypeDouble(PARAMETER_8_KEY, PARAMETER_8_DESCR, 0, 1, 1); + parameterTypes.add(parameter8); + + ParameterTypeDouble parameter9 = new ParameterTypeDouble(PARAMETER_9_KEY, PARAMETER_9_DESCR, 0, 1, 1); + parameterTypes.add(parameter9); + + ParameterTypeInt parameter10 = new ParameterTypeInt(PARAMETER_10_KEY, PARAMETER_10_DESCR, 0, 100, 1); + parameterTypes.add(parameter10); + + return parameterTypes; + } + + private MetricsRepository getMetricsConfiguration() { + + XLogInfo logInfo = null; + try { + logInfo = XLogInfoFactory.createLogInfo(getXLog(), getXEventClassifier()); + } catch (UserError e) { + e.printStackTrace(); + } + MetricsRepository metrics = MetricsRepository.createRepository(logInfo); + try { + metrics.getUnaryLogMetrics().get(0).setNormalizationMaximum(getParameterAsDouble(PARAMETER_1_KEY)); + metrics.getUnaryDerivateMetrics().get(0).setNormalizationMaximum(getParameterAsDouble(PARAMETER_2_KEY)); + metrics.getSignificanceBinaryLogMetrics().get(0) + .setNormalizationMaximum(getParameterAsDouble(PARAMETER_3_KEY)); + metrics.getCorrelationBinaryLogMetrics().get(0) + .setNormalizationMaximum(getParameterAsDouble(PARAMETER_5_KEY)); + metrics.getCorrelationBinaryLogMetrics().get(1) + .setNormalizationMaximum(getParameterAsDouble(PARAMETER_6_KEY)); + metrics.getCorrelationBinaryLogMetrics().get(2) + .setNormalizationMaximum(getParameterAsDouble(PARAMETER_7_KEY)); + metrics.getCorrelationBinaryLogMetrics().get(3) + .setNormalizationMaximum(getParameterAsDouble(PARAMETER_8_KEY)); + metrics.getCorrelationBinaryLogMetrics().get(4) + .setNormalizationMaximum(getParameterAsDouble(PARAMETER_9_KEY)); + metrics.getSignificanceBinaryMetrics().get(1) + .setNormalizationMaximum(getParameterAsDouble(PARAMETER_4_KEY)); + } catch (Exception e) { + e.printStackTrace(); + } + return metrics; + } +} diff --git a/src/main/java/org/rapidprom/operators/discovery/HeuristicsMinerOperator.java b/src/main/java/org/rapidprom/operators/discovery/HeuristicsMinerOperator.java new file mode 100644 index 0000000..c1695d6 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/discovery/HeuristicsMinerOperator.java @@ -0,0 +1,124 @@ +package org.rapidprom.operators.discovery; + +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.model.XLog; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.heuristicsnet.miner.heuristics.miner.FlexibleHeuristicsMinerPlugin; +import org.processmining.plugins.heuristicsnet.miner.heuristics.miner.settings.HeuristicsMinerSettings; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.HeuristicsNetIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMDiscoveryOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeBoolean; +import com.rapidminer.parameter.ParameterTypeDouble; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.tools.LogService; + +/** + * This class executes the heuristics miner algorithm defined in + * (http://dx.doi.org/10.1109/CIDM.2011.5949453) + * + * @author abolt + * + */ +public class HeuristicsMinerOperator extends AbstractRapidProMDiscoveryOperator { + + // Parameter keys (also used as description) + public static final String PARAMETER_1_KEY = "Threshold: Relative-to-best", + PARAMETER_1_DESCR = "Admissable distance between directly follows relations for an " + + "activity and the activity's best one. At 0 only the best directly follows " + + "relation will be shown for every activity, at 100 all will be shown.", + PARAMETER_2_KEY = "Threshold: Dependency", + PARAMETER_2_DESCR = "Strength of the directly follows relations determines when to " + + "Show arcs (based on how frequently one activity is followed by another).", + PARAMETER_3_KEY = "Threshold: Length-one-loops", + PARAMETER_3_DESCR = "Show arcs based on frequency of L1L observations", + PARAMETER_4_KEY = "Threshold: Length-two-loops", + PARAMETER_4_DESCR = "Show arcs based on frequency of L2L observations", + PARAMETER_5_KEY = "Threshold: Long distance", + PARAMETER_5_DESCR = "Show arcs based on how frequently one activity is " + "eventually followed by another", + PARAMETER_6_KEY = "All tasks connected", + PARAMETER_6_DESCR = "Every task needs to have at least one input and output arc, " + + "except one initial and one final activity.", + PARAMETER_7_KEY = "Long distance dependency", + PARAMETER_7_DESCR = "Show long distance relations in the model"; + + private OutputPort outputHeuristicsNet = getOutputPorts().createPort("model (ProM Heuristics Net)"); + + public HeuristicsMinerOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputHeuristicsNet, HeuristicsNetIOObject.class)); + } + + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: heuristics miner"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(FlexibleHeuristicsMinerPlugin.class); + + HeuristicsMinerSettings heuristicsMinerSettings = getConfiguration(getXLog()); + + HeuristicsNetIOObject heuristicsNetIOObject = new HeuristicsNetIOObject( + FlexibleHeuristicsMinerPlugin.run(pluginContext, getXLog(), heuristicsMinerSettings), pluginContext); + + outputHeuristicsNet.deliver(heuristicsNetIOObject); + + logger.log(Level.INFO, "End: heuristics miner (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeDouble parameter1 = new ParameterTypeDouble(PARAMETER_1_KEY, PARAMETER_1_DESCR, 0, 100, 5); + parameterTypes.add(parameter1); + + ParameterTypeDouble parameter2 = new ParameterTypeDouble(PARAMETER_2_KEY, PARAMETER_2_DESCR, 0, 100, 90); + parameterTypes.add(parameter2); + + ParameterTypeDouble parameter3 = new ParameterTypeDouble(PARAMETER_3_KEY, PARAMETER_3_DESCR, 0, 100, 90); + parameterTypes.add(parameter3); + + ParameterTypeDouble parameter4 = new ParameterTypeDouble(PARAMETER_4_KEY, PARAMETER_4_DESCR, 0, 100, 90); + parameterTypes.add(parameter4); + + ParameterTypeDouble parameter5 = new ParameterTypeDouble(PARAMETER_5_KEY, PARAMETER_5_DESCR, 0, 100, 90); + parameterTypes.add(parameter5); + + ParameterTypeBoolean parameter6 = new ParameterTypeBoolean(PARAMETER_6_KEY, PARAMETER_6_DESCR, true); + parameterTypes.add(parameter6); + + ParameterTypeBoolean parameter7 = new ParameterTypeBoolean(PARAMETER_7_KEY, PARAMETER_6_DESCR, false); + parameterTypes.add(parameter7); + + return parameterTypes; + } + + private HeuristicsMinerSettings getConfiguration(XLog log) { + HeuristicsMinerSettings heuristicsMinerSettings = new HeuristicsMinerSettings(); + try { + heuristicsMinerSettings.setRelativeToBestThreshold(getParameterAsDouble(PARAMETER_1_KEY) / 100d); + heuristicsMinerSettings.setDependencyThreshold(getParameterAsDouble(PARAMETER_2_KEY) / 100d); + heuristicsMinerSettings.setL1lThreshold(getParameterAsDouble(PARAMETER_3_KEY) / 100d); + heuristicsMinerSettings.setL2lThreshold(getParameterAsDouble(PARAMETER_4_KEY) / 100d); + heuristicsMinerSettings.setLongDistanceThreshold(getParameterAsDouble(PARAMETER_5_KEY) / 100d); + heuristicsMinerSettings.setUseAllConnectedHeuristics(getParameterAsBoolean(PARAMETER_6_KEY)); + heuristicsMinerSettings.setUseLongDistanceDependency(getParameterAsBoolean(PARAMETER_7_KEY)); + heuristicsMinerSettings.setCheckBestAgainstL2L(false); + heuristicsMinerSettings.setAndThreshold(Double.NaN); + heuristicsMinerSettings.setClassifier(getXEventClassifier()); + } catch (UndefinedParameterError e) { + e.printStackTrace(); + } + return heuristicsMinerSettings; + } +} diff --git a/src/main/java/org/rapidprom/operators/discovery/ILPMinerOperator.java b/src/main/java/org/rapidprom/operators/discovery/ILPMinerOperator.java new file mode 100644 index 0000000..e11fb01 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/discovery/ILPMinerOperator.java @@ -0,0 +1,142 @@ +package org.rapidprom.operators.discovery; + +import java.util.EnumSet; +import java.util.List; +import java.util.Set; + +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.model.XLog; +import org.processmining.causalactivitygraph.models.CausalActivityGraph; +import org.processmining.causalactivitygraphcreator.algorithms.DiscoverCausalActivityGraphAlgorithm; +import org.processmining.causalactivitygraphcreator.parameters.DiscoverCausalActivityGraphParameters; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.hybridilpminer.parameters.DiscoveryStrategy; +import org.processmining.hybridilpminer.parameters.DiscoveryStrategyType; +import org.processmining.hybridilpminer.parameters.LPConstraintType; +import org.processmining.hybridilpminer.parameters.LPFilter; +import org.processmining.hybridilpminer.parameters.LPFilterType; +import org.processmining.hybridilpminer.parameters.XLogHybridILPMinerParametersImpl; +import org.processmining.hybridilpminer.plugins.HybridILPMinerPlugin; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.semantics.petrinet.Marking; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMDiscoveryOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeBoolean; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.parameter.ParameterTypeDouble; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.parameter.conditions.EqualStringCondition; + +public class ILPMinerOperator extends AbstractRapidProMDiscoveryOperator { + + private OutputPort outputPetrinet = getOutputPorts().createPort("model (ProM Petri Net)"); + + private static final String PARAMETER_KEY_EAC = "enforce_emptiness_after_completion"; + private static final String PARAMETER_DESC_EAC = "Indicates whether the net is empty after replaying the event log"; + + private static final String PARAMETER_KEY_FILTER = "filter"; + private static final String PARAMETER_DESC_FILTER = "We can either apply no filtering, which guarantees perfect replay-fitness, or filter using Sequence Encoding Filtering (SEF)"; + private static final String[] PARAMETER_OPTIONS_FITLER = new String[] { LPFilterType.NONE.toString(), + LPFilterType.SEQUENCE_ENCODING.toString() }; + private static final LPFilterType[] PARAMETER_REFERENCE_FILTER = new LPFilterType[] { LPFilterType.NONE, + LPFilterType.SEQUENCE_ENCODING }; + + private static final String PARAMETER_KEY_FILTER_THRESHOLD = "filter_threshold"; + private static final String PARAMETER_DESC_FILTER_THRESHOLD = "Set the sequence encoding threshold t, for which 0 <= t <= 1."; + + public ILPMinerOperator(OperatorDescription description) { + super(description); + + getTransformer().addRule(new GenerateNewMDRule(outputPetrinet, PetriNetIOObject.class)); + } + + public void doWork() throws OperatorException { + PluginContext context = RapidProMGlobalContext.instance().getPluginContext(); + XLog log = getXLog(); + XEventClassifier classifier = getXEventClassifier(); + XLogHybridILPMinerParametersImpl params = new XLogHybridILPMinerParametersImpl(context, log, classifier); + params = setCausalActivityGraph(context, log, classifier, params); + params.setFilter(getFilter()); + params.setLPConstraintTypes(getConstraintTypes()); + Object[] pnAndMarking = HybridILPMinerPlugin.mine(context, log, params); + Petrinet pn = (Petrinet) pnAndMarking[0]; + Marking finalMarking = null; + /** + * If empiness after completion is enforced, make an empty final marking + */ + if (getConstraintTypes().contains(LPConstraintType.EMPTY_AFTER_COMPLETION)) + finalMarking = new Marking(); + + PetriNetIOObject petrinetIOObject = new PetriNetIOObject(pn, (Marking) pnAndMarking[1], finalMarking, context); + outputPetrinet.deliver(petrinetIOObject); + } + + private Set getConstraintTypes() { + Set constraints = EnumSet.of(LPConstraintType.THEORY_OF_REGIONS, + LPConstraintType.NO_TRIVIAL_REGION); + if (getParameterAsBoolean(PARAMETER_KEY_EAC)) { + constraints.add(LPConstraintType.EMPTY_AFTER_COMPLETION); + } + return constraints; + } + + private XLogHybridILPMinerParametersImpl setCausalActivityGraph(PluginContext context, XLog log, + XEventClassifier classifier, XLogHybridILPMinerParametersImpl params) { + DiscoverCausalActivityGraphParameters cagParameters = new DiscoverCausalActivityGraphParameters(log); + cagParameters.setClassifier(classifier); + DiscoverCausalActivityGraphAlgorithm discoCagAlgo = new DiscoverCausalActivityGraphAlgorithm(); + CausalActivityGraph graph = discoCagAlgo.apply(context, log, cagParameters); + params.setDiscoveryStrategy(new DiscoveryStrategy(DiscoveryStrategyType.CAUSAL)); + params.getDiscoveryStrategy().setCausalActivityGraphParameters(cagParameters); + params.getDiscoveryStrategy().setCausalActivityGraph(graph); + return params; + } + + @Override + public List getParameterTypes() { + List params = super.getParameterTypes(); + addEmptinessAfterCompletionParameter(params); + addFilterParameter(params); + return params; + } + + private List addEmptinessAfterCompletionParameter(List params) { + params.add(new ParameterTypeBoolean(PARAMETER_KEY_EAC, PARAMETER_DESC_EAC, false)); + return params; + } + + private List addFilterParameter(List params) { + params.add(new ParameterTypeCategory(PARAMETER_KEY_FILTER, PARAMETER_DESC_FILTER, PARAMETER_OPTIONS_FITLER, 0, + false)); + + ParameterType filterThreshold = new ParameterTypeDouble(PARAMETER_KEY_FILTER_THRESHOLD, + PARAMETER_DESC_FILTER_THRESHOLD, 0, 1, 0.25, false); + filterThreshold.setOptional(true); + filterThreshold.registerDependencyCondition( + new EqualStringCondition(this, PARAMETER_KEY_FILTER, true, LPFilterType.SEQUENCE_ENCODING.toString())); + + params.add(filterThreshold); + return params; + } + + private LPFilter getFilter() throws UndefinedParameterError { + LPFilter filter = new LPFilter(); + LPFilterType type = PARAMETER_REFERENCE_FILTER[getParameterAsInt(PARAMETER_KEY_FILTER)]; + filter.setFilterType(type); + switch (type) { + case SEQUENCE_ENCODING: + filter.setThreshold(getParameterAsDouble(PARAMETER_KEY_FILTER_THRESHOLD)); + break; + default: + break; + } + return filter; + } +} diff --git a/src/main/java/org/rapidprom/operators/discovery/InductiveMinerPNOperator.java b/src/main/java/org/rapidprom/operators/discovery/InductiveMinerPNOperator.java new file mode 100644 index 0000000..e5cb9ff --- /dev/null +++ b/src/main/java/org/rapidprom/operators/discovery/InductiveMinerPNOperator.java @@ -0,0 +1,47 @@ +package org.rapidprom.operators.discovery; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.plugins.InductiveMiner.mining.MiningParameters; +import org.processmining.plugins.InductiveMiner.plugins.IMPetriNet; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.operators.abstr.AbstractInductiveMinerOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.tools.LogService; + +public class InductiveMinerPNOperator extends AbstractInductiveMinerOperator { + + OutputPort output = getOutputPorts().createPort("model (ProM Petri Net)"); + + public InductiveMinerPNOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(output, PetriNetIOObject.class)); + } + + public void doWork() throws OperatorException { + + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: inductive miner - pn"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance().getPluginContext(); + MiningParameters param = getConfiguration(); + + Object[] result = IMPetriNet.minePetriNet(pluginContext, getXLog(), param); + + PetriNetIOObject petrinet = new PetriNetIOObject((Petrinet) result[0], (Marking) result[1], (Marking) result[2], + pluginContext); + + output.deliver(petrinet); + logger.log(Level.INFO, "End: inductive miner - pn (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } +} diff --git a/src/main/java/org/rapidprom/operators/discovery/InductiveMinerPTOperator.java b/src/main/java/org/rapidprom/operators/discovery/InductiveMinerPTOperator.java new file mode 100644 index 0000000..5188d43 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/discovery/InductiveMinerPTOperator.java @@ -0,0 +1,42 @@ +package org.rapidprom.operators.discovery; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.InductiveMiner.mining.MiningParameters; +import org.processmining.plugins.InductiveMiner.plugins.IMProcessTree; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.ProcessTreeIOObject; +import org.rapidprom.operators.abstr.AbstractInductiveMinerOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.tools.LogService; + +public class InductiveMinerPTOperator extends AbstractInductiveMinerOperator { + + OutputPort output = getOutputPorts().createPort("model (ProM ProcessTree)"); + + public InductiveMinerPTOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(output, ProcessTreeIOObject.class)); + } + + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: inductive miner - pt"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance().getPluginContext(); + MiningParameters param = getConfiguration(); + + ProcessTreeIOObject result = new ProcessTreeIOObject(IMProcessTree.mineProcessTree(getXLog(), param), + pluginContext); + + output.deliver(result); + logger.log(Level.INFO, "End: inductive miner - pt (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } +} diff --git a/src/main/java/org/rapidprom/operators/discovery/SocialNetworkMinerOperator.java b/src/main/java/org/rapidprom/operators/discovery/SocialNetworkMinerOperator.java new file mode 100644 index 0000000..53c7b05 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/discovery/SocialNetworkMinerOperator.java @@ -0,0 +1,125 @@ +package org.rapidprom.operators.discovery; + +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.socialnetwork.SocialNetwork; +import org.processmining.plugins.socialnetwork.miner.SNHoWMiner; +import org.processmining.plugins.socialnetwork.miner.SNRAMiner; +import org.processmining.plugins.socialnetwork.miner.SNSCMiner; +import org.processmining.plugins.socialnetwork.miner.SNWTMiner; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.SocialNetworkIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMDiscoveryOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.tools.LogService; + +public class SocialNetworkMinerOperator extends AbstractRapidProMDiscoveryOperator { + + private static final String HANDOVER_OF_WORK = "Handover of work", + HANDOVER_OF_WORK_DESCR = "Handover of work metric: Within a case (i.e., process " + + "instance) there is a handover of work from individual i to individual " + + "j if there are two subsequent activities where the first is completed " + + "by i and the second by j. This notion can be refined in various ways. " + + "For example, knowledge of the process structure can be used to detect " + + "whether there is really a causal dependency between both activities. " + + "It is also possible to not only consider direct succession but also " + + "indirect succession using a \"causality fall factor\" beta, i.e., if there " + + "are 3 activities in-between an activity completed by i and an activity " + + "completed by j, the causality fall factor is beta^3.", + + REASSIGNMENT = "Reassignment", + REASSIGNMENT_DESCR = "Reassignment metric: It considers the type of event. Thus " + + "far we assumed that events correspond to the execution of activities. " + + "However, there are also events like reassigning an activity from one " + + "individual to another. For example, if i frequently delegates work to " + + "j but not vice versa it is likely that i is in a hierarchical relation " + + "with j. From a SNA point of view these observations are particularly " + + "interesting since they represent explicit power relations.", + + SUBCONTRACTING = "Subcontracting", + SUBCONTRACTING_DESCR = "Subcontracting metric: The main idea is to count the number " + + "of times individual j executed an activity in-between two activities " + + "executed by individual i. This may indicate that work was subcontracted " + + "from i to j. All kinds of refinements mentioned in Handover of work metric " + + "are also possible.", + + WORKING_TOGETHER = "Working together", + WORKING_TOGETHER_DESCR = "Working together metric: This ignores causal dependencies " + + "but simply counts how frequently two individuals are performing activities " + + "for the same case. If individuals work together on cases, they will have a " + + "stronger relation than individuals rarely working together. There are three " + + "kinds of methods to calcuate working together metric. The first one is dividing " + + "the number of joint cases by the number of cases in which individual i appeared. " + + "It is important to use a relative notation. For example, suppose that individual " + + "i participates in three cases, individual j participates in six cases, and they " + + "work together three times. In this situation, i always work together with j, but " + + "j does not. Thus, the value for i to j has to be larger than the value for j to i. " + + "Alternative metrics can be composed by taking the distance between activities " + + "into account."; + + private static final String VARIATION = "Analysis variation", VARIATION_DESCR = HANDOVER_OF_WORK_DESCR + "\n" + + REASSIGNMENT_DESCR + "\n" + SUBCONTRACTING_DESCR + "\n" + WORKING_TOGETHER_DESCR; + + private OutputPort outputSocialNetwork = getOutputPorts().createPort("model (ProM Social Network)"); + + public SocialNetworkMinerOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputSocialNetwork, SocialNetworkIOObject.class)); + } + + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: social network miner"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = null; + + SocialNetwork result = null; + switch (getParameterAsInt(VARIATION)) { + case 0: + pluginContext = RapidProMGlobalContext.instance().getFutureResultAwarePluginContext(SNHoWMiner.class); + SNHoWMiner miner0 = new SNHoWMiner(); + result = miner0.socialnetwork(pluginContext, getXLog()); + break; + case 1: + pluginContext = RapidProMGlobalContext.instance().getFutureResultAwarePluginContext(SNRAMiner.class); + SNRAMiner miner1 = new SNRAMiner(); + result = miner1.socialnetwork(pluginContext, getXLog()); + break; + case 2: + pluginContext = RapidProMGlobalContext.instance().getFutureResultAwarePluginContext(SNSCMiner.class); + SNSCMiner miner3 = new SNSCMiner(); + result = miner3.socialnetwork(pluginContext, getXLog()); + break; + case 3: + pluginContext = RapidProMGlobalContext.instance().getFutureResultAwarePluginContext(SNWTMiner.class); + SNWTMiner miner4 = new SNWTMiner(); + result = miner4.socialnetwork(pluginContext, getXLog()); + break; + } + + outputSocialNetwork.deliver(new SocialNetworkIOObject(result, pluginContext)); + + logger.log(Level.INFO, "End: social network miner (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + String[] options = new String[] { HANDOVER_OF_WORK, REASSIGNMENT, SUBCONTRACTING, WORKING_TOGETHER }; + + ParameterTypeCategory variation = new ParameterTypeCategory(VARIATION, VARIATION_DESCR, options, 0); + parameterTypes.add(variation); + + return parameterTypes; + } +} diff --git a/src/main/java/org/rapidprom/operators/discovery/TransitionSystemMinerOperator.java b/src/main/java/org/rapidprom/operators/discovery/TransitionSystemMinerOperator.java new file mode 100644 index 0000000..b16eb60 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/discovery/TransitionSystemMinerOperator.java @@ -0,0 +1,114 @@ +package org.rapidprom.operators.discovery; + +import java.util.Arrays; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.classification.XEventClassifier; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.transitionsystem.miner.TSMinerInput; +import org.processmining.plugins.transitionsystem.miner.TSMinerPlugin; +import org.processmining.plugins.transitionsystem.miner.TSMinerTransitionSystem; +import org.processmining.plugins.transitionsystem.miner.modir.TSMinerModirInput; +import org.processmining.plugins.transitionsystem.miner.util.TSAbstractions; +import org.processmining.plugins.transitionsystem.miner.util.TSDirections; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.TransitionSystemIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMDiscoveryOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.UserError; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.tools.LogService; + +public class TransitionSystemMinerOperator extends AbstractRapidProMDiscoveryOperator { + + public static final String PARAMETER_1_KEY = "Abstraction", + PARAMETER_1_DESCR = "Defines the abstraction used to define a state: " + + "sequence (order and cardinality of events matter), " + + "bag / multiset (cardinality of events matter, but not order), " + + "set (cardinality and order of event does not matter, only distinct " + + "event classes are considered) or fixed length set (set that considers events " + + "until having X different event classes, where X = horizon).", + + PARAMETER_2_KEY = "Horizon", + PARAMETER_2_DESCR = "This number defines the length of the event window considered to " + + "defines states: only use the last 'X' events of a (partial) trace will be used."; + private OutputPort output = getOutputPorts().createPort("model (ProM TransitionSystem)"); + + public TransitionSystemMinerOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(output, TransitionSystemIOObject.class)); + } + + public void doWork() throws OperatorException { + + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: transition system miner"); + long time = System.currentTimeMillis(); + + PluginContext pluginContext = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(TSMinerPlugin.class); + + XEventClassifier[] classifiers = new XEventClassifier[1]; + classifiers[0] = getXEventClassifier(); + + XEventClassifier transitionClassifier = getXEventClassifier(); + + Object[] result = TSMinerPlugin.main(pluginContext, getXLog(), classifiers, transitionClassifier, + getConfiguration(pluginContext, classifiers, transitionClassifier)); + + // TO-DO: for now we use default parameters, we should use the same + // parameters used in prom. + TransitionSystemIOObject ts = new TransitionSystemIOObject((TSMinerTransitionSystem) result[0], pluginContext); + output.deliver(ts); + + logger.log(Level.INFO, "End: transition system miner (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + private TSMinerInput getConfiguration(PluginContext pluginContext, XEventClassifier[] classifiers, + XEventClassifier transitionClassifier) throws UserError { + + TSMinerInput input = new TSMinerInput(pluginContext, getXLog(), Arrays.asList(classifiers), + transitionClassifier); + + TSMinerModirInput setting = input.getModirSettings(TSDirections.BACKWARD, getXEventClassifier()); + + if (getParameterAsString(PARAMETER_1_KEY).equals(TSAbstractions.SET.getLabel())) + setting.setAbstraction(TSAbstractions.SET); + else if (getParameterAsString(PARAMETER_1_KEY).equals(TSAbstractions.BAG.getLabel())) + setting.setAbstraction(TSAbstractions.BAG); + else if (getParameterAsString(PARAMETER_1_KEY).equals(TSAbstractions.FIXED_LENGTH_SET.getLabel())) + setting.setAbstraction(TSAbstractions.FIXED_LENGTH_SET); + else + setting.setAbstraction(TSAbstractions.SEQUENCE); + + setting.setUse(true); + setting.setFilteredHorizon(getParameterAsInt(PARAMETER_2_KEY)); + + input.setModirSettings(TSDirections.BACKWARD, getXEventClassifier(), setting); + return input; + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeCategory parameter1 = new ParameterTypeCategory(PARAMETER_1_KEY, PARAMETER_1_DESCR, + new String[] { TSAbstractions.SET.getLabel(), TSAbstractions.BAG.getLabel(), + TSAbstractions.SEQUENCE.getLabel(), TSAbstractions.FIXED_LENGTH_SET.getLabel() }, + 1); + parameterTypes.add(parameter1); + + ParameterTypeInt parameter10 = new ParameterTypeInt(PARAMETER_2_KEY, PARAMETER_2_DESCR, 0, 100, 1); + parameterTypes.add(parameter10); + + return parameterTypes; + } + +} diff --git a/src/main/java/org/rapidprom/operators/experimental/SequencePainterTask.java b/src/main/java/org/rapidprom/operators/experimental/SequencePainterTask.java new file mode 100644 index 0000000..991c1cb --- /dev/null +++ b/src/main/java/org/rapidprom/operators/experimental/SequencePainterTask.java @@ -0,0 +1,135 @@ +package org.rapidprom.operators.experimental; +//package com.rapidminer.operator.miningplugins; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import org.deckfour.xes.model.XLog; +//import org.processmining.framework.plugin.PluginContext; +//import org.processmining.models.graphbased.directed.petrinet.Petrinet; +//import org.processmining.models.semantics.petrinet.Marking; +//import org.processmining.plugins.graphviz.visualisation.DotPanel; +//import org.processmining.processcomparator.parameters.SequencePainterParameters; +//import org.rapidprom.prom.CallProm; +// +//import com.rapidminer.ioobjects.DotPanelIOObject; +//import com.rapidminer.ioobjects.ProMContextIOObject; +//import com.rapidminer.ioobjects.XLogIOObject; +//import com.rapidminer.operator.Operator; +//import com.rapidminer.operator.OperatorDescription; +//import com.rapidminer.operator.OperatorException; +//import com.rapidminer.operator.ports.InputPort; +//import com.rapidminer.operator.ports.OutputPort; +//import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +//import com.rapidminer.parameter.ParameterType; +//import com.rapidminer.parameter.ParameterTypeCategory; +//import com.rapidminer.parameter.ParameterTypeDouble; +//import com.rapidminer.parameter.ParameterTypeInt; +//import com.rapidminer.parameters.Parameter; +//import com.rapidminer.parameters.ParameterCategory; +//import com.rapidminer.parameters.ParameterDouble; +//import com.rapidminer.parameters.ParameterInteger; +//import com.rapidminer.tools.LogService; +//import com.rapidminer.util.ProMIOObjectList; +//import com.rapidminer.util.Utilities; +// +//public class SequencePainterTask extends Operator{ +// +// private InputPort inputContext = getInputPorts().createPort("context (ProM Context)", ProMContextIOObject.class); +// private InputPort inputLog = getInputPorts().createPort("event log (ProM Event Log)", XLogIOObject.class); +// private OutputPort output = getOutputPorts().createPort("model (DotPanel)"); +// +// private List parameters; +// public SequencePainterTask(OperatorDescription description) { +// super(description); +// +// getTransformer().addRule( new GenerateNewMDRule(output, DotPanelIOObject.class)); +// } +// +// +// @Override +// public void doWork() throws OperatorException { +// // get ProMContext +// LogService logService = LogService.getGlobal(); +// logService.log("start do work Sequential Process Painter", LogService.NOTE); +// ProMContextIOObject context = inputContext.getData(ProMContextIOObject.class); +// PluginContext pluginContext = context.getPluginContext(); +// // get the log +// XLogIOObject log = inputLog.getData(XLogIOObject.class); +// XLog promLog = log.getData(); +// CallProm tp = new CallProm(); +// +// +// List pars = new ArrayList(); +// pars.add(promLog); +// +// SequencePainterParameters seqpar = (SequencePainterParameters) getConfiguration(parameters); +// pars.add(seqpar); +// +// Object[] runPlugin = tp.runPlugin(pluginContext, "XX", "Sequential Process Painter", pars); +// +// DotPanelIOObject result = new DotPanelIOObject((DotPanel) runPlugin[0]); +// result.setPluginContext(pluginContext); +// // add to list so that afterwards it can be cleared if needed +// +// ProMIOObjectList instance = ProMIOObjectList.getInstance(); +// instance.addToList(result); +// +// +// output.deliver(result); +// +// logService.log("end do work Sequential Process Painter", LogService.NOTE); +// +// } +// +// public List getParameterTypes() { +// Utilities.loadRequiredClasses(); +// +// this.parameters = new ArrayList(); +// List parameterTypes = super.getParameterTypes(); +// +// ParameterInteger parameter_charsRemoved = new ParameterInteger(8, 0, Integer.MAX_VALUE, 1, null, "Chars removed from activity name", "number of characters that are removed from the beggining of each activity name"); +// ParameterTypeInt parameterType_0 = new ParameterTypeInt(parameter_charsRemoved.getNameParameter(), parameter_charsRemoved.getDescriptionParameter(), parameter_charsRemoved.getMin(), parameter_charsRemoved.getMax(), parameter_charsRemoved.getDefaultValueParameter()); +// parameterTypes.add(parameterType_0); +// parameters.add(parameter_charsRemoved); +// +// ParameterInteger parameter_activititesPerColumn = new ParameterInteger(10, 0, Integer.MAX_VALUE, 1, null, "Number of activities per column", "defines how many activities are fitted in a column before adding a new column"); +// ParameterTypeInt parameterType_1 = new ParameterTypeInt(parameter_activititesPerColumn.getNameParameter(), parameter_activititesPerColumn.getDescriptionParameter(), parameter_activititesPerColumn.getMin(), parameter_activititesPerColumn.getMax(), parameter_activititesPerColumn.getDefaultValueParameter()); +// parameterTypes.add(parameterType_1); +// parameters.add(parameter_activititesPerColumn); +// +// ParameterInteger parameter_numberOfDeviations = new ParameterInteger(10, 0, Integer.MAX_VALUE, 1, null, "Number of deviations", "defines the number of deviations that are shown in the model using different colors (max 13)"); +// ParameterTypeInt parameterType_2 = new ParameterTypeInt(parameter_numberOfDeviations.getNameParameter(), parameter_numberOfDeviations.getDescriptionParameter(), parameter_numberOfDeviations.getMin(), parameter_numberOfDeviations.getMax(), parameter_numberOfDeviations.getDefaultValueParameter()); +// parameterTypes.add(parameterType_2); +// parameters.add(parameter_numberOfDeviations); +// +// ParameterDouble parameter_Threshold = new ParameterDouble(0.05, 0, 1, 0.01, null, "Min Threshold for deviations", "Min Threshold for deviations"); +// ParameterTypeDouble parameterType_3 = new ParameterTypeDouble(parameter_Threshold.getNameParameter(), parameter_Threshold.getDescriptionParameter(), parameter_Threshold.getMin(), parameter_Threshold.getMax()); +// parameterTypes.add(parameterType_3); +// parameters.add(parameter_Threshold); +// +// return parameterTypes; +// } +// +// private Object getConfiguration (List pars) +// { +// try +// { +// +// int charsRemoved = getParameterAsInt(pars.get(0).getNameParameter()); +// int activitiesPerColumn = getParameterAsInt(pars.get(1).getNameParameter()); +// int numberOfDeviations = getParameterAsInt(pars.get(2).getNameParameter()); +// +// double threshold = getParameterAsDouble(pars.get(3).getNameParameter()); +// +// return new SequencePainterParameters(charsRemoved, activitiesPerColumn, numberOfDeviations, threshold); +// +// } +// catch(Exception e) +// { +// e.printStackTrace(); +// } +// return null; +// +// } +//} diff --git a/src/main/java/org/rapidprom/operators/extract/ExtractXLogOperator.java b/src/main/java/org/rapidprom/operators/extract/ExtractXLogOperator.java new file mode 100644 index 0000000..dd2f64c --- /dev/null +++ b/src/main/java/org/rapidprom/operators/extract/ExtractXLogOperator.java @@ -0,0 +1,175 @@ +package org.rapidprom.operators.extract; + +import java.io.File; +import java.util.EnumSet; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.model.XLog; +import org.processmining.log.plugins.ImportXEventClassifierListPlugin; +import org.processmining.plugins.log.OpenNaiveLogFilePlugin; +import org.processmining.xeslite.plugin.OpenLogFileDiskImplPlugin; +import org.processmining.xeslite.plugin.OpenLogFileLiteImplPlugin; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.XLogIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMExtractorOperator; +import org.rapidprom.operators.ports.metadata.XLogIOObjectMetaData; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.UserError; +import com.rapidminer.operator.nio.file.FileObject; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.tools.LogService; + +/** + * Extracts a log from a file operator. Note that this class also contains some + * public static utility methods that can be used by other XLog extractors / + * importers. + * + */ +public class ExtractXLogOperator extends AbstractRapidProMExtractorOperator { + + public static enum ImplementingPlugin { + NAIVE("Naive"), LIGHT_WEIGHT_SEQ_ID("Lightweight & Sequential IDs"), MAP_DB("Buffered by MAPDB"); + + private final String name; + + private ImplementingPlugin(final String name) { + this.name = name; + } + + @Override + public String toString() { + return name; + } + } + + private final static String PARAMETER_KEY_IMPORTER = "importer"; + private final static String PARAMETER_DESC_IMPORTER = "Select the implementing importer, importers differ in terms of performance: " + + "The \"Naive\" importer loads the Log completely in memory (faster, but more memory usage). " + + "The \"Buffered by MAPDB\" importer loads only log, trace and event ids, " + + "and the rest of the data (mainly attribute values) are stored in disk by MapDB " + + "(slower, but less memory usage). " + + "The \"Lightweight & Sequential IDs\" importer is a balance between the \"Naive\" and the \"Buffered by MapDB\" importers"; + + private final static ImplementingPlugin[] PARAMETER_OPTIONS_IMPORTER = EnumSet.allOf(ImplementingPlugin.class) + .toArray(new ImplementingPlugin[EnumSet.allOf(ImplementingPlugin.class).size()]); + + private File currentFile = null; + + public ExtractXLogOperator(OperatorDescription description) { + super(description, XLogIOObject.class); + } + + @SuppressWarnings("unchecked") + @Override + public MetaData getGeneratedMetaData() throws OperatorException { + getLogger().fine("Generating meta data for " + this.getName()); + ImportXEventClassifierListPlugin plugin = new ImportXEventClassifierListPlugin(); + List classifiers; + try { + classifiers = (List) plugin.importFile(RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(ImportXEventClassifierListPlugin.class), getFile()); + } catch (Exception e) { + return new XLogIOObjectMetaData(); + } + if (classifiers != null) + return new XLogIOObjectMetaData(classifiers); + else + return new XLogIOObjectMetaData(); + } + + @Override + public List getParameterTypes() { + List types = super.getParameterTypes(); + types.add(createImporterParameterTypeCategory(PARAMETER_KEY_IMPORTER, PARAMETER_DESC_IMPORTER, + PARAMETER_OPTIONS_IMPORTER)); + return types; + } + + private ParameterType createImporterParameterTypeCategory(String key, String desc, ImplementingPlugin[] importers) { + String[] importersStr = new String[importers.length]; + for (int i = 0; i < importersStr.length; i++) { + importersStr[i] = importers[i].toString(); + } + return new ParameterTypeCategory(key, desc, importersStr, 0, true); + } + + public static XLog importLog(ImplementingPlugin p, File file) throws Exception { + XLog result = null; + switch (p) { + case LIGHT_WEIGHT_SEQ_ID: + result = importLeightWeight(file); + break; + case MAP_DB: + result = importMapDb(file); + break; + case NAIVE: + default: + result = importLogNaive(file); + break; + } + return result; + } + + private static XLog importLeightWeight(File file) throws Exception { + XLog result = null; + OpenLogFileLiteImplPlugin plugin = new OpenLogFileLiteImplPlugin(); + result = (XLog) plugin.importFile( + RapidProMGlobalContext.instance().getFutureResultAwarePluginContext(OpenLogFileLiteImplPlugin.class), + file); + return result; + } + + private static XLog importMapDb(File file) throws Exception { + XLog result = null; + OpenLogFileDiskImplPlugin plugin = new OpenLogFileDiskImplPlugin(); + result = (XLog) plugin.importFile( + RapidProMGlobalContext.instance().getFutureResultAwarePluginContext(OpenLogFileDiskImplPlugin.class), + file); + return result; + } + + private static XLog importLogNaive(File file) throws Exception { + XLog result = null; + OpenNaiveLogFilePlugin plugin = new OpenNaiveLogFilePlugin(); + result = (XLog) plugin.importFile( + RapidProMGlobalContext.instance().getFutureResultAwarePluginContext(OpenNaiveLogFilePlugin.class), + file); + return result; + } + + protected File getFile() throws UserError { + try { + File file = inputfile.getData(FileObject.class).getFile(); + this.currentFile = file; + } catch (OperatorException e) { + // Do nothing + } + return currentFile; + } + + @Override + public XLogIOObject read() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: importing event log"); + long time = System.currentTimeMillis(); + + ImplementingPlugin importPlugin = PARAMETER_OPTIONS_IMPORTER[getParameterAsInt(PARAMETER_KEY_IMPORTER)]; + XLog log; + try { + log = importLog(importPlugin, getFile()); + } catch (Exception e) { + throw new OperatorException("Loading the event log failed!"); + } + XLogIOObject xLogIOObject = new XLogIOObject(log, RapidProMGlobalContext.instance().getPluginContext()); + logger.log(Level.INFO, "End: importing event log (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + return xLogIOObject; + } + +} diff --git a/src/main/java/org/rapidprom/operators/io/ExportPetriNetOperator.java b/src/main/java/org/rapidprom/operators/io/ExportPetriNetOperator.java new file mode 100644 index 0000000..10d29dd --- /dev/null +++ b/src/main/java/org/rapidprom/operators/io/ExportPetriNetOperator.java @@ -0,0 +1,42 @@ +package org.rapidprom.operators.io; + +import java.io.File; +import java.io.IOException; +import java.util.EnumSet; + +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.petrinets.PetriNetFileFormat; +import org.processmining.plugins.pnml.exporting.PnmlExportNetToEPNML; +import org.processmining.plugins.pnml.exporting.PnmlExportNetToPNML; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMExporterOperator; + +import com.rapidminer.operator.OperatorDescription; + +public class ExportPetriNetOperator + extends AbstractRapidProMExporterOperator { + + public ExportPetriNetOperator(OperatorDescription description) { + super(description, PetriNetIOObject.class, + EnumSet.allOf(PetriNetFileFormat.class) + .toArray(new PetriNetFileFormat[EnumSet.allOf(PetriNetFileFormat.class).size()]), + PetriNetFileFormat.PNML); + } + + @Override + protected void writeToFile(File file, Petrinet object, PetriNetFileFormat format) throws IOException { + switch (format) { + case EPNML: + PnmlExportNetToEPNML exporterEPNML = new PnmlExportNetToEPNML(); + exporterEPNML.exportPetriNetToEPNMLFile(RapidProMGlobalContext.instance().getPluginContext(), object, file); + break; + case PNML: + default: + PnmlExportNetToPNML exporterPNML = new PnmlExportNetToPNML(); + exporterPNML.exportPetriNetToPNMLFile(RapidProMGlobalContext.instance().getPluginContext(), object, file); + break; + } + } + +} diff --git a/src/main/java/org/rapidprom/operators/io/ExportXLogOperator.java b/src/main/java/org/rapidprom/operators/io/ExportXLogOperator.java new file mode 100644 index 0000000..266d642 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/io/ExportXLogOperator.java @@ -0,0 +1,47 @@ +package org.rapidprom.operators.io; + +import java.io.File; +import java.io.IOException; +import java.util.EnumSet; + +import org.deckfour.xes.model.XLog; +import org.processmining.log.LogFileFormat; +import org.processmining.plugins.log.exporting.ExportLogMxml; +import org.processmining.plugins.log.exporting.ExportLogMxmlGz; +import org.processmining.plugins.log.exporting.ExportLogXes; +import org.processmining.plugins.log.exporting.ExportLogXesGz; +import org.rapidprom.ioobjects.XLogIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMExporterOperator; + +import com.rapidminer.operator.OperatorDescription; + +public class ExportXLogOperator extends + AbstractRapidProMExporterOperator { + + public ExportXLogOperator(OperatorDescription description) { + super(description, XLogIOObject.class, + EnumSet.allOf(LogFileFormat.class) + .toArray(new LogFileFormat[EnumSet + .allOf(LogFileFormat.class).size()]), + LogFileFormat.XES); + } + + protected void writeToFile(File file, XLog log, LogFileFormat format) + throws IOException { + switch (format) { + case MXML: + ExportLogMxml.export(log, file); + break; + case MXML_GZ: + ExportLogMxmlGz.export(log, file); + break; + case XES_GZ: + ExportLogXesGz.export(log, file); + break; + case XES: + default: + ExportLogXes.export(log, file); + break; + } + } +} diff --git a/src/main/java/org/rapidprom/operators/io/ExportXSStaticXSEventStream.java b/src/main/java/org/rapidprom/operators/io/ExportXSStaticXSEventStream.java new file mode 100644 index 0000000..37170a4 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/io/ExportXSStaticXSEventStream.java @@ -0,0 +1,36 @@ +package org.rapidprom.operators.io; + +import java.io.File; +import java.io.IOException; +import java.util.EnumSet; + +import org.processmining.eventstream.authors.staticeventstream.StaticEventStreamFileFormat; +import org.processmining.eventstream.authors.staticeventstream.plugins.XSStaticXSEventStreamExportPlugin; +import org.processmining.eventstream.core.interfaces.XSStaticXSEventStream; +import org.rapidprom.ioobjects.streams.event.XSStaticXSEventStreamIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMExporterOperator; + +import com.rapidminer.operator.OperatorDescription; + +public class ExportXSStaticXSEventStream extends + AbstractRapidProMExporterOperator { + + public ExportXSStaticXSEventStream(OperatorDescription description) { + super(description, XSStaticXSEventStreamIOObject.class, EnumSet + .allOf(StaticEventStreamFileFormat.class) + .toArray(new StaticEventStreamFileFormat[EnumSet + .allOf(StaticEventStreamFileFormat.class).size()]), + StaticEventStreamFileFormat.EVST); + } + + @Override + protected void writeToFile(File file, XSStaticXSEventStream object, + StaticEventStreamFileFormat format) throws IOException { + switch (format) { + case EVST: + XSStaticXSEventStreamExportPlugin.export(object, file); + break; + } + } + +} diff --git a/src/main/java/org/rapidprom/operators/io/ImportAcceptingPetriNetOperator.java b/src/main/java/org/rapidprom/operators/io/ImportAcceptingPetriNetOperator.java new file mode 100644 index 0000000..ef1c333 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/io/ImportAcceptingPetriNetOperator.java @@ -0,0 +1,39 @@ +package org.rapidprom.operators.io; + +import java.io.File; +import java.io.FileInputStream; +import java.util.List; + +import org.processmining.acceptingpetrinet.models.impl.AcceptingPetriNetFactory; +import org.processmining.framework.plugin.PluginContext; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.AcceptingPetriNetIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMImportOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeFile; + +public class ImportAcceptingPetriNetOperator extends AbstractRapidProMImportOperator { + + private final static String[] SUPPORTED_FILE_FORMATS = new String[] { "pnml" }; + + public ImportAcceptingPetriNetOperator(OperatorDescription description) { + super(description, AcceptingPetriNetIOObject.class, SUPPORTED_FILE_FORMATS); + } + + @Override + public List getParameterTypes() { + List types = super.getParameterTypes(); + types.add(new ParameterTypeFile(PARAMETER_KEY_FILE, PARAMETER_DESC_FILE, false, SUPPORTED_FILE_FORMATS)); + return types; + } + + @Override + protected AcceptingPetriNetIOObject read(File file) throws Exception { + PluginContext context = RapidProMGlobalContext.instance().getPluginContext(); + return new AcceptingPetriNetIOObject( + AcceptingPetriNetFactory.importFromStream(context, new FileInputStream(file)), context); + } + +} diff --git a/src/main/java/org/rapidprom/operators/io/ImportCPNModelOperator.java b/src/main/java/org/rapidprom/operators/io/ImportCPNModelOperator.java new file mode 100644 index 0000000..5a7eb6b --- /dev/null +++ b/src/main/java/org/rapidprom/operators/io/ImportCPNModelOperator.java @@ -0,0 +1,42 @@ +package org.rapidprom.operators.io; + +import java.io.File; +import java.io.FileInputStream; +import java.util.List; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.plugins.cpnet.ColouredPetriNet; +import org.processmining.plugins.cpnet.LoadCPNModelFromFile; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.CPNModelIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMImportOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeFile; + +public class ImportCPNModelOperator extends AbstractRapidProMImportOperator { + + private final static String[] SUPPORTED_FILE_FORMATS = new String[] { "cpn" }; + + public ImportCPNModelOperator(OperatorDescription description) { + super(description, CPNModelIOObject.class, SUPPORTED_FILE_FORMATS); + } + + @Override + public List getParameterTypes() { + List types = super.getParameterTypes(); + types.add(new ParameterTypeFile(PARAMETER_KEY_FILE, PARAMETER_DESC_FILE, false, SUPPORTED_FILE_FORMATS)); + return types; + } + + @Override + protected CPNModelIOObject read(File file) throws Exception { + ColouredPetriNet net = null; + PluginContext context = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(LoadCPNModelFromFile.class); + net = LoadCPNModelFromFile.importColouredPetriNetFromStream(context, new FileInputStream(file), file.getName(), + file.length()); + return new CPNModelIOObject(net, context); + } +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/operators/io/ImportPetriNetOperator.java b/src/main/java/org/rapidprom/operators/io/ImportPetriNetOperator.java new file mode 100644 index 0000000..1bb8947 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/io/ImportPetriNetOperator.java @@ -0,0 +1,47 @@ +package org.rapidprom.operators.io; + +import java.io.File; +import java.util.List; + +import org.processmining.framework.plugin.PluginContext; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.plugins.pnml.importing.PnmlImportNet; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.PetriNetIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMImportOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeFile; + +public class ImportPetriNetOperator extends AbstractRapidProMImportOperator { + + private final static String[] SUPPORTED_FILE_FORMATS = new String[] { "pnml" }; + + public ImportPetriNetOperator(OperatorDescription description) { + super(description, PetriNetIOObject.class, SUPPORTED_FILE_FORMATS); + } + + @Override + public List getParameterTypes() { + List types = super.getParameterTypes(); + types.add(new ParameterTypeFile(PARAMETER_KEY_FILE, PARAMETER_DESC_FILE, false, SUPPORTED_FILE_FORMATS)); + return types; + } + + @Override + protected PetriNetIOObject read(File file) throws Exception { + PluginContext context = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(PnmlImportNet.class); + PnmlImportNet importer = new PnmlImportNet(); + Object[] result = null; + try { + result = (Object[]) importer.importFile(context, getParameterAsFile(PARAMETER_KEY_FILE)); + } catch (Exception e) { + e.printStackTrace(); + } + PetriNetIOObject pnResult = new PetriNetIOObject((Petrinet) result[0], (Marking) result[1], null, context); + return pnResult; + } +} \ No newline at end of file diff --git a/src/main/java/org/rapidprom/operators/io/ImportXLogOperator.java b/src/main/java/org/rapidprom/operators/io/ImportXLogOperator.java new file mode 100644 index 0000000..1899922 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/io/ImportXLogOperator.java @@ -0,0 +1,91 @@ +package org.rapidprom.operators.io; + +import java.io.File; +import java.util.EnumSet; +import java.util.List; + +import org.deckfour.xes.classification.XEventClassifier; +import org.processmining.log.plugins.ImportXEventClassifierListPlugin; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.XLogIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMImportOperator; +import org.rapidprom.operators.extract.ExtractXLogOperator; +import org.rapidprom.operators.extract.ExtractXLogOperator.ImplementingPlugin; +import org.rapidprom.operators.ports.metadata.XLogIOObjectMetaData; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.parameter.ParameterTypeFile; + +/** + * The ImportXLLogOperator uses public static methods from the + * {@link ExtractXLogOperator} for actually importing the event log. This is + * mainly due to the fact that java does not support multiple-inheritance. + * + */ +public class ImportXLogOperator extends AbstractRapidProMImportOperator { + + private final static String PARAMETER_KEY_IMPORTER = "importer"; + private final static String PARAMETER_DESC_IMPORTER = "Select the implementing importer, importers differ in terms of performance: " + + "The \"Naive\" importer loads the Log completely in memory (faster, but more memory usage). " + + "The \"Buffered by MAPDB\" importer loads only log, trace and event ids, " + + "and the rest of the data (mainly attribute values) are stored in disk by MapDB " + + "(slower, but less memory usage). " + + "The \"Lightweight & Sequential IDs\" importer is a balance between the \"Naive\" and the \"Buffered by MapDB\" importers"; + + private final static ImplementingPlugin[] PARAMETER_OPTIONS_IMPORTER = EnumSet.allOf(ImplementingPlugin.class) + .toArray(new ImplementingPlugin[EnumSet.allOf(ImplementingPlugin.class).size()]); + + private final static String[] SUPPORTED_FILE_FORMATS = new String[] { "xes" }; + + public ImportXLogOperator(OperatorDescription description) { + super(description, XLogIOObject.class, SUPPORTED_FILE_FORMATS); + } + + @SuppressWarnings("unchecked") + @Override + public MetaData getGeneratedMetaData() throws OperatorException { + getLogger().fine("Generating meta data for " + this.getName()); + ImportXEventClassifierListPlugin plugin = new ImportXEventClassifierListPlugin(); + List classifiers; + try { + classifiers = (List) plugin.importFile(RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(ImportXEventClassifierListPlugin.class), + getParameterAsFile(PARAMETER_KEY_FILE)); + } catch (Exception e) { + return new XLogIOObjectMetaData(); + } + if (classifiers != null) + return new XLogIOObjectMetaData(classifiers); + else + return new XLogIOObjectMetaData(); + } + + protected XLogIOObject read(File file) throws Exception { + XLogIOObject obj = new XLogIOObject( + ExtractXLogOperator.importLog(PARAMETER_OPTIONS_IMPORTER[getParameterAsInt(PARAMETER_KEY_IMPORTER)], + getParameterAsFile(PARAMETER_KEY_FILE)), + RapidProMGlobalContext.instance().getPluginContext()); + return obj; + } + + @Override + public List getParameterTypes() { + List types = super.getParameterTypes(); + types.add(new ParameterTypeFile(PARAMETER_KEY_FILE, PARAMETER_DESC_FILE, false, SUPPORTED_FILE_FORMATS)); + types.add(createImporterParameterTypeCategory(PARAMETER_KEY_IMPORTER, PARAMETER_DESC_IMPORTER, + PARAMETER_OPTIONS_IMPORTER)); + return types; + } + + private ParameterType createImporterParameterTypeCategory(String key, String desc, ImplementingPlugin[] importers) { + String[] importersStr = new String[importers.length]; + for (int i = 0; i < importersStr.length; i++) { + importersStr[i] = importers[i].toString(); + } + return new ParameterTypeCategory(key, desc, importersStr, 0, true); + } +} diff --git a/src/main/java/org/rapidprom/operators/io/ImportXSStaticXSEventStreamOperator.java b/src/main/java/org/rapidprom/operators/io/ImportXSStaticXSEventStreamOperator.java new file mode 100644 index 0000000..d006e32 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/io/ImportXSStaticXSEventStreamOperator.java @@ -0,0 +1,42 @@ +package org.rapidprom.operators.io; + +import java.io.File; +import java.util.List; + +import org.processmining.eventstream.authors.staticeventstream.plugins.XSStaticXSEventStreamImportPlugin; +import org.processmining.eventstream.core.interfaces.XSStaticXSEventStream; +import org.processmining.framework.plugin.PluginContext; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.streams.event.XSStaticXSEventStreamIOObject; +import org.rapidprom.operators.abstr.AbstractRapidProMImportOperator; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeFile; + +public class ImportXSStaticXSEventStreamOperator + extends AbstractRapidProMImportOperator { + + private final static String[] SUPPORTED_FILE_FORMATS = new String[] { "evst" }; + + public ImportXSStaticXSEventStreamOperator(OperatorDescription description) { + super(description, XSStaticXSEventStreamIOObject.class, SUPPORTED_FILE_FORMATS); + } + + @Override + public List getParameterTypes() { + List types = super.getParameterTypes(); + types.add(new ParameterTypeFile(PARAMETER_KEY_FILE, PARAMETER_DESC_FILE, false, SUPPORTED_FILE_FORMATS)); + return types; + } + + @Override + protected XSStaticXSEventStreamIOObject read(File file) throws Exception { + XSStaticXSEventStreamImportPlugin importer = new XSStaticXSEventStreamImportPlugin(); + PluginContext context = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(XSStaticXSEventStreamImportPlugin.class); + XSStaticXSEventStream staticStream = (XSStaticXSEventStream) importer.importFile(context, file); + return new XSStaticXSEventStreamIOObject(staticStream, context); + } + +} diff --git a/src/main/java/org/rapidprom/operators/logmanipulation/AddArtificialStartEndEventOperator.java b/src/main/java/org/rapidprom/operators/logmanipulation/AddArtificialStartEndEventOperator.java new file mode 100644 index 0000000..37d4723 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/logmanipulation/AddArtificialStartEndEventOperator.java @@ -0,0 +1,185 @@ +package org.rapidprom.operators.logmanipulation; + +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.extension.std.XConceptExtension; +import org.deckfour.xes.extension.std.XTimeExtension; +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XAttributeMap; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.deckfour.xes.model.impl.XAttributeLiteralImpl; +import org.deckfour.xes.model.impl.XAttributeMapImpl; +import org.deckfour.xes.model.impl.XAttributeTimestampImpl; +import org.deckfour.xes.model.impl.XEventImpl; +import org.deckfour.xes.model.impl.XLogImpl; +import org.deckfour.xes.model.impl.XTraceImpl; +import org.rapidprom.ioobjects.XLogIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeBoolean; +import com.rapidminer.tools.LogService; + +public class AddArtificialStartEndEventOperator extends Operator { + + private static final String PARAMETER_1_KEY = "Add Start Event", + PARAMETER_1_DESCR = "Adds a \"start\" event before the first event of the trace.", + PARAMETER_2_KEY = "Add End Event", + PARAMETER_2_DESCR = "Adds an \"end\" event after the last event of the trace."; + + private InputPort inputXLog = getInputPorts() + .createPort("event log (ProM Event Log)", XLogIOObject.class); + private OutputPort outputEventLog = getOutputPorts() + .createPort("event log (ProM Event Log)"); + + public AddArtificialStartEndEventOperator(OperatorDescription description) { + super(description); + getTransformer().addRule( + new GenerateNewMDRule(outputEventLog, XLogIOObject.class)); + } + + public void doWork() throws OperatorException { + + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, + "Start: add artificial start and end event to all traces"); + long time = System.currentTimeMillis(); + + MetaData md = inputXLog.getMetaData(); + + XLogIOObject xLogIOObject = inputXLog.getData(XLogIOObject.class); + XLog logOriginal = xLogIOObject.getArtifact(); + XLog logModified = filterLog(logOriginal); + XLogIOObject result = new XLogIOObject(logModified, + xLogIOObject.getPluginContext()); + + outputEventLog.deliverMD(md); + outputEventLog.deliver(result); + logger.log(Level.INFO, + "End: add artificial start and end event to all traces (" + + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeBoolean parameterType1 = new ParameterTypeBoolean( + PARAMETER_1_KEY, PARAMETER_1_DESCR, true); + parameterTypes.add(parameterType1); + + ParameterTypeBoolean parameterType3 = new ParameterTypeBoolean( + PARAMETER_2_KEY, PARAMETER_2_DESCR, true); + parameterTypes.add(parameterType3); + + return parameterTypes; + } + + private XLog filterLog(XLog log) { + XAttributeMap logattlist = copyAttMap(log.getAttributes()); + XLog newLog = new XLogImpl(logattlist); + for (int i = 0; i < log.size(); i++) { + XTrace oldTrace = log.get(i); + XTrace newTrace = new XTraceImpl( + copyAttMap(oldTrace.getAttributes())); + String name = XConceptExtension.instance().extractName(oldTrace); + System.out.println("ADD ARTIFICIAL EVENT: TRACE" + name + ", size: " + + oldTrace.size()); + // add start event + + Date time = new Date(); + boolean changed = false; + if (getParameterAsBoolean(PARAMETER_1_KEY)) { + try { + time = getTime(oldTrace.get(0)); + if (time != null) { + time.setTime(time.getTime() - 1); + } + } catch (Exception ex) { + ex.printStackTrace(); + } + newTrace.add(makeEvent("START", time)); + for (int j = 0; j < oldTrace.size(); j++) { + XEvent oldEvent = oldTrace.get(j); + XEvent newEvent = new XEventImpl( + copyAttMap(oldEvent.getAttributes())); + newTrace.add(newEvent); + } + changed = true; + } + + // add end event + if (getParameterAsBoolean(PARAMETER_2_KEY)) { + time = new Date(); + try { + time = getTime(oldTrace.get(oldTrace.size() - 1)); + if (time != null) { + time.setTime(time.getTime() + 1); + } + } catch (Exception ex) { + ex.printStackTrace(); + } + newTrace.add(makeEvent("END", time)); + changed = true; + } + if (changed) + newLog.add(newTrace); + } + return newLog; + } + + private XEvent makeEvent(String name, Date time) { + XAttributeMap attMap = new XAttributeMapImpl(); + putLiteral(attMap, "concept:name", name); + putLiteral(attMap, "lifecycle:transition", "complete"); + putLiteral(attMap, "org:resource", "artificial"); + if (time != null) { + putTimestamp(attMap, "time:timestamp", time); + } + XEvent newEvent = new XEventImpl(attMap); + return newEvent; + } + + public static XAttributeMap copyAttMap(XAttributeMap srcAttMap) { + XAttributeMap destAttMap = new XAttributeMapImpl(); + Iterator attit = srcAttMap.values().iterator(); + while (attit.hasNext()) { + XAttribute att = attit.next(); + String key = att.getKey(); + att = (XAttribute) att.clone(); + destAttMap.put(key, att); + } + return destAttMap; + } + + public static Date getTime(XEvent event) { + Date res = new Date(); + try { + res = XTimeExtension.instance().extractTimestamp(event); + } catch (Exception ex) { + } + return res; + } + + public static void putLiteral(XAttributeMap attMap, String key, + String value) { + attMap.put(key, new XAttributeLiteralImpl(key, value)); + } + + public static void putTimestamp(XAttributeMap attMap, String key, + Date value) { + attMap.put(key, new XAttributeTimestampImpl(key, value)); + } + +} diff --git a/src/main/java/org/rapidprom/operators/logmanipulation/AddClassifierOperator.java b/src/main/java/org/rapidprom/operators/logmanipulation/AddClassifierOperator.java new file mode 100644 index 0000000..c3095ee --- /dev/null +++ b/src/main/java/org/rapidprom/operators/logmanipulation/AddClassifierOperator.java @@ -0,0 +1,96 @@ +package org.rapidprom.operators.logmanipulation; + +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.classification.XEventAndClassifier; +import org.deckfour.xes.classification.XEventLifeTransClassifier; +import org.deckfour.xes.classification.XEventNameClassifier; +import org.deckfour.xes.classification.XEventResourceClassifier; +import org.deckfour.xes.model.XLog; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.XLogIOObject; +import org.rapidprom.operators.ports.metadata.XLogIOObjectMetaData; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.tools.LogService; + +public class AddClassifierOperator extends Operator { + + public static final String PARAMETER_1_KEY = "Classifier", + PARAMETER_1_DESCR = "Classifier to be added to the event log"; + + public static final String NONE = "None (do not add classifier)", EN = "Event name", + EN_LT = "Event name + Lifecycle transition", EN_LT_RE = "Event name + Lifecycle transition + Resource"; + + private InputPort inputXLog = getInputPorts().createPort("event log (ProM Event Log)", XLogIOObject.class); + private OutputPort outputEventLog = getOutputPorts().createPort("event log (ProM Event Log)"); + + public AddClassifierOperator(OperatorDescription description) { + super(description); + } + + public void doWork() throws OperatorException { + + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: add classifier"); + long time = System.currentTimeMillis(); + + XLogIOObject logObject = inputXLog.getData(XLogIOObject.class); + + XLog newLog = (XLog) logObject.getArtifact().clone(); + XLogIOObjectMetaData mdC = null; + MetaData md = inputXLog.getMetaData(); + + if (md != null && md instanceof XLogIOObjectMetaData) + mdC = (XLogIOObjectMetaData) md; + + switch (getParameterAsString(PARAMETER_1_KEY)) { + case NONE: + break; + case EN: + newLog.getClassifiers().add(new XEventNameClassifier()); + + break; + case EN_LT: + newLog.getClassifiers() + .add(new XEventAndClassifier(new XEventNameClassifier(), new XEventLifeTransClassifier())); + break; + case EN_LT_RE: + newLog.getClassifiers().add(new XEventAndClassifier(new XEventNameClassifier(), + new XEventLifeTransClassifier(), new XEventResourceClassifier())); + break; + } + + XLogIOObject result = new XLogIOObject(newLog, RapidProMGlobalContext.instance().getPluginContext()); + + if (mdC != null) { + mdC.getXEventClassifiers().clear(); + mdC.getXEventClassifiers().addAll(newLog.getClassifiers()); + outputEventLog.deliverMD(md); + } + + outputEventLog.deliver(result); + logger.log(Level.INFO, "End: add classifier (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + String[] par2categories = new String[] { NONE, EN, EN_LT, EN_LT_RE }; + ParameterTypeCategory parameterType2 = new ParameterTypeCategory(PARAMETER_1_KEY, PARAMETER_1_DESCR, + par2categories, 0); + parameterTypes.add(parameterType2); + + return parameterTypes; + } + +} diff --git a/src/main/java/org/rapidprom/operators/logmanipulation/AddEventAttributesToLogOperator.java b/src/main/java/org/rapidprom/operators/logmanipulation/AddEventAttributesToLogOperator.java new file mode 100644 index 0000000..157836f --- /dev/null +++ b/src/main/java/org/rapidprom/operators/logmanipulation/AddEventAttributesToLogOperator.java @@ -0,0 +1,178 @@ +package org.rapidprom.operators.logmanipulation; + +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import javax.swing.JOptionPane; + +import org.deckfour.xes.extension.std.XConceptExtension; +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.deckfour.xes.model.impl.XAttributeLiteralImpl; +import org.rapidprom.ioobjects.XLogIOObject; + +import com.rapidminer.example.Attribute; +import com.rapidminer.example.Attributes; +import com.rapidminer.example.Example; +import com.rapidminer.example.ExampleSet; +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeString; +import com.rapidminer.tools.LogService; +import com.rapidminer.tools.Ontology; + +public class AddEventAttributesToLogOperator extends Operator { + + private static final String PARAMETER_1 = "Case id column", PARAMETER_2 = "Event id column"; + + private Attribute traceIdColumnAttrib = null; + private Attribute eventIdColumnAttrib = null; + + private InputPort inputExampleSet = getInputPorts().createPort("example set (Data Table)", + new ExampleSetMetaData()); + private InputPort inputLog = getInputPorts().createPort("event log (ProM Event Log)", XLogIOObject.class); + private OutputPort outputLog = getOutputPorts().createPort("event log (ProM Event Log)"); + + public AddEventAttributesToLogOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputLog, XLogIOObject.class)); + } + + @Override + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: add event attributes"); + long time = System.currentTimeMillis(); + + MetaData md = inputLog.getMetaData(); + + ExampleSet es = inputExampleSet.getData(ExampleSet.class); + + XLogIOObject logIO = inputLog.getData(XLogIOObject.class); + XLog xLog = logIO.getArtifact(); + + Iterator iterator = es.getAttributes().iterator(); + while (iterator.hasNext()) { + Attribute next = iterator.next(); + if (next.getName().equals(getParameterAsString(PARAMETER_1))) { + traceIdColumnAttrib = next; + } + if (next.getName().equals(getParameterAsString(PARAMETER_2))) { + eventIdColumnAttrib = next; + } + if (traceIdColumnAttrib != null && eventIdColumnAttrib != null) { + break; + } + } + + if (traceIdColumnAttrib != null && eventIdColumnAttrib != null) { + XLog adaptedLog = mergeExampleSetIntoLog(xLog, es, traceIdColumnAttrib, eventIdColumnAttrib); + XLogIOObject xLogIOObject = new XLogIOObject(adaptedLog, logIO.getPluginContext()); + outputLog.deliverMD(md); + outputLog.deliver(xLogIOObject); + + } else { + // show warning + JOptionPane.showMessageDialog(null, "Case ID column or event ID column was not found", + "Case ID / Event ID column not found", JOptionPane.ERROR_MESSAGE); + outputLog.deliverMD(md); + outputLog.deliver(null); + } + logger.log(Level.INFO, "End: add event attributes (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeString parameterType1 = new ParameterTypeString(PARAMETER_1, PARAMETER_1, "T:concept:name"); + parameterTypes.add(parameterType1); + + ParameterTypeString parameterType2 = new ParameterTypeString(PARAMETER_2, PARAMETER_2, "E:concept:name"); + parameterTypes.add(parameterType2); + + return parameterTypes; + } + + private XLog mergeExampleSetIntoLog(XLog xLog, ExampleSet es, Attribute traceIdColumnAttrib, + Attribute eventIdColumnAttrib) { + Iterator iterator = es.iterator(); + while (iterator.hasNext()) { + Example example = iterator.next(); + // get the case id and see if a corresponding trace can be found + String caseid = example.getValueAsString(traceIdColumnAttrib); + XTrace t = findTrace(caseid, xLog); + if (t != null) { + XEvent e = findEvent(eventIdColumnAttrib, t); + if (e != null) { + Attributes attributes = example.getAttributes(); + Iterator iterator2 = attributes.iterator(); + while (iterator2.hasNext()) { + Attribute attrib = iterator2.next(); + XAttribute newAttrib = null; + if (!attrib.getName().equals(traceIdColumnAttrib.getName()) + && !attrib.getName().equals(eventIdColumnAttrib.getName())) { + if (attrib.getValueType() == Ontology.NUMERICAL || attrib.getValueType() == Ontology.INTEGER + || attrib.getValueType() == Ontology.REAL) { + double numericalValue = example.getNumericalValue(attrib); + XAttributeLiteralImpl attribLit = new XAttributeLiteralImpl(attrib.getName(), + Double.toString(numericalValue)); + newAttrib = attribLit; + } else if (attrib.getValueType() == Ontology.NOMINAL + || attrib.getValueType() == Ontology.BINOMINAL) { + String nominalValue = example.getNominalValue(attrib); + XAttributeLiteralImpl attribLit = new XAttributeLiteralImpl(attrib.getName(), + nominalValue); + newAttrib = attribLit; + } else if (attrib.getValueType() == Ontology.DATE_TIME) { + Date dateValue = example.getDateValue(attrib); + XAttributeLiteralImpl attribLit = new XAttributeLiteralImpl(attrib.getName(), + dateValue.toString()); + newAttrib = attribLit; + } + } + // add attribute to the log + if (newAttrib != null) { + e.getAttributes().put(attrib.getName(), newAttrib); + } + } + } + } + } + return xLog; + } + + private XEvent findEvent(Attribute eventAttrib, XTrace t) { + for (XEvent e : t) { + String name = eventAttrib.getName(); + String nameEvent = XConceptExtension.instance().extractName(e); + if (name.equals(nameEvent)) { + // found the event + return e; + } + } + return null; + } + + private XTrace findTrace(String caseid, XLog xLog) { + for (XTrace t : xLog) { + String name = XConceptExtension.instance().extractName(t); + if (name.equals(caseid)) { + return t; + } + } + return null; + } + +} diff --git a/src/main/java/org/rapidprom/operators/logmanipulation/AddEventsToLogOperator.java b/src/main/java/org/rapidprom/operators/logmanipulation/AddEventsToLogOperator.java new file mode 100644 index 0000000..a6522c0 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/logmanipulation/AddEventsToLogOperator.java @@ -0,0 +1,197 @@ +package org.rapidprom.operators.logmanipulation; + +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.extension.std.XConceptExtension; +import org.deckfour.xes.extension.std.XLifecycleExtension; +import org.deckfour.xes.extension.std.XOrganizationalExtension; +import org.deckfour.xes.extension.std.XTimeExtension; +import org.deckfour.xes.factory.XFactory; +import org.deckfour.xes.factory.XFactoryRegistry; +import org.deckfour.xes.model.XAttributeLiteral; +import org.deckfour.xes.model.XAttributeMap; +import org.deckfour.xes.model.XAttributeTimestamp; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.deckfour.xes.model.impl.XAttributeLiteralImpl; +import org.deckfour.xes.model.impl.XAttributeMapImpl; +import org.rapidprom.ioobjects.XLogIOObject; + +import com.rapidminer.example.Attribute; +import com.rapidminer.example.Example; +import com.rapidminer.example.ExampleSet; +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeString; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.tools.LogService; +import com.rapidminer.tools.Ontology; + +public class AddEventsToLogOperator extends Operator { + + private static final String PARAMETER_1 = "Case id column", PARAMETER_2 = "Event id column", + PARAMETER_3 = "Lifecycle column", PARAMETER_4 = "Timestamp column", PARAMETER_5 = "Resource column"; + private Attribute traceIdColumnAttrib = null; + + private InputPort inputExampleSet = getInputPorts().createPort("example set (Data Table)", + new ExampleSetMetaData()); + private InputPort inputLog = getInputPorts().createPort("event log (ProM Event Log)", XLogIOObject.class); + private OutputPort outputLog = getOutputPorts().createPort("event log (ProM Event Log)"); + + public AddEventsToLogOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputLog, XLogIOObject.class)); + } + + @Override + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: add event"); + long time = System.currentTimeMillis(); + MetaData md = inputLog.getMetaData(); + + ExampleSet es = inputExampleSet.getData(ExampleSet.class); + + XLogIOObject logIO = inputLog.getData(XLogIOObject.class); + XLog xLog = logIO.getArtifact(); + + Iterator iterator = es.getAttributes().iterator(); + while (iterator.hasNext()) { + Attribute next = iterator.next(); + if (next.getName().equals(getParameterAsString(PARAMETER_1))) { + traceIdColumnAttrib = next; + break; + } + } + if (traceIdColumnAttrib != null) { + System.out.println("DUMPFIRST"); + dumpSizeTraces(xLog); + XLog adaptedLog = mergeExampleSetIntoLog(xLog, es, traceIdColumnAttrib); + XLogIOObject xLogIOObject = new XLogIOObject(adaptedLog, logIO.getPluginContext()); + outputLog.deliverMD(md); + outputLog.deliver(xLogIOObject); + System.out.println("DUMPSECOND"); + dumpSizeTraces(adaptedLog); + + } + logger.log(Level.INFO, "End: add event (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + private void dumpSizeTraces(XLog xLog) { + for (XTrace t : xLog) { + System.out.println(XConceptExtension.instance().extractName(t) + ":" + t.size()); + } + + } + + private XLog mergeExampleSetIntoLog(XLog xLog, ExampleSet es, Attribute traceIdAttrib) + throws UndefinedParameterError { + XFactory factory = XFactoryRegistry.instance().currentDefault(); + Iterator iterator = es.iterator(); + while (iterator.hasNext()) { + Example row = iterator.next(); + String caseID = row.getValueAsString(traceIdAttrib); + XTrace t = findTrace(caseID, xLog); + if (t != null) { + XAttributeMap attribMapEvent = new XAttributeMapImpl(); + Iterator iterator2 = row.getAttributes().iterator(); + while (iterator2.hasNext()) { + Attribute next = iterator2.next(); + String nameAttrib = next.getName(); + if (nameAttrib.equals(traceIdAttrib.getName())) { + // do nothing + } else if (nameAttrib.equals(getParameterAsString(PARAMETER_2)) + && !getParameterAsString(PARAMETER_2).equals("")) { + // concept:name + String value = row.getValueAsString(next); + XAttributeLiteral attribNameEvent = factory.createAttributeLiteral("concept:name", value, + XConceptExtension.instance()); + attribMapEvent.put("concept:name", attribNameEvent); + } else if (nameAttrib.equals(getParameterAsString(PARAMETER_3)) + && !getParameterAsString(PARAMETER_3).equals("")) { + // lifecycle:transition + String value = row.getValueAsString(next); + XAttributeLiteral attribLC = factory.createAttributeLiteral("lifecycle:transition", value, + XLifecycleExtension.instance()); + attribMapEvent.put("lifecycle:transition", attribLC); + } else if (nameAttrib.equals(getParameterAsString(PARAMETER_4)) + && !getParameterAsString(PARAMETER_4).equals("")) { + // timestamp + Date dateValue = row.getDateValue(next); + XAttributeTimestamp attribTimestampEvent = factory.createAttributeTimestamp("time:timestamp", + dateValue, XTimeExtension.instance()); + attribMapEvent.put("time:timestamp", attribTimestampEvent); + } else if (nameAttrib.equals(getParameterAsString(PARAMETER_5)) + && !getParameterAsString(PARAMETER_5).equals("")) { + // resource + String value = row.getValueAsString(next); + XAttributeLiteral attribResource = factory.createAttributeLiteral("org:resource", value, + XOrganizationalExtension.instance()); + attribMapEvent.put("org:resource", attribResource); + } else { + if (next.getValueType() == Ontology.DATE || next.getValueType() == Ontology.DATE_TIME) { + Date dateValue = row.getDateValue(next); + XAttributeTimestamp attribTimestampEvent = factory.createAttributeTimestamp(nameAttrib, + dateValue, XTimeExtension.instance()); + attribMapEvent.put(nameAttrib, attribTimestampEvent); + } else { + String value = row.getValueAsString(next); + XAttributeLiteralImpl attribLit = new XAttributeLiteralImpl(nameAttrib, value); + attribMapEvent.put(nameAttrib, attribLit); + } + } + + } + XEvent event = factory.createEvent(attribMapEvent); + t.add(event); + } + } + return xLog; + } + + public List getParameterTypes() { + + List parameterTypes = super.getParameterTypes(); + + ParameterTypeString parameterType1 = new ParameterTypeString(PARAMETER_1, PARAMETER_1, "T:concept:name"); + parameterTypes.add(parameterType1); + + ParameterTypeString parameterType2 = new ParameterTypeString(PARAMETER_2, PARAMETER_2, "E:concept:name"); + parameterTypes.add(parameterType2); + + ParameterTypeString parameterType3 = new ParameterTypeString(PARAMETER_3, PARAMETER_3, + "E:lifecycle:transition"); + parameterTypes.add(parameterType3); + + ParameterTypeString parameterType4 = new ParameterTypeString(PARAMETER_4, PARAMETER_4, "E:time:timestamp"); + parameterTypes.add(parameterType4); + + ParameterTypeString parameterType5 = new ParameterTypeString(PARAMETER_5, PARAMETER_5, "E:org:resource"); + parameterTypes.add(parameterType5); + + return parameterTypes; + } + + private XTrace findTrace(String caseid, XLog xLog) { + for (XTrace t : xLog) { + String name = XConceptExtension.instance().extractName(t); + if (name.equals(caseid)) { + return t; + } + } + return null; + } + +} diff --git a/src/main/java/org/rapidprom/operators/logmanipulation/AddNoiseOperator.java b/src/main/java/org/rapidprom/operators/logmanipulation/AddNoiseOperator.java new file mode 100644 index 0000000..148ea28 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/logmanipulation/AddNoiseOperator.java @@ -0,0 +1,277 @@ +package org.rapidprom.operators.logmanipulation; + +import java.util.Date; +import java.util.List; +import java.util.Random; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.extension.std.XTimeExtension; +import org.deckfour.xes.factory.XFactory; +import org.deckfour.xes.factory.XFactoryNaiveImpl; +import org.deckfour.xes.model.XAttributeMap; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.rapidprom.ioobjects.XLogIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.parameter.ParameterTypeDouble; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.tools.LogService; + +public class AddNoiseOperator extends Operator { + + private static final String PARAMETER_1_KEY = "Noise Percentage", + PARAMETER_1_DESCR = "The probabilitiy that, for any given trace, noise will be added to it.", + PARAMETER_2_KEY = "Noise Type", + PARAMETER_2_DESCR = "There are 5 possible noise types: remove head, remove body, swap tasks, " + + "remove task, and add task. The noise types \"remove head\" and \"remove body\" " + + "respectively remove at most the first or second 1/3 of a trace. " + + "The noise type swap randomly swaps two event in a trace. " + + "The noise type remove randomly removes event from a trace. " + + "The add type randomly adds an event to a trace.", + + PARAMETER_3_KEY = "Seed", + PARAMETER_3_DESCR = "This parameter defines the seed used to evaluate noise " + + "probability and apply the noise type."; + private static final String HEAD = "Remove Head", BODY = "Remove Body", + EXTRA = "Add Event", SWAP = "Swap Tasks", REMOVE = "Remove Task"; + + private InputPort inputXLog = getInputPorts() + .createPort("event log (ProM Event Log)", XLogIOObject.class); + private OutputPort outputEventLog = getOutputPorts() + .createPort("event log (ProM Event Log)"); + + public AddNoiseOperator(OperatorDescription description) { + super(description); + getTransformer().addRule( + new GenerateNewMDRule(outputEventLog, XLogIOObject.class)); + } + + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, + "Start: add noise"); + long time = System.currentTimeMillis(); + + MetaData md = inputXLog.getMetaData(); + + XLogIOObject xLogIOObject = inputXLog.getData(XLogIOObject.class); + XLog logOriginal = xLogIOObject.getArtifact(); + XLog logModified = filterLog(logOriginal); + XLogIOObject result = new XLogIOObject(logModified, + xLogIOObject.getPluginContext()); + + outputEventLog.deliverMD(md); + outputEventLog.deliver(result); + logger.log(Level.INFO, + "End: add noise (" + + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + private XLog filterLog(XLog log) throws UndefinedParameterError { + + XFactory factory = new XFactoryNaiveImpl(); + XLog result = factory.createLog(log.getAttributes()); + result.getClassifiers().addAll(log.getClassifiers()); + + int traceCounter = 0; + Random rOverall = new Random(getParameterAsInt(PARAMETER_3_KEY)); + for (XTrace t : log) { + XTrace copy = factory.createTrace(t.getAttributes()); + Random r = new Random(getParameterAsInt(PARAMETER_3_KEY) + + new Integer(traceCounter).hashCode()); + double nextDouble = rOverall.nextDouble(); + // System.out.println("nextDouble:" + nextDouble); + if (nextDouble < getParameterAsDouble(PARAMETER_1_KEY)) { + double oneThird = t.size() / 3.0; + if (getParameterAsString(PARAMETER_2_KEY).equals(HEAD)) { + int start = safeNextInt(r, (int) oneThird); + for (int i = start; i < t.size(); i++) { + XEvent e = t.get(i); + XEvent copyEvent = factory + .createEvent(e.getAttributes()); + copy.add(copyEvent); + } + } else if (getParameterAsString(PARAMETER_2_KEY).equals(BODY)) { + int stopFirst = safeNextInt(r, (int) oneThird); + for (int i = 0; i < stopFirst; i++) { + XEvent e = t.get(i); + XEvent copyEvent = factory + .createEvent(e.getAttributes()); + copy.add(copyEvent); + } + + int startLast = t.size() - safeNextInt(r, (int) oneThird); + for (int i = startLast; i < t.size(); i++) { + XEvent e = t.get(i); + XEvent copyEvent = factory + .createEvent(e.getAttributes()); + copy.add(copyEvent); + } + } else if (getParameterAsString(PARAMETER_2_KEY) + .equals(EXTRA)) { + for (XEvent e : t) { + XEvent copyEvent = factory + .createEvent(e.getAttributes()); + copy.add(copyEvent); + } + // add event + int pos = safeNextInt(r, t.size()); + + System.out.println("Pos: " + pos); + + // get the previous event to check for timestamp + Date lowb = (pos != 0) ? XTimeExtension.instance() + .extractTimestamp(copy.get(pos - 1)) : null; + Date upb = (pos != t.size()) ? XTimeExtension.instance() + .extractTimestamp(copy.get(pos)) : null; + + // if(lowb!= null) + // System.out.println("Low: " + lowb.toString()); + // + // if(upb!=null) + // System.out.println("Up: " + upb.toString()); + + if ((lowb != null) && (upb != null)) { + // the new event has timestamp in between + copy.add(pos, + createEvent(log, log.size(), r, new Date( + (upb.getTime() + lowb.getTime()) / 2), + XTimeExtension.instance())); + } else if (lowb != null) { + // there is a lower bound + copy.add(pos, + createEvent(log, log.size(), r, + new Date(lowb.getTime() + 1), + XTimeExtension.instance())); + } else if (upb != null) { + // there is an upper bound + copy.add(pos, + createEvent(log, log.size(), r, + new Date(upb.getTime() - 1), + XTimeExtension.instance())); + } else { + // there is neither a lower or an upper bound + copy.add(pos, createEvent(log, log.size(), r, null, + XTimeExtension.instance())); + } + } else if (getParameterAsString(PARAMETER_2_KEY).equals(SWAP)) { + int indexFirstTaskToSwap = safeNextInt(r, t.size()); + int indexSecondTaskToSwap = safeNextInt(r, t.size()); + XEvent firstTaskToSwap = null; + XEvent secondTaskToSwap = null; + XEvent event = null; + if (indexFirstTaskToSwap != indexSecondTaskToSwap) { + // it makes sense to swap + firstTaskToSwap = t.get(indexSecondTaskToSwap); + secondTaskToSwap = t.get(indexFirstTaskToSwap); + // swap also the timestamps + Date firstTimestamp = XTimeExtension.instance() + .extractTimestamp(firstTaskToSwap); + Date secondTimestamp = XTimeExtension.instance() + .extractTimestamp(secondTaskToSwap); + + for (int i = 0; i < t.size(); i++) { + if (i == indexFirstTaskToSwap) { + event = (XEvent) firstTaskToSwap.clone(); + XTimeExtension.instance().assignTimestamp(event, + secondTimestamp); + } else if (i == indexSecondTaskToSwap) { + event = (XEvent) secondTaskToSwap.clone(); + XTimeExtension.instance().assignTimestamp(event, + firstTimestamp); + } else { + event = t.get(i); + } + XEvent copyEvent = factory + .createEvent((XAttributeMap) event + .getAttributes().clone()); + copy.add(copyEvent); + + } + } else { + // we still need to copy + for (XEvent e : t) { + XEvent copyEvent = factory.createEvent( + (XAttributeMap) e.getAttributes().clone()); + copy.add(copyEvent); + } + } + } else { + // remove an event + int pos = Math.abs(r.nextInt()) % (t.size() + 1); + for (int i = 0; i < t.size(); i++) { + if (i != pos) { + XEvent event = t.get(i); + XEvent copyEvent = factory + .createEvent(event.getAttributes()); + copy.add(copyEvent); + } + } + } + } else { + for (XEvent e : t) { + XEvent copyEvent = factory.createEvent(e.getAttributes()); + copy.add(copyEvent); + } + } + traceCounter++; + result.add(copy); + } + return result; + } + + private int safeNextInt(Random r, int maxInt) { + return r.nextInt(maxInt > 0 ? maxInt : 1); + } + + protected XEvent createEvent(XLog log, int logSize, Random rand, Date date, + XTimeExtension xTime) { + // both date are null + XTrace tr = log.get(Math.abs(rand.nextInt()) % logSize); + int pos = safeNextInt(rand, tr.size()); + + if (pos == 0 && pos < tr.size() - 1) // so it does not create "start" + // events + pos++; + + XFactory factory = new XFactoryNaiveImpl(); + XEvent newEvt = factory.createEvent( + (XAttributeMap) tr.get(pos).getAttributes().clone()); + if (date != null) { + xTime.assignTimestamp(newEvt, date); + } + return newEvt; + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeDouble parameterType1 = new ParameterTypeDouble( + PARAMETER_1_KEY, PARAMETER_1_DESCR, 0, 1, 0.05); + parameterTypes.add(parameterType1); + + String[] par2categories = new String[] { REMOVE, HEAD, BODY, EXTRA, + SWAP }; + ParameterTypeCategory parameterType2 = new ParameterTypeCategory( + PARAMETER_2_KEY, PARAMETER_2_DESCR, par2categories, 0); + parameterTypes.add(parameterType2); + + ParameterTypeInt parameterType3 = new ParameterTypeInt(PARAMETER_3_KEY, + PARAMETER_3_DESCR, 0, Integer.MAX_VALUE, 1); + parameterTypes.add(parameterType3); + return parameterTypes; + } + +} diff --git a/src/main/java/org/rapidprom/operators/logmanipulation/AddTraceAttributesToLogOperator.java b/src/main/java/org/rapidprom/operators/logmanipulation/AddTraceAttributesToLogOperator.java new file mode 100644 index 0000000..8be841c --- /dev/null +++ b/src/main/java/org/rapidprom/operators/logmanipulation/AddTraceAttributesToLogOperator.java @@ -0,0 +1,158 @@ +package org.rapidprom.operators.logmanipulation; + +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import javax.swing.JOptionPane; + +import org.deckfour.xes.extension.std.XConceptExtension; +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.deckfour.xes.model.impl.XAttributeLiteralImpl; +import org.rapidprom.ioobjects.XLogIOObject; + +import com.rapidminer.example.Attribute; +import com.rapidminer.example.Attributes; +import com.rapidminer.example.Example; +import com.rapidminer.example.ExampleSet; +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeString; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.tools.LogService; +import com.rapidminer.tools.Ontology; + +public class AddTraceAttributesToLogOperator extends Operator { + + private static final String PARAMETER_1 = "Case id column"; + + private InputPort inputExampleSet = getInputPorts().createPort("example set (Data Table)", + new ExampleSetMetaData()); + private InputPort inputLog = getInputPorts().createPort("event log (ProM Event Log)", XLogIOObject.class); + private OutputPort outputLog = getOutputPorts().createPort("event log (ProM Event Log)"); + + public AddTraceAttributesToLogOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputLog, XLogIOObject.class)); + } + + @Override + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: add trace attributes"); + long time = System.currentTimeMillis(); + MetaData md = inputLog.getMetaData(); + + ExampleSet es = inputExampleSet.getData(ExampleSet.class); + + XLogIOObject logIO = inputLog.getData(XLogIOObject.class); + XLog xLog = logIO.getArtifact(); + + Attribute idColumnAttrib = null; + boolean found = false; + Iterator iterator = es.getAttributes().iterator(); + while (iterator.hasNext()) { + Attribute next = iterator.next(); + if (next.getName().equals(getParameterAsString(PARAMETER_1))) { + idColumnAttrib = next; + found = true; + break; + } + } + + if (found) { + XLog adaptedLog = mergeExampleSetIntoLog(xLog, es, getParameterAsString(PARAMETER_1), idColumnAttrib); + XLogIOObject xLogIOObject = new XLogIOObject(adaptedLog, logIO.getPluginContext()); + outputLog.deliverMD(md); + outputLog.deliver(xLogIOObject); + + } else { + // show warning + JOptionPane.showMessageDialog(null, "Case ID was not found", "Case ID column not found", + JOptionPane.ERROR_MESSAGE); + outputLog.deliverMD(md); + outputLog.deliver(null); + } + logger.log(Level.INFO, "End: add trace attributes (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeString parameterType1 = new ParameterTypeString(PARAMETER_1, PARAMETER_1, "T:concept:name"); + parameterTypes.add(parameterType1); + + return parameterTypes; + } + + private HashMap buildTraceMap(XLog xlog) { + HashMap map = new HashMap<>(); + for (XTrace t : xlog) { + String name = XConceptExtension.instance().extractName(t); + if (name != null) { + map.put(name, t); + } + } + return map; + } + + private XLog mergeExampleSetIntoLog(XLog xLog, ExampleSet es, String nameIDcolumn, Attribute idColumnAttrib) + throws UndefinedParameterError { + + HashMap traceMap = buildTraceMap(xLog); + Iterator iterator = es.iterator(); + while (iterator.hasNext()) { + Example example = iterator.next(); + // get the case id and see if a corresponding trace can be found + String caseid = example.getValueAsString(idColumnAttrib); + XTrace t = traceMap.get(caseid); + if (t != null) { + Attributes attributes = example.getAttributes(); + Iterator iterator2 = attributes.iterator(); + while (iterator2.hasNext()) { + Attribute attrib = iterator2.next(); + XAttribute newAttrib = null; + if (!attrib.getName().equals(getParameterAsString(PARAMETER_1))) { + if (attrib.getValueType() == Ontology.NUMERICAL || attrib.getValueType() == Ontology.INTEGER + || attrib.getValueType() == Ontology.REAL) { + double numericalValue = example.getNumericalValue(attrib); + XAttributeLiteralImpl attribLit = new XAttributeLiteralImpl(attrib.getName(), + Double.toString(numericalValue)); + newAttrib = attribLit; + } else if (attrib.getValueType() == Ontology.NOMINAL + || attrib.getValueType() == Ontology.BINOMINAL + || attrib.getValueType() == Ontology.STRING + || attrib.getValueType() == Ontology.POLYNOMINAL) { + String nominalValue = example.getNominalValue(attrib); + XAttributeLiteralImpl attribLit = new XAttributeLiteralImpl(attrib.getName(), nominalValue); + newAttrib = attribLit; + } else if (attrib.getValueType() == Ontology.DATE_TIME) { + Date dateValue = example.getDateValue(attrib); + XAttributeLiteralImpl attribLit = new XAttributeLiteralImpl(attrib.getName(), + dateValue.toString()); + newAttrib = attribLit; + } + } + // add attribute to the log + if (newAttrib != null) { + t.getAttributes().put(attrib.getName(), newAttrib); + } + } + } + } + return xLog; + } + +} diff --git a/src/main/java/org/rapidprom/operators/logmanipulation/MergeTwoEventLogsOperator.java b/src/main/java/org/rapidprom/operators/logmanipulation/MergeTwoEventLogsOperator.java new file mode 100644 index 0000000..943c7cc --- /dev/null +++ b/src/main/java/org/rapidprom/operators/logmanipulation/MergeTwoEventLogsOperator.java @@ -0,0 +1,136 @@ +package org.rapidprom.operators.logmanipulation; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.extension.std.XConceptExtension; +import org.deckfour.xes.factory.XFactoryRegistry; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.rapidprom.ioobjects.XLogIOObject; +import org.rapidprom.operators.ports.metadata.XLogIOObjectMetaData; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeBoolean; +import com.rapidminer.tools.LogService; + +public class MergeTwoEventLogsOperator extends Operator { + + private static final String PARAMETER_1_KEY = "Merge traces with same identifier", + PARAMETER_1_DESCR = "If two traces have the same identifier, the traces are merged (true) and their event will be put under the same collection, or they are kept separately as independent traces (false)."; + + private InputPort inputLog1 = getInputPorts().createPort("event log 1 (ProM Event Log)", XLogIOObject.class); + private InputPort inputLog2 = getInputPorts().createPort("event log 2 (ProM Event Log)", XLogIOObject.class); + private OutputPort outputLog = getOutputPorts().createPort("event log (ProM Event Log)"); + + public MergeTwoEventLogsOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputLog, XLogIOObject.class)); + } + + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: merge event logs"); + long time = System.currentTimeMillis(); + + MetaData md1 = inputLog1.getMetaData(); + + XLogIOObject logIO1 = inputLog1.getData(XLogIOObject.class); + XLog xLog1 = logIO1.getArtifact(); + XLogIOObject logIO2 = inputLog2.getData(XLogIOObject.class); + XLog xLog2 = logIO2.getArtifact(); + + // configuration + boolean dontMergeDouble = !getParameterAsBoolean(PARAMETER_1_KEY); + // first copy entire log1 + XLog result = XFactoryRegistry.instance().currentDefault().createLog(xLog1.getAttributes()); + + Set classifiers = new HashSet(); + classifiers.addAll(xLog1.getClassifiers()); + classifiers.addAll(xLog2.getClassifiers()); + + result.getClassifiers().addAll(classifiers); + + for (XTrace t : xLog1) { + XTrace copy = XFactoryRegistry.instance().currentDefault().createTrace(t.getAttributes()); + result.add(copy); + for (XEvent e : t) { + XEvent copyEvent = XFactoryRegistry.instance().currentDefault().createEvent(e.getAttributes()); + copy.add(copyEvent); + } + } + + for (XTrace t : xLog2) { + copyIntoFirstLog(t, result, dontMergeDouble); + } + // report the result + XLogIOObject xLogIOObject = new XLogIOObject(result, logIO1.getPluginContext()); + + XLogIOObjectMetaData mdC = null; + if (md1 != null && md1 instanceof XLogIOObjectMetaData) + mdC = (XLogIOObjectMetaData) md1; + + if (mdC != null) { + mdC.getXEventClassifiers().clear(); + mdC.getXEventClassifiers().addAll(classifiers); + outputLog.deliverMD(md1); + } + + outputLog.deliver(xLogIOObject); + + logger.log(Level.INFO, "End: merge event logs (" + (System.currentTimeMillis() - time) / 1000 + " sec)"); + + } + + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + + ParameterTypeBoolean parameterType1 = new ParameterTypeBoolean(PARAMETER_1_KEY, PARAMETER_1_DESCR, false); + parameterTypes.add(parameterType1); + + return parameterTypes; + } + + private void copyIntoFirstLog(XTrace t, XLog result, boolean dontMergeDouble) { + // check if in result log + String nameTrace = XConceptExtension.instance().extractName(t); + XTrace simTrace = null; + for (XTrace trace : result) { + String name = XConceptExtension.instance().extractName(trace); + if (name.equals(nameTrace)) { + // found trace with same name + simTrace = trace; + break; + } + } + if (simTrace != null && !dontMergeDouble) { + // I found a trace with similar name + // add the events + for (XEvent e : t) { + XEvent copyEvent = XFactoryRegistry.instance().currentDefault().createEvent(e.getAttributes()); + simTrace.add(copyEvent); + } + } else { + // trace is new + XTrace copy = XFactoryRegistry.instance().currentDefault().createTrace(t.getAttributes()); + for (XEvent e : t) { + XEvent copyEvent = XFactoryRegistry.instance().currentDefault().createEvent(e.getAttributes()); + copy.add(copyEvent); + } + result.add(copy); + } + } + +} diff --git a/src/main/java/org/rapidprom/operators/logmanipulation/TimestampSortOperator.java b/src/main/java/org/rapidprom/operators/logmanipulation/TimestampSortOperator.java new file mode 100644 index 0000000..b37d4db --- /dev/null +++ b/src/main/java/org/rapidprom/operators/logmanipulation/TimestampSortOperator.java @@ -0,0 +1,54 @@ +package org.rapidprom.operators.logmanipulation; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.deckfour.xes.model.XLog; +import org.processmining.plugins.log.ReSortLog; +import org.rapidprom.ioobjects.XLogIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.tools.LogService; + +public class TimestampSortOperator extends Operator { + + private InputPort inputLog = getInputPorts().createPort( + "event log (ProM Event Log)", XLogIOObject.class); + private OutputPort outputLog = getOutputPorts().createPort( + "event log (ProM Event Log)"); + + public TimestampSortOperator(OperatorDescription description) { + super(description); + getTransformer().addRule( + new GenerateNewMDRule(outputLog, XLogIOObject.class)); + } + + @Override + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "Start: sort by timestamp"); + long time = System.currentTimeMillis(); + + MetaData md = inputLog.getMetaData(); + + XLogIOObject log = inputLog.getData(XLogIOObject.class); + XLog resultLog = ReSortLog.removeEdgePoints(log.getPluginContext(), + log.getArtifact()); + XLogIOObject result = new XLogIOObject(resultLog, + log.getPluginContext()); + + outputLog.deliverMD(md); + outputLog.deliver(result); + + logger.log(Level.INFO, + "End: sort by timestamp (" + + (System.currentTimeMillis() - time) / 1000 + " sec)"); + + } +} diff --git a/src/main/java/org/rapidprom/operators/ports/metadata/ExampleSetNumberOfAttributesPrecondition.java b/src/main/java/org/rapidprom/operators/ports/metadata/ExampleSetNumberOfAttributesPrecondition.java new file mode 100644 index 0000000..2374d8f --- /dev/null +++ b/src/main/java/org/rapidprom/operators/ports/metadata/ExampleSetNumberOfAttributesPrecondition.java @@ -0,0 +1,62 @@ +package org.rapidprom.operators.ports.metadata; + +import com.rapidminer.example.ExampleSet; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.metadata.AbstractPrecondition; +import com.rapidminer.operator.ports.metadata.CompatibilityLevel; +import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; +import com.rapidminer.operator.ports.metadata.InputMissingMetaDataError; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.operator.ports.metadata.MetaDataUnderspecifiedError; + +public class ExampleSetNumberOfAttributesPrecondition + extends AbstractPrecondition { + + private final int numberOfColumns; + + public ExampleSetNumberOfAttributesPrecondition(InputPort inputPort, + final int numColumns) { + super(inputPort); + this.numberOfColumns = numColumns; + } + + @Override + public void check(MetaData metaData) { + final InputPort inputPort = getInputPort(); + if (metaData == null) { + inputPort.addError(new InputMissingMetaDataError(inputPort, + ExampleSet.class, null)); + } else { + if (metaData instanceof ExampleSetMetaData) { + ExampleSetMetaData emd = (ExampleSetMetaData) metaData; + if (emd.getAllAttributes().size() < numberOfColumns) { + // TODO: customize the error message + inputPort.addError( + new MetaDataUnderspecifiedError(inputPort)); + } + } + } + + } + + @Override + public String getDescription() { + return "expects: ExampleSet"; + } + + @Override + public boolean isCompatible(MetaData input, CompatibilityLevel level) { + return ExampleSet.class.isAssignableFrom(input.getObjectClass()); + } + + @Override + public void assumeSatisfied() { + getInputPort().receiveMD(new ExampleSetMetaData()); + } + + @Override + public MetaData getExpectedMetaData() { + return new ExampleSetMetaData(); + } + +} diff --git a/src/main/java/org/rapidprom/operators/ports/metadata/XLogContainsXEventClassifiersPreCondition.java b/src/main/java/org/rapidprom/operators/ports/metadata/XLogContainsXEventClassifiersPreCondition.java new file mode 100644 index 0000000..4fa12f0 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/ports/metadata/XLogContainsXEventClassifiersPreCondition.java @@ -0,0 +1,33 @@ +package org.rapidprom.operators.ports.metadata; + +import org.rapidprom.ioobjects.XLogIOObject; + +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.metadata.InputMissingMetaDataError; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.operator.ports.metadata.MetaDataUnderspecifiedError; +import com.rapidminer.operator.ports.metadata.SimplePrecondition; + +public class XLogContainsXEventClassifiersPreCondition extends + SimplePrecondition { + + protected InputPort inputPort; + + public XLogContainsXEventClassifiersPreCondition(InputPort inputPort) { + super(inputPort, null, false); + this.inputPort = inputPort; + } + + public void makeAdditionalChecks(MetaData received) { + if (received == null || !(received instanceof XLogIOObjectMetaData)) { + inputPort.addError(new MetaDataUnderspecifiedError(inputPort)); + } else { + XLogIOObjectMetaData recCast = (XLogIOObjectMetaData) received; + if (recCast.getXEventClassifiers().isEmpty()) { + inputPort.addError(new InputMissingMetaDataError(inputPort, + XLogIOObject.class)); + } + } + } + +} diff --git a/src/main/java/org/rapidprom/operators/ports/metadata/XLogIOObjectMetaData.java b/src/main/java/org/rapidprom/operators/ports/metadata/XLogIOObjectMetaData.java new file mode 100644 index 0000000..04b29a6 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/ports/metadata/XLogIOObjectMetaData.java @@ -0,0 +1,48 @@ +package org.rapidprom.operators.ports.metadata; + +import java.util.ArrayList; +import java.util.List; + +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.model.XLog; +import org.rapidprom.ioobjects.XLogIOObject; + +import com.rapidminer.operator.ports.metadata.MetaData; + +public class XLogIOObjectMetaData extends MetaData { + + private static final long serialVersionUID = 3447751295083897459L; + + private List classifiers; + + public XLogIOObjectMetaData() { + super(XLogIOObject.class); + classifiers = new ArrayList(); + } + + @Deprecated + public XLogIOObjectMetaData(XLog log) { + super(XLogIOObject.class); + classifiers = log.getClassifiers(); + } + + public XLogIOObjectMetaData(List classifiers) { + super(XLogIOObject.class); + this.classifiers = classifiers; + } + + public List getXEventClassifiers() { + return classifiers; + } + + public void setXEventClassifiers(List classifiers) { + this.classifiers = classifiers; + } + + @Override + public MetaData clone() { + XLogIOObjectMetaData clone = (XLogIOObjectMetaData) super.clone(); + clone.setXEventClassifiers(new ArrayList<>(getXEventClassifiers())); + return clone; + } +} diff --git a/src/main/java/org/rapidprom/operators/streams/analysis/AbstractEventStreamBasedDiscoveryAlgorithmAnalyzer.java b/src/main/java/org/rapidprom/operators/streams/analysis/AbstractEventStreamBasedDiscoveryAlgorithmAnalyzer.java new file mode 100644 index 0000000..581858f --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/analysis/AbstractEventStreamBasedDiscoveryAlgorithmAnalyzer.java @@ -0,0 +1,275 @@ +package org.rapidprom.operators.streams.analysis; + +import java.io.File; +import java.io.IOException; +import java.util.EnumSet; +import java.util.List; + +import org.processmining.streamanalysis.parameters.XSEventStreamAnalyzerParameters; +import org.processmining.streamanalysis.parameters.XSEventStreamAnalyzerParameters.AnalysisScheme; +import org.processmining.streamanalysis.parameters.XSEventStreamAnalyzerParameters.FragmentationScheme; +import org.rapidprom.ioobjects.streams.XSStreamAnalyzerIOObject; +import org.rapidprom.ioobjects.streams.event.XSEventStreamIOObject; +import org.rapidprom.util.IOUtils; +import org.rapidprom.util.ObjectUtils; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.UserError; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.InputPortExtender; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeBoolean; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.parameter.ParameterTypeDirectory; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.parameter.ParameterTypeString; +import com.rapidminer.parameter.conditions.BooleanParameterCondition; +import com.rapidminer.parameter.conditions.EqualStringCondition; + +public abstract class AbstractEventStreamBasedDiscoveryAlgorithmAnalyzer

+ extends Operator { + + private final static String PARAMETER_KEY_ANALYSIS_SCHEME = "analysis_scheme"; + private final static String PARAMETER_DESC_ANALYSIS_SCHEME = "Determines the analysis scheme, i.e. Continuous (analyze every packet), or Fragmented (do not analyze every packet)."; + private final static AnalysisScheme[] PARAMETER_OPTIONS_ANALYSIS_SCHEME = EnumSet + .allOf(AnalysisScheme.class).toArray(new AnalysisScheme[EnumSet + .allOf(AnalysisScheme.class).size()]); + + private final static String PARAMETER_KEY_FRAGMENTATION_SCHEME = "fragmentation_scheme"; + private final static String PARAMETER_DESC_FRAGMENTATION_SCHEME = "Determines what fragmentation scheme to use if the analysis scheme selected is Fragmented. Currently only a linear scheme is implemented. The linear scheme allows for selecting a step size (s) and a window size (w). The window size should be even. Each time t with k * s - 0.5 * w <= t <= k * s + 0.5 * w for arbitrary k >= 0 will be analyzed resulting in window sizes w + 1 "; + private final static FragmentationScheme[] PARAMETER_OPTIONS_FRAGMENTATION_SCHEME = EnumSet + .allOf(FragmentationScheme.class) + .toArray(new FragmentationScheme[EnumSet + .allOf(FragmentationScheme.class).size()]); + + private final static String PARAMETER_KEY_STEP_SIZE = "step_size"; + private final static String PARAMETER_DESC_STEP_SIZE = "Determines the step size for the linear fragmentation scheme."; + private final static int PARAMETER_DEFAULT_VALUE_STEP_SIZE = XSEventStreamAnalyzerParameters.DEFAULT_STEP_SIZE; + + private final static String PARAMETER_KEY_FRAGMENTATION_WINDOW_SIZE = "fragmentation_window_size"; + private final static String PARAMETER_DESC_FRAGMENTATION_WINDOW_SIZE = "Determines what window size should be used for the (linear) fragmentation scheme."; + private final static int PARAMETER_DEFAULT_VALUE_FRAGMENTATION_WINDOW_SIZE = XSEventStreamAnalyzerParameters.DEFAULT_FRAGMENTATION_WINDOW_SIZE; + + private final static String EXPORT_FILE_FORMAT = "csv"; + private static final String PARAMETER_DESC_END_POINT = "Determines at what point the analysis should stop."; + + private final static String PARAMETER_DESC_FILE_NAME = "The file name of the exported event log."; + private final static String PARAMETER_DESC_FOLDER = "The folder where the exported event log should be stored."; + + // currently disabled storing model sequence. + @SuppressWarnings("unused") + private final static String PARAMETER_DESC_STORE_MODEL_DIR = "Directory where to store the model sequence"; + @SuppressWarnings("unused") + private final static String PARAMETER_DESC_STORE_MODEL_SEQUENCE = "Store a sequence of models (showing model evolution), potentially memory expensive"; + + private static final String PARAMETER_DESC_WRITE_TO_FILE = "Indicates whether the analyzer should write the results to a file"; + private static final String PARAMETER_KEY_END_POINT = "end_point"; + + private final static String PARAMETER_KEY_FILE_NAME = "file_name"; + + private final static String PARAMETER_KEY_FOLDER = "folder"; + @SuppressWarnings("unused") + private final static String PARAMETER_KEY_STORE_MODEL_DIR = "store_model_dir"; + + @SuppressWarnings("unused") + private final static String PARAMETER_KEY_STORE_MODEL_SEQUENCE = "store_model_sequence"; + private static final String PARAMETER_KEY_WRITE_TO_FILE = "write_to_file"; + + private final InputPortExtender algorithmsPort = new InputPortExtender( + "algorithms", getInputPorts(), null, 1); + + private P analyzerParameters; + + public void setAnalyzerParameters(P analyzerParameters) { + this.analyzerParameters = analyzerParameters; + } + + private final OutputPort analyzerPort = getOutputPorts() + .createPort("analyzer"); + + private final InputPort streamPort = getInputPorts() + .createPort("event stream", XSEventStreamIOObject.class); + + public AbstractEventStreamBasedDiscoveryAlgorithmAnalyzer( + OperatorDescription description, P parameters) { + super(description); + getAlgorithmsPort().start(); + getTransformer().addRule(new GenerateNewMDRule(getAnalyzerPort(), + XSStreamAnalyzerIOObject.class)); + this.analyzerParameters = parameters; + } + + private ParameterType createDirectoryChooserParameterType() { + return new ParameterTypeDirectory(PARAMETER_KEY_FOLDER, + PARAMETER_DESC_FOLDER, ""); + } + + private ParameterType createEndPointParameterType() { + return new ParameterTypeInt(PARAMETER_KEY_END_POINT, + PARAMETER_DESC_END_POINT, 0, Integer.MAX_VALUE, 1000); + } + + private ParameterType createFileNameParameterType() { + return new ParameterTypeString(PARAMETER_KEY_FILE_NAME, + PARAMETER_DESC_FILE_NAME); + } + + private ParameterType createWriteToFileParameterType() { + return new ParameterTypeBoolean(PARAMETER_KEY_WRITE_TO_FILE, + PARAMETER_DESC_WRITE_TO_FILE, true); + } + + /** + * @return the algorithmsPort + */ + public InputPortExtender getAlgorithmsPort() { + return algorithmsPort; + } + + public P getAnalyzerParameters() { + return analyzerParameters; + } + + /** + * @return the analyzerPort + */ + public OutputPort getAnalyzerPort() { + return analyzerPort; + } + + @Override + public List getParameterTypes() { + List params = super.getParameterTypes(); + ParameterTypeCategory analysisScheme = createAnalysisSchemeParameterType(); + params.add(analysisScheme); + ParameterTypeCategory fragmentationScheme = createFragmentationSchemeParameterType(); + fragmentationScheme.registerDependencyCondition( + new EqualStringCondition(this, PARAMETER_KEY_ANALYSIS_SCHEME, + true, AnalysisScheme.FRAGMENTED.toString())); + params.add(fragmentationScheme); + + ParameterTypeInt stepSizeParam = createStepSizeParameterType(); + stepSizeParam.registerDependencyCondition( + new EqualStringCondition(this, PARAMETER_KEY_ANALYSIS_SCHEME, + true, AnalysisScheme.FRAGMENTED.toString())); + params.add(stepSizeParam); + + ParameterTypeInt windowSizeParam = createWindowSizeParameterType(); + windowSizeParam.registerDependencyCondition( + new EqualStringCondition(this, PARAMETER_KEY_ANALYSIS_SCHEME, + true, AnalysisScheme.FRAGMENTED.toString())); + params.add(windowSizeParam); + + params.add(createEndPointParameterType()); + + params.add(createWriteToFileParameterType()); + + ParameterType dir = createDirectoryChooserParameterType(); + dir.setOptional(true); + dir.registerDependencyCondition(new BooleanParameterCondition(this, + PARAMETER_KEY_WRITE_TO_FILE, true, true)); + params.add(dir); + + ParameterType fileName = createFileNameParameterType(); + fileName.setOptional(true); + fileName.registerDependencyCondition(new BooleanParameterCondition(this, + PARAMETER_KEY_WRITE_TO_FILE, true, true)); + params.add(fileName); + + // params.add(new + // ParameterTypeBoolean(PARAMETER_KEY_STORE_MODEL_SEQUENCE, + // PARAMETER_DESC_STORE_MODEL_SEQUENCE, false)); + // + // ParameterType modelSequenceDir = new ParameterTypeDirectory( + // PARAMETER_KEY_STORE_MODEL_DIR, PARAMETER_DESC_STORE_MODEL_DIR, + // true); + // modelSequenceDir + // .registerDependencyCondition(new BooleanParameterCondition(this, + // PARAMETER_KEY_STORE_MODEL_SEQUENCE, true, true)); + // params.add(modelSequenceDir); + return params; + } + + private ParameterTypeInt createWindowSizeParameterType() { + return new ParameterTypeInt(PARAMETER_KEY_FRAGMENTATION_WINDOW_SIZE, + PARAMETER_DESC_FRAGMENTATION_WINDOW_SIZE, 0, Integer.MAX_VALUE, + PARAMETER_DEFAULT_VALUE_FRAGMENTATION_WINDOW_SIZE, false); + } + + private ParameterTypeInt createStepSizeParameterType() { + return new ParameterTypeInt(PARAMETER_KEY_STEP_SIZE, + PARAMETER_DESC_STEP_SIZE, 1, Integer.MAX_VALUE, + PARAMETER_DEFAULT_VALUE_STEP_SIZE, false); + } + + private ParameterTypeCategory createAnalysisSchemeParameterType() { + return new ParameterTypeCategory(PARAMETER_KEY_ANALYSIS_SCHEME, + PARAMETER_DESC_ANALYSIS_SCHEME, + ObjectUtils.toString(PARAMETER_OPTIONS_ANALYSIS_SCHEME), 0, + false); + } + + private ParameterTypeCategory createFragmentationSchemeParameterType() { + return new ParameterTypeCategory(PARAMETER_KEY_FRAGMENTATION_SCHEME, + PARAMETER_DESC_FRAGMENTATION_SCHEME, + ObjectUtils.toString(PARAMETER_OPTIONS_FRAGMENTATION_SCHEME), 0, + false); + } + + /** + * @return the streamPort + */ + public InputPort getStreamPort() { + return streamPort; + } + + protected P parseParameters() throws UserError, IOException { + setAnalyzerParameters(renewParameters()); + getAnalyzerParameters() + .setEndPoint(getParameterAsInt(PARAMETER_KEY_END_POINT)); + getAnalyzerParameters().setAnalysisScheme( + PARAMETER_OPTIONS_ANALYSIS_SCHEME[getParameterAsInt( + PARAMETER_KEY_ANALYSIS_SCHEME)]); + if (getAnalyzerParameters().getAnalysisScheme() + .equals(AnalysisScheme.FRAGMENTED)) { + getAnalyzerParameters().setFragmentationScheme( + PARAMETER_OPTIONS_FRAGMENTATION_SCHEME[getParameterAsInt( + PARAMETER_KEY_FRAGMENTATION_SCHEME)]); + getAnalyzerParameters() + .setStepSize(getParameterAsInt(PARAMETER_KEY_STEP_SIZE)); + getAnalyzerParameters().setFragmentationWindowSize( + getParameterAsInt(PARAMETER_KEY_FRAGMENTATION_WINDOW_SIZE)); + } + if (getParameterAsBoolean(PARAMETER_KEY_WRITE_TO_FILE)) { + File target = IOUtils.prepareTargetFile( + getParameterAsFile(PARAMETER_KEY_FOLDER).getCanonicalPath(), + getParameterAsString(PARAMETER_KEY_FILE_NAME), + EXPORT_FILE_FORMAT); + if (target.exists()) { + target.delete(); + } + target.createNewFile(); + getAnalyzerParameters().setMetricsFile(target); + } + // if (getParameterAsBoolean(PARAMETER_KEY_STORE_MODEL_SEQUENCE)) { + // File dir = getParameterAsFile(PARAMETER_KEY_STORE_MODEL_DIR); + // assert (dir.exists() && dir.isDirectory()); + // getAnalyzerParameters().setStoreModelSequence(true); + // getAnalyzerParameters().setModelSequenceDirectory(dir); + // } + getAnalyzerParameters().setVerbose(true); + return getAnalyzerParameters(); + } + + /** + * The parameters have to be renewed in a repeated experimental setting such + * that writing to file succeeds. + * + * @return fresh instance of P + */ + protected abstract P renewParameters(); + +} diff --git a/src/main/java/org/rapidprom/operators/streams/analysis/AlignmentAPNAnalyzerOperator.java b/src/main/java/org/rapidprom/operators/streams/analysis/AlignmentAPNAnalyzerOperator.java new file mode 100644 index 0000000..be14bbd --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/analysis/AlignmentAPNAnalyzerOperator.java @@ -0,0 +1,128 @@ +package org.rapidprom.operators.streams.analysis; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.deckfour.xes.classification.XEventClassifier; +import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +import org.processmining.eventstream.core.interfaces.XSEvent; +import org.processmining.eventstream.core.interfaces.XSEventStream; +import org.processmining.eventstream.readers.acceptingpetrinet.XSEventStreamToAcceptingPetriNetReader; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSReader; +import org.processmining.streamanalysis.core.interfaces.XSStreamAnalyzer; +import org.processmining.streamanalysis.parameters.AlignmentAnalyzerParametersImpl; +import org.processmining.streamanalysis.plugins.AlignmentAPNAnalyzerPlugin; +import org.processmining.streamanalysis.utils.XLogArray; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.XLogIOObject; +import org.rapidprom.ioobjects.streams.XSStreamAnalyzerIOObject; +import org.rapidprom.ioobjects.streams.event.XSEventStreamIOObject; +import org.rapidprom.ioobjects.streams.event.XSEventStreamToAcceptingPetriNetReaderIOObject; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.UserError; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.InputPortExtender; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeInt; + +public class AlignmentAPNAnalyzerOperator + extends AbstractEventStreamBasedDiscoveryAlgorithmAnalyzer { + + private final static String PARAMETER_KEY_MAX_STATE_SPACE = "max_state_space"; + private final static String PARAMETER_DESC_MAX_STATE_SPACE = "Determine the maximal size of the state space of the underlying automaton."; + private final static int PARAMETER_DEFAULT_MAX_STATE_SPACE = 25000; + + private final static String PARAMETER_KEY_TIMEOUT = "timeout"; + private final static String PARAMETER_DESC_TIMEOUT = "Determine the timeout in ms for calcuating the alignments"; + private final static int PARAMETER_DEFAULT_TIMEOUT = 5000; + + private final InputPortExtender referenceLogsPort = new InputPortExtender("logs", getInputPorts(), null, 1); + + public AlignmentAPNAnalyzerOperator(OperatorDescription description) { + super(description, new AlignmentAnalyzerParametersImpl()); + referenceLogsPort.start(); + } + + @Override + public void doWork() throws OperatorException { + AlignmentAnalyzerParametersImpl params; + try { + params = parseParameters(); + } catch (IOException e) { + throw new OperatorException(e.getMessage()); + } + PluginContext context = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(AlignmentAPNAnalyzerPlugin.class); + XSEventStream stream = getStreamPort().getData(XSEventStreamIOObject.class).getArtifact(); + XLogArray arr = new XLogArray(); + for (InputPort i : referenceLogsPort.getManagedPorts()) { + try { + arr.add(i.getData(XLogIOObject.class).getArtifact()); + } catch (UserError e) { + } + } + // TODO: use meta-data to process classifiers! + List classifiers = fetchClassifiers(arr); + params.setClassifier(classifiers.get(0)); + List algos = new ArrayList(); + for (InputPort i : getAlgorithmsPort().getManagedPorts()) { + try { + algos.add((XSEventStreamToAcceptingPetriNetReader) i + .getData(XSEventStreamToAcceptingPetriNetReaderIOObject.class).getArtifact()); + } catch (UserError e) { + } + } + XSStreamAnalyzer, Map>>>, AcceptingPetriNet> analyzer = AlignmentAPNAnalyzerPlugin + .run(context, stream, arr, params, + algos.toArray(new XSEventStreamToAcceptingPetriNetReader[algos.size()])); + getAnalyzerPort().deliver( + new XSStreamAnalyzerIOObject, Map>>>, AcceptingPetriNet>( + analyzer, context)); + } + + private List fetchClassifiers(XLogArray array) { + List classifiers = new ArrayList<>(); + classifiers.addAll(array.get(0).getClassifiers()); + for (int i = 1; i < array.size(); i++) { + classifiers.retainAll(array.get(i).getClassifiers()); + } + return classifiers; + } + + @Override + protected AlignmentAnalyzerParametersImpl parseParameters() throws UserError, IOException { + AlignmentAnalyzerParametersImpl params = super.parseParameters(); + params.setMaxNumberOfStates(getParameterAsInt(PARAMETER_KEY_MAX_STATE_SPACE)); + params.setTimeoutMili(getParameterAsInt(PARAMETER_KEY_TIMEOUT)); + return params; + } + + @Override + public List getParameterTypes() { + List params = super.getParameterTypes(); + params.add(createMaxStateSpaceParameterType()); + params.add(createTimeoutParameterType()); + return params; + } + + private ParameterTypeInt createMaxStateSpaceParameterType() { + return new ParameterTypeInt(PARAMETER_KEY_MAX_STATE_SPACE, PARAMETER_DESC_MAX_STATE_SPACE, 1, Integer.MAX_VALUE, + PARAMETER_DEFAULT_MAX_STATE_SPACE, true); + } + + private ParameterTypeInt createTimeoutParameterType() { + return new ParameterTypeInt(PARAMETER_KEY_TIMEOUT, PARAMETER_DESC_TIMEOUT, 0, Integer.MAX_VALUE, + PARAMETER_DEFAULT_TIMEOUT, true); + } + + @Override + protected AlignmentAnalyzerParametersImpl renewParameters() { + return new AlignmentAnalyzerParametersImpl(); + } + +} diff --git a/src/main/java/org/rapidprom/operators/streams/analysis/ProjRecPrecAPNStreamAnalyzerOperator.java b/src/main/java/org/rapidprom/operators/streams/analysis/ProjRecPrecAPNStreamAnalyzerOperator.java new file mode 100644 index 0000000..649af47 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/analysis/ProjRecPrecAPNStreamAnalyzerOperator.java @@ -0,0 +1,119 @@ +package org.rapidprom.operators.streams.analysis; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +import org.processmining.acceptingpetrinet.models.AcceptingPetriNetArray; +import org.processmining.acceptingpetrinet.models.impl.AcceptingPetriNetArrayFactory; +import org.processmining.eventstream.core.interfaces.XSEvent; +import org.processmining.eventstream.core.interfaces.XSEventStream; +import org.processmining.eventstream.readers.acceptingpetrinet.XSEventStreamToAcceptingPetriNetReader; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.projectedrecallandprecision.framework.CompareParameters; +import org.processmining.stream.core.interfaces.XSReader; +import org.processmining.streamanalysis.core.interfaces.XSStreamAnalyzer; +import org.processmining.streamanalysis.parameters.ProjRecPrecAnalyzerParametersImpl; +import org.processmining.streamanalysis.plugins.ProjRecPrecAutomataXSEventStreamAPN2APNAnalyzerPlugin; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.AcceptingPetriNetIOObject; +import org.rapidprom.ioobjects.streams.XSStreamAnalyzerIOObject; +import org.rapidprom.ioobjects.streams.event.XSEventStreamIOObject; +import org.rapidprom.ioobjects.streams.event.XSEventStreamToAcceptingPetriNetReaderIOObject; + +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.UserError; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.InputPortExtender; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeInt; + +public class ProjRecPrecAPNStreamAnalyzerOperator + extends AbstractEventStreamBasedDiscoveryAlgorithmAnalyzer { + + private final InputPortExtender referenceModelsPort = new InputPortExtender("accepting petri nets", getInputPorts(), + null, 1); + + private final static String PARAMETER_KEY_MAX_STATE_SPACE = "max_state_space"; + private final static String PARAMETER_DESC_MAX_STATE_SPACE = "Determine the maximal size of the state space of the underlying automaton."; + private final static int PARAMETER_DEFAULT_MAX_STATE_SPACE = 2000; + + private final static String PARAMETER_KEY_PROJECTION_SIZE = "projection_size"; + private final static String PARAMETER_DESC_PROJECTION_SIZE = "Determine the number of activities taken into account per projection"; + private final static int PARAMETER_DEFAULT_PROJECTION_SIZE = 2; + + public ProjRecPrecAPNStreamAnalyzerOperator(OperatorDescription description) { + super(description, new ProjRecPrecAnalyzerParametersImpl()); + referenceModelsPort.start(); + } + + @Override + public void doWork() throws OperatorException { + ProjRecPrecAnalyzerParametersImpl params; + try { + params = parseParameters(); + } catch (IOException e) { + throw new OperatorException(e.getMessage()); + } + PluginContext context = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(ProjRecPrecAutomataXSEventStreamAPN2APNAnalyzerPlugin.class); + XSEventStream stream = getStreamPort().getData(XSEventStreamIOObject.class).getArtifact(); + AcceptingPetriNetArray arr = AcceptingPetriNetArrayFactory.createAcceptingPetriNetArray(); + for (InputPort i : referenceModelsPort.getManagedPorts()) { + try { + arr.addNet(i.getData(AcceptingPetriNetIOObject.class).getArtifact()); + } catch (UserError e) { + } + } + List algos = new ArrayList(); + for (InputPort i : getAlgorithmsPort().getManagedPorts()) { + try { + algos.add((XSEventStreamToAcceptingPetriNetReader) i + .getData(XSEventStreamToAcceptingPetriNetReaderIOObject.class).getArtifact()); + } catch (UserError e) { + } + } + XSStreamAnalyzer, Map>>>, AcceptingPetriNet> analyzer = ProjRecPrecAutomataXSEventStreamAPN2APNAnalyzerPlugin + .run(context, stream, arr, params, + algos.toArray(new XSEventStreamToAcceptingPetriNetReader[algos.size()])); + getAnalyzerPort().deliver( + new XSStreamAnalyzerIOObject, Map>>>, AcceptingPetriNet>( + analyzer, context)); + } + + @Override + protected ProjRecPrecAnalyzerParametersImpl parseParameters() throws UserError, IOException { + ProjRecPrecAnalyzerParametersImpl params = super.parseParameters(); + CompareParameters compareParameters = new CompareParameters(getParameterAsInt(PARAMETER_KEY_PROJECTION_SIZE)); + compareParameters.setMaxStatesReachabilityGraph(getParameterAsInt(PARAMETER_KEY_MAX_STATE_SPACE)); + params.setProjRecParams(compareParameters); + return params; + } + + @Override + public List getParameterTypes() { + List params = super.getParameterTypes(); + params.add(createMaxStateSpaceParameterType()); + params.add(createProjectionSizeParameterType()); + return params; + } + + private ParameterTypeInt createMaxStateSpaceParameterType() { + return new ParameterTypeInt(PARAMETER_KEY_MAX_STATE_SPACE, PARAMETER_DESC_MAX_STATE_SPACE, 1, Integer.MAX_VALUE, + PARAMETER_DEFAULT_MAX_STATE_SPACE, true); + } + + private ParameterTypeInt createProjectionSizeParameterType() { + return new ParameterTypeInt(PARAMETER_KEY_PROJECTION_SIZE, PARAMETER_DESC_PROJECTION_SIZE, 2, 3, + PARAMETER_DEFAULT_PROJECTION_SIZE, true); + } + + @Override + protected ProjRecPrecAnalyzerParametersImpl renewParameters() { + return new ProjRecPrecAnalyzerParametersImpl(); + } + +} diff --git a/src/main/java/org/rapidprom/operators/streams/discovery/StreamAlphaMinerAcceptingPNOperator.java b/src/main/java/org/rapidprom/operators/streams/discovery/StreamAlphaMinerAcceptingPNOperator.java new file mode 100644 index 0000000..edeb899 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/discovery/StreamAlphaMinerAcceptingPNOperator.java @@ -0,0 +1,46 @@ +package org.rapidprom.operators.streams.discovery; + +import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +import org.processmining.eventstream.core.interfaces.XSEvent; +import org.processmining.eventstream.core.interfaces.XSEventStream; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSReader; +import org.processmining.streamalphaminer.parameters.StreamAlphaMinerParameters; +import org.processmining.streamalphaminer.plugins.StreamAlphaMinerAcepptingPetriNetPlugin; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.streams.XSReaderIOObject; +import org.rapidprom.ioobjects.streams.event.XSEventStreamToAcceptingPetriNetReaderIOObject; +import org.rapidprom.operators.streams.discovery.abstr.AbstractDFABasedMinerOperator; + +import com.rapidminer.operator.OperatorDescription; + +public class StreamAlphaMinerAcceptingPNOperator + extends AbstractDFABasedMinerOperator { + + public StreamAlphaMinerAcceptingPNOperator(OperatorDescription description) { + super(description); + } + + @Override + protected PluginContext getPluginContextForAlgorithm() { + return RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(StreamAlphaMinerAcepptingPetriNetPlugin.class); + } + + @Override + protected StreamAlphaMinerParameters getAlgorithmParameterObject() { + return new StreamAlphaMinerParameters(); + } + + @Override + protected XSReaderIOObject getIOObject(XSReader algorithm, + PluginContext context) { + return new XSEventStreamToAcceptingPetriNetReaderIOObject(algorithm, context); + } + + @Override + protected XSReader getAlgorithm(PluginContext context, XSEventStream stream, + StreamAlphaMinerParameters parameters) { + return StreamAlphaMinerAcepptingPetriNetPlugin.apply(context, stream, parameters); + } +} diff --git a/src/main/java/org/rapidprom/operators/streams/discovery/StreamInductiveMinerAcceptingPNOperator.java b/src/main/java/org/rapidprom/operators/streams/discovery/StreamInductiveMinerAcceptingPNOperator.java new file mode 100644 index 0000000..7582773 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/discovery/StreamInductiveMinerAcceptingPNOperator.java @@ -0,0 +1,49 @@ +package org.rapidprom.operators.streams.discovery; + +import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +import org.processmining.eventstream.core.interfaces.XSEvent; +import org.processmining.eventstream.core.interfaces.XSEventStream; +import org.processmining.eventstream.readers.acceptingpetrinet.XSEventStreamToAcceptingPetriNetReader; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSReader; +import org.processmining.streaminductiveminer.parameters.StreamInductiveMinerParameters; +import org.processmining.streaminductiveminer.plugins.StreamInductiveMinerAcceptingPetriNetPlugin; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.streams.XSReaderIOObject; +import org.rapidprom.ioobjects.streams.event.XSEventStreamToAcceptingPetriNetReaderIOObject; +import org.rapidprom.operators.streams.discovery.abstr.AbstractDFABasedMinerOperator; + +import com.rapidminer.operator.OperatorDescription; + +public class StreamInductiveMinerAcceptingPNOperator + extends AbstractDFABasedMinerOperator { + + public StreamInductiveMinerAcceptingPNOperator(OperatorDescription description) { + super(description); + } + + @Override + protected PluginContext getPluginContextForAlgorithm() { + return RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(StreamInductiveMinerAcceptingPetriNetPlugin.class); + } + + @Override + protected XSEventStreamToAcceptingPetriNetReader getAlgorithm(PluginContext context, XSEventStream stream, + StreamInductiveMinerParameters parameters) { + StreamInductiveMinerAcceptingPetriNetPlugin plugin = new StreamInductiveMinerAcceptingPetriNetPlugin(); + return plugin.apply(context, stream, parameters); + } + + @Override + protected StreamInductiveMinerParameters getAlgorithmParameterObject() { + return new StreamInductiveMinerParameters(); + } + + @Override + protected XSReaderIOObject getIOObject(XSReader algorithm, + PluginContext context) { + return new XSEventStreamToAcceptingPetriNetReaderIOObject(algorithm, context); + } + +} diff --git a/src/main/java/org/rapidprom/operators/streams/discovery/StreamInductiveMinerProcessTreeOperator.java b/src/main/java/org/rapidprom/operators/streams/discovery/StreamInductiveMinerProcessTreeOperator.java new file mode 100644 index 0000000..c4eb0f4 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/discovery/StreamInductiveMinerProcessTreeOperator.java @@ -0,0 +1,49 @@ +package org.rapidprom.operators.streams.discovery; + +import org.processmining.eventstream.core.interfaces.XSEvent; +import org.processmining.eventstream.core.interfaces.XSEventStream; +import org.processmining.eventstream.readers.processtree.XSEventStreamToProcessTreeReader; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.processtree.ProcessTree; +import org.processmining.stream.core.interfaces.XSReader; +import org.processmining.streaminductiveminer.parameters.StreamInductiveMinerParameters; +import org.processmining.streaminductiveminer.plugins.StreamInductiveMinerProcessTreePlugin; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.streams.XSReaderIOObject; +import org.rapidprom.ioobjects.streams.event.XSEventStreamToProcessTreePetriNetReaderIOObject; +import org.rapidprom.operators.streams.discovery.abstr.AbstractDFABasedMinerOperator; + +import com.rapidminer.operator.OperatorDescription; + +public class StreamInductiveMinerProcessTreeOperator + extends AbstractDFABasedMinerOperator { + + public StreamInductiveMinerProcessTreeOperator(OperatorDescription description) { + super(description); + } + + @Override + protected XSEventStreamToProcessTreeReader getAlgorithm(PluginContext context, XSEventStream stream, + StreamInductiveMinerParameters parameters) { + StreamInductiveMinerProcessTreePlugin plugin = new StreamInductiveMinerProcessTreePlugin(); + return plugin.apply(context, stream, parameters); + } + + @Override + protected PluginContext getPluginContextForAlgorithm() { + return RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(StreamInductiveMinerProcessTreePlugin.class); + } + + @Override + protected StreamInductiveMinerParameters getAlgorithmParameterObject() { + return new StreamInductiveMinerParameters(); + } + + @Override + protected XSReaderIOObject getIOObject(XSReader algorithm, + PluginContext context) { + return new XSEventStreamToProcessTreePetriNetReaderIOObject(algorithm, context); + } + +} diff --git a/src/main/java/org/rapidprom/operators/streams/discovery/abstr/AbstractDFABasedMinerOperator.java b/src/main/java/org/rapidprom/operators/streams/discovery/abstr/AbstractDFABasedMinerOperator.java new file mode 100644 index 0000000..fe9b0d6 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/discovery/abstr/AbstractDFABasedMinerOperator.java @@ -0,0 +1,208 @@ +package org.rapidprom.operators.streams.discovery.abstr; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.deckfour.xes.extension.std.XConceptExtension; +import org.processmining.eventstream.core.interfaces.XSEventSignature; +import org.processmining.eventstream.core.interfaces.XSEventStream; +import org.processmining.eventstream.readers.abstractions.CAxAADataStoreBasedDFAReaderImpl; +import org.processmining.eventstream.readers.abstractions.XSEventStreamToDFAReaderParameters; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSDataPacket; +import org.processmining.stream.core.interfaces.XSReader; +import org.processmining.stream.model.datastructure.DSParameter; +import org.processmining.stream.model.datastructure.DSParameterDefinition; +import org.processmining.stream.model.datastructure.DSParameterFactory; +import org.processmining.stream.model.datastructure.DataStructure.Type; +import org.rapidprom.ioobjects.streams.XSReaderIOObject; +import org.rapidprom.ioobjects.streams.event.XSEventStreamIOObject; +import org.rapidprom.util.ObjectUtils; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.UserError; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.parameter.ParameterTypeString; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.parameter.conditions.EqualStringCondition; + +public abstract class AbstractDFABasedMinerOperator, R, P extends XSEventStreamToDFAReaderParameters> + extends Operator { + + private InputPort streamInputPort = getInputPorts() + .createPort("event stream", XSEventStreamIOObject.class); + + private OutputPort readerOutputPort = getOutputPorts().createPort("reader"); + + protected static final String PARAMETER_KEY_CASE_IDENTIFIER = "case_identifier"; + protected static final String PARAMETER_DESC_CASE_IDENTIFIER = "Defines what key to use within the data packet to identify a case."; + protected static final String PARAMETER_DEFAULT_CASE_IDENTIFIER = XSEventSignature.TRACE + .toString(); + + protected static final String PARAMETER_KEY_EVENT_IDENTIFIER = "activity_identifier"; + protected static final String PARAMETER_DESC_EVENT_IDENTIFIER = "Defines what key to use within the data packet to identify the activity of the event."; + protected static final String PARAMETER_DEFAULT_EVENT_IDENTIFIER = XConceptExtension.KEY_NAME; + + protected static final String PARAMETER_KEY_REFRESH_RATE = "refresh_rate"; + protected static final String PARAMETER_DESC_REFRESH_RATE = "Defines at what intervals (in terms of messages received) a new model should be queried."; + protected static final int PARAMETER_DEFAULT_REFRESH_RATE = -1; + + protected static final String PARAMETER_KEY_CASE_ACTIVITY_STORE = "case_activity_store"; + protected static final String PARAMETER_DESC_CASE_ACTIVITY_STORE = "Defines what stream-based data store to use for capturing CASE X ACTIVITY information."; + protected static final Type[] PARAMETER_OPTIONS_CASE_ACTIVITY_STORE = CAxAADataStoreBasedDFAReaderImpl.DEFAULT_ALLOWED_CASE_ACTIVITY_DATA_STRUCTURES + .toArray( + new Type[CAxAADataStoreBasedDFAReaderImpl.DEFAULT_ALLOWED_CASE_ACTIVITY_DATA_STRUCTURES + .size()]); + + protected static final String PARAMETER_KEY_ACTIVITY_ACTIVITY_STORE = "activity_activity_store"; + protected static final String PARAMETER_DESC_ACTIVITY_ACTIVITY_STORE = "Defines what stream-based data store to use for capturing ACITIVTY X ACTIVITY information."; + protected static final Type[] PARAMETER_OPTIONS_ACTIVITY_ACTIVITY_STORE = CAxAADataStoreBasedDFAReaderImpl.DEFAULT_ALLOWED_ACTIVITY_ACTIVITY_DATA_STRUCTURES + .toArray( + new Type[CAxAADataStoreBasedDFAReaderImpl.DEFAULT_ALLOWED_ACTIVITY_ACTIVITY_DATA_STRUCTURES + .size()]); + + public AbstractDFABasedMinerOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(readerOutputPort, + XSReaderIOObject.class)); + } + + protected abstract PluginContext getPluginContextForAlgorithm(); + + protected abstract XSReader getAlgorithm(PluginContext context, + XSEventStream stream, P parameters); + + protected abstract XSReaderIOObject getIOObject( + XSReader algorithm, PluginContext context); + + @Override + public void doWork() throws UserError { + XSEventStream eventStream = streamInputPort + .getData(XSEventStreamIOObject.class).getArtifact(); + PluginContext context = getPluginContextForAlgorithm(); + P params = getAlgorithmParameterObject(); + params.setCaseIdentifier( + getParameterAsString(PARAMETER_KEY_CASE_IDENTIFIER)); + params.setActivityIdentifier( + getParameterAsString(PARAMETER_KEY_EVENT_IDENTIFIER)); + params.setRefreshRate(getParameterAsInt(PARAMETER_KEY_REFRESH_RATE)); + params = setCaseActivityDataStructure(params); + params = setActivityActivityDataStructure(params); + XSReader reader = getAlgorithm(context, eventStream, params); + reader.startXSRunnable(); + readerOutputPort.deliver(getIOObject(reader, context)); + } + + protected abstract P getAlgorithmParameterObject(); + + protected P setActivityActivityDataStructure(P params) + throws UndefinedParameterError { + Type activityActivityStoreType = PARAMETER_OPTIONS_ACTIVITY_ACTIVITY_STORE[getParameterAsInt( + PARAMETER_KEY_ACTIVITY_ACTIVITY_STORE)]; + params.setActivityActivityDataStructureType(activityActivityStoreType); + params.setActivityActivityDataStructureParameters( + getDataStructureParameters( + PARAMETER_KEY_ACTIVITY_ACTIVITY_STORE, + activityActivityStoreType)); + return params; + } + + protected P setCaseActivityDataStructure(P params) + throws UndefinedParameterError { + Type caseActivityStoreType = PARAMETER_OPTIONS_CASE_ACTIVITY_STORE[getParameterAsInt( + PARAMETER_KEY_CASE_ACTIVITY_STORE)]; + params.setCaseActivityDataStructureType(caseActivityStoreType); + params.setCaseActivityDataStructureParameters( + getDataStructureParameters(PARAMETER_KEY_CASE_ACTIVITY_STORE, + caseActivityStoreType)); + return params; + } + + @Override + public List getParameterTypes() { + List params = super.getParameterTypes(); + + params.add(new ParameterTypeString(PARAMETER_KEY_CASE_IDENTIFIER, + PARAMETER_DESC_CASE_IDENTIFIER, + PARAMETER_DEFAULT_CASE_IDENTIFIER, false)); + + params.add(new ParameterTypeString(PARAMETER_KEY_EVENT_IDENTIFIER, + PARAMETER_DESC_EVENT_IDENTIFIER, + PARAMETER_DEFAULT_EVENT_IDENTIFIER, false)); + + params.add(new ParameterTypeInt(PARAMETER_KEY_REFRESH_RATE, + PARAMETER_DESC_REFRESH_RATE, -1, Integer.MAX_VALUE, + PARAMETER_DEFAULT_REFRESH_RATE, true)); + + params.add(new ParameterTypeCategory(PARAMETER_KEY_CASE_ACTIVITY_STORE, + PARAMETER_DESC_CASE_ACTIVITY_STORE, + ObjectUtils.toString(PARAMETER_OPTIONS_CASE_ACTIVITY_STORE), 0, + false)); + + params = addDataStructureDependencyConditions( + PARAMETER_KEY_CASE_ACTIVITY_STORE, + PARAMETER_OPTIONS_CASE_ACTIVITY_STORE, params); + + params.add( + new ParameterTypeCategory(PARAMETER_KEY_ACTIVITY_ACTIVITY_STORE, + PARAMETER_DESC_ACTIVITY_ACTIVITY_STORE, + ObjectUtils.toString( + PARAMETER_OPTIONS_ACTIVITY_ACTIVITY_STORE), + 0, false)); + + params = addDataStructureDependencyConditions( + PARAMETER_KEY_ACTIVITY_ACTIVITY_STORE, + PARAMETER_OPTIONS_ACTIVITY_ACTIVITY_STORE, params); + + return params; + } + + protected List addDataStructureDependencyConditions( + String parameterTypeKey, Type[] options, + List params) { + for (Type dataStructureType : options) { + for (DSParameterDefinition paramDef : dataStructureType + .getParameterDefinition()) { + String key = getDataStructureParameterSubKey(parameterTypeKey, + dataStructureType, paramDef.getName()); + ParameterType param = new ParameterTypeString(key, "", + paramDef.getDefaultValue().toString(), false); + param.setOptional(true); + param.registerDependencyCondition(new EqualStringCondition(this, + parameterTypeKey, true, dataStructureType.toString())); + params.add(param); + } + } + return params; + } + + protected String getDataStructureParameterSubKey( + String parentParameterTypeKey, Type sbdst, String param) { + return sbdst.toString().toLowerCase() + "_" + parentParameterTypeKey + + "_" + param; + } + + protected Map> getDataStructureParameters( + String parameterTypeKey, Type dataStructureType) + throws UndefinedParameterError { + Map> result = new HashMap>(); + for (DSParameterDefinition paramDef : dataStructureType + .getParameterDefinition()) { + String subKey = getDataStructureParameterSubKey(parameterTypeKey, + dataStructureType, paramDef.getName()); + String userValue = getParameterAsString(subKey); + DSParameter paramInstance = DSParameterFactory + .createParameter(paramDef.getParameterType() + .cast(paramDef.parse(paramDef, userValue))); + result.put(paramDef, paramInstance); + } + return result; + } +} diff --git a/src/main/java/org/rapidprom/operators/streams/extract/EventStreamToStaticEventStream.java b/src/main/java/org/rapidprom/operators/streams/extract/EventStreamToStaticEventStream.java new file mode 100644 index 0000000..272b4dd --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/extract/EventStreamToStaticEventStream.java @@ -0,0 +1,90 @@ +package org.rapidprom.operators.streams.extract; + +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.eventstream.core.interfaces.XSEvent; +import org.processmining.eventstream.core.interfaces.XSStaticXSEventStream; +import org.processmining.eventstream.readers.staticeventstream.parameters.XSEventStreamToXSStaticEventStreamParameters; +import org.processmining.eventstream.readers.staticeventstream.plugins.XSEventStreamToXSStaticEventStreamPlugin; +import org.processmining.stream.core.interfaces.XSAuthor; +import org.rapidprom.ioobjects.streams.XSAuthorIOObject; +import org.rapidprom.ioobjects.streams.event.XSEventStreamIOObject; +import org.rapidprom.ioobjects.streams.event.XSStaticXSEventStreamIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.tools.LogService; + +public class EventStreamToStaticEventStream extends Operator { + + private static final String PARAMETER_KEY_NUM_PACKETS = "num_packets"; + private static final String PARAMETER_DESC_NUM_PACKETS = "Number of packets to capture into the static event stream."; + private static final int PARAMETER_MIN_NUM_PACKETS = 0; + private static final int PARAMETER_MAX_NUM_PACKETS = Integer.MAX_VALUE; + private static final int PARAMETER_DEFAULT_NUM_PACKETS = 10000; + + private InputPort inputEventStream = getInputPorts().createPort("stream", + XSEventStreamIOObject.class); + + //optional port -> no .class argument + private InputPort inputAuthor = getInputPorts().createPort("author"); + + private OutputPort outputStaticEventStream = getOutputPorts() + .createPort("static stream"); + + public EventStreamToStaticEventStream(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputStaticEventStream, + XSStaticXSEventStreamIOObject.class)); + } + + @SuppressWarnings("unchecked") + @Override + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "start do work Static Stream extractor"); + XSEventStreamToXSStaticEventStreamParameters parameters = new XSEventStreamToXSStaticEventStreamParameters(); + parameters.setTotalNumberOfEvents( + getParameterAsInt(PARAMETER_KEY_NUM_PACKETS)); + XSStaticXSEventStream result; + if (inputAuthor.getDataOrNull(XSAuthorIOObject.class) != null) { + result = XSEventStreamToXSStaticEventStreamPlugin.runContextFree( + (XSAuthor) inputAuthor + .getData(XSAuthorIOObject.class).getArtifact(), + inputEventStream.getData(XSEventStreamIOObject.class) + .getArtifact(), + parameters); + } else { + result = XSEventStreamToXSStaticEventStreamPlugin.runContextFree( + inputEventStream.getData(XSEventStreamIOObject.class) + .getArtifact(), + parameters); + } + + outputStaticEventStream + .deliver(new XSStaticXSEventStreamIOObject(result, null)); + logger.log(Level.INFO, "end do work Static Stream extractor"); + } + + @Override + public List getParameterTypes() { + List params = super.getParameterTypes(); + ParameterType numPackets = new ParameterTypeInt( + PARAMETER_KEY_NUM_PACKETS, PARAMETER_DESC_NUM_PACKETS, + PARAMETER_MIN_NUM_PACKETS, PARAMETER_MAX_NUM_PACKETS, + PARAMETER_DEFAULT_NUM_PACKETS, false); + numPackets.setOptional(false); + params.add(numPackets); + return params; + + } + +} diff --git a/src/main/java/org/rapidprom/operators/streams/generators/CPNToEventStreamOperator.java b/src/main/java/org/rapidprom/operators/streams/generators/CPNToEventStreamOperator.java new file mode 100644 index 0000000..f29e563 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/generators/CPNToEventStreamOperator.java @@ -0,0 +1,235 @@ +package org.rapidprom.operators.streams.generators; + +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.eventstream.authors.cpn.parameters.CPN2XSEventStreamCaseIdentification; +import org.processmining.eventstream.authors.cpn.parameters.CPN2XSEventStreamParameters; +import org.processmining.eventstream.authors.cpn.plugins.CPNModelToXSEventStreamAuthorPlugin; +import org.processmining.eventstream.core.interfaces.XSEvent; +import org.processmining.eventstream.core.interfaces.XSEventStream; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.enums.CommunicationType; +import org.processmining.stream.core.interfaces.XSAuthor; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.CPNModelIOObject; +import org.rapidprom.ioobjects.streams.XSAuthorIOObject; +import org.rapidprom.ioobjects.streams.event.XSEventStreamIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.parameter.ParameterType; +import com.rapidminer.parameter.ParameterTypeBoolean; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.parameter.ParameterTypeInt; +import com.rapidminer.parameter.ParameterTypeString; +import com.rapidminer.parameter.UndefinedParameterError; +import com.rapidminer.parameter.conditions.EqualStringCondition; +import com.rapidminer.tools.LogService; + +public class CPNToEventStreamOperator extends Operator { + + private static final String PARAMETER_DEFAULT_VALUE_CASE_IDENTIFICATION_VARIABLE = CPN2XSEventStreamCaseIdentification.CPN_VARIABLE + .getDefaultValue(); + private static final String PARAMETER_KEY_CASE_IDENTIFICATION = "case_identification"; + + private static final String PARAMETER_KEY_CASE_IDENTIFICATION_VARIABLE = "cpn_variable"; + private static final String PARAMETER_KEY_COMMUNICATION_TYPE = "communication_type"; + + private static final String PARAMETER_KEY_INCLUDE_ADDITIONAL_DATA = "include_other_variables"; + + private static final String PARAMETER_KEY_MAX_STEPS = "max_steps"; + + private static final String PARAMETER_KEY_REPETITIONS = "repetitions"; + private static final String PARAMETER_KEY_STEP_DELAY = "step_delay"; + private static final String PARAMETER_LABEL_CASE_IDENTIFICATION = "Case identification, specifying what how to identify cases within the stream."; + private static final String PARAMETER_LABEL_CASE_IDENTIFICATION_VARIABLE = "Case identification by CPN variable, denotes what CPN variable to track."; + private static final String PARAMETER_LABEL_COMMUNIATION_TYPE = "The communicationtype of the selected stream, in synchronous mode, events will only be emitted if the receiving end is ready to accept new packages."; + + private static final String PARAMETER_LABEL_INCLUDE_ADDITIONAL_DATA = "If this parameter is set to true, the events will include other variable values as well"; + private static final String PARAMETER_LABEL_MAX_STEPS = "Max steps (-1 for no limit) within one simulation of the CPN model."; + private static final String PARAMETER_LABEL_REPETITIONS = "Number of simulations."; + + private static final String PARAMETER_LABEL_STEP_DELAY = "Step delay (ms) inbetween two consecutive emissions."; + private static final String PARAMETER_OPTION_CASE_IDENTIFICATION_REPETITION = CPN2XSEventStreamCaseIdentification.REPITITION + .toString(); + + private static final String PARAMETER_OPTION_CASE_IDENTIFICATION_VARIABELE = CPN2XSEventStreamCaseIdentification.CPN_VARIABLE + .toString(); + private static final String PARAMETER_OPTION_COMMUNICATION_TYPE_ASYNC = CommunicationType.ASYNC.toString(); + private static final String PARAMETER_OPTION_COMMUNICATION_TYPE_SYNC = CommunicationType.SYNC.toString(); + private static final String[] PARAMETER_OPTIONS_CASE_IDENTIFICATION = new String[] { + PARAMETER_OPTION_CASE_IDENTIFICATION_VARIABELE, PARAMETER_OPTION_CASE_IDENTIFICATION_REPETITION }; + private static final String[] PARAMETER_OPTIONS_COMMUNICATION_TYPE = new String[] { + PARAMETER_OPTION_COMMUNICATION_TYPE_SYNC, PARAMETER_OPTION_COMMUNICATION_TYPE_ASYNC }; + + private static final String PARAMETER_KEY_IGNORE_PAGE = "ignore_page"; + private static final String PARAMETER_LABEL_IGNORE_PAGE = "Ignore CPN model's page information in emitted events."; + + private static final String PARAMETER_KEY_IGNORE_PATTERSNS = "ignore_patterns"; + private static final String PARAMETER_LABEL_IGNORE_PATTERNS = "Provide a comma separated list of patterns to ignore for event emission"; + + private InputPort inputCPNModel = getInputPorts().createPort("cpn model", CPNModelIOObject.class); + private OutputPort outputAuthor = getOutputPorts().createPort("generator"); + private OutputPort outputStream = getOutputPorts().createPort("stream"); + + public CPNToEventStreamOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputAuthor, XSAuthorIOObject.class)); + getTransformer().addRule(new GenerateNewMDRule(outputStream, XSEventStreamIOObject.class)); + } + + private CPN2XSEventStreamParameters determineCaseIdentification(CPN2XSEventStreamParameters params) + throws UndefinedParameterError { + String caseIdentificationType = PARAMETER_OPTIONS_CASE_IDENTIFICATION[getParameterAsInt( + PARAMETER_KEY_CASE_IDENTIFICATION)]; + if (caseIdentificationType.equals(PARAMETER_OPTION_CASE_IDENTIFICATION_REPETITION)) { + params.setCaseIdentificationType(CPN2XSEventStreamCaseIdentification.REPITITION); + } else if (caseIdentificationType.equals(PARAMETER_OPTION_CASE_IDENTIFICATION_VARIABELE)) { + params.setCaseIdentificationType(CPN2XSEventStreamCaseIdentification.CPN_VARIABLE); + params.setCaseIdentifier(getParameterAsString(PARAMETER_KEY_CASE_IDENTIFICATION_VARIABLE)); + } + return params; + } + + private CPN2XSEventStreamParameters determineCommunicationType(CPN2XSEventStreamParameters params) + throws UndefinedParameterError { + String communicationType = PARAMETER_OPTIONS_COMMUNICATION_TYPE[getParameterAsInt( + PARAMETER_KEY_COMMUNICATION_TYPE)]; + if (communicationType.equals(PARAMETER_OPTION_COMMUNICATION_TYPE_ASYNC)) { + params.setCommunicationType(CommunicationType.ASYNC); + } else if (communicationType.equals(PARAMETER_OPTION_COMMUNICATION_TYPE_SYNC)) { + params.setCommunicationType(CommunicationType.SYNC); + } + return params; + } + + @SuppressWarnings("unchecked") + public void doWork() throws OperatorException { + + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "start do work Stream Generator"); + + PluginContext context = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(CPNModelToXSEventStreamAuthorPlugin.class); + + CPN2XSEventStreamParameters parameters = getStreamParameters(); + + Object[] result = CPNModelToXSEventStreamAuthorPlugin.apply(context, + inputCPNModel.getData(CPNModelIOObject.class).getArtifact(), parameters); + + outputAuthor.deliver(new XSAuthorIOObject((XSAuthor) result[0], context)); + outputStream.deliver(new XSEventStreamIOObject((XSEventStream) result[1], context)); + + logger.log(Level.INFO, "end do work Stream Generator"); + } + + @Override + public List getParameterTypes() { + List parameterTypes = super.getParameterTypes(); + parameterTypes = setupMaxStepsParameter(parameterTypes); + parameterTypes = setupRepetitionsParameter(parameterTypes); + parameterTypes = setupStepDelayParameter(parameterTypes); + parameterTypes = setupCaseIdentificationParameter(parameterTypes); + parameterTypes = setupAdditionalVariables(parameterTypes); + parameterTypes = setupCommunicationType(parameterTypes); + parameterTypes = setupIgnorePageParameter(parameterTypes); + parameterTypes = setupIgnorePatternsParameter(parameterTypes); + return parameterTypes; + } + + private List setupIgnorePatternsParameter(List parameterTypes) { + ParameterType ignorePatterns = new ParameterTypeString(PARAMETER_KEY_IGNORE_PATTERSNS, + PARAMETER_LABEL_IGNORE_PATTERNS, true, false); + parameterTypes.add(ignorePatterns); + return parameterTypes; + } + + private List setupIgnorePageParameter(List parameterTypes) { + ParameterType ignorePageBool = new ParameterTypeBoolean(PARAMETER_KEY_IGNORE_PAGE, PARAMETER_LABEL_IGNORE_PAGE, + true, false); + ignorePageBool.setOptional(false); + parameterTypes.add(ignorePageBool); + return parameterTypes; + } + + private CPN2XSEventStreamParameters getStreamParameters() throws UndefinedParameterError { + CPN2XSEventStreamParameters streamParams = new CPN2XSEventStreamParameters(); + streamParams.setMaximumNumberOfStepsPerRepetition(getParameterAsInt(PARAMETER_KEY_MAX_STEPS)); + streamParams.setTotalNumberOfRepetitions(getParameterAsInt(PARAMETER_KEY_REPETITIONS)); + streamParams.setTransitionDelayMs(getParameterAsInt(PARAMETER_KEY_STEP_DELAY)); + streamParams = determineCaseIdentification(streamParams); + streamParams.setIncludeVariables(getParameterAsBoolean(PARAMETER_KEY_INCLUDE_ADDITIONAL_DATA)); + streamParams = determineCommunicationType(streamParams); + streamParams.setIgnorePage(getParameterAsBoolean(PARAMETER_KEY_IGNORE_PAGE)); + String[] ignorePatterns = getParameterAsString(PARAMETER_KEY_IGNORE_PATTERSNS) == null ? new String[0] + : getParameterAsString(PARAMETER_KEY_IGNORE_PATTERSNS).split(","); + streamParams.setIgnorePatterns(ignorePatterns); + return streamParams; + } + + private List setupAdditionalVariables(List parameterTypes) { + ParameterTypeBoolean includeVariablesParam = new ParameterTypeBoolean(PARAMETER_KEY_INCLUDE_ADDITIONAL_DATA, + PARAMETER_LABEL_INCLUDE_ADDITIONAL_DATA, false); + includeVariablesParam.setOptional(false); + includeVariablesParam.setExpert(false); + parameterTypes.add(includeVariablesParam); + return parameterTypes; + } + + private List setupCaseIdentificationParameter(List parameterTypes) { + ParameterTypeCategory caseIdentificationCat = new ParameterTypeCategory(PARAMETER_KEY_CASE_IDENTIFICATION, + PARAMETER_LABEL_CASE_IDENTIFICATION, PARAMETER_OPTIONS_CASE_IDENTIFICATION, 0, false); + + parameterTypes.add(caseIdentificationCat); + + ParameterTypeString caseIdentificationVariable = new ParameterTypeString( + PARAMETER_KEY_CASE_IDENTIFICATION_VARIABLE, PARAMETER_LABEL_CASE_IDENTIFICATION_VARIABLE, + new String(PARAMETER_DEFAULT_VALUE_CASE_IDENTIFICATION_VARIABLE), false); + caseIdentificationVariable.setOptional(true); + caseIdentificationVariable + .registerDependencyCondition(new EqualStringCondition(this, PARAMETER_KEY_CASE_IDENTIFICATION, true, + new String[] { PARAMETER_OPTION_CASE_IDENTIFICATION_VARIABELE })); + parameterTypes.add(caseIdentificationVariable); + return parameterTypes; + } + + private List setupCommunicationType(List parameterTypes) { + ParameterTypeCategory communicationTypeParam = new ParameterTypeCategory(PARAMETER_KEY_COMMUNICATION_TYPE, + PARAMETER_LABEL_COMMUNIATION_TYPE, PARAMETER_OPTIONS_COMMUNICATION_TYPE, 0); + communicationTypeParam.setOptional(false); + communicationTypeParam.setExpert(false); + parameterTypes.add(communicationTypeParam); + return parameterTypes; + } + + private List setupMaxStepsParameter(List parameterTypes) { + ParameterTypeInt maxSteps = new ParameterTypeInt(PARAMETER_KEY_MAX_STEPS, PARAMETER_LABEL_MAX_STEPS, -1, + Integer.MAX_VALUE, -1, false); + maxSteps.setOptional(false); + parameterTypes.add(maxSteps); + return parameterTypes; + } + + private List setupRepetitionsParameter(List parameterTypes) { + ParameterTypeInt repetitions = new ParameterTypeInt(PARAMETER_KEY_REPETITIONS, PARAMETER_LABEL_REPETITIONS, 1, + Integer.MAX_VALUE, 1, false); + repetitions.setOptional(false); + parameterTypes.add(repetitions); + return parameterTypes; + } + + private List setupStepDelayParameter(List parameterTypes) { + ParameterTypeInt stepDelay = new ParameterTypeInt(PARAMETER_KEY_STEP_DELAY, PARAMETER_LABEL_STEP_DELAY, 0, + Integer.MAX_VALUE, 0, false); + stepDelay.setOptional(false); + parameterTypes.add(stepDelay); + return parameterTypes; + } + +} diff --git a/src/main/java/org/rapidprom/operators/streams/generators/StaticEventStreamToEventStreamOperator.java b/src/main/java/org/rapidprom/operators/streams/generators/StaticEventStreamToEventStreamOperator.java new file mode 100644 index 0000000..7ae5ac9 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/generators/StaticEventStreamToEventStreamOperator.java @@ -0,0 +1,52 @@ +package org.rapidprom.operators.streams.generators; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.processmining.eventstream.authors.staticeventstream.plugins.XSStaticXSEventStreamToXSEventStreamPlugin; +import org.processmining.eventstream.core.interfaces.XSEvent; +import org.processmining.eventstream.core.interfaces.XSEventStream; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.stream.core.interfaces.XSAuthor; +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; +import org.rapidprom.ioobjects.streams.XSAuthorIOObject; +import org.rapidprom.ioobjects.streams.event.XSEventStreamIOObject; +import org.rapidprom.ioobjects.streams.event.XSStaticXSEventStreamIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.OutputPort; +import com.rapidminer.operator.ports.metadata.GenerateNewMDRule; +import com.rapidminer.tools.LogService; + +public class StaticEventStreamToEventStreamOperator extends Operator { + + private InputPort inputStaticStream = getInputPorts().createPort("static stream", + XSStaticXSEventStreamIOObject.class); + private OutputPort outputAuthor = getOutputPorts().createPort("generator"); + private OutputPort outputStream = getOutputPorts().createPort("stream"); + + public StaticEventStreamToEventStreamOperator(OperatorDescription description) { + super(description); + getTransformer().addRule(new GenerateNewMDRule(outputAuthor, XSAuthorIOObject.class)); + getTransformer().addRule(new GenerateNewMDRule(outputStream, XSEventStreamIOObject.class)); + } + + @SuppressWarnings("unchecked") + @Override + public void doWork() throws OperatorException { + Logger logger = LogService.getRoot(); + logger.log(Level.INFO, "start do work Stream Generator"); + PluginContext context = RapidProMGlobalContext.instance() + .getFutureResultAwarePluginContext(XSStaticXSEventStreamToXSEventStreamPlugin.class); + Object[] authStream = XSStaticXSEventStreamToXSEventStreamPlugin.apply(context, + inputStaticStream.getData(XSStaticXSEventStreamIOObject.class).getArtifact()); + + outputAuthor.deliver(new XSAuthorIOObject((XSAuthor) authStream[0], context)); + outputStream.deliver(new XSEventStreamIOObject((XSEventStream) authStream[1], context)); + logger.log(Level.INFO, "end do work Stream Generator"); + } + +} diff --git a/src/main/java/org/rapidprom/operators/streams/util/PollXSRunnableOperator.java b/src/main/java/org/rapidprom/operators/streams/util/PollXSRunnableOperator.java new file mode 100644 index 0000000..5edc8a7 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/util/PollXSRunnableOperator.java @@ -0,0 +1,46 @@ +package org.rapidprom.operators.streams.util; + +import org.processmining.stream.core.interfaces.XSRunnable; +import org.rapidprom.ioobjects.streams.XSRunnableIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.InputPortExtender; +import com.rapidminer.operator.ports.OutputPort; + +public class PollXSRunnableOperator extends Operator { + + private static final long SLEEP_TIME = 125; + + private final InputPort runnableInput = getInputPorts() + .createPort("runnable_to_poll", XSRunnableIOObject.class); + + private final InputPortExtender dependenciesPort = new InputPortExtender( + "objects_to_wait_for", getInputPorts(), null, false); + + private final OutputPort runnableOutput = getOutputPorts() + .createPort("generator"); + + public PollXSRunnableOperator(OperatorDescription description) { + super(description); + dependenciesPort.start(); + getTransformer().addPassThroughRule(runnableInput, runnableOutput); + } + + @Override + public void doWork() throws OperatorException { + XSRunnable runnable = (XSRunnable) runnableInput + .getData(XSRunnableIOObject.class).getArtifact(); + while (runnable.isRunning()) { + try { + Thread.sleep(SLEEP_TIME); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + runnableOutput.deliver(runnableInput.getData(XSRunnableIOObject.class)); + } + +} diff --git a/src/main/java/org/rapidprom/operators/streams/util/StartXSRunnableOperator.java b/src/main/java/org/rapidprom/operators/streams/util/StartXSRunnableOperator.java new file mode 100644 index 0000000..f87bc0d --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/util/StartXSRunnableOperator.java @@ -0,0 +1,48 @@ +package org.rapidprom.operators.streams.util; + +import org.processmining.stream.core.interfaces.XSAuthor; +import org.processmining.stream.core.interfaces.XSRunnable; +import org.rapidprom.ioobjects.streams.XSAuthorIOObject; +import org.rapidprom.ioobjects.streams.XSRunnableIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.UserError; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.InputPortExtender; +import com.rapidminer.operator.ports.OutputPort; + +public class StartXSRunnableOperator extends Operator { + + private final InputPort runnableInput = getInputPorts() + .createPort("runnable_to_start", XSRunnableIOObject.class); + + private final InputPortExtender dependenciesPort = new InputPortExtender( + "objects_to_wait_for", getInputPorts(), null, false); + + private final OutputPort runnableOutput = getOutputPorts() + .createPort("runnable_passed_through"); + + public StartXSRunnableOperator(OperatorDescription description) { + super(description); + dependenciesPort.start(); + getTransformer().addPassThroughRule(runnableInput, runnableOutput); + } + + @Override + public void doWork() throws OperatorException { + for (InputPort i : dependenciesPort.getManagedPorts()) { + try { + ((XSRunnable) i.getData(XSRunnableIOObject.class).getArtifact()) + .startXSRunnable(); + } catch (UserError e) { + } + } + XSRunnable runnable = (XSAuthor) runnableInput + .getData(XSAuthorIOObject.class).getArtifact(); + runnable.startXSRunnable(); + runnableOutput.deliver(runnableInput.getData(XSAuthorIOObject.class)); + } + +} diff --git a/src/main/java/org/rapidprom/operators/streams/util/StopXSRunnableOperator.java b/src/main/java/org/rapidprom/operators/streams/util/StopXSRunnableOperator.java new file mode 100644 index 0000000..f83abe6 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/streams/util/StopXSRunnableOperator.java @@ -0,0 +1,48 @@ +package org.rapidprom.operators.streams.util; + +import org.processmining.stream.core.interfaces.XSAuthor; +import org.processmining.stream.core.interfaces.XSRunnable; +import org.rapidprom.ioobjects.streams.XSAuthorIOObject; +import org.rapidprom.ioobjects.streams.XSRunnableIOObject; + +import com.rapidminer.operator.Operator; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; +import com.rapidminer.operator.UserError; +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.InputPortExtender; +import com.rapidminer.operator.ports.OutputPort; + +public class StopXSRunnableOperator extends Operator { + + private final InputPort runnableInput = getInputPorts() + .createPort("runnable_to_stop", XSRunnableIOObject.class); + + private final InputPortExtender artifactsPort = new InputPortExtender( + "objects_to_stop", getInputPorts(), null, false); + + private final OutputPort runnableOutput = getOutputPorts() + .createPort("runnable_passed_through"); + + public StopXSRunnableOperator(OperatorDescription description) { + super(description); + artifactsPort.start(); + getTransformer().addPassThroughRule(runnableInput, runnableOutput); + } + + @Override + public void doWork() throws OperatorException { + for (InputPort i : artifactsPort.getManagedPorts()) { + try { + ((XSRunnable) i.getData(XSRunnableIOObject.class).getArtifact()) + .stopXSRunnable(); + } catch (UserError e) { + } + } + XSRunnable runnable = (XSAuthor) runnableInput + .getData(XSAuthorIOObject.class).getArtifact(); + runnable.stopXSRunnable(); + runnableOutput.deliver(runnableInput.getData(XSAuthorIOObject.class)); + } + +} diff --git a/src/main/java/org/rapidprom/operators/util/RapidProMMemoryCleanUpOperator.java b/src/main/java/org/rapidprom/operators/util/RapidProMMemoryCleanUpOperator.java new file mode 100644 index 0000000..1291ff1 --- /dev/null +++ b/src/main/java/org/rapidprom/operators/util/RapidProMMemoryCleanUpOperator.java @@ -0,0 +1,21 @@ +package org.rapidprom.operators.util; + +import org.rapidprom.external.connectors.prom.RapidProMGlobalContext; + +import com.rapidminer.operator.MemoryCleanUp; +import com.rapidminer.operator.OperatorDescription; +import com.rapidminer.operator.OperatorException; + +public class RapidProMMemoryCleanUpOperator extends MemoryCleanUp { + + public RapidProMMemoryCleanUpOperator(OperatorDescription description) { + super(description); + } + + @Override + public void doWork() throws OperatorException { + RapidProMGlobalContext.instance().getPluginContext().clear(); + super.doWork(); + } + +} diff --git a/src/main/java/org/rapidprom/parameter/ParameterTypeDynamicCategory.java b/src/main/java/org/rapidprom/parameter/ParameterTypeDynamicCategory.java new file mode 100644 index 0000000..f2ddbd9 --- /dev/null +++ b/src/main/java/org/rapidprom/parameter/ParameterTypeDynamicCategory.java @@ -0,0 +1,163 @@ +package org.rapidprom.parameter; + +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.MetaDataChangeListener; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.parameter.MetaDataProvider; +import com.rapidminer.parameter.ParameterTypeCategory; +import com.rapidminer.tools.container.Pair; + +/** + * The ParameterTypeDynamicCategory, allows us to change the contents of a + * {@link ParameterTypeCategory}, depending on some inputPort's meta data. + * Additionally this class introduces the option to query, given the selected + * index, what the corresponding object is. + * + * @author svzelst + * + * @param + * indicates what underlying java object the user wants to choose. + */ +public abstract class ParameterTypeDynamicCategory + extends ParameterTypeCategory { + + private static final long serialVersionUID = 5610913750316933718L; + + private final T[] defaultValues; + private final String[] defaultValuesToString; + + private final MetaDataProvider metaDataProvider; + + private T[] values = null; + private String[] valuesToString = new String[0]; + + public ParameterTypeDynamicCategory(String key, String description, + String[] defaultValuesToString, T[] defaultValues, + int defaultValueIndex, boolean expert, final InputPort inputPort) { + this(key, description, defaultValuesToString, defaultValues, + defaultValueIndex, expert, new MetaDataProvider() { + + @Override + public void addMetaDataChangeListener( + MetaDataChangeListener l) { + inputPort.registerMetaDataChangeListener(l); + + } + + @Override + public MetaData getMetaData() { + if (inputPort != null) { + return inputPort.getMetaData(); + } else { + return null; + } + } + + @Override + public void removeMetaDataChangeListener( + MetaDataChangeListener l) { + inputPort.removeMetaDataChangeListener(l); + + } + }); + } + + public ParameterTypeDynamicCategory(String key, String description, + String[] defaultValuesToString, T[] defaultValues, + int defaultValueIndex, boolean expert, + MetaDataProvider metaDataProvider) { + super(key, description, defaultValuesToString, defaultValueIndex, + expert); + this.defaultValues = defaultValues; + this.defaultValuesToString = defaultValuesToString; + this.valuesToString = defaultValuesToString; + this.values = defaultValues; + this.metaDataProvider = metaDataProvider; + } + + public int getIndexOf(String string) { + for (int i = 0; i < valuesToString.length; i++) { + if (valuesToString[i].equals(string)) { + return Integer.valueOf(i); + } + } + // try to interpret string as number + try { + return Integer.parseInt(string); + } catch (NumberFormatException e) { + return -1; + } + } + + public T valueOf(int index) throws IndexOutOfBoundsException { + if (index > values.length) { + throw new IndexOutOfBoundsException("The index is not defined"); + } + return values[index]; + } + + public String[] getValuesToString() { + return valuesToString; + } + + public void setValuesToString(String[] valuesToString) { + this.valuesToString = valuesToString; + } + + public T[] getDefaultValues() { + return defaultValues; + } + + public String[] getDefaultValuesToString() { + return defaultValuesToString; + } + + public void setValues(T[] values) { + this.values = values; + } + + public MetaDataProvider getMetaDataProvider() { + return metaDataProvider; + } + + @Override + public String[] getValues() { + Pair newValues = updateValues(); + valuesToString = newValues.getFirst(); + values = newValues.getSecond(); + return valuesToString; + } + + @Override + public String toString(Object value) { + try { + if (value == null) + return null; + int index = Integer.parseInt(value.toString()); + if (index >= valuesToString.length) + return ""; + return super.toString(valuesToString[index]); + } catch (NumberFormatException e) { + return super.toString(value); + } + } + + @Override + public int getIndex(String string) { + for (int i = 0; i < valuesToString.length; i++) { + if (valuesToString[i].equals(string)) { + return Integer.valueOf(i); + } + } + // try to interpret string as number + try { + return Integer.parseInt(string); + } catch (NumberFormatException e) { + // take the first of the *probably* new list + return 0; + } + } + + protected abstract Pair updateValues(); + +} diff --git a/src/main/java/org/rapidprom/parameter/ParameterTypeExampleSetAttributesDynamicCategory.java b/src/main/java/org/rapidprom/parameter/ParameterTypeExampleSetAttributesDynamicCategory.java new file mode 100644 index 0000000..e80b457 --- /dev/null +++ b/src/main/java/org/rapidprom/parameter/ParameterTypeExampleSetAttributesDynamicCategory.java @@ -0,0 +1,56 @@ +package org.rapidprom.parameter; + +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.metadata.AttributeMetaData; +import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.tools.container.Pair; + +public class ParameterTypeExampleSetAttributesDynamicCategory + extends ParameterTypeDynamicCategory { + + private static final long serialVersionUID = -6944684403198889196L; + + public ParameterTypeExampleSetAttributesDynamicCategory(String key, + String description, String[] categories, + String[] correspondingValues, int defaultValue, boolean expert, + final InputPort inputPort) { + super(key, description, categories, correspondingValues, defaultValue, + expert, inputPort); + } + + @Override + protected Pair updateValues() { + MetaData md = getMetaDataProvider().getMetaData(); + // sometimes, the metadata can be null although it is actually not. + // suspending the thread for a while appears to be overcoming this + // problem. + // this is an ugly fix, yet it works. The problem appears in RM 5 and RM + // 7. + if (md == null) { + try { + Thread.sleep(50); + } catch (InterruptedException e) { + e.printStackTrace(); + } + md = getMetaDataProvider().getMetaData(); + } + if (md != null && md instanceof ExampleSetMetaData) { + String[] categories = null; + ExampleSetMetaData mdc = (ExampleSetMetaData) md; + if (!mdc.getAllAttributes().isEmpty()) { + categories = new String[mdc.getAllAttributes().size()]; + int i = 0; + for (AttributeMetaData amd : mdc.getAllAttributes()) { + categories[i] = amd.getName(); + i++; + } + return new Pair(categories, categories); + } + } + // restore default + return new Pair(getDefaultValuesToString(), + getDefaultValues()); + } + +} diff --git a/src/main/java/org/rapidprom/parameter/ParameterTypeXEventClassifierCategory.java b/src/main/java/org/rapidprom/parameter/ParameterTypeXEventClassifierCategory.java new file mode 100644 index 0000000..596c985 --- /dev/null +++ b/src/main/java/org/rapidprom/parameter/ParameterTypeXEventClassifierCategory.java @@ -0,0 +1,48 @@ +package org.rapidprom.parameter; + +import org.deckfour.xes.classification.XEventClassifier; +import org.rapidprom.operators.ports.metadata.XLogIOObjectMetaData; + +import com.rapidminer.operator.ports.InputPort; +import com.rapidminer.operator.ports.metadata.MetaData; +import com.rapidminer.tools.container.Pair; + +public class ParameterTypeXEventClassifierCategory + extends ParameterTypeDynamicCategory { + + private static final long serialVersionUID = 2977722407280721507L; + + public ParameterTypeXEventClassifierCategory(String key, String description, + String[] categories, XEventClassifier[] correspondingValues, + int defaultValue, boolean expert, InputPort inputPort) { + super(key, description, categories, correspondingValues, defaultValue, + expert, inputPort); + } + + @Override + protected Pair updateValues() { + MetaData md = getMetaDataProvider().getMetaData(); + if (md != null && md instanceof XLogIOObjectMetaData) { + String[] categories = null; + XEventClassifier[] correspondingValues = null; + XLogIOObjectMetaData mdC = (XLogIOObjectMetaData) md; + if (!(mdC.getXEventClassifiers().isEmpty())) { + categories = new String[mdC.getXEventClassifiers().size()]; + correspondingValues = new XEventClassifier[mdC + .getXEventClassifiers().size()]; + int i = 0; + for (XEventClassifier e : mdC.getXEventClassifiers()) { + correspondingValues[i] = e; + categories[i] = e.toString(); + i++; + } + return new Pair(categories, + correspondingValues); + } + } + // restore defaults + return new Pair( + getDefaultValuesToString(), getDefaultValues()); + } + +} diff --git a/src/main/java/org/rapidprom/properties/RapidProMProperties.java b/src/main/java/org/rapidprom/properties/RapidProMProperties.java new file mode 100644 index 0000000..b67bfdb --- /dev/null +++ b/src/main/java/org/rapidprom/properties/RapidProMProperties.java @@ -0,0 +1,70 @@ +package org.rapidprom.properties; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; + +/** + * Load RapidProM properties such as location of ProM packages etc. + * + * @author svzelst + */ +public class RapidProMProperties { + + private static String RAPIDPROM_PROPERTIES_FILE = "/org/rapidprom/resources/rapidprom.properties"; + private static RapidProMProperties instance = null; + private final Properties properties; + + private Deployment deployment = null; + + public enum Deployment { + DEVELOPMENT, LIVE; + } + + private RapidProMProperties() { + properties = setup(); + } + + public static RapidProMProperties instance() { + if (instance == null) { + instance = new RapidProMProperties(); + } + return instance; + } + + private Properties setup() { + Properties properties = new Properties(); + InputStream propertiesIS = RapidProMProperties.class + .getResourceAsStream(RAPIDPROM_PROPERTIES_FILE); + try { + properties.load(propertiesIS); + } catch (IOException e) { + e.printStackTrace(); + } + return properties; + } + + public Properties getProperties() { + return properties; + } + + public Deployment getDeployment() { + if (deployment == null) { + String deploymentProp = properties + .getProperty("deployment"); + if (deploymentProp != null) { + if (deploymentProp.equals("live")) { + deployment = Deployment.LIVE; + } else { + deployment = Deployment.DEVELOPMENT; + } + } + } + return deployment; + } + + public String getExtensionName() { + return properties.getProperty("extension.name"); + } + +} diff --git a/src/main/java/org/rapidprom/util/IOUtils.java b/src/main/java/org/rapidprom/util/IOUtils.java new file mode 100644 index 0000000..0d94a36 --- /dev/null +++ b/src/main/java/org/rapidprom/util/IOUtils.java @@ -0,0 +1,42 @@ +package org.rapidprom.util; + +import java.io.File; +import java.io.IOException; + +import com.rapidminer.operator.UserError; + +public class IOUtils { + + /** + * Creates a File object in some directory, given some name, and given some + * extention (which is possibly an enum). It specifically checks whether the + * directory path contains ".null" at the end which is a side-effect of + * RapidMiner's directory chooser. + * + * @param dirPath + * @param name + * @param format + * @return + * @throws UserError + * @throws IOException + */ + public static File prepareTargetFile(String dirPath, String name, + F format) throws UserError, IOException { + // only remove a ".null" if it is the last occurring element of the + // path. + String nullStr = ".null"; + if (dirPath.length() > nullStr.length()) { + if (dirPath.substring(dirPath.length() - nullStr.length(), + dirPath.length()).contains(".null")) { + dirPath = dirPath.substring(0, + dirPath.length() - nullStr.length()); + } + } + if (!dirPath.endsWith(File.separator)) { + dirPath += File.separator; + } + dirPath += name + "." + format.toString(); + return new File(dirPath); + } + +} diff --git a/src/main/java/org/rapidprom/util/LpSolveUtils.java b/src/main/java/org/rapidprom/util/LpSolveUtils.java new file mode 100644 index 0000000..855047f --- /dev/null +++ b/src/main/java/org/rapidprom/util/LpSolveUtils.java @@ -0,0 +1,58 @@ +package org.rapidprom.util; + +import com.rapidminer.tools.SystemInfoUtilities.JVMArch; +import com.rapidminer.tools.SystemInfoUtilities.OperatingSystem; + +public class LpSolveUtils { + + private static final String BASE_PATH = "lpsolve/resources/"; + + public static final String LPSOLVE_JAR = "lpsolve55j.jar"; + public static final String BINARIES_FOLDER = "lib"; + + /** + * Returns the path to the binaries and jar file of LpSolve, based on OS and + * JVM Architecture. To get the jar file, use: String jar = + * getOSBasedLpSolvePath + LPSOLVE_JAR;. + * + * @param os + * @param jvmArch + * @return + */ + public static String getOSBasedLpSolvePath(OperatingSystem os, JVMArch jvmArch) { + return BASE_PATH + getOSFolder(os, jvmArch); + } + + public static String getOSFolder(OperatingSystem os, JVMArch jvmArch) { + String result = ""; + switch (os) { + case OSX: + result = "mac" + jvmArchToString(jvmArch) + "/"; + break; + case WINDOWS: + result = "win" + jvmArchToString(jvmArch) + "/"; + break; + case OTHER: + case SOLARIS: + case UNIX: + default: + result = "ux" + jvmArchToString(jvmArch) + "/"; + break; + } + return result; + } + + public static String jvmArchToString(JVMArch jvmArch) { + String res = ""; + switch (jvmArch) { + case SIXTY_FOUR: + res = "64"; + break; + case THIRTY_TWO: + default: + res = "32"; + break; + } + return res; + } +} diff --git a/src/main/java/org/rapidprom/util/ObjectUtils.java b/src/main/java/org/rapidprom/util/ObjectUtils.java new file mode 100644 index 0000000..0841d22 --- /dev/null +++ b/src/main/java/org/rapidprom/util/ObjectUtils.java @@ -0,0 +1,13 @@ +package org.rapidprom.util; + +public class ObjectUtils { + + public static String[] toString(T[] t) { + String[] str = new String[t.length]; + for (int i = 0; i < t.length; i++) { + str[i] = t[i].toString(); + } + return str; + } + +} diff --git a/src/main/java/org/rapidprom/util/RapidMinerUtils.java b/src/main/java/org/rapidprom/util/RapidMinerUtils.java new file mode 100644 index 0000000..5ee1492 --- /dev/null +++ b/src/main/java/org/rapidprom/util/RapidMinerUtils.java @@ -0,0 +1,22 @@ +package org.rapidprom.util; + +import org.rapidprom.properties.RapidProMProperties; + +import com.rapidminer.tools.plugin.Plugin; + +public class RapidMinerUtils { + + private static Plugin plugin = null; + + public static Plugin getRapidProMPlugin() { + if (plugin == null) { + for (Plugin plugin : Plugin.getAllPlugins()) { + if (plugin.getName().equals(RapidProMProperties.instance().getExtensionName())) { + return plugin; + } + } + } + return plugin; + } + +} diff --git a/src/main/java/org/rapidprom/util/XLogUtils.java b/src/main/java/org/rapidprom/util/XLogUtils.java new file mode 100644 index 0000000..465827d --- /dev/null +++ b/src/main/java/org/rapidprom/util/XLogUtils.java @@ -0,0 +1,262 @@ +package org.rapidprom.util; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import java.util.Map.Entry; + +import javax.swing.table.AbstractTableModel; + +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XAttributeBoolean; +import org.deckfour.xes.model.XAttributeContinuous; +import org.deckfour.xes.model.XAttributeDiscrete; +import org.deckfour.xes.model.XAttributeLiteral; +import org.deckfour.xes.model.XAttributeTimestamp; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; + +public class XLogUtils { + + public enum AttributeTypes {BOOLEAN, DISCRETE, CONTINUOUS, STRING, DATE} + + public static TableModelXLog convertLogToStringTable (XLog log, boolean timeIsSeconds) throws Exception { + final List columnNames = new ArrayList(); + final List> values = new ArrayList>(); + // go through log for values + ColumnNamesLog columnNames2 = XLogUtils.getColumnNames(log); + columnNames.addAll(columnNames2.getAttribsTrace()); + columnNames.addAll(columnNames2.getAttribsEvents()); + // generate mapping from string to column number + Map mapping = new HashMap(); + for (int i=0; i> iterator = t.getAttributes().entrySet().iterator(); + while (iterator.hasNext()) { + Entry next = iterator.next(); + String key = next.getKey(); + XAttribute value = next.getValue(); + String stringFromAttribute = getStringFromAttribute(value); + // put at right position + Integer integer = mapping.get("T:" + key); + if (integer!=null) { + valuesTrace[integer] = stringFromAttribute; + // try to find type + AttributeTypes type = getType(value); + if (columnTypesArray[integer] == null) { + columnTypesArray[integer] = type; + } + else if (!columnTypesArray[integer].equals(type)) { + // something wrong, basically should not happen + throw new Exception("type is different!"); + } + if (timeIsSeconds && type.equals(AttributeTypes.DATE)) { + XAttributeTimestamp ts = (XAttributeTimestamp) value; + valuesTrace[integer] = Long.toString(ts.getValue().getTime()); + } + else { + valuesTrace[integer] = stringFromAttribute; + } + } + } + // now for event attributes + for (XEvent e : t) { + Iterator> iterator2 = e.getAttributes().entrySet().iterator(); + String[] valuesEvent = new String[columnNames.size()]; + // copy values from trace + for (int i=0; i next = iterator2.next(); + String key = next.getKey(); + XAttribute value = next.getValue(); + String stringFromAttribute = getStringFromAttribute(value); + // put at right position + Integer integer = mapping.get("E:" + key); + if (integer!=null) { + // check if is time + AttributeTypes type = getType(value); + // try to find type + if (columnTypesArray[integer] == null) { + columnTypesArray[integer] = type; + } + else if (!columnTypesArray[integer].equals(type)) { + // something wrong, basically should not happen + throw new Exception("type is different!"); + } + + if (timeIsSeconds && type.equals(AttributeTypes.DATE)) { + XAttributeTimestamp ts = (XAttributeTimestamp) value; + valuesEvent[integer] = Long.toString(ts.getValue().getTime()); + } + else { + valuesEvent[integer] = stringFromAttribute; + } + } + } + // check whether any value is null + for (int i=0; i asList = Arrays.asList(valuesEvent); + values.add(asList); + } + } + List columnTypes = Arrays.asList(columnTypesArray); + XLogUtils utilsInst = new XLogUtils(); + return utilsInst.new TableModelXLog(columnNames, values, columnTypes); + } + + public class TableModelXLog extends AbstractTableModel { + + /** + * generated + */ + private static final long serialVersionUID = -6973564987055884713L; + private List columnNames = new ArrayList(); + private List> values = new ArrayList>(); + private List columnTypes = new ArrayList(); + + public TableModelXLog(List columnNames, List> values, List columnTypes) { + this.columnNames = columnNames; + this.values = values; + this.columnTypes = columnTypes; + } + + @Override + public int getColumnCount() { + return columnNames.size(); + } + + @Override + public String getColumnName(int column) { + return columnNames.get(column); + } + + public int getNameForColumn(String name) { + for (int i=0; i attribsTrace = new TreeSet(); + Set attribsEvents = new TreeSet(); + for (XTrace t : log) { + Iterator> iterator = t.getAttributes().entrySet().iterator(); + while (iterator.hasNext()) { + Entry next = iterator.next(); + attribsTrace.add("T:" + next.getKey()); + } + for (XEvent e : t) { + Iterator> iterator2 = e.getAttributes().entrySet().iterator(); + while (iterator2.hasNext()) { + Entry next = iterator2.next(); + attribsEvents.add("E:" + next.getKey()); + } + } + } + return new XLogUtils().new ColumnNamesLog(attribsTrace, attribsEvents); + } + + public static String getStringFromAttribute (XAttribute value) { + String valueString = ""; + if (value instanceof XAttributeLiteral) { + XAttributeLiteral attribLit = (XAttributeLiteral) value; + valueString = attribLit.getValue(); + } + else if (value instanceof XAttributeBoolean) { + XAttributeBoolean attribBool = (XAttributeBoolean) value; + valueString = Boolean.toString(attribBool.getValue()); + } + else if (value instanceof XAttributeContinuous) { + XAttributeContinuous attribContin = (XAttributeContinuous) value; + valueString = Double.toString(attribContin.getValue()); + } + else if (value instanceof XAttributeDiscrete) { + XAttributeDiscrete attribDisc = (XAttributeDiscrete) value; + valueString = Long.toString(attribDisc.getValue()); + } + else if (value instanceof XAttributeTimestamp) { + XAttributeTimestamp attribTimestamp = (XAttributeTimestamp) value; + valueString = attribTimestamp.getValue().toString(); + } + return valueString; + } + + public static AttributeTypes getType(XAttribute attribute){ + if(attribute instanceof XAttributeBoolean) + return AttributeTypes.BOOLEAN; + else if(attribute instanceof XAttributeDiscrete) + return AttributeTypes.DISCRETE; + else if(attribute instanceof XAttributeContinuous) + return AttributeTypes.CONTINUOUS; + else if(attribute instanceof XAttributeTimestamp) + return AttributeTypes.DATE; + else + return AttributeTypes.STRING; + } + + public class ColumnNamesLog { + + private Set attribsTrace = null; + private Set attribsEvents = null; + + ColumnNamesLog (Set attribsTrace, Set attribsEvents) { + this.attribsTrace = attribsTrace; + this.attribsEvents = attribsEvents; + } + + public Set getAttribsTrace() { + return attribsTrace; + } + + public void setAttribsTrace(Set attribsTrace) { + this.attribsTrace = attribsTrace; + } + + public Set getAttribsEvents() { + return attribsEvents; + } + + public void setAttribsEvents(Set attribsEvents) { + this.attribsEvents = attribsEvents; + } + + } +} diff --git a/src/main/resources/META-INF/icon.png b/src/main/resources/META-INF/icon.png new file mode 100644 index 0000000..1bbfe6a Binary files /dev/null and b/src/main/resources/META-INF/icon.png differ diff --git a/src/main/resources/org/rapidprom/resources/OperatorsRapidProM.xml b/src/main/resources/org/rapidprom/resources/OperatorsRapidProM.xml new file mode 100644 index 0000000..3de9654 --- /dev/null +++ b/src/main/resources/org/rapidprom/resources/OperatorsRapidProM.xml @@ -0,0 +1,300 @@ + + + + + analyze_with_woflan + org.rapidprom.operators.analysis.WoflanAnalysisOperator + magnifying_glass.png + + + show_pompom_view + org.rapidprom.operators.analysis.ShowPomPomViewOperator + magnifying_glass.png + + + animate_event_log_in_fuzzy_instance + org.rapidprom.operators.analysis.AnimateLogInFuzzyModelAnalysisOperator + magnifying_glass.png + + + repair_model + org.rapidprom.operators.analysis.RepairModelOperator + magnifying_glass.png + + + reduce_silent_transitions + org.rapidprom.operators.analysis.ReduceSilentTransitionsOperator + magnifying_glass.png + + + perform_predictions_of_business_process_features + org.rapidprom.operators.analysis.FeaturePredictionAnalysisOperator + magnifying_glass.png + + + mine_with_inductive_visual_miner + org.rapidprom.operators.analysis.InductiveVisualMinerOperator + magnifying_glass.png + + + case_data_extractor + org.rapidprom.operators.analysis.CaseDataExtractorOperator + magnifying_glass.png + + + + + replay_a_log_on_petri_net_for_conformance_analysis + org.rapidprom.operators.conformance.ConformanceAnalysisOperator + checks.png + + + measure_precision + org.rapidprom.operators.conformance.MeasurePrecisionAnalysisOperator + checks.png + + + replay_a_log_on_petri_net_for_performance_conformance_analysis + org.rapidprom.operators.conformance.PerformanceConformanceAnalysisOperator + checks.png + + + etc_1_align_precision + org.rapidprom.operators.conformance.ETCPrecisionOperator + checks.png + + + + + convert_petrinet_to_bpmn + org.rapidprom.operators.conversion.PetriNetToBpmnConversionOperator + objects_transform.png + + + transition_system_to_petrinet + org.rapidprom.operators.conversion.TransitionSystemtoPetriNetConversionOperator + objects_transform.png + + + construct_reachability_graph_of_a_petri_net + org.rapidprom.operators.conversion.PetriNetToReachabilityGraphConversionOperator + objects_transform.png + + + xlog_to_exampleset + org.rapidprom.operators.conversion.XLogToExampleSetConversionOperator + objects_transform.png + + + exampleset_to_xlog + org.rapidprom.operators.conversion.ExampleSetToXLogConversionOperator + objects_transform.png + + + convert_heuristics_net_into_petri_net + org.rapidprom.operators.conversion.HeuristicNetToPetriNetConversionOperator + objects_transform.png + + + convert_process_tree_to_petri_net + org.rapidprom.operators.conversion.ProcessTreeToPetriNetConversionOperator + objects_transform.png + + + + + alpha_miner + org.rapidprom.operators.discovery.AlphaMinerOperator + hardhat.png + + + ilp_miner + org.rapidprom.operators.discovery.ILPMinerOperator + hardhat.png + + + etm_miner + org.rapidprom.operators.discovery.ETMdMinerOperator + hardhat.png + + + heuristics_miner + org.rapidprom.operators.discovery.HeuristicsMinerOperator + hardhat.png + + + mine_petri_net_with_inductive_miner + org.rapidprom.operators.discovery.InductiveMinerPNOperator + hardhat.png + + + mine_process_tree_with_inductive_miner + org.rapidprom.operators.discovery.InductiveMinerPTOperator + hardhat.png + + + social_network_miner + org.rapidprom.operators.discovery.SocialNetworkMinerOperator + hardhat.png + + + mine_transition_system + org.rapidprom.operators.discovery.TransitionSystemMinerOperator + hardhat.png + + + mine_fuzzy_model + org.rapidprom.operators.discovery.FuzzyMinerOperator + hardhat.png + + + + + + export_petri_net + org.rapidprom.operators.io.ExportPetriNetOperator + inbox_out.png + + + export_log + org.rapidprom.operators.io.ExportXLogOperator + inbox_out.png + + + + + extract_log_from_file + org.rapidprom.operators.extract.ExtractXLogOperator + box_out.png + + + import_accepting_petri_net + org.rapidprom.operators.io.ImportAcceptingPetriNetOperator + inbox_into.png + + + import_xlog + org.rapidprom.operators.io.ImportXLogOperator + inbox_into.png + + + import_cpn + org.rapidprom.operators.io.ImportCPNModelOperator + inbox_into.png + + + import_petri_net + org.rapidprom.operators.io.ImportPetriNetOperator + inbox_into.png + + + + + add_noise_log + org.rapidprom.operators.logmanipulation.AddNoiseOperator + + + add_classifier_log + org.rapidprom.operators.logmanipulation.AddClassifierOperator + + + add_artificial_start_end_event_filter + org.rapidprom.operators.logmanipulation.AddArtificialStartEndEventOperator + + + add_trace_attributes_to_log + org.rapidprom.operators.logmanipulation.AddTraceAttributesToLogOperator + + + merge_two_event_logs + org.rapidprom.operators.logmanipulation.MergeTwoEventLogsOperator + + + add_event_attributes_to_log + org.rapidprom.operators.logmanipulation.AddEventAttributesToLogOperator + + + add_events_to_log + org.rapidprom.operators.logmanipulation.AddEventsToLogOperator + + + timestamp_sort + org.rapidprom.operators.logmanipulation.TimestampSortOperator + + + + + + event_stream_analysis_proj_rec_prec_apn + org.rapidprom.operators.streams.analysis.ProjRecPrecAPNStreamAnalyzerOperator + + + event_stream_analysis_alignments_apn + org.rapidprom.operators.streams.analysis.AlignmentAPNAnalyzerOperator + + + + + stream_alpha_miner_apn + org.rapidprom.operators.streams.discovery.StreamAlphaMinerAcceptingPNOperator + + + stream_inductive_miner_apn + org.rapidprom.operators.streams.discovery.StreamInductiveMinerAcceptingPNOperator + + + stream_inductive_miner_pt + org.rapidprom.operators.streams.discovery.StreamInductiveMinerProcessTreeOperator + + + + + export_static_event_stream + org.rapidprom.operators.io.ExportXSStaticXSEventStream + + + + + extract_static_event_stream_from_event_stream + org.rapidprom.operators.streams.extract.EventStreamToStaticEventStream + + + + + import_static_event_stream + org.rapidprom.operators.io.ImportXSStaticXSEventStreamOperator + + + + + stream_from_cpn + org.rapidprom.operators.streams.generators.CPNToEventStreamOperator + + + stream_from_static_event_stream + org.rapidprom.operators.streams.generators.StaticEventStreamToEventStreamOperator + + + + + poll_xsrunnable + org.rapidprom.operators.streams.util.PollXSRunnableOperator + + + start_xsrunnable + org.rapidprom.operators.streams.util.StartXSRunnableOperator + + + stop_xsrunnable + org.rapidprom.operators.streams.util.StopXSRunnableOperator + + + + + + rprom_free_memory + org.rapidprom.operators.util.RapidProMMemoryCleanUpOperator + wrench.png + + + diff --git a/src/main/resources/org/rapidprom/resources/groupsRapidProM.properties b/src/main/resources/org/rapidprom/resources/groupsRapidProM.properties new file mode 100644 index 0000000..b00ba09 --- /dev/null +++ b/src/main/resources/org/rapidprom/resources/groupsRapidProM.properties @@ -0,0 +1,7 @@ +# Use this file to specify colors of Operator groups and IOObjects + +# Operator group colors +#group.example_group.color = #ffffff + +# IOObjects +#io.org.rapidprom.ioobjects.ExampleIOObject.color = #ffffff diff --git a/src/main/resources/org/rapidprom/resources/i18n/ErrorsRapidProM.properties b/src/main/resources/org/rapidprom/resources/i18n/ErrorsRapidProM.properties new file mode 100644 index 0000000..84df379 --- /dev/null +++ b/src/main/resources/org/rapidprom/resources/i18n/ErrorsRapidProM.properties @@ -0,0 +1 @@ +# File used for error message I18N diff --git a/src/main/resources/org/rapidprom/resources/i18n/GUIRapidProM.properties b/src/main/resources/org/rapidprom/resources/i18n/GUIRapidProM.properties new file mode 100644 index 0000000..aca3278 --- /dev/null +++ b/src/main/resources/org/rapidprom/resources/i18n/GUIRapidProM.properties @@ -0,0 +1,5 @@ +# Used for I18N of GUI properties + +#gui.label.rapidprom.example.label = Example button! +#gui.label.rapidprom.example.tip = Tooltip example for the button. +#gui.label.rapidprom.example.icon = myIcon.png diff --git a/src/main/resources/org/rapidprom/resources/i18n/OperatorsDocRapidProM.xml b/src/main/resources/org/rapidprom/resources/i18n/OperatorsDocRapidProM.xml new file mode 100644 index 0000000..0499126 --- /dev/null +++ b/src/main/resources/org/rapidprom/resources/i18n/OperatorsDocRapidProM.xml @@ -0,0 +1,470 @@ + + + + + + + analysis + Analysis + + + + conformance + Conformance + + + + conversion + Conversion + + + + discovery + Discovery + + + + import + Import + + + + export + Export + + + + log_manipulation + Log Manipulation + + + + streams + Streams + + + + utility + Utility + + + + + + Calculate ETC 1-align precision + Calculate precision using 1-align ETC conformance + + etc_1_align_precision + + + Measure Model Precision based on alignments + Measures model precision and generalization based on pre-calculated alignments, as defined in: http://dx.doi.org/10.6100/IR770080 + For this operator, you need alignments (PNRepResult) as input. + measure_precision + + + Mine a sequential model + DO NOT USE THIS OPERATOR (Experimental) + + sequence_miner + + + + Import CPN Model + Import CPN Model + This operator imports a coloured-Petri net. These can be obtained from other tools such as CPN tools. + import_cpn + + + Import Static Event Stream + Import a static event stream + Allows for importing a static event stream. Static event streams allow us to do repeated experiments using the same event stream. + import_static_event_stream + + + Add Source and Sink to Petri net + This operator creates an artificial and single source and sink places in a petri net + + add_source_sink + + + ActivityList to Petri net + This operator creates a sequencial Petri net based on an activity name list extracted from a data table + + name_to_petrinet + + + Soundness analysis (WOFLAN) + This operator analyzes the soundness of a Petri net using the Woflan workflow diagnosis tool, as defined in: http://dx.doi.org/10.1007/3-540-44988-4_28 + For using this operator, you need a Petri net. + analyze_with_woflan + + + Feature Prediction (Decision Trees) + This operator performs Predictions of Business Process Features, as defined in http://dx.doi.org/10.1007/978-3-319-10172-9_16 + For using this operator, you need an event log. The actual tool will be launched when visualized. + perform_predictions_of_business_process_features + + + Conformance Checking + This operator replays a log into a Petri net and show visual results, as described in http://dx.doi.org/10.6100/IR770080 + For using this operator, you need a Petri net and a log. + replay_a_log_on_petri_net_for_conformance_analysis + + + Inductive Visual Miner + This operator creates a process model using the Inductive Visual Miner as described in: http://repository.tue.nl/25a7fd81-8bf4-4606-a903-d9b78b95cdd3 + For using this operator, you need an event log. + mine_with_inductive_visual_miner + + + Visualize Model using PomPom (visualization) + This operator shows a PomPom view over a PetriNet. The PomPom operator visualizes a Petri-net/EPC that emphasizes those parts of the process that correspond to high-frequent events in a given log. The visualizer will attempt to reduce low-frequent transitions/functions as much as possible. + For using this operator, you need a petri net and a log. + show_pompom_view + + + Discover Process Data Flow (Decision Tree) + This operator transforms a Petri net into a Desicion tree + For using this operator, you need petri net and a log. + discovery_of_the_process_data-flow_(decision-tree_miner) + + + Petri Net to BPMN + This operator transforms a Petri net into a BPMN diagram. The conversion used is described in http://dx.doi.org/10.1007/s10270-015-0502-0 + For using this operator, you need a petri net as input. + convert_petrinet_to_bpmn + + + Transition System to Petri Net + This operator transforms a Transition System (Reachability Graph) into a petri net, using Petrify: http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.17.8484 + For using this operator, you need a transition system (reachability graph). + transition_system_to_petrinet + + + Petri Net to Reachability Graph + This operator transforms a petri net into a Transition System (Reachability Graph), using Petrify: http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.17.8484 + For using this operator, you need a petri net. + construct_reachability_graph_of_a_petri_net + + + + Import Accepting Petri Net + Import an Accepting Petri Net + This operator imports an accepting Petri net (a Petri net with initial and final markings). + import_accepting_petri_net + + + + Import Petri Net + This operator is used to import a Petri net in a pnml format + This operator can be used to read a PNML file. PNML files that are accepted are the ones that are produced by ProM itself or the ones that are produced by Yasper. As input a pnml file is needed. + import_petri_net + + + + Animate Log in Fuzzy Graph + This operator is used to animate (replay) an event log into a fuzzy graph as described in: http://dx.doi.org/10.6100/IR644335 + For using this operator, an event log, and a Fuzzy Graph are necessary as inputs. + animate_event_log_in_fuzzy_instance + + + + Import Event Log + This operator can be used to import a ProM XES event log from a file. + This operator can be used to import a ProM XES event log file. + Note that only .xes files can be read. + import_xlog + + + + Extract Event Log + This operator can be used to read a ProM XES log file from a file object (useful for loops over file collections) + This operator can be used to read a ProM XES log file + extract_log_from_file + + + Sort chronologically + This operator sorts traces and events within traces by timestamp (increasing). + This operator sorts traces and events within traces by timestamp. + timestamp_sort + + + + Alpha Miner (Petri Net) + This operator can be used to execute the Alpha algorithm. + This operator can be used to execute the Alpha algorithm which provides a Petri Net and a Marking. As input a log file is needed. + alpha_miner + + + + ILP Miner (Petri Net) + This operator can be used to execute the ILP Miner discovery algorithm. + This operator can be used to execute the ILP Miner algorithm which provides a Petri Net and a marking. As input a log file is needed. + ilp_miner + + + + Genetic Miner (Heuristics Net) + This operator can be used to execute the Genetic Miner discovery algorithm. + This operator can be used to execute the Genetic Miner algorithm which provides a Heuristics Net. As input a log file is needed. + genetic_miner + + + + Repair Model (Petri Net) + This operator can be used to execute the repair model algorithm as defined in: http://dx.doi.org/10.1016/j.is.2013.12.007 + This operator can be used to execute the repair model algorithm which provides a repaired Petri net as result. As input an event log and a Petri Net are needed. + repair_model + + + + Reduce Silent Transitions (Petri Net) + This operator can be used to reduce silent transitions using the Murata rules, as defined in: http://dx.doi.org/10.1109/5.24143. + This operator can be used to execute the reduce silent transitions algorithm which returns a Petri net. As input a Petri Net is needed. + reduce_silent_transitions + + + + Export Petri net + This operator can be used to export a Petri Net to a (E)PNML file. + This operator can be used to export a Petri Net to a (E)PNML file. As Petri Net is needed as an input. + export_petri_net + + + Export Event Log + This operator can be used to export an event log. + This operator can be used to export a event log. As log format XES, XES.GZ, MXML, and MXML.GZ are available. As input this operator needs an event log. + export_log + + + Export Static Event Stream + This operator can be used to export a static event stream. + + export_static_event_stream + + + Event Log to Data Table (Exampleset) + This operator can be used to convert a ProM log into an Exampleset (data table). + This operator can be used to convert a ProM log into an Exampleset. As input a log is needed. Each event of the log is converted into a single row in the Exampleset. Trace attributes are repeated in each row that is created for an event of that trace. + xlog_to_exampleset + + + + Data Table to Event Log (XLog) + This operator can be used to convert an Exampleset (data table) into an ProM log. + This operator can be used to convert an Exampleset into an ProM log. As input an Exampleset is needed. Each row of the exampleset is converted into a event belonging to a trace. For the conversion it is important that the following columns exist: T:concept:name (the concept name attribute of the trace), E:concept:name (the concept name attribute of the event), E:lifecycle:transition (the life cycle attribute of the event), E:org:resource (the resource attribute of the event), E:org:role (the role attribute of the event), E:org:group (the group attribute of the event), and E:time:time (the time attribute of the event). + exampleset_to_xlog + + + + Heuristics Net to Petri Net + This operator can be used to convert a Heuristics Net into a Petri Net. + This operator can be used to convert a Heuristics Net into a Petri Net. As input a Heuristics Net is needed. + convert_heuristics_net_into_petri_net + + + + Evolutionary Tree Miner (Process Tree) + Discovers a Process Tree from an event log using the Evolutionary Tree Miner described in: http://dx.doi.org/10.6100/IR780920. The Evolutionary Tree Miner (or ETM) is an evolutionary algorithm to discover a process model (in our case a process tree) from an event log. It does so by creating several process models, checking how good they are (w.r.t the event log) using several quality dimensions, and then slightly modifying the best ones in the hope that they will become even better. By repeating this, and always keeping a list of the best ones found so far, the ETM will discover better and better process models. The main advantage is that many quality dimensions can be used to define what a ‘better’ process model is. + Mine a Process Using the ETM Algorithm from an event log. + etm_miner + + + + Process Tree to Petri Net + Convert a Process Tree into a Petri Net. + Convert a Process Tree into a Petri Net. + convert_process_tree_to_petri_net + + + + Analyze Performance (Manifest) + A log is replayed on a petri net to get performance information. This operator projects performance measurements in a process model such as bottlenecks. + A log is replayed on a petri net to get performance and conformance information. + replay_a_log_on_petri_net_for_performance_conformance_analysis + + + + Heuristics Miner (Heuristics Net) + This operator can be used to execute the Heuristics Miner algorithm. + This operator can be used to execute the Heuristics Miner algorithm (defined in http://dx.doi.org/10.1109/CIDM.2011.5949453) which provides a Heuristics Net. As input a log file is needed. + heuristics_miner + + + + Inductive Miner (Petri Net) + This operator can be used to execute the Inductive Miner in order to discover a Petri Net as defined in: http://dx.doi.org/10.1007/978-3-642-38697-8_17. + This operator can be used to execute the Inductive Miner algorithm which provides a Petri Net. As input an event log is needed. + mine_petri_net_with_inductive_miner + + + + Inductive Miner (Process Tree) + This operator can be used to execute the Inductive Miner in order to discover a Process Tree as defined in: http://dx.doi.org/10.1007/978-3-642-38697-8_17. + This operator can be used to execute the Inductive Miner algorithm which provides a Process Tree. As input an event log is needed. + mine_process_tree_with_inductive_miner + + + + Social Network Miner (Social Network) + This plugin reads an event log and generates social networks that can be used as a starting point for SNA. We can apply several techniques to analyze the social networks, e.g., find interaction patterns, evaluate the role of an individual in an organization, etc. + An event log is requires as input + social_network_miner + + + Add Noise + This filter randomly adds noise to the log. + This operator can be used to add specific noise to a log. Check the parameter descriptions for more info on how noise is added. An event log is required as input. + add_noise_log + + + + Add Table Column to XLog + This operator adds a column of an example set to an XLog. + This operator adds a column of an example set to an XLog. As input a log file and an example set is needed. + add_table_column_to_log + + + + + Add Trace Attributes to Log + This operator adds trace attributes to a log. + This operator adds trace attributes (that are contained in an exampleset) to a log. For each trace only one row in the exampleset exists. In the exampleset one row needs to contain the (unique) case identifiers of the traces. As input a log file and an exampleset is needed. + add_trace_attributes_to_log + + + + Add Event Attributes to Log + This operator adds event attributes to a log. + This operator adds event attributes (that are contained in an exampleset) to a log. For each event only one row in the exampleset exists. In the exampleset one column needs to contain the case identifiers of the traces and another column needs to contain the (unique) event identifiers. As input a log file and an exampleset is needed. + add_event_attributes_to_log + + + + Add Events to Log + This operator adds events to a log. + This operator adds events (that are contained in an exampleset) to a log. For each event only one row in the exampleset exists. In the exampleset, separate columns need to contain the following information: case identifiers, concept:name of the events, lifecycle:transition of the events, time:timestamp of the events, org:resource of the events, org:role of the events, group:resource of the events. In case a column with information is not present, no value needs to be filled in. As input a log file and an exampleset is needed. + add_events_to_log + + + + Merge Event Logs + This operator merges two event logs. + This operator merges two event logs by collecting all the traces into a consolidated event log. As input two logs are needed. + merge_two_event_logs + + + + Add Classifier to Event Log + This operator adds an event classifier to an event log. An event classifier defines how events are identified. + An event log is needed as input. + add_classifier_log + + + + Add Artificial Start and End Event + This operator adds a start and end event to each trace. + This operator adds a start and end event to each trace. As input a log is needed. + add_artificial_start_end_event_filter + + + + Transition System Miner (Transition System) + This operator mines a transition system based on the selected parameters, as defined in (http://dx.doi.org/10.1016/j.is.2010.09.001) + A log is required as input + mine_transition_system + + + + Fuzzy Miner (Fuzzy Model) + This operator mines a fuzzy model as described in (http://dx.doi.org/10.6100/IR644335). This miner is best suited for flexible or unstructured processes, where other miners give very complex models as result. + A log is required for input + mine_fuzzy_model + + + + + Generate Event Stream from CPN + Generate stream from CPN + + stream_from_cpn + + + Generate Event Stream from Static Event Stream + Generate an event stream from a static event stream. + + stream_from_static_event_stream + + + Stream Alpha Miner (Accepting Petri Net) + Discover a Petri net from an event stream using a stream-based implementation of the alpha miner + stream_alpha_miner_apn + + + + Stream Inductive Miner (Accepting Petri Net) + Discover a Petri net from an event stream using a stream-based implementation of the inductive miner + + stream_inductive_miner_apn + + + Stream Inductive Miner (Process Tree) + Discover a process tree from an event stream using a stream-based implementation of the inductive miner + + stream_inductive_miner_pt + + + Event Stream Algorithm Analyzer (Projected Recall Precision, Accepting Petri Net) + Calculates Model to Model similarity of stream based discovery algorithms. + Calculates model-to-model similarity of each generated model of each connected algorithm at each point in time w.r.t. a collection of reference models. The reported metrics are: recall and precision. + event_stream_analysis_proj_rec_prec_apn + + + Event Stream Algorithm Analyzer (Alignments, Accepting Petri Net) + Calculates alignments of stream based discovery algorithms. + Calculates alignments of each generated model of each connected algorithm at each point in time w.r.t. a collection of reference event logs. The reported metrics are: replay-fitness, precision, and, generalization + event_stream_analysis_alignments_apn + + + Extract Static Event Stream + This operator allows for extracting a static event stream from a live event stream. + This operator allows for extracting a static event stream from a live event stream. The static event stream can be used repeatedly (i.e. for repeated experiments). The user needs to specify how many packets should be captured by the extractor. The primary input is an (inactive) event stream. As a secondary, optional, input an author object can be given. If the author is not started yet the operator will start the underlying author. + extract_static_event_stream_from_event_stream + + + Start Runnable + This operator allows to start a runnable entity from the stream framework (i.e. generator, stream algorithm etc.) + This operator allows to start a runnable entity from the stream framework (i.e. generator, stream algorithm etc.). It takes a runnable entity as an input and tries to start it. Optionally one can add multiple input objects such that starting the generator is postponed until these objects are ready. If the objects themselves are runnable, the operator will try to start them. Using this operator allows us to use stream based operators within a loop environment. + start_xsrunnable + + + Stop Runnable + This operator allows to stop a runnable entity from the stream framework (i.e. generator, stream algorithm etc.) + This operator allows to stop a runnable entity from the stream framework (i.e. generator, stream algorithm etc.). It takes a runnable entity as an input and tries to stop it. Optionally one can add multiple input objects such that stopping the generator is postponed until these objects are ready. If the objects themselves are runnable, the operator will try to stop them. Using this operator allows us to use stream based operators within a loop environment. + stop_xsrunnable + + + Poll Runnable + This operator allows to poll a runnable entity from the stream framework (i.e. generator, stream algorithm etc.) + This operator allows to poll a runnable entity from the stream framework (i.e. generator, stream algorithm etc.). The operator checks whether the entity is still active. If the entity has become inactive, the operator will pass the entity on as an output object. This operator can be used within repeated experiments of stream related operators + poll_xsrunnable + + + + Case Data Extractor + This extracts data from the traces contained in an event log, such as relative occurrence, etc... + An Event log is required as input + case_data_extractor + + + + + + Free Memory (RapidProM) + This operator tries to free the memory used by RapidProM (and also Rapid Miner itself). + This operator tries to free the memory used by RapidProM (and also Rapid Miner itself). It does so by clearing the Plugin Context object (which is originating from the ProM framework, i.e., the base of RapidProM) + rprom_free_memory + + + \ No newline at end of file diff --git a/src/main/resources/org/rapidprom/resources/i18n/SettingsRapidProM.properties b/src/main/resources/org/rapidprom/resources/i18n/SettingsRapidProM.properties new file mode 100644 index 0000000..d009ee9 --- /dev/null +++ b/src/main/resources/org/rapidprom/resources/i18n/SettingsRapidProM.properties @@ -0,0 +1,7 @@ +#This file is used to add preferences dialog setting names/description. + +#example_preferences_group.title = Examples +#example_preferences_group.description = + +#example.property.title = Do nothing? +#example.property.description = Controls whether nothing is done in this example. diff --git a/src/main/resources/org/rapidprom/resources/i18n/UserErrorMessagesRapidProM.properties b/src/main/resources/org/rapidprom/resources/i18n/UserErrorMessagesRapidProM.properties new file mode 100644 index 0000000..8b63818 --- /dev/null +++ b/src/main/resources/org/rapidprom/resources/i18n/UserErrorMessagesRapidProM.properties @@ -0,0 +1,6 @@ +# This file is used to define Operator UserError messages which are displayed when the code in an +# Operator detects wrong parameter settings or otherwise encounters a problem. + +#error.rapidprom.example_error.name = This is an example error name. +#error.rapidprom.example_error.short = This is a short description of the error with a parameter {0}. +#error.rapidprom.example_error.long = This is the long description of the error. diff --git a/src/main/resources/org/rapidprom/resources/ioobjectsRapidProM.xml b/src/main/resources/org/rapidprom/resources/ioobjectsRapidProM.xml new file mode 100644 index 0000000..6102f4d --- /dev/null +++ b/src/main/resources/org/rapidprom/resources/ioobjectsRapidProM.xml @@ -0,0 +1,182 @@ + + + org.rapidprom.ioobjectrenderers.AcceptingPetriNetIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.CPNModelIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.WoflanDiagnosisIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.BPMNIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.PredictorIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.PomPomViewIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.PNRepResultIOObjectModelRenderer + org.rapidprom.ioobjectrenderers.PNRepResultIOObjectLogRenderer + + + org.rapidprom.ioobjectrenderers.InteractiveMinerLauncherIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.ReachabilityGraphIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.FuzzyAnimationIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.XLogIOObjectDefaultRenderer + org.rapidprom.ioobjectrenderers.XLogIOObjectNewDottedChartRenderer + org.rapidprom.ioobjectrenderers.XLogIOObjectOldDottedChartRenderer + org.rapidprom.ioobjectrenderers.XLogIOObjectExampleSetRenderer + + + + org.rapidprom.ioobjectrenderers.PetriNetIOObjectRenderer + + + + + + + org.rapidprom.ioobjectrenderers.HeuristicsNetIOObjectRenderer + + + + org.rapidprom.ioobjectrenderers.ProcessTreeIOObjectDefaultRenderer + org.rapidprom.ioobjectrenderers.ProcessTreeIOObjectDotRenderer + + + + org.rapidprom.ioobjectrenderers.ManifestIOObjectRenderer + + + + org.rapidprom.ioobjectrenderers.SocialNetworkIOObjectRenderer + + + + org.rapidprom.ioobjectrenderers.TransitionSystemIOObjectRenderer + + + + org.rapidprom.ioobjectrenderers.MetricsRepositoryIOObjectRenderer + + + + + org.rapidprom.ioobjectrenderers.streams.XSAuthorIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.streams.event.XSEventStreamIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.streams.XSReaderIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.streams.XSReaderIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.streams.XSReaderIOObjectRenderer + + + org.rapidprom.ioobjectrenderers.streams.XSStreamAnalyzerIOObjectRenderer + + + + + + + + + + + diff --git a/src/main/resources/org/rapidprom/resources/parserulesRapidProM.xml b/src/main/resources/org/rapidprom/resources/parserulesRapidProM.xml new file mode 100644 index 0000000..470e32e --- /dev/null +++ b/src/main/resources/org/rapidprom/resources/parserulesRapidProM.xml @@ -0,0 +1,5 @@ + + + + diff --git a/src/main/resources/org/rapidprom/resources/rapidprom.properties b/src/main/resources/org/rapidprom/resources/rapidprom.properties new file mode 100644 index 0000000..552a626 --- /dev/null +++ b/src/main/resources/org/rapidprom/resources/rapidprom.properties @@ -0,0 +1,2 @@ +extension.name=RapidProM +deployment=development \ No newline at end of file diff --git a/src/main/resources/org/rapidprom/resources/settingsRapidProM.xml b/src/main/resources/org/rapidprom/resources/settingsRapidProM.xml new file mode 100644 index 0000000..e52f524 --- /dev/null +++ b/src/main/resources/org/rapidprom/resources/settingsRapidProM.xml @@ -0,0 +1,10 @@ + + + + + diff --git a/src/main/resources/rapidprom/example_group/example_operator_key.xml b/src/main/resources/rapidprom/example_group/example_operator_key.xml new file mode 100644 index 0000000..f4cab70 --- /dev/null +++ b/src/main/resources/rapidprom/example_group/example_operator_key.xml @@ -0,0 +1,72 @@ + + + + + + Example Operator + This is an example Operator. It does nothing when being executed. + + + First paragraph. + Second paragraph. + + + + Input port description. + + + + + Output port description. + + + + + + + Description here + + + Description here + + + Description here + + + + + + + + Tutorial description here. + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test-processes/README b/test-processes/README new file mode 100644 index 0000000..495a2d8 --- /dev/null +++ b/test-processes/README @@ -0,0 +1 @@ +Put test processes for new operators into this folder. Processes with a name that contains 'NOTEST' will be skipped.