[ONOS-7732] Automating switch workflow

Change-Id: Ie047c34df2278bc2220cade744f51ca6950f48f6
diff --git a/apps/workflow/app/BUCK b/apps/workflow/app/BUCK
new file mode 100644
index 0000000..bc7e9af
--- /dev/null
+++ b/apps/workflow/app/BUCK
@@ -0,0 +1,17 @@
+COMPILE_DEPS = [
+    '//lib:CORE_DEPS',
+    '//lib:KRYO',
+    '//lib:JACKSON',
+    '//lib:jsch',
+    '//lib:org.apache.karaf.shell.console',
+    '//lib:jackson-core',
+    '//lib:jackson-annotations',
+    '//lib:jackson-databind',
+    '//cli:onos-cli',
+    '//core/store/serializers:onos-core-serializers',
+    '//apps/workflow/api:onos-apps-workflow-api',
+]
+
+osgi_jar(
+    deps = COMPILE_DEPS,
+)
diff --git a/apps/workflow/app/BUILD b/apps/workflow/app/BUILD
new file mode 100644
index 0000000..d5af5a2
--- /dev/null
+++ b/apps/workflow/app/BUILD
@@ -0,0 +1,10 @@
+COMPILE_DEPS = CORE_DEPS + KRYO + JACKSON + CLI + [
+    "//core/store/serializers:onos-core-serializers",
+    "//core/store/primitives:onos-core-primitives",
+    "//apps/workflow/api:onos-apps-workflow-api",
+]
+
+osgi_jar(
+    karaf_command_packages = ["org.onosproject.workflow.cli"],
+    deps = COMPILE_DEPS,
+)
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowCommand.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowCommand.java
new file mode 100644
index 0000000..b0f4168
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowCommand.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import org.apache.karaf.shell.commands.Argument;
+import org.apache.karaf.shell.commands.Command;
+import org.onosproject.cli.AbstractShellCommand;
+import org.onosproject.workflow.api.DefaultWorkflowDescription;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.WorkflowExecutionService;
+import org.onosproject.workflow.api.WorkflowService;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.WorkplaceStore;
+
+import java.util.Objects;
+
+@Command(scope = "onos", name = "workflow", description = "workflow cli")
+public class WorkFlowCommand extends AbstractShellCommand {
+
+    static final String INVOKE = "invoke";
+    static final String EVAL   = "eval";
+
+
+
+    @Argument(index = 0, name = "cmd",
+            description = "command(" + INVOKE + "|" + EVAL + "eval)",
+            required = true)
+    private String cmd = null;
+
+    @Argument(index = 1, name = "name",
+            description = "workflow context name(workflow@workplace)",
+            required = true)
+    private String name = null;
+
+    @Override
+    protected void execute() {
+        if (Objects.isNull(cmd)) {
+            error("invalid cmd parameter");
+            return;
+        }
+
+        if (Objects.isNull(name)) {
+            error("invalid workflow context name");
+            return;
+        }
+
+        String[] tokens = name.split("@");
+        if (tokens != null && tokens.length != 2) {
+            error("invalid workflow context name(workflow@workplace)");
+            return;
+        }
+
+        String workflowId = tokens[0];
+        String workplace = tokens[1];
+
+        switch (cmd) {
+            case INVOKE:
+                invoke(workflowId, workplace);
+                break;
+            case EVAL:
+                eval(name);
+                break;
+            default:
+                print("Unsupported cmd: " + cmd);
+        }
+    }
+
+    /**
+     * Invokes workflow.
+     * @param workflowId workflow id
+     * @param workplaceName workplace name
+     */
+    private void invoke(String workflowId, String workplaceName) {
+
+        WorkflowService service = get(WorkflowService.class);
+        try {
+            DefaultWorkflowDescription wfDesc = DefaultWorkflowDescription.builder()
+                    .workplaceName(workplaceName)
+                    .id(workflowId)
+                    .data(JsonNodeFactory.instance.objectNode())
+                    .build();
+
+            service.invokeWorkflow(wfDesc);
+        } catch (WorkflowException e) {
+            error("Exception: ", e);
+        }
+    }
+
+    /**
+     * Evaluates workflow context.
+     * @param workflowContextName workflow context name
+     */
+    private void eval(String workflowContextName) {
+        WorkplaceStore storService = get(WorkplaceStore.class);
+        WorkflowExecutionService execService = get(WorkflowExecutionService.class);
+
+        WorkflowContext context = storService.getContext(workflowContextName);
+        if (context == null) {
+            error("failed to find workflow context {}", workflowContextName);
+            return;
+        }
+        execService.eval(workflowContextName);
+    }
+
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowCompleter.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowCompleter.java
new file mode 100644
index 0000000..9460c03
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowCompleter.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2019-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import com.google.common.collect.ImmutableList;
+import org.onosproject.cli.AbstractChoicesCompleter;
+
+import java.util.List;
+
+import static org.onosproject.workflow.cli.WorkFlowCommand.EVAL;
+import static org.onosproject.workflow.cli.WorkFlowCommand.INVOKE;
+
+
+/**
+ * Workflow command completer.
+ */
+public class WorkFlowCompleter extends AbstractChoicesCompleter {
+    @Override
+    protected List<String> choices() {
+        return ImmutableList.of(INVOKE, EVAL);
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowCtxtNameCompleter.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowCtxtNameCompleter.java
new file mode 100644
index 0000000..a8bd53a
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowCtxtNameCompleter.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2019-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import org.onosproject.cli.AbstractChoicesCompleter;
+import org.onosproject.workflow.api.WorkplaceStore;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static org.onlab.osgi.DefaultServiceDirectory.getService;
+
+/**
+ * Workflow context name completer.
+ */
+public class WorkFlowCtxtNameCompleter extends AbstractChoicesCompleter {
+    @Override
+    protected List<String> choices() {
+        WorkplaceStore workplaceStore = getService(WorkplaceStore.class);
+        return workplaceStore.getContexts().stream()
+                .map(workplace -> workplace.name())
+                .collect(Collectors.toList());
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowEventMapCommand.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowEventMapCommand.java
new file mode 100644
index 0000000..8eb0749
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowEventMapCommand.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.karaf.shell.commands.Argument;
+import org.apache.karaf.shell.commands.Command;
+import org.onosproject.cli.AbstractShellCommand;
+import org.onosproject.workflow.api.ContextEventMapStore;
+import org.onosproject.workflow.api.WorkflowException;
+
+import java.util.Arrays;
+import java.util.Objects;
+
+@Command(scope = "onos", name = "workflow-eventmap", description = "workflow event map cli")
+public class WorkFlowEventMapCommand extends AbstractShellCommand {
+
+    @Argument(index = 0, name = "cmd", description = "command(print)", required = true)
+    private String cmd = null;
+
+    @Override
+    protected void execute() {
+        if (Objects.isNull(cmd)) {
+            error("invalid cmd parameter");
+            return;
+        }
+
+        ContextEventMapStore store = get(ContextEventMapStore.class);
+        try {
+            switch (cmd) {
+                case "print":
+                    JsonNode tree = store.asJsonTree();
+                    ObjectMapper mapper = new ObjectMapper();
+                    try {
+                        print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(tree));
+                    } catch (JsonProcessingException e) {
+                        error("Exception: " + e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+                    }
+                    break;
+
+                default:
+                    print("Unsupported cmd: " + cmd);
+            }
+        } catch (WorkflowException e) {
+            error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+        }
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowIdCompleter.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowIdCompleter.java
new file mode 100644
index 0000000..0f56184
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowIdCompleter.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2019-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import org.onosproject.cli.AbstractChoicesCompleter;
+import org.onosproject.workflow.api.WorkflowStore;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static org.onlab.osgi.DefaultServiceDirectory.getService;
+
+/**
+ * Workflow ID completer.
+ */
+public class WorkFlowIdCompleter extends AbstractChoicesCompleter {
+    @Override
+    protected List<String> choices() {
+        WorkflowStore workflowStore = getService(WorkflowStore.class);
+        return workflowStore.getAll().stream()
+                .map(workflow -> workflow.id().toString())
+                .collect(Collectors.toList());
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowStoreCommand.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowStoreCommand.java
new file mode 100644
index 0000000..dcf38a6
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowStoreCommand.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import org.apache.karaf.shell.commands.Argument;
+import org.apache.karaf.shell.commands.Command;
+import org.onosproject.cli.AbstractShellCommand;
+import org.onosproject.workflow.api.Workflow;
+import org.onosproject.workflow.api.WorkflowStore;
+
+import java.util.Objects;
+import java.net.URI;
+
+@Command(scope = "onos", name = "workflowstore", description = "workflow store cli")
+public class WorkFlowStoreCommand extends AbstractShellCommand {
+
+    static final String RM = "rm";
+
+    @Argument(index = 0, name = "cmd",
+            description = "command(" + RM + ")", required = false)
+    private String cmd = null;
+
+    @Argument(index = 1, name = "id",
+            description = "workflow id(URI)", required = false)
+    private String id = null;
+
+    @Override
+    protected void execute() {
+
+        if (Objects.isNull(cmd)) {
+            printAllWorkflow();
+            return;
+        }
+
+        if (Objects.isNull(id)) {
+            print("invalid id");
+            return;
+        }
+
+        switch (cmd) {
+            case RM:
+                rmWorkflow(id);
+                break;
+            default:
+                print("Unsupported cmd: " + cmd);
+        }
+    }
+
+    private void rmWorkflow(String id) {
+        WorkflowStore workflowStore = get(WorkflowStore.class);
+        workflowStore.unregister(URI.create(id));
+    }
+
+    private void printAllWorkflow() {
+        WorkflowStore workflowStore = get(WorkflowStore.class);
+        for (Workflow workflow : workflowStore.getAll()) {
+            print(getWorkflowString(workflow));
+        }
+    }
+
+    private String getWorkflowString(Workflow workflow) {
+        return workflow.toString();
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowStoreCompleter.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowStoreCompleter.java
new file mode 100644
index 0000000..c6b425f
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowStoreCompleter.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2019-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import com.google.common.collect.ImmutableList;
+import org.onosproject.cli.AbstractChoicesCompleter;
+
+import java.util.List;
+
+import static org.onosproject.workflow.cli.WorkFlowStoreCommand.RM;
+
+/**
+ * Workflow Store command completer.
+ */
+public class WorkFlowStoreCompleter extends AbstractChoicesCompleter {
+    @Override
+    protected List<String> choices() {
+        return ImmutableList.of(RM);
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowTestCommand.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowTestCommand.java
new file mode 100644
index 0000000..c882e07
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowTestCommand.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.apache.karaf.shell.commands.Argument;
+import org.apache.karaf.shell.commands.Command;
+import org.onosproject.cli.AbstractShellCommand;
+import org.onosproject.workflow.api.DefaultWorkflowDescription;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.WorkflowService;
+
+import java.util.Arrays;
+import java.util.Objects;
+
+/**
+ * Workflow test command.
+ */
+@Command(scope = "onos", name = "workflow-test", description = "workflow test cli")
+public class WorkFlowTestCommand extends AbstractShellCommand {
+
+    static final String INVOKE_SAMPLE = "invoke-sample";
+    static final String EXCEPTION_SAMPLE = "exception-sample";
+
+    @Argument(index = 0, name = "test-name",
+            description = "Test name (" + INVOKE_SAMPLE + " | " + EXCEPTION_SAMPLE + ")",
+            required = true)
+    private String testName = null;
+
+    @Argument(index = 1, name = "arg1",
+            description = "number of test for (" + INVOKE_SAMPLE + " | " + EXCEPTION_SAMPLE + ")",
+            required = false)
+    private String arg1 = null;
+
+    @Override
+    protected void execute() {
+        if (Objects.isNull(testName)) {
+            error("invalid test-name parameter");
+            return;
+        }
+
+        switch (testName) {
+            case INVOKE_SAMPLE:
+                if (Objects.isNull(arg1)) {
+                    error("arg1 is required for test " + INVOKE_SAMPLE);
+                    return;
+                }
+
+                int num;
+                try {
+                    num = Integer.parseInt(arg1);
+                } catch (NumberFormatException e) {
+                    error("arg1 should be an integer value");
+                    return;
+                } catch (Exception e) {
+                    error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+                    return;
+                }
+
+                invokeSampleTest(num);
+                break;
+
+            case EXCEPTION_SAMPLE:
+                if (Objects.isNull(arg1)) {
+                    error("arg1 is required for test " + EXCEPTION_SAMPLE);
+                    return;
+                }
+                int count;
+                try {
+                    count = Integer.parseInt(arg1);
+                } catch (NumberFormatException e) {
+                    error("arg1 should be an integer value");
+                    return;
+                } catch (Exception e) {
+                    error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+                    return;
+                }
+
+                invokeExceptionTest(count);
+                break;
+
+            default:
+                print("Unsupported test-name: " + testName);
+        }
+    }
+
+    /**
+     * Workflow invoke test_name.
+     *
+     * @param num the arg1 of workflow to test_name
+     */
+    private void invokeSampleTest(int num) {
+        for (int i = 0; i <= num; i++) {
+            String wpName = "test_name-" + i;
+            invoke("sample.workflow-0", wpName);
+            invoke("sample.workflow-1", wpName);
+            invoke("sample.workflow-2", wpName);
+        }
+    }
+
+    /**
+     * Workflow datatype exception test.
+     *
+     * @param num the number of workflow to test
+     */
+    private void invokeExceptionTest(int num) {
+        for (int i = 0; i <= num; i++) {
+            String wpName = "test-" + i;
+            invoke("sample.workflow-3", wpName);
+        }
+    }
+
+
+    /**
+     * Invokes workflow.
+     *
+     * @param workflowId    workflow id
+     * @param workplaceName workplace name
+     */
+    private void invoke(String workflowId, String workplaceName) {
+
+        WorkflowService service = get(WorkflowService.class);
+
+        ObjectNode dataModel = JsonNodeFactory.instance.objectNode();
+        dataModel.put("count", 0);
+
+        try {
+            DefaultWorkflowDescription wfDesc = DefaultWorkflowDescription.builder()
+                    .workplaceName(workplaceName)
+                    .id(workflowId)
+                    .data(dataModel)
+                    .build();
+            service.invokeWorkflow(wfDesc);
+        } catch (WorkflowException e) {
+            error(e.getMessage() + "trace: " + Arrays.asList(e.getStackTrace()));
+        }
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowTestCompleter.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowTestCompleter.java
new file mode 100644
index 0000000..d1fb7f4
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowTestCompleter.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2019-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import com.google.common.collect.ImmutableList;
+import org.onosproject.cli.AbstractChoicesCompleter;
+
+import java.util.List;
+
+import static org.onosproject.workflow.cli.WorkFlowTestCommand.EXCEPTION_SAMPLE;
+import static org.onosproject.workflow.cli.WorkFlowTestCommand.INVOKE_SAMPLE;
+
+/**
+ * Workflow test command completer.
+ */
+public class WorkFlowTestCompleter  extends AbstractChoicesCompleter {
+    @Override
+    protected List<String> choices() {
+        return ImmutableList.of(INVOKE_SAMPLE, EXCEPTION_SAMPLE);
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkplaceNameCompleter.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkplaceNameCompleter.java
new file mode 100644
index 0000000..5dd2f23
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkplaceNameCompleter.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2019-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import org.onosproject.cli.AbstractChoicesCompleter;
+import org.onosproject.workflow.api.WorkplaceStore;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static org.onlab.osgi.DefaultServiceDirectory.getService;
+
+/**
+ * Workplace name completer.
+ */
+public class WorkplaceNameCompleter extends AbstractChoicesCompleter {
+    @Override
+    protected List<String> choices() {
+        WorkplaceStore workplaceStore = getService(WorkplaceStore.class);
+        return workplaceStore.getWorkplaces().stream()
+                .map(workplace -> workplace.name())
+                .collect(Collectors.toList());
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkplaceStoreCommand.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkplaceStoreCommand.java
new file mode 100644
index 0000000..f8c2310
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkplaceStoreCommand.java
@@ -0,0 +1,208 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import org.apache.karaf.shell.commands.Argument;
+import org.apache.karaf.shell.commands.Command;
+import org.apache.karaf.shell.commands.Option;
+import org.onosproject.cli.AbstractShellCommand;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.WorkflowService;
+import org.onosproject.workflow.api.Workplace;
+import org.onosproject.workflow.api.DefaultWorkplaceDescription;
+import org.onosproject.workflow.api.WorkplaceStore;
+
+import java.util.Arrays;
+import java.util.Objects;
+
+@Command(scope = "onos", name = "workplace",
+        description = "workplace cli")
+public class WorkplaceStoreCommand extends AbstractShellCommand {
+
+    static final String ADD   = "add";
+    static final String RM    = "rm";
+    static final String CLEAR = "clear";
+    static final String PRINT = "print";
+
+    @Argument(index = 0, name = "cmd",
+            description = "command(" + ADD + "/" + RM + "/" + CLEAR + "/" + PRINT + ")",
+            required = false)
+    private String cmd = null;
+
+    @Argument(index = 1, name = "name",
+            description = "workplace name",
+            required = false)
+    private String name = null;
+
+    @Option(name = "-f", aliases = "--filter", description = "including filter",
+            required = false, multiValued = false)
+    private String inFilter = null;
+
+    @Option(name = "-e", aliases = "--excludefilter", description = "excluding filter",
+            required = false, multiValued = false)
+    private String exFilter = null;
+
+    @Override
+    protected void execute() {
+
+        if (Objects.isNull(cmd)) {
+            printAllWorkplace();
+            return;
+        }
+
+        switch (cmd) {
+            case ADD:
+                if (Objects.isNull(name)) {
+                    error("invalid name");
+                    return;
+                }
+                addEmptyWorkplace(name);
+                return;
+
+            case RM:
+                if (Objects.isNull(name)) {
+                    print("invalid name");
+                    return;
+                }
+                rmWorkplace(name);
+                break;
+
+            case CLEAR:
+                clearWorkplace();
+                break;
+
+            case PRINT:
+                if (Objects.isNull(name)) {
+                    print("invalid name");
+                    return;
+                }
+                printWorkplace(name);
+                break;
+
+            default:
+                print("Unsupported cmd: " + cmd);
+        }
+    }
+
+    /**
+     * Adds empty workplace.
+     * @param name workplace name
+     */
+    private void addEmptyWorkplace(String name) {
+        WorkflowService service = get(WorkflowService.class);
+        try {
+            DefaultWorkplaceDescription wpDesc = DefaultWorkplaceDescription.builder()
+                    .name(name)
+                    .build();
+            service.createWorkplace(wpDesc);
+        } catch (WorkflowException e) {
+            error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+        }
+    }
+
+    /**
+     * Clears all workplaces.
+     */
+    private void clearWorkplace() {
+        WorkflowService service = get(WorkflowService.class);
+        try {
+            service.clearWorkplace();
+        } catch (WorkflowException e) {
+            error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+        }
+    }
+
+    /**
+     * Removes workplace.
+     * @param name workplace name to remove
+     */
+    private void rmWorkplace(String name) {
+        WorkflowService service = get(WorkflowService.class);
+        try {
+            DefaultWorkplaceDescription wpDesc = DefaultWorkplaceDescription.builder()
+                    .name(name)
+                    .build();
+            service.removeWorkplace(wpDesc);
+        } catch (WorkflowException e) {
+            error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+        }
+    }
+
+    /**
+     * Prints workplace.
+     * @param name workplace name
+     */
+    private void printWorkplace(String name) {
+        WorkplaceStore workplaceStore = get(WorkplaceStore.class);
+        Workplace workplace = workplaceStore.getWorkplace(name);
+        if (Objects.isNull(workplace)) {
+            print("Not existing workplace " + name);
+            return;
+        }
+        print(getWorkplaceString(workplace));
+    }
+
+    /**
+     * Prints all workplaces.
+     */
+    private void printAllWorkplace() {
+        WorkplaceStore workplaceStore = get(WorkplaceStore.class);
+        for (Workplace workplace : workplaceStore.getWorkplaces()) {
+            print(getWorkplaceString(workplace));
+            printWorkplaceContexts(workplaceStore, workplace.name());
+        }
+    }
+
+    /**
+     * Prints contexts of workplace.
+     * @param workplaceStore workplace store service
+     * @param workplaceName workplace name
+     */
+    private void printWorkplaceContexts(WorkplaceStore workplaceStore, String workplaceName) {
+        for (WorkflowContext context : workplaceStore.getWorkplaceContexts(workplaceName)) {
+            String str = context.toString();
+            if (Objects.nonNull(inFilter)) {
+                if (str.indexOf(inFilter) != -1) {
+                    if (Objects.nonNull(exFilter)) {
+                        if (str.indexOf(exFilter) == -1) {
+                            print(" - " + context);
+                        }
+                    } else {
+                        print(" - " + context);
+                    }
+                }
+            } else {
+                if (Objects.nonNull(exFilter)) {
+                    if (str.indexOf(exFilter) == -1) {
+                        print(" - " + context);
+                    }
+                } else {
+                    print(" - " + context);
+                }
+            }
+        }
+    }
+
+    /**
+     * Gets workplace string.
+     * @param workplace workplace
+     * @return workplace string
+     */
+    private String getWorkplaceString(Workplace workplace) {
+        return workplace.toString();
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkplaceStoreCompleter.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkplaceStoreCompleter.java
new file mode 100644
index 0000000..6654340
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkplaceStoreCompleter.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2019-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import com.google.common.collect.ImmutableList;
+import org.onosproject.cli.AbstractChoicesCompleter;
+
+import java.util.List;
+
+import static org.onosproject.workflow.cli.WorkplaceStoreCommand.ADD;
+import static org.onosproject.workflow.cli.WorkplaceStoreCommand.CLEAR;
+import static org.onosproject.workflow.cli.WorkplaceStoreCommand.PRINT;
+import static org.onosproject.workflow.cli.WorkplaceStoreCommand.RM;
+
+/**
+ * Workplace store command completer.
+ */
+public class WorkplaceStoreCompleter extends AbstractChoicesCompleter {
+    @Override
+    protected List<String> choices() {
+        return ImmutableList.of(ADD, RM, CLEAR, PRINT);
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/package-info.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/package-info.java
new file mode 100644
index 0000000..0383d04
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/package-info.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Workflow cli package.
+ */
+package org.onosproject.workflow.cli;
\ No newline at end of file
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/DistributedContextEventMapTreeStore.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/DistributedContextEventMapTreeStore.java
new file mode 100644
index 0000000..9554cd8
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/DistributedContextEventMapTreeStore.java
@@ -0,0 +1,247 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import org.apache.felix.scr.annotations.Activate;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Deactivate;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
+import org.onosproject.store.service.EventuallyConsistentMap;
+import org.onosproject.store.service.WallClockTimestamp;
+import org.onlab.util.KryoNamespace;
+import org.onosproject.core.ApplicationId;
+import org.onosproject.core.CoreService;
+import org.onosproject.store.serializers.KryoNamespaces;
+import org.onosproject.store.service.AsyncDocumentTree;
+import org.onosproject.store.service.DocumentPath;
+import org.onosproject.store.service.IllegalDocumentModificationException;
+import org.onosproject.store.service.NoSuchDocumentPathException;
+import org.onosproject.store.service.Ordering;
+import org.onosproject.store.service.Serializer;
+import org.onosproject.store.service.StorageService;
+import org.onosproject.store.service.Versioned;
+import org.onosproject.workflow.api.ContextEventMapStore;
+import org.onosproject.workflow.api.WorkflowException;
+import org.slf4j.Logger;
+
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+@Component(immediate = true)
+public class DistributedContextEventMapTreeStore implements ContextEventMapStore {
+
+    protected static final Logger log = getLogger(DistributedContextEventMapTreeStore.class);
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    private CoreService coreService;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    private StorageService storageService;
+
+    private ApplicationId appId;
+
+    private AsyncDocumentTree<String> eventMapTree;
+
+    private EventuallyConsistentMap<String, Set<String>> hintSetPerCxtMap;
+
+
+    @Activate
+    public void activate() {
+
+        appId = coreService.registerApplication("org.onosproject.contexteventmapstore");
+        log.info("appId=" + appId);
+
+        KryoNamespace eventMapNamespace = KryoNamespace.newBuilder()
+                .register(KryoNamespaces.API)
+                .build();
+
+        eventMapTree = storageService.<String>documentTreeBuilder()
+                .withSerializer(Serializer.using(eventMapNamespace))
+                .withName("context-event-map-store")
+                .withOrdering(Ordering.INSERTION)
+                .buildDocumentTree();
+
+        hintSetPerCxtMap = storageService.<String, Set<String>>eventuallyConsistentMapBuilder()
+                .withName("workflow-event-hint-per-cxt")
+                .withSerializer(eventMapNamespace)
+                .withTimestampProvider((k, v) -> new WallClockTimestamp())
+                .build();
+
+        log.info("Started");
+    }
+
+    @Deactivate
+    public void deactivate() {
+        eventMapTree.destroy();
+        hintSetPerCxtMap.destroy();
+        log.info("Stopped");
+    }
+
+    @Override
+    public void registerEventMap(String eventType, Set<String> eventHintSet,
+                                 String contextName, String programCounterString) throws WorkflowException {
+        for (String eventHint : eventHintSet) {
+            //Insert in eventCxtPerHintMap
+            DocumentPath dpathForCxt = DocumentPath.from(Lists.newArrayList(
+                    "root", eventType, eventHint, contextName));
+            String currentWorkletType = completeVersioned(eventMapTree.get(dpathForCxt));
+            if (currentWorkletType == null) {
+                complete(eventMapTree.createRecursive(dpathForCxt, programCounterString));
+            } else {
+                complete(eventMapTree.replace(dpathForCxt, programCounterString, currentWorkletType));
+            }
+            log.trace("RegisterEventMap for eventType:{}, eventSet:{}, contextName:{}, pc:{}",
+                     eventType, eventHintSet, contextName, programCounterString);
+
+        }
+        hintSetPerCxtMap.put(contextName, eventHintSet);
+        log.trace("RegisterEventMap in hintSetPerCxt for " +
+                         "eventType:{}, eventSet:{}, contextName:{}, pc:{}",
+                 eventType, eventHintSet, contextName, programCounterString);
+    }
+
+    @Override
+    public void unregisterEventMap(String eventType, String contextName)
+            throws WorkflowException {
+
+        Set<String> hints = hintSetPerCxtMap.get(contextName);
+        for (String eventHint : hints) {
+            //Remove from eventCxtPerHintMap
+            complete(eventMapTree.removeNode(DocumentPath.from(Lists.newArrayList(
+                    "root", eventType, eventHint, contextName))));
+            log.trace("UnregisterEventMap from eventCxtPerHintMap for eventType:{}, eventSet:{}, contextName:{}",
+                     eventType, eventHint, contextName);
+        }
+        hintSetPerCxtMap.remove(contextName);
+    }
+
+
+    @Override
+    public Map<String, String> getEventMapByHint(String eventType, String eventHint) throws WorkflowException {
+        DocumentPath path = DocumentPath.from(
+                Lists.newArrayList("root", eventType, eventHint));
+        Map<String, Versioned<String>> contexts = complete(eventMapTree.getChildren(path));
+        Map<String, String> eventMap = Maps.newHashMap();
+        if (Objects.isNull(contexts)) {
+            return eventMap;
+        }
+
+        for (Map.Entry<String, Versioned<String>> entry : contexts.entrySet()) {
+            eventMap.put(entry.getKey(), entry.getValue().value());
+        }
+        log.trace("getEventMapByHint returns eventMap {} ", eventMap);
+        return eventMap;
+    }
+
+    @Override
+    public boolean isEventMapPresent(String contextName) {
+        Map<String, String> eventMap = Maps.newHashMap();
+        Set<String> eventHintSet = hintSetPerCxtMap.get(contextName);
+        if (Objects.nonNull(eventHintSet)) {
+            log.trace("EventMap present for Context:{}", contextName);
+            return true;
+        } else {
+            log.trace("EventMap Doesnt exist for Context:{}", contextName);
+            return false;
+        }
+    }
+
+
+    @Override
+    public Map<String, Versioned<String>> getChildren(String path) throws WorkflowException {
+        DocumentPath dpath = DocumentPath.from(path);
+        Map<String, Versioned<String>> entries = complete(eventMapTree.getChildren(dpath));
+        return entries;
+    }
+
+    @Override
+    public DocumentPath getDocumentPath(String path) throws WorkflowException {
+        DocumentPath dpath = DocumentPath.from(path);
+        return dpath;
+    }
+
+    @Override
+    public ObjectNode asJsonTree() throws WorkflowException {
+
+        DocumentPath rootPath = DocumentPath.from(Lists.newArrayList("root"));
+        Map<String, Versioned<String>> eventmap = complete(eventMapTree.getChildren(rootPath));
+
+        ObjectNode rootNode = JsonNodeFactory.instance.objectNode();
+
+        for (Map.Entry<String, Versioned<String>> eventTypeEntry : eventmap.entrySet()) {
+
+            String eventType = eventTypeEntry.getKey();
+
+            ObjectNode eventTypeNode = JsonNodeFactory.instance.objectNode();
+            rootNode.put(eventType, eventTypeNode);
+
+            DocumentPath eventTypePath = DocumentPath.from(Lists.newArrayList("root", eventType));
+            Map<String, Versioned<String>> hintmap = complete(eventMapTree.getChildren(eventTypePath));
+
+            for (Map.Entry<String, Versioned<String>> hintEntry : hintmap.entrySet()) {
+
+                String hint = hintEntry.getKey();
+
+                ObjectNode hintNode = JsonNodeFactory.instance.objectNode();
+                eventTypeNode.put(hint, hintNode);
+
+                DocumentPath hintPath = DocumentPath.from(Lists.newArrayList("root", eventType, hint));
+                Map<String, Versioned<String>> contextmap = complete(eventMapTree.getChildren(hintPath));
+
+                for (Map.Entry<String, Versioned<String>> ctxtEntry : contextmap.entrySet()) {
+                    hintNode.put(ctxtEntry.getKey(), ctxtEntry.getValue().value());
+                }
+            }
+        }
+
+        return rootNode;
+    }
+
+    private <T> T complete(CompletableFuture<T> future) throws WorkflowException {
+        try {
+            return future.get();
+        } catch (InterruptedException e) {
+            Thread.currentThread().interrupt();
+            throw new WorkflowException(e.getCause().getMessage());
+        } catch (ExecutionException e) {
+            if (e.getCause() instanceof IllegalDocumentModificationException) {
+                throw new WorkflowException("Node or parent does not exist or is root or is not a Leaf Node",
+                        e.getCause());
+            } else if (e.getCause() instanceof NoSuchDocumentPathException) {
+                return null;
+            } else {
+                throw new WorkflowException("Datastore operation failed", e.getCause());
+            }
+        }
+    }
+
+    private <T> T completeVersioned(CompletableFuture<Versioned<T>> future) throws WorkflowException {
+        return Optional.ofNullable(complete(future))
+                .map(Versioned::value)
+                .orElse(null);
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/DistributedWorkplaceStore.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/DistributedWorkplaceStore.java
new file mode 100644
index 0000000..1361758
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/DistributedWorkplaceStore.java
@@ -0,0 +1,394 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.BaseJsonNode;
+import com.fasterxml.jackson.databind.node.BigIntegerNode;
+import com.fasterxml.jackson.databind.node.BinaryNode;
+import com.fasterxml.jackson.databind.node.BooleanNode;
+import com.fasterxml.jackson.databind.node.ContainerNode;
+import com.fasterxml.jackson.databind.node.DecimalNode;
+import com.fasterxml.jackson.databind.node.DoubleNode;
+import com.fasterxml.jackson.databind.node.FloatNode;
+import com.fasterxml.jackson.databind.node.IntNode;
+import com.fasterxml.jackson.databind.node.JsonNodeCreator;
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import com.fasterxml.jackson.databind.node.JsonNodeType;
+import com.fasterxml.jackson.databind.node.LongNode;
+import com.fasterxml.jackson.databind.node.MissingNode;
+import com.fasterxml.jackson.databind.node.NullNode;
+import com.fasterxml.jackson.databind.node.NumericNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.fasterxml.jackson.databind.node.POJONode;
+import com.fasterxml.jackson.databind.node.ShortNode;
+import com.fasterxml.jackson.databind.node.TextNode;
+import com.fasterxml.jackson.databind.node.ValueNode;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Maps;
+import org.apache.felix.scr.annotations.Activate;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Deactivate;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
+import org.onlab.util.KryoNamespace;
+import org.onosproject.core.ApplicationId;
+import org.onosproject.core.CoreService;
+import org.onosproject.workflow.api.DataModelTree;
+import org.onosproject.workflow.api.DefaultWorkplace;
+import org.onosproject.workflow.api.DefaultWorkflowContext;
+import org.onosproject.workflow.api.JsonDataModelTree;
+import org.onosproject.workflow.api.ProgramCounter;
+import org.onosproject.workflow.api.SystemWorkflowContext;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.WorkflowData;
+import org.onosproject.workflow.api.WorkflowState;
+import org.onosproject.workflow.api.Workplace;
+import org.onosproject.workflow.api.WorkflowDataEvent;
+import org.onosproject.workflow.api.WorkplaceStore;
+import org.onosproject.workflow.api.WorkplaceStoreDelegate;
+import org.onosproject.store.AbstractStore;
+import org.onosproject.store.serializers.KryoNamespaces;
+import org.onosproject.store.service.ConsistentMap;
+import org.onosproject.store.service.MapEvent;
+import org.onosproject.store.service.MapEventListener;
+import org.onosproject.store.service.Serializer;
+import org.onosproject.store.service.StorageException;
+import org.onosproject.store.service.StorageService;
+import org.onosproject.store.service.Versioned;
+import org.slf4j.Logger;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+@Component(immediate = true)
+public class DistributedWorkplaceStore
+    extends AbstractStore<WorkflowDataEvent, WorkplaceStoreDelegate> implements WorkplaceStore {
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected CoreService coreService;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected StorageService storageService;
+
+    private ApplicationId appId;
+    private final Logger log = getLogger(getClass());
+
+    private final WorkplaceMapListener workplaceMapEventListener = new WorkplaceMapListener();
+    private ConsistentMap<String, WorkflowData> workplaceMap;
+    private Map<String, Workplace> localWorkplaceMap = Maps.newConcurrentMap();
+
+    private final WorkflowContextMapListener contextMapEventListener = new WorkflowContextMapListener();
+    private ConsistentMap<String, WorkflowData> contextMap;
+    private Map<String, WorkflowContext> localContextMap = Maps.newConcurrentMap();
+
+    private Map<String, Map<String, WorkflowContext>> localWorkplaceMemberMap = Maps.newConcurrentMap();
+
+    @Activate
+    public void activate() {
+
+        appId = coreService.registerApplication("org.onosproject.workplacestore");
+        log.info("appId=" + appId);
+
+        KryoNamespace workplaceNamespace = KryoNamespace.newBuilder()
+                .register(KryoNamespaces.API)
+                .register(WorkflowData.class)
+                .register(Workplace.class)
+                .register(DefaultWorkplace.class)
+                .register(WorkflowContext.class)
+                .register(DefaultWorkflowContext.class)
+                .register(SystemWorkflowContext.class)
+                .register(WorkflowState.class)
+                .register(ProgramCounter.class)
+                .register(DataModelTree.class)
+                .register(JsonDataModelTree.class)
+                .register(List.class)
+                .register(ArrayList.class)
+                .register(JsonNode.class)
+                .register(ObjectNode.class)
+                .register(TextNode.class)
+                .register(LinkedHashMap.class)
+                .register(ArrayNode.class)
+                .register(BaseJsonNode.class)
+                .register(BigIntegerNode.class)
+                .register(BinaryNode.class)
+                .register(BooleanNode.class)
+                .register(ContainerNode.class)
+                .register(DecimalNode.class)
+                .register(DoubleNode.class)
+                .register(FloatNode.class)
+                .register(IntNode.class)
+                .register(JsonNodeType.class)
+                .register(LongNode.class)
+                .register(MissingNode.class)
+                .register(NullNode.class)
+                .register(NumericNode.class)
+                .register(POJONode.class)
+                .register(ShortNode.class)
+                .register(ValueNode.class)
+                .register(JsonNodeCreator.class)
+                .register(JsonNodeFactory.class)
+                .build();
+
+        localWorkplaceMap.clear();
+        workplaceMap = storageService.<String, WorkflowData>consistentMapBuilder()
+                .withSerializer(Serializer.using(workplaceNamespace))
+                .withName("workplace-map")
+                .withApplicationId(appId)
+                .build();
+        workplaceMap.addListener(workplaceMapEventListener);
+
+        localContextMap.clear();
+        contextMap = storageService.<String, WorkflowData>consistentMapBuilder()
+                .withSerializer(Serializer.using(workplaceNamespace))
+                .withName("workflow-context-map")
+                .withApplicationId(appId)
+                .build();
+        contextMap.addListener(contextMapEventListener);
+
+        workplaceMapEventListener.syncLocal();
+        contextMapEventListener.syncLocal();
+        log.info("Started");
+    }
+
+    @Deactivate
+    public void deactivate() {
+        workplaceMap.destroy();
+        localWorkplaceMap.clear();
+        contextMap.destroy();
+        localContextMap.clear();
+
+        log.info("Stopped");
+    }
+
+    @Override
+    public void registerWorkplace(String name, Workplace workplace) throws StorageException {
+        workplaceMap.put(name, workplace);
+    }
+
+    @Override
+    public void removeWorkplace(String name) throws StorageException {
+        removeWorkplaceContexts(name);
+        workplaceMap.remove(name);
+    }
+
+    @Override
+    public Workplace getWorkplace(String name) throws StorageException {
+        return localWorkplaceMap.get(name);
+    }
+
+    @Override
+    public Collection<Workplace> getWorkplaces() throws StorageException {
+        return ImmutableList.copyOf(localWorkplaceMap.values());
+    }
+
+    @Override
+    public void commitWorkplace(String name, Workplace workplace, boolean handleEvent) throws StorageException {
+        workplace.setTriggerNext(handleEvent);
+        if (workplaceMap.containsKey(name)) {
+            workplaceMap.replace(name, workplace);
+        } else {
+            registerWorkplace(name, workplace);
+        }
+    }
+
+    @Override
+    public void registerContext(String name, WorkflowContext context) throws StorageException {
+        contextMap.put(name, context);
+    }
+
+    @Override
+    public void removeContext(String name) throws StorageException {
+        contextMap.remove(name);
+    }
+
+    @Override
+    public WorkflowContext getContext(String name) throws StorageException {
+        return localContextMap.get(name);
+    }
+
+    @Override
+    public void commitContext(String name, WorkflowContext context, boolean handleEvent) throws StorageException {
+        context.setTriggerNext(handleEvent);
+        if (contextMap.containsKey(name)) {
+            contextMap.replace(name, context);
+        } else {
+            registerContext(name, context);
+        }
+    }
+
+    @Override
+    public Collection<WorkflowContext> getContexts() throws StorageException {
+        return ImmutableList.copyOf(localContextMap.values());
+    }
+
+    @Override
+    public Collection<WorkflowContext> getWorkplaceContexts(String workplaceName) {
+        Map<String, WorkflowContext> ctxMap = localWorkplaceMemberMap.get(workplaceName);
+        if (ctxMap == null) {
+            return Collections.emptyList();
+        }
+
+        return ImmutableList.copyOf(ctxMap.values());
+    }
+
+    @Override
+    public void removeWorkplaceContexts(String workplaceName) {
+        for (WorkflowContext ctx : getWorkplaceContexts(workplaceName)) {
+            removeContext(ctx.name());
+        }
+    }
+
+    private class WorkplaceMapListener implements MapEventListener<String, WorkflowData> {
+
+        @Override
+        public void event(MapEvent<String, WorkflowData> event) {
+
+            Workplace newWorkplace = (Workplace) Versioned.valueOrNull(event.newValue());
+            Workplace oldWorkplace = (Workplace) Versioned.valueOrNull(event.oldValue());
+
+            log.info("WorkplaceMap event: {}", event);
+            switch (event.type()) {
+                case INSERT:
+                    insert(newWorkplace);
+                    notifyDelegate(new WorkflowDataEvent(WorkflowDataEvent.Type.INSERT, newWorkplace));
+                    break;
+                case UPDATE:
+                    update(newWorkplace);
+                    notifyDelegate(new WorkflowDataEvent(WorkflowDataEvent.Type.UPDATE, newWorkplace));
+                    break;
+                case REMOVE:
+                    remove(oldWorkplace);
+                    notifyDelegate(new WorkflowDataEvent(WorkflowDataEvent.Type.REMOVE, oldWorkplace));
+                    break;
+                default:
+            }
+        }
+
+        private void insert(Workplace workplace) {
+            localWorkplaceMap.put(workplace.name(), workplace);
+        }
+
+        private void update(Workplace workplace) {
+            localWorkplaceMap.replace(workplace.name(), workplace);
+        }
+
+        private void remove(Workplace workplace) {
+            localWorkplaceMap.remove(workplace.name());
+        }
+
+        public void syncLocal() {
+            workplaceMap.values().stream().forEach(
+                    x -> insert((Workplace) (x.value()))
+            );
+        }
+    }
+
+    private class WorkflowContextMapListener implements MapEventListener<String, WorkflowData> {
+
+        @Override
+        public void event(MapEvent<String, WorkflowData> event) {
+
+            WorkflowContext newContext = (WorkflowContext) Versioned.valueOrNull(event.newValue());
+            WorkflowContext oldContext = (WorkflowContext) Versioned.valueOrNull(event.oldValue());
+
+            log.debug("WorkflowContext event: {}", event);
+            switch (event.type()) {
+                case INSERT:
+                    insert(newContext);
+                    notifyDelegate(new WorkflowDataEvent(WorkflowDataEvent.Type.INSERT, newContext));
+                    break;
+                case UPDATE:
+                    update(newContext);
+                    notifyDelegate(new WorkflowDataEvent(WorkflowDataEvent.Type.UPDATE, newContext));
+                    break;
+                case REMOVE:
+                    remove(oldContext);
+                    notifyDelegate(new WorkflowDataEvent(WorkflowDataEvent.Type.REMOVE, oldContext));
+                    break;
+                default:
+            }
+        }
+
+        /**
+         * Inserts workflow context on local hash map.
+         * @param context workflow context
+         */
+        private void insert(WorkflowContext context) {
+            String workplaceName = context.workplaceName();
+            Map<String, WorkflowContext> ctxMap = localWorkplaceMemberMap.get(workplaceName);
+            if (ctxMap == null) {
+                ctxMap = new HashMap<>();
+                localWorkplaceMemberMap.put(workplaceName, ctxMap);
+            }
+            ctxMap.put(context.name(), context);
+
+            localContextMap.put(context.name(), context);
+        }
+
+        /**
+         * Updates workflow context on local hash map.
+         * @param context workflow context
+         */
+        private void update(WorkflowContext context) {
+            String workplaceName = context.workplaceName();
+            Map<String, WorkflowContext> ctxMap = localWorkplaceMemberMap.get(workplaceName);
+            if (ctxMap == null) {
+                ctxMap = new HashMap<>();
+                localWorkplaceMemberMap.put(workplaceName, ctxMap);
+            }
+            ctxMap.put(context.name(), context);
+
+            localContextMap.put(context.name(), context);
+        }
+
+        /**
+         * Removes workflow context from local hash map.
+         * @param context workflow context
+         */
+        private void remove(WorkflowContext context) {
+            localContextMap.remove(context.name());
+
+            String workplaceName = context.workplaceName();
+            Map<String, WorkflowContext> ctxMap = localWorkplaceMemberMap.get(workplaceName);
+            if (ctxMap == null) {
+                log.error("remove-context: Failed to find workplace({}) in localWorkplaceMemberMap", workplaceName);
+                return;
+            }
+            ctxMap.remove(context.name());
+            if (ctxMap.size() == 0) {
+                localWorkplaceMemberMap.remove(workplaceName, ctxMap);
+            }
+        }
+
+        /**
+         * Synchronizes local hash map.
+         */
+        public void syncLocal() {
+            contextMap.values().stream().forEach(
+                    x -> insert((WorkflowContext) (x.value()))
+            );
+        }
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/ECWorkFlowStore.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/ECWorkFlowStore.java
new file mode 100644
index 0000000..8cd56ec
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/ECWorkFlowStore.java
@@ -0,0 +1,153 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Sets;
+import org.apache.felix.scr.annotations.Activate;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Deactivate;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
+import org.onlab.util.KryoNamespace;
+import org.slf4j.Logger;
+import org.onosproject.core.ApplicationId;
+import org.onosproject.core.CoreService;
+import org.onosproject.workflow.api.AbstractWorkflow;
+import org.onosproject.workflow.api.ImmutableListWorkflow;
+import org.onosproject.workflow.api.Workflow;
+import org.onosproject.workflow.api.WorkflowAttribute;
+import org.onosproject.workflow.api.WorkflowStore;
+import org.onosproject.net.group.GroupEvent;
+import org.onosproject.net.group.GroupStoreDelegate;
+import org.onosproject.store.AbstractStore;
+import org.onosproject.store.serializers.KryoNamespaces;
+import org.onosproject.store.service.EventuallyConsistentMap;
+import org.onosproject.store.service.StorageService;
+import org.onosproject.store.service.WallClockTimestamp;
+
+import java.net.URI;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.List;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+@Component(immediate = true)
+public class ECWorkFlowStore
+    extends AbstractStore<GroupEvent, GroupStoreDelegate> implements WorkflowStore {
+
+    private final Logger log = getLogger(getClass());
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected CoreService coreService;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected StorageService storageService;
+
+    private ApplicationId appId;
+    private EventuallyConsistentMap<URI, Workflow> workflowStore;
+    private Set<ClassLoader> classloaders = Sets.newConcurrentHashSet();
+
+    @Activate
+    public void activate() {
+
+        appId = coreService.registerApplication("org.onosproject.nfconfig");
+        log.info("appId=" + appId);
+
+        KryoNamespace.Builder workflowSerializer = KryoNamespace.newBuilder()
+                .register(KryoNamespaces.API)
+                .register(URI.class)
+                .register(Workflow.class)
+                .register(AbstractWorkflow.class)
+                .register(ImmutableListWorkflow.class)
+                .register(List.class)
+                .register(ImmutableList.class)
+                .register(Class.class)
+                .register(WorkflowAttribute.class)
+                .register(Set.class)
+                .register(ImmutableSet.class)
+                .register(HashSet.class);
+
+        workflowStore = storageService.<URI, Workflow>eventuallyConsistentMapBuilder()
+                .withName("workflow-workplaceStore")
+                .withSerializer(workflowSerializer)
+                .withAntiEntropyPeriod(5, TimeUnit.SECONDS)
+                .withTimestampProvider((k, v) -> new WallClockTimestamp())
+                .withTombstonesDisabled()
+                .build();
+
+        classloaders.add(this.getClass().getClassLoader());
+
+        log.info("Started");
+    }
+
+    @Deactivate
+    public void deactivate() {
+        workflowStore.destroy();
+
+        log.info("Stopped");
+    }
+
+    @Override
+    public void register(Workflow workflow) {
+        workflowStore.put(workflow.id(), workflow);
+    }
+
+    @Override
+    public void unregister(URI id) {
+        workflowStore.remove(id);
+    }
+
+    @Override
+    public Workflow get(URI id) {
+        return workflowStore.get(id);
+    }
+
+    @Override
+    public Collection<Workflow> getAll() {
+        return workflowStore.values();
+    }
+
+    @Override
+    public void registerLocal(ClassLoader loader) {
+        classloaders.add(loader);
+    }
+
+    @Override
+    public void unregisterLocal(ClassLoader loader) {
+        classloaders.remove(loader);
+    }
+
+    @Override
+    public Class getClass(String name) throws ClassNotFoundException {
+        for (ClassLoader loader : classloaders) {
+            Class cl = null;
+            try {
+                cl = loader.loadClass(name);
+            } catch (ClassNotFoundException e) {
+                // do nothing
+            }
+            if (cl != null) {
+                return cl;
+            }
+        }
+        throw new ClassNotFoundException(name);
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/HandlerTaskAccumulator.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/HandlerTaskAccumulator.java
new file mode 100644
index 0000000..4651cd4
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/HandlerTaskAccumulator.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ListMultimap;
+import org.onlab.util.AbstractAccumulator;
+import org.onosproject.workflow.api.HandlerTask;
+import org.onosproject.workflow.api.HandlerTaskBatchDelegate;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Timer;
+
+/**
+ * An accumulator for building batches of event task operations. Only one batch should
+ * be in process per instance at a time.
+ */
+public class HandlerTaskAccumulator extends AbstractAccumulator<HandlerTask> {
+
+    private static final int DEFAULT_MAX_EVENTS = 5000;
+    private static final int DEFAULT_MAX_IDLE_MS = 10;
+    private static final int DEFAULT_MAX_BATCH_MS = 50;
+
+    private static final Timer TIMER = new Timer("onos-workflow-handlertask-batching");
+
+    private final HandlerTaskBatchDelegate delegate;
+
+    private volatile boolean ready;
+
+    /**
+     * Creates an event-task operation accumulator.
+     *
+     * @param delegate the event-task batch delegate
+     */
+    protected HandlerTaskAccumulator(HandlerTaskBatchDelegate delegate) {
+        super(TIMER, DEFAULT_MAX_EVENTS, DEFAULT_MAX_BATCH_MS, DEFAULT_MAX_IDLE_MS);
+        this.delegate = delegate;
+        // Assume that the delegate is ready for workletType at the start
+        ready = true; //TODO validate the assumption that delegate is ready
+    }
+
+    @Override
+    public void processItems(List<HandlerTask> items) {
+        ready = false;
+        delegate.execute(epoch(items));
+    }
+
+    /**
+     * Gets epoch.
+     * @param ops handler tasks
+     * @return collection of collection of handler task.
+     */
+    private Collection<Collection<HandlerTask>> epoch(List<HandlerTask> ops) {
+
+        ListMultimap<String, HandlerTask> tasks = ArrayListMultimap.create();
+
+        // align event-tasks with context
+        for (HandlerTask op : ops) {
+            tasks.put(op.context().name(), op);
+        }
+
+        return tasks.asMap().values();
+    }
+
+    @Override
+    public boolean isReady() {
+        return ready;
+    }
+
+    /**
+     * Making accumulator to be ready.
+     */
+    public void ready() {
+        ready = true;
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkFlowEngine.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkFlowEngine.java
new file mode 100644
index 0000000..8f1d109
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkFlowEngine.java
@@ -0,0 +1,843 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import org.apache.felix.scr.annotations.Activate;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Deactivate;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
+import org.onosproject.cluster.ClusterService;
+import org.onosproject.cluster.LeadershipService;
+import org.onosproject.cluster.NodeId;
+import org.onosproject.core.ApplicationId;
+import org.onosproject.core.CoreService;
+import org.onosproject.store.service.StorageException;
+import org.onosproject.workflow.api.DefaultWorkplace;
+import org.onosproject.workflow.api.EventHintSupplier;
+import org.onosproject.workflow.api.EventTask;
+import org.onosproject.workflow.api.JsonDataModelInjector;
+import org.onosproject.workflow.api.JsonDataModelTree;
+import org.onosproject.workflow.api.ProgramCounter;
+import org.onosproject.workflow.api.SystemWorkflowContext;
+import org.onosproject.workflow.api.EventTimeoutTask;
+import org.onosproject.workflow.api.TimeoutTask;
+import org.onosproject.workflow.api.TimerChain;
+import org.onosproject.workflow.api.Worklet;
+import org.onosproject.workflow.api.Workflow;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.WorkflowData;
+import org.onosproject.workflow.api.ContextEventMapStore;
+import org.onosproject.workflow.api.WorkflowState;
+import org.onosproject.workflow.api.WorkflowStore;
+import org.onosproject.workflow.api.WorkflowBatchDelegate;
+import org.onosproject.workflow.api.WorkflowDataEvent;
+import org.onosproject.workflow.api.WorkflowDataListener;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.HandlerTask;
+import org.onosproject.workflow.api.HandlerTaskBatchDelegate;
+import org.onosproject.workflow.api.Workplace;
+import org.onosproject.workflow.api.WorkplaceStore;
+import org.onosproject.workflow.api.WorkplaceStoreDelegate;
+import org.onosproject.workflow.api.WorkflowExecutionService;
+import org.onosproject.event.AbstractListenerManager;
+import org.onosproject.event.Event;
+import org.onosproject.net.intent.WorkPartitionService;
+import org.slf4j.Logger;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.stream.Collectors;
+
+import static java.util.concurrent.Executors.newFixedThreadPool;
+import static java.util.concurrent.Executors.newSingleThreadExecutor;
+import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
+import static org.onlab.util.Tools.groupedThreads;
+import static org.onosproject.workflow.api.WorkflowAttribute.REMOVE_AFTER_COMPLETE;
+import static org.slf4j.LoggerFactory.getLogger;
+
+@Component(immediate = true)
+public class WorkFlowEngine extends AbstractListenerManager<WorkflowDataEvent, WorkflowDataListener>
+        implements WorkflowExecutionService {
+
+    protected static final Logger log = getLogger(WorkFlowEngine.class);
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected CoreService coreService;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected ClusterService clusterService;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected LeadershipService leadershipService;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected WorkPartitionService partitionService;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected WorkplaceStore workplaceStore;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected WorkflowStore workflowStore;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected ContextEventMapStore eventMapStore;
+
+    private final WorkplaceStoreDelegate workplaceStoreDelegate = this::post;
+
+    private final WorkflowBatchDelegate workflowBatchDelegate = new InternalWorkflowBatchDelegate();
+    private final WorkflowAccumulator workflowAccumulator = new WorkflowAccumulator(workflowBatchDelegate);
+
+    private final HandlerTaskBatchDelegate eventtaskBatchDelegate = new InternalHandlerTaskBatchDelegate();
+    private final HandlerTaskAccumulator eventtaskAccumulator = new HandlerTaskAccumulator(eventtaskBatchDelegate);
+
+    private ExecutorService workflowBatchExecutor;
+    private ExecutorService workflowExecutor;
+
+    private ExecutorService handlerTaskBatchExecutor;
+    private ExecutorService handlerTaskExecutor;
+
+    private static final int DEFAULT_WORKFLOW_THREADS = 12;
+    private static final int DEFAULT_EVENTTASK_THREADS = 12;
+    private static final int MAX_REGISTER_EVENTMAP_WAITS = 10;
+
+    private ScheduledExecutorService eventMapTriggerExecutor;
+
+    private TimerChain timerChain = new TimerChain();
+
+    private JsonDataModelInjector dataModelInjector = new JsonDataModelInjector();
+
+    public static final String APPID = "org.onosproject.workflow";
+    private ApplicationId appId;
+    private NodeId localNodeId;
+
+    @Activate
+    public void activate() {
+        appId = coreService.registerApplication(APPID);
+        workplaceStore.setDelegate(workplaceStoreDelegate);
+        localNodeId = clusterService.getLocalNode().id();
+        leadershipService.runForLeadership(appId.name());
+
+        workflowBatchExecutor = newSingleThreadExecutor(
+                groupedThreads("onos/workflow", "workflow-batch", log));
+        workflowExecutor = newFixedThreadPool(DEFAULT_WORKFLOW_THREADS,
+                groupedThreads("onos/workflow-exec", "worker-%d", log));
+        handlerTaskBatchExecutor = newSingleThreadExecutor(
+                groupedThreads("onos/workflow", "handlertask-batch", log));
+        handlerTaskExecutor = newFixedThreadPool(DEFAULT_EVENTTASK_THREADS,
+                groupedThreads("onos/handlertask-exec", "worker-%d", log));
+        eventMapTriggerExecutor = newSingleThreadScheduledExecutor(
+                groupedThreads("onos/workflow-engine", "eventmap-trigger-executor"));
+
+        (new WorkplaceWorkflow(this, workflowStore)).registerWorkflows();
+        JsonDataModelTree data = new JsonDataModelTree(JsonNodeFactory.instance.objectNode());
+        workplaceStore.registerWorkplace(Workplace.SYSTEM_WORKPLACE,
+                new DefaultWorkplace(Workplace.SYSTEM_WORKPLACE, data));
+
+        log.info("Started");
+    }
+
+    @Deactivate
+    public void deactivate() {
+        leadershipService.withdraw(appId.name());
+        workplaceStore.unsetDelegate(workplaceStoreDelegate);
+        workflowBatchExecutor.shutdown();
+        workflowExecutor.shutdown();
+        handlerTaskBatchExecutor.shutdown();
+        handlerTaskExecutor.shutdown();
+        eventMapTriggerExecutor.shutdown();
+        log.info("Stopped");
+    }
+
+    @Override
+    public void execInitWorklet(WorkflowContext context) {
+
+        Workflow workflow = workflowStore.get(context.workflowId());
+        if (workflow == null) {
+            log.error("Invalid workflow {}", context.workflowId());
+            return;
+        }
+
+        initWorkletExecution(context);
+        try {
+            Worklet initWorklet = workflow.init(context);
+            if (initWorklet != null) {
+
+                log.info("{} worklet.process:{}", context.name(), initWorklet.tag());
+                log.trace("{} context: {}", context.name(), context);
+
+                dataModelInjector.inject(initWorklet, context);
+                initWorklet.process(context);
+                dataModelInjector.inhale(initWorklet, context);
+
+                log.info("{} worklet.process(done): {}", context.name(), initWorklet.tag());
+                log.trace("{} context: {}", context.name(), context);
+            }
+
+        } catch (WorkflowException e) {
+            log.error("Exception: ", e);
+            context.setCause(e.getMessage());
+            context.setState(WorkflowState.EXCEPTION);
+            workplaceStore.commitContext(context.name(), context, false);
+            return;
+        }
+        // trigger the execution of next worklet.
+        workplaceStore.registerContext(context.name(), context);
+    }
+
+    @Override
+    public void eval(String contextName) {
+
+        final WorkflowContext latestContext = workplaceStore.getContext(contextName);
+        if (latestContext == null) {
+            log.error("Invalid workflow context {}", contextName);
+            return;
+        }
+
+        initWorkletExecution(latestContext);
+
+        workplaceStore.commitContext(latestContext.name(), latestContext, true);
+    }
+
+    @Override
+    public void eventMapTrigger(Event event, EventHintSupplier supplier) {
+
+        if (event.subject() instanceof SystemWorkflowContext) {
+            return;
+        }
+
+        Map<String, String> eventMap;
+
+        String eventHint;
+        try {
+            eventHint = supplier.apply(event);
+        } catch (Throwable e) {
+            log.error("Exception: ", e);
+            return;
+        }
+        if (eventHint == null) {
+            // do nothing
+            log.error("Invalid eventHint, event: {}", event);
+            return;
+        }
+
+        try {
+            eventMap = eventMapStore.getEventMapByHint(event.getClass().getName(), eventHint);
+        } catch (WorkflowException e) {
+            log.error("Exception: ", e);
+            return;
+        }
+
+        if (Objects.isNull(eventMap) || eventMap.isEmpty()) {
+            // do nothing;
+            log.debug("Invalid eventMap, event: {}", event);
+            return;
+        }
+
+        for (Map.Entry<String, String> entry : eventMap.entrySet()) {
+            String contextName = entry.getKey();
+            String strProgramCounter = entry.getValue();
+            ProgramCounter pc;
+            try {
+                pc = ProgramCounter.valueOf(strProgramCounter);
+            } catch (IllegalArgumentException e) {
+                log.error("Exception: ", e);
+                return;
+            }
+
+            WorkflowContext context = workplaceStore.getContext(contextName);
+            if (Objects.isNull(context)) {
+                log.info("Invalid context: {}, event: {}", contextName, event);
+                continue;
+            }
+            EventTask eventtask = null;
+            try {
+                eventtask = EventTask.builder()
+                    .event(event)
+                    .eventHint(eventHint)
+                    .context(context)
+                    .programCounter(pc)
+                    .build();
+            } catch (WorkflowException e) {
+                log.error("Exception: ", e);
+            }
+
+            log.debug("eventtaskAccumulator.add: task: {}", eventtask);
+            if (!Objects.isNull(eventtask)) {
+                eventtaskAccumulator.add(eventtask);
+            }
+        }
+    }
+
+    @Override
+    public void registerEventMap(Class<? extends Event> eventType, Set<String> eventHintSet,
+                                 String contextName, String programCounterString) throws WorkflowException {
+        eventMapStore.registerEventMap(eventType.getName(), eventHintSet, contextName, programCounterString);
+        for (String eventHint : eventHintSet) {
+            for (int i = 0; i < MAX_REGISTER_EVENTMAP_WAITS; i++) {
+                Map<String, String> eventMap = eventMapStore.getEventMapByHint(eventType.getName(), eventHint);
+                if (eventMap != null && eventMap.containsKey(contextName)) {
+                    break;
+                }
+                try {
+                    log.info("sleep {}", i);
+                    Thread.sleep(10L * (i + 1));
+                } catch (InterruptedException e) {
+                    log.error("Exception: ", e);
+                    Thread.currentThread().interrupt();
+                }
+            }
+        }
+
+    }
+
+    @Override
+    protected void post(WorkflowDataEvent event) {
+
+        if (event.subject() == null || !isRelevant(event.subject())) {
+            log.debug("ignore event {}", event);
+            return;
+        }
+
+        // trigger next worklet selection
+        WorkflowData dataModelContainer = event.subject();
+        switch (event.type()) {
+            case INSERT:
+            case UPDATE:
+                if (dataModelContainer.triggerNext()) {
+                    log.debug("workflowAccumulator.add: {}", dataModelContainer);
+                    workflowAccumulator.add(dataModelContainer);
+                } else {
+                    log.debug("pass-workflowAccumulator.add: {}", dataModelContainer);
+                }
+                break;
+            case REMOVE:
+                break;
+            default:
+        }
+
+        // trigger EventTask for WorkflowDataEvent
+        eventMapTriggerExecutor.submit(
+                () -> eventMapTrigger(
+                        event,
+                        // event hint supplier
+                        (ev) -> {
+                            if (ev == null || ev.subject() == null) {
+                                return null;
+                            }
+
+                            if (ev.subject() instanceof WorkflowData) {
+                                return ((WorkflowData) ev.subject()).name();
+                            } else {
+                                return null;
+                            }
+                        }
+                )
+        );
+    }
+
+    /**
+     * Checks whether this workflow data job is relevant to this ONOS node.
+     * @param job workflow data
+     * @return checking result
+     */
+    private boolean isRelevant(WorkflowData job) {
+        // distributes event processing with work-partition
+        return partitionService.isMine(job.distributor(), this::stringHash);
+    }
+
+    /**
+     * Gets hash of the string.
+     * @param str string to get a hash
+     * @return hash value
+     */
+    public Long stringHash(String str) {
+        return UUID.nameUUIDFromBytes(str.getBytes()).getMostSignificantBits();
+    }
+
+    /**
+     * Class for handler task batch delegation.
+     */
+    private class InternalHandlerTaskBatchDelegate implements HandlerTaskBatchDelegate {
+        @Override
+        public void execute(Collection<Collection<HandlerTask>> operations) {
+            log.debug("Execute {} operation(s).", operations.size());
+
+            CompletableFuture.runAsync(() -> {
+                List<CompletableFuture<Collection<HandlerTask>>> futures = operations.stream()
+                        .map(
+                                x -> CompletableFuture.completedFuture(x)
+                                        .thenApplyAsync(WorkFlowEngine.this::processHandlerTask, handlerTaskExecutor)
+                                        .exceptionally(e -> null)
+                        )
+                        .collect(Collectors.toList());
+
+                // waiting the completion of futures
+                futures.parallelStream().forEach(x -> x.join());
+
+            }, handlerTaskBatchExecutor).exceptionally(e -> {
+                log.error("Error submitting batches:", e);
+                return null;
+            }).thenRun(eventtaskAccumulator::ready);
+        }
+    }
+
+    /**
+     * Initializes worklet execution.
+     * @param context workflow context
+     */
+    private void initWorkletExecution(WorkflowContext context) {
+        context.setState(WorkflowState.RUNNING);
+        context.setCause("");
+        context.setWorkflowExecutionService(this);
+        context.setWorkflowStore(workflowStore);
+        context.setWorkplaceStore(workplaceStore);
+        context.waitCompletion(null, null, null, 0L);
+        context.setTriggerNext(false);
+    }
+
+    /**
+     * Processes handler tasks.
+     * @param tasks handler tasks
+     * @return handler tasks processed
+     */
+    private Collection<HandlerTask> processHandlerTask(Collection<HandlerTask> tasks) {
+
+        for (HandlerTask task : tasks) {
+            if (task instanceof EventTimeoutTask) {
+                execEventTimeoutTask((EventTimeoutTask) task);
+            } else if (task instanceof TimeoutTask) {
+                execTimeoutTask((TimeoutTask) task);
+            } else if (task instanceof EventTask) {
+                execEventTask((EventTask) task);
+            } else {
+                log.error("Unsupported handler task {}", task);
+            }
+        }
+
+        return null;
+    }
+
+    /**
+     * Executes event task.
+     * @param task event task
+     * @return event task
+     */
+    private EventTask execEventTask(EventTask task) {
+
+        if (!eventMapStore.isEventMapPresent(task.context().name())) {
+            log.trace("EventMap doesnt exist for taskcontext:{}", task.context().name());
+            return task;
+        }
+
+        log.debug("execEventTask- task: {}, hash: {}", task, stringHash(task.context().distributor()));
+
+        WorkflowContext context = (WorkflowContext) (task.context());
+        Workflow workflow = workflowStore.get(context.workflowId());
+        if (workflow == null) {
+            log.error("Invalid workflow {}", context.workflowId());
+            return task;
+        }
+
+        WorkflowContext latestContext = workplaceStore.getContext(context.name());
+        if (latestContext == null) {
+            log.error("Invalid workflow context {}", context.name());
+            return task;
+        }
+
+        try {
+            if (!Objects.equals(latestContext.current(), task.programCounter())) {
+                log.error("Current worklet({}) is not mismatched with task work({}). Ignored.",
+                        latestContext.current(), task.programCounter());
+                return task;
+            }
+
+            Worklet worklet = workflow.getWorkletInstance(task.programCounter().workletType());
+            if (Worklet.Common.COMPLETED.equals(worklet) || Worklet.Common.INIT.equals(worklet)) {
+                log.error("Current worklet is {}, Ignored", worklet);
+                return task;
+            }
+
+            initWorkletExecution(latestContext);
+
+            log.info("{} worklet.isCompleted:{}", latestContext.name(), worklet.tag());
+            log.trace("{} task:{}, context: {}", latestContext.name(), task, latestContext);
+
+            dataModelInjector.inject(worklet, latestContext);
+            boolean completed = worklet.isCompleted(latestContext, task.event());
+            dataModelInjector.inhale(worklet, latestContext);
+
+            if (completed) {
+
+                log.info("{} worklet.isCompleted(true):{}", latestContext.name(), worklet.tag());
+                log.trace("{} task:{}, context: {}", latestContext.name(), task, latestContext);
+
+                eventMapStore.unregisterEventMap(
+                        task.eventType(), latestContext.name());
+
+                //completed case
+                //increase program counter
+                ProgramCounter pc = latestContext.current();
+                latestContext.setCurrent(workflow.increased(pc));
+
+                workplaceStore.commitContext(latestContext.name(), latestContext, true);
+                return null;
+            } else {
+
+                log.info("{} worklet.isCompleted(false):{}", latestContext.name(), worklet.tag());
+                log.trace("{} task:{}, context: {}", latestContext.name(), task, latestContext);
+
+                workplaceStore.commitContext(latestContext.name(), latestContext, false);
+            }
+
+        } catch (WorkflowException e) {
+            log.error("Exception: ", e);
+            latestContext.setCause(e.getMessage());
+            latestContext.setState(WorkflowState.EXCEPTION);
+            workplaceStore.commitContext(latestContext.name(), latestContext, false);
+        } catch (StorageException e) {
+            log.error("Exception: ", e);
+            // StorageException does not commit context.
+        } catch (Exception e) {
+            log.error("Exception: ", e);
+            latestContext.setCause(e.getMessage());
+            latestContext.setState(WorkflowState.EXCEPTION);
+            workplaceStore.commitContext(latestContext.name(), latestContext, false);
+        }
+
+        return task;
+    }
+
+    /**
+     * Executes event timeout task.
+     * @param task event timeout task
+     * @return handler task
+     */
+    private HandlerTask execEventTimeoutTask(EventTimeoutTask task) {
+
+        if (!eventMapStore.isEventMapPresent(task.context().name())) {
+            log.trace("EventMap doesnt exist for taskcontext:{}", task.context().name());
+            return task;
+        }
+
+        log.debug("execEventTimeoutTask- task: {}, hash: {}", task, stringHash(task.context().distributor()));
+
+        WorkflowContext context = (WorkflowContext) (task.context());
+        Workflow workflow = workflowStore.get(context.workflowId());
+        if (workflow == null) {
+            log.error("execEventTimeoutTask: Invalid workflow {}", context.workflowId());
+            return task;
+        }
+
+        WorkflowContext latestContext = workplaceStore.getContext(context.name());
+        if (latestContext == null) {
+            log.error("execEventTimeoutTask: Invalid workflow context {}", context.name());
+            return task;
+        }
+
+        try {
+            if (!Objects.equals(latestContext.current(), task.programCounter())) {
+                log.error("execEventTimeoutTask: Current worklet({}) is not mismatched with task work({}). Ignored.",
+                        latestContext.current(), task.programCounter());
+                return task;
+            }
+
+            Worklet worklet = workflow.getWorkletInstance(task.programCounter().workletType());
+            if (worklet == Worklet.Common.COMPLETED || worklet == Worklet.Common.INIT) {
+                log.error("execEventTimeoutTask: Current worklet is {}, Ignored", worklet);
+                return task;
+            }
+
+            initWorkletExecution(latestContext);
+
+            eventMapStore.unregisterEventMap(task.eventType(), latestContext.name());
+
+            log.info("{} worklet.timeout(for event):{}", latestContext.name(), worklet.tag());
+            log.trace("{} task:{}, context: {}", latestContext.name(), task, latestContext);
+
+            dataModelInjector.inject(worklet, latestContext);
+            worklet.timeout(latestContext);
+            dataModelInjector.inhale(worklet, latestContext);
+
+            log.info("{} worklet.timeout(for event)(done):{}", latestContext.name(), worklet.tag());
+            log.trace("{} task:{}, context: {}", latestContext.name(), task, latestContext);
+
+
+            workplaceStore.commitContext(latestContext.name(), latestContext, latestContext.triggerNext());
+
+        } catch (WorkflowException e) {
+            log.error("Exception: ", e);
+            latestContext.setCause(e.getMessage());
+            latestContext.setState(WorkflowState.EXCEPTION);
+            workplaceStore.commitContext(latestContext.name(), latestContext, false);
+        } catch (StorageException e) {
+            log.error("Exception: ", e);
+            // StorageException does not commit context.
+        } catch (Exception e) {
+            log.error("Exception: ", e);
+            latestContext.setCause(e.getMessage());
+            latestContext.setState(WorkflowState.EXCEPTION);
+            workplaceStore.commitContext(latestContext.name(), latestContext, false);
+        }
+
+        return task;
+    }
+
+    /**
+     * Executes timeout task.
+     * @param task time out task
+     * @return handler task
+     */
+    private HandlerTask execTimeoutTask(TimeoutTask task) {
+
+        log.debug("execTimeoutTask- task: {}, hash: {}", task, stringHash(task.context().distributor()));
+
+        WorkflowContext context = (WorkflowContext) (task.context());
+        Workflow workflow = workflowStore.get(context.workflowId());
+        if (workflow == null) {
+            log.error("execTimeoutTask: Invalid workflow {}", context.workflowId());
+            return task;
+        }
+
+        WorkflowContext latestContext = workplaceStore.getContext(context.name());
+        if (latestContext == null) {
+            log.error("execTimeoutTask: Invalid workflow context {}", context.name());
+            return task;
+        }
+
+        try {
+            if (!Objects.equals(latestContext.current(), task.programCounter())) {
+                log.error("execTimeoutTask: Current worklet({}) is not mismatched with task work({}). Ignored.",
+                        latestContext.current(), task.programCounter());
+                return task;
+            }
+
+            Worklet worklet = workflow.getWorkletInstance(task.programCounter().workletType());
+            if (worklet == Worklet.Common.COMPLETED || worklet == Worklet.Common.INIT) {
+                log.error("execTimeoutTask: Current worklet is {}, Ignored", worklet);
+                return task;
+            }
+
+            initWorkletExecution(latestContext);
+
+            log.info("{} worklet.timeout:{}", latestContext.name(), worklet.tag());
+            log.trace("{} context: {}", latestContext.name(), latestContext);
+
+            dataModelInjector.inject(worklet, latestContext);
+            worklet.timeout(latestContext);
+            dataModelInjector.inhale(worklet, latestContext);
+
+            log.info("{} worklet.timeout(done):{}", latestContext.name(), worklet.tag());
+            log.trace("{} context: {}", latestContext.name(), latestContext);
+
+            workplaceStore.commitContext(latestContext.name(), latestContext, latestContext.triggerNext());
+
+        } catch (WorkflowException e) {
+            log.error("Exception: ", e);
+            latestContext.setCause(e.getMessage());
+            latestContext.setState(WorkflowState.EXCEPTION);
+            workplaceStore.commitContext(latestContext.name(), latestContext, false);
+        } catch (StorageException e) {
+            log.error("Exception: ", e);
+            // StorageException does not commit context.
+        } catch (Exception e) {
+            log.error("Exception: ", e);
+            latestContext.setCause(e.getMessage());
+            latestContext.setState(WorkflowState.EXCEPTION);
+            workplaceStore.commitContext(latestContext.name(), latestContext, false);
+        }
+
+        return task;
+    }
+
+    /**
+     * Class for delegation of workflow batch execution.
+     */
+    private class InternalWorkflowBatchDelegate implements WorkflowBatchDelegate {
+        @Override
+        public void execute(Collection<WorkflowData> operations) {
+            log.debug("Execute {} operation(s).", operations.size());
+
+            CompletableFuture.runAsync(() -> {
+                List<CompletableFuture<WorkflowData>> futures = operations.stream()
+                        .map(
+                                x -> CompletableFuture.completedFuture(x)
+                                .thenApplyAsync(WorkFlowEngine.this::execWorkflow, workflowExecutor)
+                                .exceptionally(e -> null)
+                        )
+                        .collect(Collectors.toList());
+
+                // waiting the completion of futures
+                futures.parallelStream().forEach(x -> x.join());
+
+            }, workflowBatchExecutor).exceptionally(e -> {
+                log.error("Error submitting batches:", e);
+                return null;
+            }).thenRun(workflowAccumulator::ready);
+        }
+    }
+
+    /**
+     * Executes workflow.
+     * @param dataModelContainer workflow data model container(workflow or workplace)
+     * @return
+     */
+    private WorkflowData execWorkflow(WorkflowData dataModelContainer) {
+        if (dataModelContainer instanceof WorkflowContext) {
+            return execWorkflowContext((WorkflowContext) dataModelContainer);
+        } else if (dataModelContainer instanceof Workplace) {
+            return execWorkplace((Workplace) dataModelContainer);
+        } else {
+            log.error("Invalid context {}", dataModelContainer);
+            return null;
+        }
+    }
+
+    /**
+     * Executes workflow context.
+     * @param context workflow context
+     * @return workflow context
+     */
+    private WorkflowContext execWorkflowContext(WorkflowContext context) {
+
+        Workflow workflow = workflowStore.get(context.workflowId());
+        if (workflow == null) {
+            log.error("Invalid workflow {}", context.workflowId());
+            return null;
+        }
+
+        final WorkflowContext latestContext = workplaceStore.getContext(context.name());
+        if (latestContext == null) {
+            log.error("Invalid workflow context {}", context.name());
+            return null;
+        }
+
+        initWorkletExecution(latestContext);
+
+        try {
+            final ProgramCounter pc = workflow.next(latestContext);
+            final Worklet worklet = workflow.getWorkletInstance(pc.workletType());
+
+            if (worklet == Worklet.Common.INIT) {
+                log.error("workflow.next gave INIT. It cannot be executed (context: {})", context.name());
+                return latestContext;
+            }
+
+            latestContext.setCurrent(pc);
+            if (worklet == Worklet.Common.COMPLETED) {
+
+                if (workflow.attributes().contains(REMOVE_AFTER_COMPLETE)) {
+                    workplaceStore.removeContext(latestContext.name());
+                    return null;
+                } else {
+                    latestContext.setState(WorkflowState.IDLE);
+                    workplaceStore.commitContext(latestContext.name(), latestContext, false);
+                    return latestContext;
+                }
+            }
+
+            log.info("{} worklet.process:{}", latestContext.name(), worklet.tag());
+            log.trace("{} context: {}", latestContext.name(), latestContext);
+
+            dataModelInjector.inject(worklet, latestContext);
+            worklet.process(latestContext);
+            dataModelInjector.inhale(worklet, latestContext);
+
+            log.info("{} worklet.process(done): {}", latestContext.name(), worklet.tag());
+            log.trace("{} context: {}", latestContext.name(), latestContext);
+
+
+            if (latestContext.completionEventType() != null) {
+                if (latestContext.completionEventGenerator() == null) {
+                    String msg = String.format("Invalid exepecting event(%s), generator(%s)",
+                            latestContext.completionEventType(),
+                            latestContext.completionEventGenerator());
+                    throw new WorkflowException(msg);
+                }
+
+                registerEventMap(latestContext.completionEventType(), latestContext.completionEventHints(),
+                        latestContext.name(), pc.toString());
+
+                latestContext.completionEventGenerator().apply();
+
+                if (latestContext.completionEventTimeout() != 0L) {
+                    final EventTimeoutTask eventTimeoutTask = EventTimeoutTask.builder()
+                            .context(latestContext)
+                            .programCounter(pc)
+                            .eventType(latestContext.completionEventType().getName())
+                            .eventHintSet(latestContext.completionEventHints())
+                            .build();
+                    timerChain.schedule(latestContext.completionEventTimeout(),
+                            () -> {
+                                eventtaskAccumulator.add(eventTimeoutTask);
+                            });
+                }
+            } else {
+                if (latestContext.completionEventTimeout() != 0L) {
+                    final TimeoutTask timeoutTask = TimeoutTask.builder()
+                            .context(latestContext)
+                            .programCounter(pc)
+                            .build();
+
+                    timerChain.schedule(latestContext.completionEventTimeout(),
+                            () -> {
+                                eventtaskAccumulator.add(timeoutTask);
+                            });
+                } else {
+                    //completed case
+                    // increase program counter
+                    latestContext.setCurrent(workflow.increased(pc));
+                }
+            }
+
+            workplaceStore.commitContext(latestContext.name(), latestContext, latestContext.triggerNext());
+
+        } catch (WorkflowException e) {
+            log.error("Exception: ", e);
+            latestContext.setCause(e.getMessage());
+            latestContext.setState(WorkflowState.EXCEPTION);
+            workplaceStore.commitContext(latestContext.name(), latestContext, false);
+        } catch (StorageException e) {
+            log.error("Exception: ", e);
+            // StorageException does not commit context.
+        } catch (Exception e) {
+            log.error("Exception: ", e);
+            latestContext.setCause(e.getMessage());
+            latestContext.setState(WorkflowState.EXCEPTION);
+            workplaceStore.commitContext(latestContext.name(), latestContext, false);
+        }
+
+        return latestContext;
+    }
+
+    /**
+     * Execute workplace.
+     * @param workplace workplace
+     * @return workplace
+     */
+    private Workplace execWorkplace(Workplace workplace) {
+
+        return null;
+    }
+
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowAccumulator.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowAccumulator.java
new file mode 100644
index 0000000..d13b66f
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowAccumulator.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.google.common.collect.Maps;
+import org.onlab.util.AbstractAccumulator;
+import org.onosproject.workflow.api.WorkflowData;
+import org.onosproject.workflow.api.WorkflowBatchDelegate;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Timer;
+
+/**
+ * An accumulator for building batches of workflow operations. Only one batch should
+ * be in process per instance at a time.
+ */
+public class WorkflowAccumulator extends AbstractAccumulator<WorkflowData> {
+
+    private static final int DEFAULT_MAX_EVENTS = 1000;
+    private static final int DEFAULT_MAX_IDLE_MS = 10;
+    private static final int DEFAULT_MAX_BATCH_MS = 50;
+
+    private static final Timer TIMER = new Timer("onos-workflow-op-batching");
+
+    private final WorkflowBatchDelegate delegate;
+
+    private volatile boolean ready;
+
+    /**
+     * Creates an intent operation accumulator.
+     *
+     * @param delegate the intent batch delegate
+     */
+    protected WorkflowAccumulator(WorkflowBatchDelegate delegate) {
+        super(TIMER, DEFAULT_MAX_EVENTS, DEFAULT_MAX_BATCH_MS, DEFAULT_MAX_IDLE_MS);
+        this.delegate = delegate;
+        // Assume that the delegate is ready for worklettype at the start
+        ready = true; //TODO validate the assumption that delegate is ready
+    }
+
+    @Override
+    public void processItems(List<WorkflowData> items) {
+        ready = false;
+        delegate.execute(reduce(items));
+    }
+
+    private Collection<WorkflowData> reduce(List<WorkflowData> ops) {
+        Map<String, WorkflowData> map = Maps.newHashMap();
+        for (WorkflowData op : ops) {
+            map.put(op.name(), op);
+        }
+        //TODO check the version... or maybe workplaceStore will handle this.
+        return map.values();
+    }
+
+    @Override
+    public boolean isReady() {
+        return ready;
+    }
+
+    /**
+     * Making accumulator to be ready.
+     */
+    public void ready() {
+        ready = true;
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowManager.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowManager.java
new file mode 100644
index 0000000..66bd709
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowManager.java
@@ -0,0 +1,282 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import com.fasterxml.jackson.databind.node.JsonNodeType;
+import com.fasterxml.jackson.databind.node.MissingNode;
+import org.apache.felix.scr.annotations.Activate;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Deactivate;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
+import org.onosproject.net.config.NetworkConfigRegistry;
+import org.onosproject.net.config.NetworkConfigService;
+import org.onosproject.workflow.api.WorkflowService;
+import org.onosproject.workflow.api.WorkflowExecutionService;
+import org.onosproject.workflow.api.WorkplaceStore;
+import org.onosproject.workflow.api.WorkflowStore;
+import org.onosproject.workflow.api.WorkplaceDescription;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.DefaultWorkplace;
+import org.onosproject.workflow.api.JsonDataModelTree;
+import org.onosproject.workflow.api.WorkflowDescription;
+import org.onosproject.workflow.api.Workplace;
+import org.onosproject.workflow.api.WorkflowDataModelException;
+import org.onosproject.workflow.api.Workflow;
+import org.onosproject.workflow.api.Worklet;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.JsonDataModel;
+import org.slf4j.Logger;
+
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Field;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+@Component(immediate = true)
+public class WorkflowManager implements WorkflowService {
+
+    protected static final Logger log = getLogger(WorkflowManager.class);
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    private WorkflowExecutionService workflowExecutionService;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected WorkplaceStore workplaceStore;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected WorkflowStore workflowStore;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    private NetworkConfigService networkConfigService;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    private NetworkConfigRegistry networkConfigRegistry;
+
+    private WorkflowNetConfigListener netcfgListener;
+
+    @Activate
+    public void activate() {
+        netcfgListener = new WorkflowNetConfigListener(this);
+        networkConfigRegistry.registerConfigFactory(netcfgListener.getConfigFactory());
+        networkConfigService.addListener(netcfgListener);
+        log.info("Started");
+    }
+
+    @Deactivate
+    public void deactivate() {
+        networkConfigService.removeListener(netcfgListener);
+        networkConfigRegistry.unregisterConfigFactory(netcfgListener.getConfigFactory());
+        log.info("Stopped");
+    }
+
+    @Override
+    public void createWorkplace(WorkplaceDescription wpDesc) throws WorkflowException {
+        log.info("createWorkplace: {}", wpDesc);
+
+        JsonNode root;
+        if (wpDesc.data().isPresent()) {
+            root = wpDesc.data().get();
+        } else {
+            root = JsonNodeFactory.instance.objectNode();
+        }
+        DefaultWorkplace workplace =
+                new DefaultWorkplace(wpDesc.name(), new JsonDataModelTree(root));
+        workplaceStore.registerWorkplace(wpDesc.name(), workplace);
+    }
+
+    @Override
+    public void removeWorkplace(WorkplaceDescription wpDesc) throws WorkflowException {
+        log.info("removeWorkplace: {}", wpDesc);
+        //TODO: Removing workflows belong to this workplace
+        workplaceStore.removeWorkplace(wpDesc.name());
+    }
+
+    @Override
+    public void clearWorkplace() throws WorkflowException {
+        log.info("clearWorkplace");
+        workplaceStore.getWorkplaces().stream()
+                .filter(wp -> !Objects.equals(wp.name(), Workplace.SYSTEM_WORKPLACE))
+                .forEach(wp -> workplaceStore.removeWorkplace(wp.name()));
+    }
+
+    @Override
+    public void invokeWorkflow(WorkflowDescription wfDesc) throws WorkflowException {
+        invokeWorkflow(wfDesc.toJson());
+    }
+
+    @Override
+    public void invokeWorkflow(JsonNode worklowDescJson) throws WorkflowException {
+        log.info("invokeWorkflow: {}", worklowDescJson);
+        Workplace workplace = workplaceStore.getWorkplace(Workplace.SYSTEM_WORKPLACE);
+        if (Objects.isNull(workplace)) {
+            throw new WorkflowException("Invalid system workplace");
+        }
+
+        Workflow workflow = workflowStore.get(URI.create(worklowDescJson.get("id").asText()));
+        if (Objects.isNull(workflow)) {
+            throw new WorkflowException("Invalid Workflow");
+        }
+
+        checkWorkflowSchema(workflow, worklowDescJson);
+
+        Workflow wfCreationWf = workflowStore.get(URI.create(WorkplaceWorkflow.WF_CREATE_WORKFLOW));
+        if (Objects.isNull(wfCreationWf)) {
+            throw new WorkflowException("Invalid workflow " + WorkplaceWorkflow.WF_CREATE_WORKFLOW);
+        }
+
+        WorkflowContext context = wfCreationWf.buildSystemContext(workplace, new JsonDataModelTree(worklowDescJson));
+        workflowExecutionService.execInitWorklet(context);
+    }
+
+    /**
+     * Checks if the type of worklet is same as that of wfdesc Json.
+     *
+     * @param workflow workflow
+     * @param worklowDescJson jsonNode
+     * @throws WorkflowException workflow exception
+     */
+    private void checkWorkflowSchema(Workflow workflow, JsonNode worklowDescJson) throws WorkflowException {
+
+        List<String> errors = new ArrayList<>();
+
+        JsonNode dataNode = worklowDescJson.get("data");
+        if (Objects.isNull(dataNode) || dataNode instanceof MissingNode) {
+            errors.add("workflow description json does not have 'data'");
+            throw new WorkflowDataModelException(workflow.id(), worklowDescJson, errors);
+        }
+
+        for (String workletType : workflow.getWorkletTypeList()) {
+
+            Worklet worklet = workflow.getWorkletInstance(workletType);
+            if (Worklet.Common.COMPLETED.equals(worklet) || Worklet.Common.INIT.equals(worklet)) {
+                continue;
+            }
+
+            Class cls = worklet.getClass();
+            for (Field field : cls.getDeclaredFields()) {
+
+                if (field.isSynthetic()) {
+                    continue;
+                }
+
+                for (Annotation annotation : field.getAnnotations()) {
+
+                    if (annotation instanceof JsonDataModel) {
+
+                        JsonDataModel jsonDataModel = (JsonDataModel) annotation;
+                        Matcher matcher = Pattern.compile("(\\w+)").matcher(jsonDataModel.path());
+                        if (!matcher.find()) {
+                            throw new WorkflowException(
+                                    "Invalid Json Data Model Path(" + jsonDataModel.path() + ") in " + worklet.tag());
+                        }
+                        String path = matcher.group(1);
+
+                        Optional<String> optError =
+                                getJsonNodeDataError(dataNode, worklet, field, path, jsonDataModel.optional());
+
+                        if (optError.isPresent()) {
+                            errors.add(optError.get());
+                        }
+                    }
+                }
+            }
+        }
+
+        if (!errors.isEmpty()) {
+            throw new WorkflowDataModelException(workflow.id(), worklowDescJson, errors);
+        }
+    }
+
+    private Optional<String> getJsonNodeDataError(
+            JsonNode dataNode, Worklet worklet, Field field, String path, boolean isOptional) throws WorkflowException {
+
+        // Checking the existence of path in dataNode
+        JsonNode pathNode = dataNode.get(path);
+        if (Objects.isNull(pathNode) || pathNode instanceof MissingNode) {
+
+            if (isOptional) {
+                return Optional.empty();
+
+            } else {
+                return Optional.of("data doesn't have '" + path + "' in worklet<" + worklet.tag() + ">");
+            }
+        }
+
+        // Checking the type of path
+        JsonNodeType type = pathNode.getNodeType();
+
+        if (Objects.isNull(type)) {
+            throw new WorkflowException("Invalid type for " + pathNode);
+        }
+
+        switch (type) {
+            case NUMBER:
+                if (!(field.getType().isAssignableFrom(Integer.class))) {
+                    return Optional.of("'" + path + "<NUMBER>' cannot be assigned to " +
+                            field.getName() + "<" + field.getType() + "> in worklet<" + worklet.tag() + ">");
+                }
+                break;
+            case STRING:
+                if (!(field.getType().isAssignableFrom(String.class))) {
+                    return Optional.of("'" + path + "<STRING>' cannot be assigned to " +
+                            field.getName() + "<" + field.getType() + "> in worklet<" + worklet.tag() + ">");
+                }
+                break;
+            case BOOLEAN:
+                if (!(field.getType().isAssignableFrom(Boolean.class))) {
+                    return Optional.of("'" + path + "<BOOLEAN>' cannot be assigned to " +
+                            field.getName() + "<" + field.getType() + "> in worklet<" + worklet.tag() + ">");
+                }
+                break;
+            case OBJECT:
+                if (!(field.getType().isAssignableFrom(JsonNode.class))) {
+                    return Optional.of("'" + path + "<OBJECT>' cannot be assigned to " +
+                            field.getName() + "<" + field.getType() + "> in worklet<" + worklet.tag() + ">");
+                }
+                break;
+            case ARRAY:
+                if (!(field.getType().isAssignableFrom(ArrayNode.class))) {
+                    return Optional.of("'" + path + "<ARRAY>' cannot be assigned to " +
+                            field.getName() + "<" + field.getType() + "> in worklet<" + worklet.tag() + ">");
+                }
+                break;
+            default:
+                return Optional.of("'" + path + "<" + type + ">' is not supported");
+        }
+
+        return Optional.empty();
+    }
+
+    @Override
+    public void terminateWorkflow(WorkflowDescription wfDesc) throws WorkflowException {
+        log.info("terminateWorkflow: {}", wfDesc);
+        if (Objects.nonNull(workplaceStore.getContext(wfDesc.workflowContextName()))) {
+            workplaceStore.removeContext(wfDesc.workflowContextName());
+        }
+    }
+}
\ No newline at end of file
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowNetConfig.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowNetConfig.java
new file mode 100644
index 0000000..2bca555
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowNetConfig.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import org.onosproject.core.ApplicationId;
+import org.onosproject.net.config.Config;
+import org.onosproject.workflow.api.DefaultRpcDescription;
+import org.onosproject.workflow.api.RpcDescription;
+import org.onosproject.workflow.api.WorkflowException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+
+public class WorkflowNetConfig extends Config<ApplicationId> {
+
+    private static final Logger log = LoggerFactory.getLogger(WorkflowNetConfig.class);
+
+    /**
+     * Workflow RPC pointer.
+     */
+    private static final String RPC_PTR = "/rpc";
+
+    public Collection<RpcDescription> getRpcDescriptions() throws WorkflowException {
+
+        JsonNode node = object.at(RPC_PTR);
+        if (!(node instanceof ArrayNode)) {
+            throw new WorkflowException("invalid rpc for " + object);
+        }
+        ArrayNode rpcArrayNode = (ArrayNode) node;
+
+        List<RpcDescription> rpcDescriptions = new ArrayList<>();
+        for (JsonNode rpcNode : rpcArrayNode) {
+            rpcDescriptions.add(DefaultRpcDescription.valueOf(rpcNode));
+        }
+
+        return rpcDescriptions;
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowNetConfigListener.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowNetConfigListener.java
new file mode 100644
index 0000000..bb7f875
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowNetConfigListener.java
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import org.onosproject.core.ApplicationId;
+import org.onosproject.net.config.ConfigFactory;
+import org.onosproject.net.config.NetworkConfigEvent;
+import org.onosproject.net.config.NetworkConfigListener;
+import org.onosproject.net.config.basics.SubjectFactories;
+import org.onosproject.workflow.api.DefaultWorkflowDescription;
+import org.onosproject.workflow.api.RpcDescription;
+import org.onosproject.workflow.api.WorkflowService;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.DefaultWorkplaceDescription;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ScheduledExecutorService;
+
+import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
+import static org.onlab.util.Tools.groupedThreads;
+
+public class WorkflowNetConfigListener implements NetworkConfigListener {
+
+    private static final Logger log = LoggerFactory.getLogger(WorkflowNetConfigListener.class);
+
+    public static final String CONFIG_KEY = "workflow";
+    public static final String EXECUTOR_GROUPNAME = "onos/workflow-netcfg";
+    public static final String EXECUTOR_PATTERN = "netcfg-event-handler";
+
+    private final ConfigFactory<ApplicationId, WorkflowNetConfig> configFactory =
+            new ConfigFactory<ApplicationId, WorkflowNetConfig>(
+                    SubjectFactories.APP_SUBJECT_FACTORY, WorkflowNetConfig.class, CONFIG_KEY) {
+                @Override
+                public WorkflowNetConfig createConfig() {
+                    return new WorkflowNetConfig();
+                }
+            };
+
+    private final WorkflowService workflowService;
+
+    private final ScheduledExecutorService executor =
+            newSingleThreadScheduledExecutor(groupedThreads(EXECUTOR_GROUPNAME, EXECUTOR_PATTERN));
+
+    public WorkflowNetConfigListener(WorkflowService workflowService) {
+        this.workflowService = workflowService;
+    }
+
+    public ConfigFactory<ApplicationId, WorkflowNetConfig> getConfigFactory() {
+        return configFactory;
+    }
+
+    @Override
+    public boolean isRelevant(NetworkConfigEvent event) {
+        return event.config().isPresent() && event.config().get() instanceof WorkflowNetConfig;
+    }
+
+    @Override
+    public void event(NetworkConfigEvent event) {
+        log.info("Configuration event: {}", event);
+        switch (event.type()) {
+            case CONFIG_ADDED:
+            case CONFIG_UPDATED:
+                if (!event.config().isPresent()) {
+                    log.error("No configuration found");
+                    return;
+                }
+                WorkflowNetConfig config = (WorkflowNetConfig) event.config().get();
+
+                //Single thread executor(locking is not required)
+                executor.execute(new Handler(workflowService, config));
+                break;
+            default:
+                break;
+        }
+    }
+
+    public static class Handler implements Runnable {
+
+        private WorkflowService workflowService;
+        private WorkflowNetConfig config;
+
+        public Handler(WorkflowService workflowService, WorkflowNetConfig config) {
+            this.workflowService = workflowService;
+            this.config = config;
+        }
+
+        @Override
+        public void run() {
+
+            try {
+                Collection<RpcDescription> rpcs = config.getRpcDescriptions();
+                log.info("" + rpcs);
+                for (RpcDescription rpc : rpcs) {
+                    if (!rpcMap.containsKey(rpc.op())) {
+                        log.error("Invalid RPC: {}", rpc);
+                        continue;
+                    }
+
+                    rpcMap.get(rpc.op()).apply(this.workflowService, rpc);
+                }
+            } catch (WorkflowException e) {
+                log.error("Exception: ", e);
+            }
+        }
+    }
+
+    @FunctionalInterface
+    public interface RpcCall {
+        void apply(WorkflowService workflowService, RpcDescription rpcDesc) throws WorkflowException;
+    }
+
+    private static Map<String, RpcCall> rpcMap = new HashMap<>();
+    static {
+        rpcMap.put("workplace.create",
+                (service, desc) -> service.createWorkplace(DefaultWorkplaceDescription.valueOf(desc.params())));
+        rpcMap.put("workplace.remove",
+                (service, desc) -> service.removeWorkplace(DefaultWorkplaceDescription.valueOf(desc.params())));
+        rpcMap.put("workflow.invoke",
+                (service, desc) -> service.invokeWorkflow(desc.params()));
+        rpcMap.put("workflow.terminate",
+                (service, desc) -> service.terminateWorkflow(DefaultWorkflowDescription.valueOf(desc.params())));
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkplaceWorkflow.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkplaceWorkflow.java
new file mode 100644
index 0000000..219436c
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkplaceWorkflow.java
@@ -0,0 +1,222 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.BooleanNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.onosproject.event.Event;
+import org.onosproject.workflow.api.AbstractWorklet;
+import org.onosproject.workflow.api.DefaultWorkflowContext;
+import org.onosproject.workflow.api.DefaultWorkplace;
+import org.onosproject.workflow.api.ImmutableListWorkflow;
+import org.onosproject.workflow.api.JsonDataModelTree;
+import org.onosproject.workflow.api.SystemWorkflowContext;
+import org.onosproject.workflow.api.Workflow;
+import org.onosproject.workflow.api.WorkflowAttribute;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.WorkflowData;
+import org.onosproject.workflow.api.WorkflowDataEvent;
+import org.onosproject.workflow.api.DefaultWorkflowDescription;
+import org.onosproject.workflow.api.WorkflowDescription;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.WorkflowExecutionService;
+import org.onosproject.workflow.api.WorkflowStore;
+import org.onosproject.workflow.api.Workplace;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.URI;
+import java.util.Objects;
+
+import static org.onosproject.workflow.api.WorkflowDataEvent.Type.INSERT;
+
+public class WorkplaceWorkflow {
+
+    private static final Logger log = LoggerFactory.getLogger(WorkplaceWorkflow.class);
+
+    private WorkflowExecutionService workflowExecutionService;
+    private WorkflowStore workflowStore;
+
+    public WorkplaceWorkflow(WorkflowExecutionService workflowExecutionService,
+                             WorkflowStore workflowStore) {
+        this.workflowExecutionService = workflowExecutionService;
+        this.workflowStore = workflowStore;
+    }
+
+    public static final String WF_CREATE_WORKFLOW = "workplace.create-workflow";
+
+    public void registerWorkflows() {
+
+        Workflow workflow = ImmutableListWorkflow.builder()
+                .id(URI.create(WF_CREATE_WORKFLOW))
+                .attribute(WorkflowAttribute.REMOVE_AFTER_COMPLETE)
+                .init(ChangeDistributor.class.getName())
+                .chain(CreateWorkplace.class.getName())
+                .chain(CreateWorkflowContext.class.getName())
+                .build();
+        workflowStore.register(workflow);
+    }
+
+    public abstract static class AbsWorkflowWorklet extends AbstractWorklet {
+
+        protected WorkflowDescription getWorkflowDesc(WorkflowContext context) throws WorkflowException {
+
+            JsonNode root = ((JsonDataModelTree) context.data()).root();
+            return DefaultWorkflowDescription.valueOf(root);
+        }
+    }
+
+    public static class ChangeDistributor extends AbsWorkflowWorklet {
+
+        @Override
+        public void process(WorkflowContext context) throws WorkflowException {
+
+            String workplaceName = getWorkflowDesc(context).workplaceName();
+            // Sets workflow job distribution hash value to make this workflow to be executed on the
+            // same cluster node to execute workplace tasks.
+            ((SystemWorkflowContext) context).setDistributor(workplaceName);
+
+            context.completed();
+       }
+    }
+
+    public static class CreateWorkplace extends AbsWorkflowWorklet {
+
+        @Override
+        public boolean isNext(WorkflowContext context) throws WorkflowException {
+
+            WorkflowDescription wfDesc = getWorkflowDesc(context);
+
+            Workplace workplace = context.workplaceStore().getWorkplace(wfDesc.workplaceName());
+            return Objects.isNull(workplace);
+        }
+
+        @Override
+        public void process(WorkflowContext context) throws WorkflowException {
+
+            WorkflowDescription wfDesc = getWorkflowDesc(context);
+
+            // creates workplace with empty data model
+            DefaultWorkplace workplace = new DefaultWorkplace(wfDesc.workplaceName(), new JsonDataModelTree());
+            log.info("registerWorkplace {}", workplace);
+            context.waitCompletion(WorkflowDataEvent.class, wfDesc.workplaceName(),
+                    () -> context.workplaceStore().registerWorkplace(wfDesc.workplaceName(), workplace),
+                    60000L
+            );
+        }
+
+        @Override
+        public boolean isCompleted(WorkflowContext context, Event event)throws WorkflowException {
+
+            if (!(event instanceof WorkflowDataEvent)) {
+                return false;
+            }
+
+            WorkflowDataEvent wfEvent = (WorkflowDataEvent) event;
+            WorkflowData wfData = wfEvent.subject();
+
+            WorkflowDescription wfDesc = getWorkflowDesc(context);
+
+            if (wfData instanceof Workplace
+                    && Objects.equals(wfData.name(), wfDesc.workplaceName())
+                    && wfEvent.type() == INSERT) {
+                log.info("isCompleted(true): event:{}, context:{}, workplace:{}",
+                        event, context, wfDesc.workplaceName());
+                return true;
+            } else {
+                log.info("isCompleted(false) event:{}, context:{}, workplace:{}",
+                        event, context, wfDesc.workplaceName());
+                return false;
+            }
+        }
+
+        @Override
+        public void timeout(WorkflowContext context) throws WorkflowException {
+            if (!isNext(context)) {
+                context.completed(); //Complete the job of worklet by timeout
+            } else {
+                super.timeout(context);
+            }
+        }
+    }
+
+    public static class CreateWorkflowContext extends AbsWorkflowWorklet {
+
+        private static final String SUBMITTED = "submitted";
+
+        private boolean isSubmitted(WorkflowContext context) throws WorkflowException {
+            JsonNode node = ((JsonDataModelTree) context.data()).nodeAt("/" + SUBMITTED);
+            if (!(node instanceof BooleanNode)) {
+                return false;
+            }
+            return node.asBoolean();
+        }
+
+        private void submitTrue(WorkflowContext context) throws WorkflowException {
+            JsonNode root = ((JsonDataModelTree) context.data()).root();
+            if (!(root instanceof ObjectNode)) {
+                throw new WorkflowException("Invalid root node for " + context);
+            }
+            ((ObjectNode) root).put(SUBMITTED, true);
+        }
+
+        @Override
+        public boolean isNext(WorkflowContext context) throws WorkflowException {
+
+            WorkflowDescription wfDesc = getWorkflowDesc(context);
+
+            String contextName = DefaultWorkflowContext.nameBuilder(wfDesc.id(), wfDesc.workplaceName());
+            if (Objects.isNull(context.workplaceStore().getContext(contextName))) {
+                return (!isSubmitted(context));
+            } else {
+                return false;
+            }
+        }
+
+        @Override
+        public void process(WorkflowContext context) throws WorkflowException {
+
+            WorkflowDescription wfDesc = getWorkflowDesc(context);
+
+            Workplace workplace = context.workplaceStore().getWorkplace(wfDesc.workplaceName());
+            if (Objects.isNull(workplace)) {
+
+                log.error("Failed to find workplace with " + wfDesc.workplaceName());
+                throw new WorkflowException("Failed to find workplace with " + wfDesc.workplaceName());
+            }
+
+            Workflow workflow = context.workflowStore().get(wfDesc.id());
+            if (Objects.isNull(workflow)) {
+                throw new WorkflowException("Failed to find workflow with " + wfDesc.id());
+            }
+
+            //String contextName = DefaultWorkflowContext.nameBuilder(wfDesc.id(), wfDesc.workplaceName());
+            String contextName = wfDesc.workflowContextName();
+            if (Objects.nonNull(context.workplaceStore().getContext(contextName))) {
+                throw new WorkflowException(contextName + " exists already");
+            }
+
+            JsonDataModelTree subTree = new JsonDataModelTree(wfDesc.data());
+            WorkflowContext buildingContext = workflow.buildContext(workplace, subTree);
+            log.info("registerContext {}", buildingContext.name());
+            context.workplaceStore().registerContext(buildingContext.name(), buildingContext);
+            submitTrue(context);
+
+            context.completed();
+        }
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/example/SampleWorkflow.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/example/SampleWorkflow.java
new file mode 100644
index 0000000..9da6d60
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/example/SampleWorkflow.java
@@ -0,0 +1,345 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl.example;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.JsonNodeType;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.apache.felix.scr.annotations.Activate;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Deactivate;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
+import org.onosproject.net.device.DeviceService;
+import org.onosproject.workflow.api.AbstractWorklet;
+import org.onosproject.workflow.api.DataModelTree;
+import org.onosproject.workflow.api.ImmutableListWorkflow;
+import org.onosproject.workflow.api.JsonDataModel;
+import org.onosproject.workflow.api.JsonDataModelTree;
+import org.onosproject.workflow.api.Workflow;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.WorkflowExecutionService;
+import org.onosproject.workflow.api.WorkflowStore;
+import org.onosproject.workflow.api.WorkplaceStore;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.URI;
+
+/**
+ * Class for sample workflow.
+ */
+@Component(immediate = true)
+public class SampleWorkflow {
+
+    private static final Logger log = LoggerFactory.getLogger(SampleWorkflow.class);
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected WorkflowStore workflowStore;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected WorkplaceStore workplaceStore;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected WorkflowExecutionService workflowExecutionService;
+
+    @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+    protected DeviceService deviceService;
+
+
+    @Activate
+    public void activate() {
+        log.info("Activated");
+
+        registerWorkflows();
+
+    }
+
+    @Deactivate
+    public void deactivate() {
+        log.info("Deactivated");
+    }
+
+    /**
+     * Registers example workflows.
+     */
+    private void registerWorkflows() {
+        // registering class-loader
+        workflowStore.registerLocal(this.getClass().getClassLoader());
+
+        // registering new workflow definition
+        URI uri = URI.create("sample.workflow-0");
+        Workflow workflow = ImmutableListWorkflow.builder()
+                .id(uri)
+                .chain(SampleWorklet1.class.getName())
+                .chain(SampleWorklet2.class.getName())
+                .chain(SampleWorklet3.class.getName())
+                .chain(SampleWorklet4.class.getName())
+                .chain(SampleWorklet5.class.getName())
+                .build();
+        workflowStore.register(workflow);
+
+        // registering new workflow definition
+        uri = URI.create("sample.workflow-1");
+        workflow = ImmutableListWorkflow.builder()
+                .id(uri)
+                .chain(SampleWorklet3.class.getName())
+                .chain(SampleWorklet2.class.getName())
+                .chain(SampleWorklet1.class.getName())
+                .chain(SampleWorklet4.class.getName())
+                .chain(SampleWorklet5.class.getName())
+                .build();
+        workflowStore.register(workflow);
+
+        // registering new workflow definition
+        uri = URI.create("sample.workflow-2");
+        workflow = ImmutableListWorkflow.builder()
+                .id(uri)
+                .chain(SampleWorklet1.class.getName())
+                .chain(SampleWorklet3.class.getName())
+                .chain(SampleWorklet2.class.getName())
+                .chain(SampleWorklet4.class.getName())
+                .chain(SampleWorklet5.class.getName())
+                .build();
+        workflowStore.register(workflow);
+
+        // registering new workflow definition
+        uri = URI.create("sample.workflow-3");
+        workflow = ImmutableListWorkflow.builder()
+                .id(uri)
+                .chain(SampleWorklet6.class.getName())
+                .build();
+        workflowStore.register(workflow);
+
+    }
+
+    /**
+     * Abstract class for sample worklet.
+     */
+    public abstract static class AbsSampleWorklet extends AbstractWorklet {
+
+        protected static final String MODEL_SAMPLE_JOB = "/sample/job";
+        protected static final String MODEL_COUNT = "/count";
+
+        /**
+         * Constructor for sample worklet.
+         */
+        protected AbsSampleWorklet() {
+
+        }
+
+        /**
+         * Allocates or gets data model.
+         * @param context workflow context
+         * @return json object node
+         * @throws WorkflowException workflow exception
+         */
+        protected ObjectNode allocOrGetModel(WorkflowContext context) throws WorkflowException {
+
+            JsonDataModelTree tree = (JsonDataModelTree) context.data();
+            JsonNode params = tree.root();
+
+            if (params.at(MODEL_SAMPLE_JOB).getNodeType() == JsonNodeType.MISSING) {
+                tree.alloc(MODEL_SAMPLE_JOB, DataModelTree.Nodetype.MAP);
+            }
+            return (ObjectNode) params.at(MODEL_SAMPLE_JOB);
+        }
+
+        /**
+         * Gets data model.
+         * @param context workflow context
+         * @return json object node
+         * @throws WorkflowException workflow exception
+         */
+        protected ObjectNode getDataModel(WorkflowContext context) throws WorkflowException {
+            DataModelTree tree = context.data();
+            return ((JsonDataModelTree) tree.subtree(MODEL_SAMPLE_JOB)).rootObject();
+        }
+
+        /**
+         * Sleeps for 'ms' milli seconds.
+         * @param ms milli seconds to sleep
+         */
+        protected void sleep(long ms) {
+            try {
+                Thread.sleep(ms);
+            } catch (InterruptedException e) {
+                log.error("Exception: ", e);
+                Thread.currentThread().interrupt();
+            }
+        }
+    }
+
+    /**
+     * Class for sample worklet-1.
+     */
+    public static class SampleWorklet1 extends AbsSampleWorklet {
+
+        @JsonDataModel(path = MODEL_COUNT)
+        Integer intCount;
+
+        @Override
+        public void process(WorkflowContext context) throws WorkflowException {
+            ObjectNode node = getDataModel(context);
+            node.put("work1", "done");
+            log.info("workflow-process {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+            sleep(30);
+
+            context.completed(); //Complete the job of worklet in the process
+        }
+
+        @Override
+        public boolean isNext(WorkflowContext context) throws WorkflowException {
+            ObjectNode node = allocOrGetModel(context);
+            log.info("workflow-isNext {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+            sleep(30);
+            return !node.has("work1");
+
+        }
+    }
+
+    /**
+     * Class for sample worklet-2 (using timeout).
+     */
+    public static class SampleWorklet2 extends AbsSampleWorklet {
+
+        @JsonDataModel(path = MODEL_COUNT)
+        Integer intCount;
+
+        @Override
+        public void process(WorkflowContext context) throws WorkflowException {
+            ObjectNode node = getDataModel(context);
+            node.put("work2", "done");
+            log.info("workflow-process {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+            sleep(50);
+            intCount++;
+
+            context.waitFor(50L); //Timeout will happen after 50 milli seconds.
+        }
+
+        @Override
+        public void timeout(WorkflowContext context) throws WorkflowException {
+            context.completed(); //Complete the job of worklet by timeout
+        }
+
+        @Override
+        public boolean isNext(WorkflowContext context) throws WorkflowException {
+            ObjectNode node = allocOrGetModel(context);
+            log.info("workflow-isNext {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+            sleep(50);
+            return !node.has("work2");
+        }
+    }
+
+    public static class SampleWorklet3 extends AbsSampleWorklet {
+
+        @JsonDataModel(path = MODEL_COUNT)
+        Integer intCount;
+
+        @Override
+        public void process(WorkflowContext context) throws WorkflowException {
+            ObjectNode node = getDataModel(context);
+            node.put("work3", "done");
+            log.info("workflow-process {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+            sleep(10);
+            intCount++;
+            context.completed();
+        }
+
+        @Override
+        public boolean isNext(WorkflowContext context) throws WorkflowException {
+            ObjectNode node = allocOrGetModel(context);
+            log.info("workflow-isNext {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+            sleep(10);
+            return !node.has("work3");
+        }
+    }
+
+    public static class SampleWorklet4 extends AbsSampleWorklet {
+
+        @JsonDataModel(path = MODEL_COUNT)
+        Integer intCount;
+
+        @Override
+        public void process(WorkflowContext context) throws WorkflowException {
+            ObjectNode node = getDataModel(context);
+            node.put("work4", "done");
+            log.info("workflow-process {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+            sleep(10);
+            intCount++;
+            context.completed();
+        }
+
+        @Override
+        public boolean isNext(WorkflowContext context) throws WorkflowException {
+            ObjectNode node = allocOrGetModel(context);
+            log.info("workflow-isNext {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+            sleep(10);
+            return !node.has("work4");
+        }
+    }
+
+    public static class SampleWorklet5 extends AbsSampleWorklet {
+
+        @JsonDataModel(path = MODEL_COUNT)
+        Integer intCount;
+
+        @Override
+        public void process(WorkflowContext context) throws WorkflowException {
+            ObjectNode node = getDataModel(context);
+            node.put("work5", "done");
+            log.info("workflow-process {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+            sleep(10);
+            intCount++;
+            context.completed();
+        }
+
+        @Override
+        public boolean isNext(WorkflowContext context) throws WorkflowException {
+            ObjectNode node = allocOrGetModel(context);
+            log.info("workflow-isNext {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+            sleep(10);
+            return !node.has("work5");
+        }
+    }
+
+    /**
+     * Class for sample worklet-6 to test workflow datamodel exception.
+     */
+    public static class SampleWorklet6 extends AbsSampleWorklet {
+
+        @JsonDataModel(path = MODEL_COUNT)
+        String str;
+
+        @Override
+        public void process(WorkflowContext context) throws WorkflowException {
+            ObjectNode node = getDataModel(context);
+            node.put("work6", "done");
+            log.info("workflow-process {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+            sleep(10);
+            context.completed();
+        }
+
+        @Override
+        public boolean isNext(WorkflowContext context) throws WorkflowException {
+            ObjectNode node = allocOrGetModel(context);
+            log.info("workflow-isNext {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+            sleep(10);
+            return !node.has("work6");
+        }
+    }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/example/package-info.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/example/package-info.java
new file mode 100644
index 0000000..2fbc9549a
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/example/package-info.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Workflow example package.
+ */
+package org.onosproject.workflow.impl.example;
\ No newline at end of file
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/package-info.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/package-info.java
new file mode 100644
index 0000000..f82c452
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/package-info.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Workflow implementation package.
+ */
+package org.onosproject.workflow.impl;
\ No newline at end of file