[ONOS-7732] Automating switch workflow: api, app, and sample workflows
Change-Id: Iee87d4fe6cf61c1f8904d1d77df5f913a712b64a
diff --git a/apps/workflow/app/BUCK b/apps/workflow/app/BUCK
new file mode 100644
index 0000000..aa0e62a
--- /dev/null
+++ b/apps/workflow/app/BUCK
@@ -0,0 +1,16 @@
+COMPILE_DEPS = [
+ '//lib:CORE_DEPS',
+ '//lib:KRYO',
+ '//lib:jsch',
+ '//lib:org.apache.karaf.shell.console',
+ '//lib:jackson-core',
+ '//lib:jackson-annotations',
+ '//lib:jackson-databind',
+ '//cli:onos-cli',
+ '//core/store/serializers:onos-core-serializers',
+ '//apps/workflow/api:onos-apps-workflow-api',
+]
+
+osgi_jar_with_tests(
+ deps = COMPILE_DEPS,
+)
diff --git a/apps/workflow/app/BUILD b/apps/workflow/app/BUILD
new file mode 100644
index 0000000..97ef1b4
--- /dev/null
+++ b/apps/workflow/app/BUILD
@@ -0,0 +1,8 @@
+COMPILE_DEPS = CORE_DEPS + KRYO + JACKSON + CLI + [
+ "//core/store/serializers:onos-core-serializers",
+ "//apps/workflow/api:onos-apps-workflow-api",
+]
+
+osgi_jar_with_tests(
+ deps = COMPILE_DEPS,
+)
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowCommand.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowCommand.java
new file mode 100644
index 0000000..94b099b
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowCommand.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import org.apache.karaf.shell.commands.Argument;
+import org.apache.karaf.shell.commands.Command;
+import org.onosproject.cli.AbstractShellCommand;
+import org.onosproject.workflow.api.DefaultWorkflowDescription;
+import org.onosproject.workflow.api.WorkflowService;
+import org.onosproject.workflow.api.WorkflowException;
+
+import java.util.Arrays;
+import java.util.Objects;
+
+@Command(scope = "onos", name = "workflow", description = "workflow cli")
+public class WorkFlowCommand extends AbstractShellCommand {
+
+ @Argument (index = 0, name = "cmd", description = "command(invoke)", required = true)
+ private String cmd = null;
+
+ @Argument (index = 1, name = "id", description = "workflow id(URI)", required = true)
+ private String id = null;
+
+ @Argument (index = 2, name = "name", description = "workplace name", required = true)
+ private String name = null;
+
+ @Override
+ protected void execute() {
+ if (Objects.isNull(cmd)) {
+ error("invalid cmd parameter");
+ return;
+ }
+
+ switch (cmd) {
+ case "invoke":
+ if (Objects.isNull(id)) {
+ error("invalid workflow id parameter");
+ return;
+ }
+
+ if (Objects.isNull(name)) {
+ error("invalid workplace name parameter");
+ return;
+ }
+
+ invoke(id, name);
+ break;
+ default:
+ print("Unsupported cmd: " + cmd);
+ }
+ }
+
+ /**
+ * Invokes workflow.
+ * @param workflowId workflow id
+ * @param workplaceName workplace name
+ */
+ private void invoke(String workflowId, String workplaceName) {
+
+ WorkflowService service = get(WorkflowService.class);
+ DefaultWorkflowDescription wfDesc = DefaultWorkflowDescription.builder()
+ .workplaceName(workplaceName)
+ .id(workflowId)
+ .data(JsonNodeFactory.instance.objectNode())
+ .build();
+ try {
+ service.invokeWorkflow(wfDesc);
+ } catch (WorkflowException e) {
+ error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+ }
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowEventMapCommand.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowEventMapCommand.java
new file mode 100644
index 0000000..0a8e492
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowEventMapCommand.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.karaf.shell.commands.Argument;
+import org.apache.karaf.shell.commands.Command;
+import org.onosproject.cli.AbstractShellCommand;
+import org.onosproject.workflow.api.ContextEventMapStore;
+import org.onosproject.workflow.api.WorkflowException;
+
+import java.util.Arrays;
+import java.util.Objects;
+
+@Command(scope = "onos", name = "workflow-eventmap", description = "workflow event map cli")
+public class WorkFlowEventMapCommand extends AbstractShellCommand {
+
+ @Argument (index = 0, name = "cmd", description = "command(print)", required = true)
+ private String cmd = null;
+
+ @Override
+ protected void execute() {
+ if (Objects.isNull(cmd)) {
+ error("invalid cmd parameter");
+ return;
+ }
+
+ ContextEventMapStore store = get(ContextEventMapStore.class);
+ try {
+ switch (cmd) {
+ case "print":
+ JsonNode tree = store.asJsonTree();
+ ObjectMapper mapper = new ObjectMapper();
+ try {
+ print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(tree));
+ } catch (JsonProcessingException e) {
+ error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+ }
+ break;
+
+ default:
+ print("Unsupported cmd: " + cmd);
+ }
+ } catch (WorkflowException e) {
+ error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+ }
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowStoreCommand.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowStoreCommand.java
new file mode 100644
index 0000000..820d141
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowStoreCommand.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import org.apache.karaf.shell.commands.Argument;
+import org.apache.karaf.shell.commands.Command;
+import org.onosproject.cli.AbstractShellCommand;
+import org.onosproject.workflow.api.Workflow;
+import org.onosproject.workflow.api.WorkflowStore;
+
+import java.util.Objects;
+import java.net.URI;
+
+@Command(scope = "onos", name = "workflowstore", description = "workflow store cli")
+public class WorkFlowStoreCommand extends AbstractShellCommand {
+
+ @Argument (index = 0, name = "cmd", description = "command(rm)", required = false)
+ private String cmd = null;
+
+ @Argument (index = 1, name = "id", description = "workflow id(URI)", required = false)
+ private String id = null;
+
+ @Override
+ protected void execute() {
+
+ if (Objects.isNull(cmd)) {
+ printAllWorkflow();
+ return;
+ }
+
+ if (Objects.isNull(id)) {
+ print("invalid id");
+ return;
+ }
+
+ switch (cmd) {
+ case "rm":
+ rmWorkflow(id);
+ break;
+ default:
+ print("Unsupported cmd: " + cmd);
+ }
+ }
+
+ private void rmWorkflow(String id) {
+ WorkflowStore workflowStore = get(WorkflowStore.class);
+ workflowStore.unregister(URI.create(id));
+ }
+
+ private void printAllWorkflow() {
+ WorkflowStore workflowStore = get(WorkflowStore.class);
+ for (Workflow workflow : workflowStore.getAll()) {
+ print(getWorkflowString(workflow));
+ }
+ }
+
+ private String getWorkflowString(Workflow workflow) {
+ return workflow.toString();
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowTestCommand.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowTestCommand.java
new file mode 100644
index 0000000..090dad0
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkFlowTestCommand.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import org.apache.karaf.shell.commands.Argument;
+import org.apache.karaf.shell.commands.Command;
+import org.onosproject.cli.AbstractShellCommand;
+import org.onosproject.workflow.api.DefaultWorkflowDescription;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.WorkflowService;
+
+import java.util.Arrays;
+import java.util.Objects;
+
+@Command(scope = "onos", name = "workflow-test", description = "workflow test cli")
+public class WorkFlowTestCommand extends AbstractShellCommand {
+
+ @Argument (index = 0, name = "cmd", description = "command(invoke)", required = true)
+ private String cmd = null;
+
+ @Argument (index = 1, name = "number", description = "number of test", required = true)
+ private String number = null;
+
+ @Override
+ protected void execute() {
+ if (Objects.isNull(cmd)) {
+ error("invalid cmd parameter");
+ return;
+ }
+
+ switch (cmd) {
+ case "invoke":
+ if (Objects.isNull(number)) {
+ error("invalid number of test");
+ return;
+ }
+
+ int num;
+ try {
+ num = Integer.parseInt(number);
+ } catch (Exception e) {
+ error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+ return;
+ }
+
+ test(num);
+ break;
+ default:
+ print("Unsupported cmd: " + cmd);
+ }
+ }
+
+ /**
+ * Invokes workflow.
+ * @param workflowId workflow id
+ * @param workplaceName workplace name
+ */
+ private void invoke(String workflowId, String workplaceName) {
+
+ WorkflowService service = get(WorkflowService.class);
+ DefaultWorkflowDescription wfDesc = DefaultWorkflowDescription.builder()
+ .workplaceName(workplaceName)
+ .id(workflowId)
+ .data(JsonNodeFactory.instance.objectNode())
+ .build();
+ try {
+ service.invokeWorkflow(wfDesc);
+ } catch (WorkflowException e) {
+ error(e.getMessage() + "trace: " + Arrays.asList(e.getStackTrace()));
+ }
+ }
+
+ /**
+ * Workflow invoke test.
+ * @param num the number of workflow to test
+ */
+ private void test(int num) {
+ for (int i = 0; i <= num; i++) {
+ String wpName = "test-" + i;
+ invoke("sample.workflow-0", wpName);
+ }
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkplaceStoreCommand.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkplaceStoreCommand.java
new file mode 100644
index 0000000..ded5882
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/WorkplaceStoreCommand.java
@@ -0,0 +1,195 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.cli;
+
+import org.apache.karaf.shell.commands.Argument;
+import org.apache.karaf.shell.commands.Command;
+import org.apache.karaf.shell.commands.Option;
+import org.onosproject.cli.AbstractShellCommand;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.WorkflowService;
+import org.onosproject.workflow.api.Workplace;
+import org.onosproject.workflow.api.DefaultWorkplaceDescription;
+import org.onosproject.workflow.api.WorkplaceStore;
+
+import java.util.Arrays;
+import java.util.Objects;
+
+@Command(scope = "onos", name = "workplace",
+ description = "workplace cli")
+public class WorkplaceStoreCommand extends AbstractShellCommand {
+
+ @Argument (index = 0, name = "cmd", description = "command(add/rm/clear/print)", required = false)
+ private String cmd = null;
+
+ @Argument (index = 1, name = "name", description = "workspace name", required = false)
+ private String name = null;
+
+ @Option(name = "-f", aliases = "--filter", description = "including filter",
+ required = false, multiValued = false)
+ private String inFilter = null;
+
+ @Option(name = "-e", aliases = "--excludefilter", description = "excluding filter",
+ required = false, multiValued = false)
+ private String exFilter = null;
+
+ @Override
+ protected void execute() {
+
+ if (Objects.isNull(cmd)) {
+ printAllWorkplace();
+ return;
+ }
+
+ switch (cmd) {
+ case "add":
+ if (Objects.isNull(name)) {
+ error("invalid name");
+ return;
+ }
+ addEmptyWorkplace(name);
+ return;
+
+ case "rm":
+ if (Objects.isNull(name)) {
+ print("invalid name");
+ return;
+ }
+ rmWorkplace(name);
+ break;
+
+ case "clear":
+ clearWorkplace();
+ break;
+
+ case "print":
+ if (Objects.isNull(name)) {
+ print("invalid name");
+ return;
+ }
+ printWorkplace(name);
+ break;
+
+ default:
+ print("Unsupported cmd: " + cmd);
+ }
+ }
+
+ /**
+ * Adds empty workplace.
+ * @param name workplace name
+ */
+ private void addEmptyWorkplace(String name) {
+ WorkflowService service = get(WorkflowService.class);
+ DefaultWorkplaceDescription wpDesc = DefaultWorkplaceDescription.builder()
+ .name(name)
+ .build();
+ try {
+ service.createWorkplace(wpDesc);
+ } catch (WorkflowException e) {
+ error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+ }
+ }
+
+ /**
+ * Clears all workplaces.
+ */
+ private void clearWorkplace() {
+ WorkflowService service = get(WorkflowService.class);
+ try {
+ service.clearWorkplace();
+ } catch (WorkflowException e) {
+ error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+ }
+ }
+
+ /**
+ * Removes workplace.
+ * @param name workplace name to remove
+ */
+ private void rmWorkplace(String name) {
+ WorkflowService service = get(WorkflowService.class);
+ DefaultWorkplaceDescription wpDesc = DefaultWorkplaceDescription.builder()
+ .name(name)
+ .build();
+ try {
+ service.removeWorkplace(wpDesc);
+ } catch (WorkflowException e) {
+ error(e.getMessage() + ", trace: " + Arrays.asList(e.getStackTrace()));
+ }
+ }
+
+ /**
+ * Prints workplace.
+ * @param name workplace name
+ */
+ private void printWorkplace(String name) {
+ WorkplaceStore workplaceStore = get(WorkplaceStore.class);
+ Workplace workplace = workplaceStore.getWorkplace(name);
+ print(getWorkplaceString(workplace));
+ }
+
+ /**
+ * Prints all workplaces.
+ */
+ private void printAllWorkplace() {
+ WorkplaceStore workplaceStore = get(WorkplaceStore.class);
+ for (Workplace workplace : workplaceStore.getWorkplaces()) {
+ print(getWorkplaceString(workplace));
+ printWorkplaceContexts(workplaceStore, workplace.name());
+ }
+ }
+
+ /**
+ * Prints contexts of workplace.
+ * @param workplaceStore workplace store service
+ * @param workplaceName workplace name
+ */
+ private void printWorkplaceContexts(WorkplaceStore workplaceStore, String workplaceName) {
+ for (WorkflowContext context : workplaceStore.getWorkplaceContexts(workplaceName)) {
+ String str = context.toString();
+ if (Objects.nonNull(inFilter)) {
+ if (str.indexOf(inFilter) != -1) {
+ if (Objects.nonNull(exFilter)) {
+ if (str.indexOf(exFilter) == -1) {
+ print(" - " + context);
+ }
+ } else {
+ print(" - " + context);
+ }
+ }
+ } else {
+ if (Objects.nonNull(exFilter)) {
+ if (str.indexOf(exFilter) == -1) {
+ print(" - " + context);
+ }
+ } else {
+ print(" - " + context);
+ }
+ }
+ }
+ }
+
+ /**
+ * Gets workplace string.
+ * @param workplace workplace
+ * @return workplace string
+ */
+ private String getWorkplaceString(Workplace workplace) {
+ return workplace.toString();
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/package-info.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/package-info.java
new file mode 100644
index 0000000..0383d04
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/cli/package-info.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Workflow cli package.
+ */
+package org.onosproject.workflow.cli;
\ No newline at end of file
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/DistributedContextEventMapTreeStore.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/DistributedContextEventMapTreeStore.java
new file mode 100644
index 0000000..b7c7366
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/DistributedContextEventMapTreeStore.java
@@ -0,0 +1,198 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import org.apache.felix.scr.annotations.Activate;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Deactivate;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
+import org.apache.felix.scr.annotations.Service;
+import org.onlab.util.KryoNamespace;
+import org.onosproject.core.ApplicationId;
+import org.onosproject.core.CoreService;
+import org.onosproject.store.serializers.KryoNamespaces;
+import org.onosproject.store.service.AsyncDocumentTree;
+import org.onosproject.store.service.DocumentPath;
+import org.onosproject.store.service.IllegalDocumentModificationException;
+import org.onosproject.store.service.NoSuchDocumentPathException;
+import org.onosproject.store.service.Ordering;
+import org.onosproject.store.service.Serializer;
+import org.onosproject.store.service.StorageService;
+import org.onosproject.store.service.Versioned;
+import org.onosproject.workflow.api.ContextEventMapStore;
+import org.onosproject.workflow.api.WorkflowException;
+import org.slf4j.Logger;
+
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+@Component(immediate = true)
+@Service
+public class DistributedContextEventMapTreeStore implements ContextEventMapStore {
+
+ protected static final Logger log = getLogger(DistributedContextEventMapTreeStore.class);
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ private CoreService coreService;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ private StorageService storageService;
+
+ private ApplicationId appId;
+
+ private AsyncDocumentTree<String> eventMapTree;
+
+ @Activate
+ public void activate() {
+
+ appId = coreService.registerApplication("org.onosproject.contexteventmapstore");
+ log.info("appId=" + appId);
+
+ KryoNamespace eventMapNamespace = KryoNamespace.newBuilder()
+ .register(KryoNamespaces.API)
+ .build();
+
+ eventMapTree = storageService.<String>documentTreeBuilder()
+ .withSerializer(Serializer.using(eventMapNamespace))
+ .withName("context-event-map-store")
+ .withOrdering(Ordering.INSERTION)
+ .buildDocumentTree();
+ log.info("Started");
+ }
+
+ @Deactivate
+ public void deactivate() {
+ eventMapTree.destroy();
+ log.info("Stopped");
+ }
+
+ @Override
+ public void registerEventMap(String eventType, String eventHint,
+ String contextName, String workletType) throws WorkflowException {
+ DocumentPath dpath = DocumentPath.from(Lists.newArrayList("root", eventType, eventHint, contextName));
+ String currentWorkletType = completeVersioned(eventMapTree.get(dpath));
+ if (currentWorkletType == null) {
+ complete(eventMapTree.createRecursive(dpath, workletType));
+ } else {
+ complete(eventMapTree.replace(dpath, workletType, currentWorkletType));
+ }
+ }
+
+ @Override
+ public void unregisterEventMap(String eventType, String eventHint, String contextName) throws WorkflowException {
+ DocumentPath contextPath = DocumentPath.from(Lists.newArrayList("root", eventType, eventHint, contextName));
+ complete(eventMapTree.removeNode(contextPath));
+ }
+
+ @Override
+ public Map<String, String> getEventMap(String eventType, String eventHint) throws WorkflowException {
+ DocumentPath path = DocumentPath.from(Lists.newArrayList("root", eventType, eventHint));
+ Map<String, Versioned<String>> contexts = complete(eventMapTree.getChildren(path));
+ Map<String, String> eventMap = Maps.newHashMap();
+ if (Objects.isNull(contexts)) {
+ return eventMap;
+ }
+
+ for (Map.Entry<String, Versioned<String>> entry : contexts.entrySet()) {
+ eventMap.put(entry.getKey(), entry.getValue().value());
+ }
+ return eventMap;
+ }
+
+ @Override
+ public Map<String, Versioned<String>> getChildren(String path) throws WorkflowException {
+ DocumentPath dpath = DocumentPath.from(path);
+ Map<String, Versioned<String>> entries = complete(eventMapTree.getChildren(dpath));
+ return entries;
+ }
+
+ @Override
+ public DocumentPath getDocumentPath(String path) throws WorkflowException {
+ DocumentPath dpath = DocumentPath.from(path);
+ return dpath;
+ }
+
+ @Override
+ public ObjectNode asJsonTree() throws WorkflowException {
+
+ DocumentPath rootPath = DocumentPath.from(Lists.newArrayList("root"));
+ Map<String, Versioned<String>> eventmap = complete(eventMapTree.getChildren(rootPath));
+
+ ObjectNode rootNode = JsonNodeFactory.instance.objectNode();
+
+ for (Map.Entry<String, Versioned<String>> eventTypeEntry : eventmap.entrySet()) {
+
+ String eventType = eventTypeEntry.getKey();
+
+ ObjectNode eventTypeNode = JsonNodeFactory.instance.objectNode();
+ rootNode.put(eventType, eventTypeNode);
+
+ DocumentPath eventTypePath = DocumentPath.from(Lists.newArrayList("root", eventType));
+ Map<String, Versioned<String>> hintmap = complete(eventMapTree.getChildren(eventTypePath));
+
+ for (Map.Entry<String, Versioned<String>> hintEntry : hintmap.entrySet()) {
+
+ String hint = hintEntry.getKey();
+
+ ObjectNode hintNode = JsonNodeFactory.instance.objectNode();
+ eventTypeNode.put(hint, hintNode);
+
+ DocumentPath hintPath = DocumentPath.from(Lists.newArrayList("root", eventType, hint));
+ Map<String, Versioned<String>> contextmap = complete(eventMapTree.getChildren(hintPath));
+
+ for (Map.Entry<String, Versioned<String>> ctxtEntry : contextmap.entrySet()) {
+ hintNode.put(ctxtEntry.getKey(), ctxtEntry.getValue().value());
+ }
+ }
+ }
+
+ return rootNode;
+ }
+
+ private <T> T complete(CompletableFuture<T> future) throws WorkflowException {
+ try {
+ return future.get();
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new WorkflowException(e.getCause().getMessage());
+ } catch (ExecutionException e) {
+ if (e.getCause() instanceof IllegalDocumentModificationException) {
+ throw new WorkflowException("Node or parent does not exist or is root or is not a Leaf Node",
+ e.getCause());
+ } else if (e.getCause() instanceof NoSuchDocumentPathException) {
+ return null;
+ } else {
+ throw new WorkflowException("Datastore operation failed", e.getCause());
+ }
+ }
+ }
+
+ private <T> T completeVersioned(CompletableFuture<Versioned<T>> future) throws WorkflowException {
+ return Optional.ofNullable(complete(future))
+ .map(Versioned::value)
+ .orElse(null);
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/DistributedWorkplaceStore.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/DistributedWorkplaceStore.java
new file mode 100644
index 0000000..ca8a454
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/DistributedWorkplaceStore.java
@@ -0,0 +1,394 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.BaseJsonNode;
+import com.fasterxml.jackson.databind.node.BigIntegerNode;
+import com.fasterxml.jackson.databind.node.BinaryNode;
+import com.fasterxml.jackson.databind.node.BooleanNode;
+import com.fasterxml.jackson.databind.node.ContainerNode;
+import com.fasterxml.jackson.databind.node.DecimalNode;
+import com.fasterxml.jackson.databind.node.DoubleNode;
+import com.fasterxml.jackson.databind.node.FloatNode;
+import com.fasterxml.jackson.databind.node.IntNode;
+import com.fasterxml.jackson.databind.node.JsonNodeCreator;
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import com.fasterxml.jackson.databind.node.JsonNodeType;
+import com.fasterxml.jackson.databind.node.LongNode;
+import com.fasterxml.jackson.databind.node.MissingNode;
+import com.fasterxml.jackson.databind.node.NullNode;
+import com.fasterxml.jackson.databind.node.NumericNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.fasterxml.jackson.databind.node.POJONode;
+import com.fasterxml.jackson.databind.node.ShortNode;
+import com.fasterxml.jackson.databind.node.TextNode;
+import com.fasterxml.jackson.databind.node.ValueNode;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Maps;
+import org.apache.felix.scr.annotations.Activate;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Deactivate;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
+import org.apache.felix.scr.annotations.Service;
+import org.onlab.util.KryoNamespace;
+import org.onosproject.core.ApplicationId;
+import org.onosproject.core.CoreService;
+import org.onosproject.workflow.api.DataModelTree;
+import org.onosproject.workflow.api.DefaultWorkplace;
+import org.onosproject.workflow.api.DefaultWorkflowContext;
+import org.onosproject.workflow.api.JsonDataModelTree;
+import org.onosproject.workflow.api.SystemWorkflowContext;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.WorkflowData;
+import org.onosproject.workflow.api.WorkflowState;
+import org.onosproject.workflow.api.Workplace;
+import org.onosproject.workflow.api.WorkflowDataEvent;
+import org.onosproject.workflow.api.WorkplaceStore;
+import org.onosproject.workflow.api.WorkplaceStoreDelegate;
+import org.onosproject.store.AbstractStore;
+import org.onosproject.store.serializers.KryoNamespaces;
+import org.onosproject.store.service.ConsistentMap;
+import org.onosproject.store.service.MapEvent;
+import org.onosproject.store.service.MapEventListener;
+import org.onosproject.store.service.Serializer;
+import org.onosproject.store.service.StorageException;
+import org.onosproject.store.service.StorageService;
+import org.onosproject.store.service.Versioned;
+import org.slf4j.Logger;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+@Component(immediate = true)
+@Service
+public class DistributedWorkplaceStore
+ extends AbstractStore<WorkflowDataEvent, WorkplaceStoreDelegate> implements WorkplaceStore {
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected CoreService coreService;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected StorageService storageService;
+
+ private ApplicationId appId;
+ private final Logger log = getLogger(getClass());
+
+ private final WorkplaceMapListener workplaceMapEventListener = new WorkplaceMapListener();
+ private ConsistentMap<String, WorkflowData> workplaceMap;
+ private Map<String, Workplace> localWorkplaceMap = Maps.newConcurrentMap();
+
+ private final WorkflowContextMapListener contextMapEventListener = new WorkflowContextMapListener();
+ private ConsistentMap<String, WorkflowData> contextMap;
+ private Map<String, WorkflowContext> localContextMap = Maps.newConcurrentMap();
+
+ private Map<String, Map<String, WorkflowContext>> localWorkplaceMemberMap = Maps.newConcurrentMap();
+
+ @Activate
+ public void activate() {
+
+ appId = coreService.registerApplication("org.onosproject.workplacestore");
+ log.info("appId=" + appId);
+
+ KryoNamespace workplaceNamespace = KryoNamespace.newBuilder()
+ .register(KryoNamespaces.API)
+ .register(WorkflowData.class)
+ .register(Workplace.class)
+ .register(DefaultWorkplace.class)
+ .register(WorkflowContext.class)
+ .register(DefaultWorkflowContext.class)
+ .register(SystemWorkflowContext.class)
+ .register(WorkflowState.class)
+ .register(DataModelTree.class)
+ .register(JsonDataModelTree.class)
+ .register(List.class)
+ .register(ArrayList.class)
+ .register(JsonNode.class)
+ .register(ObjectNode.class)
+ .register(TextNode.class)
+ .register(LinkedHashMap.class)
+ .register(ArrayNode.class)
+ .register(BaseJsonNode.class)
+ .register(BigIntegerNode.class)
+ .register(BinaryNode.class)
+ .register(BooleanNode.class)
+ .register(ContainerNode.class)
+ .register(DecimalNode.class)
+ .register(DoubleNode.class)
+ .register(FloatNode.class)
+ .register(IntNode.class)
+ .register(JsonNodeType.class)
+ .register(LongNode.class)
+ .register(MissingNode.class)
+ .register(NullNode.class)
+ .register(NumericNode.class)
+ .register(POJONode.class)
+ .register(ShortNode.class)
+ .register(ValueNode.class)
+ .register(JsonNodeCreator.class)
+ .register(JsonNodeFactory.class)
+ .build();
+
+ localWorkplaceMap.clear();
+ workplaceMap = storageService.<String, WorkflowData>consistentMapBuilder()
+ .withSerializer(Serializer.using(workplaceNamespace))
+ .withName("workplace-map")
+ .withApplicationId(appId)
+ .build();
+ workplaceMap.addListener(workplaceMapEventListener);
+
+ localContextMap.clear();
+ contextMap = storageService.<String, WorkflowData>consistentMapBuilder()
+ .withSerializer(Serializer.using(workplaceNamespace))
+ .withName("workflow-context-map")
+ .withApplicationId(appId)
+ .build();
+ contextMap.addListener(contextMapEventListener);
+
+ workplaceMapEventListener.syncLocal();
+ contextMapEventListener.syncLocal();
+ log.info("Started");
+ }
+
+ @Deactivate
+ public void deactivate() {
+ workplaceMap.destroy();
+ localWorkplaceMap.clear();
+ contextMap.destroy();
+ localContextMap.clear();
+
+ log.info("Stopped");
+ }
+
+ @Override
+ public void registerWorkplace(String name, Workplace workplace) throws StorageException {
+ workplaceMap.put(name, workplace);
+ }
+
+ @Override
+ public void removeWorkplace(String name) throws StorageException {
+ removeWorkplaceContexts(name);
+ workplaceMap.remove(name);
+ }
+
+ @Override
+ public Workplace getWorkplace(String name) throws StorageException {
+ return localWorkplaceMap.get(name);
+ }
+
+ @Override
+ public Collection<Workplace> getWorkplaces() throws StorageException {
+ return ImmutableList.copyOf(localWorkplaceMap.values());
+ }
+
+ @Override
+ public void commitWorkplace(String name, Workplace workplace, boolean handleEvent) throws StorageException {
+ workplace.setTriggerNext(handleEvent);
+ if (workplaceMap.containsKey(name)) {
+ workplaceMap.replace(name, workplace);
+ } else {
+ registerWorkplace(name, workplace);
+ }
+ }
+
+ @Override
+ public void registerContext(String name, WorkflowContext context) throws StorageException {
+ contextMap.put(name, context);
+ }
+
+ @Override
+ public void removeContext(String name) throws StorageException {
+ contextMap.remove(name);
+ }
+
+ @Override
+ public WorkflowContext getContext(String name) throws StorageException {
+ return localContextMap.get(name);
+ }
+
+ @Override
+ public void commitContext(String name, WorkflowContext context, boolean handleEvent) throws StorageException {
+ context.setTriggerNext(handleEvent);
+ if (contextMap.containsKey(name)) {
+ contextMap.replace(name, context);
+ } else {
+ registerContext(name, context);
+ }
+ }
+
+ @Override
+ public Collection<WorkflowContext> getContexts() throws StorageException {
+ return ImmutableList.copyOf(localContextMap.values());
+ }
+
+ @Override
+ public Collection<WorkflowContext> getWorkplaceContexts(String workplaceName) {
+ Map<String, WorkflowContext> ctxMap = localWorkplaceMemberMap.get(workplaceName);
+ if (ctxMap == null) {
+ return Collections.emptyList();
+ }
+
+ return ImmutableList.copyOf(ctxMap.values());
+ }
+
+ @Override
+ public void removeWorkplaceContexts(String workplaceName) {
+ for (WorkflowContext ctx : getWorkplaceContexts(workplaceName)) {
+ removeContext(ctx.name());
+ }
+ }
+
+ private class WorkplaceMapListener implements MapEventListener<String, WorkflowData> {
+
+ @Override
+ public void event(MapEvent<String, WorkflowData> event) {
+
+ Workplace newWorkplace = (Workplace) Versioned.valueOrNull(event.newValue());
+ Workplace oldWorkplace = (Workplace) Versioned.valueOrNull(event.oldValue());
+
+ log.info("WorkplaceMap event: {}", event);
+ switch (event.type()) {
+ case INSERT:
+ insert(newWorkplace);
+ notifyDelegate(new WorkflowDataEvent(WorkflowDataEvent.Type.INSERT, newWorkplace));
+ break;
+ case UPDATE:
+ update(newWorkplace);
+ notifyDelegate(new WorkflowDataEvent(WorkflowDataEvent.Type.UPDATE, newWorkplace));
+ break;
+ case REMOVE:
+ remove(oldWorkplace);
+ notifyDelegate(new WorkflowDataEvent(WorkflowDataEvent.Type.REMOVE, oldWorkplace));
+ break;
+ default:
+ }
+ }
+
+ private void insert(Workplace workplace) {
+ localWorkplaceMap.put(workplace.name(), workplace);
+ }
+
+ private void update(Workplace workplace) {
+ localWorkplaceMap.replace(workplace.name(), workplace);
+ }
+
+ private void remove(Workplace workplace) {
+ localWorkplaceMap.remove(workplace.name());
+ }
+
+ public void syncLocal() {
+ workplaceMap.values().stream().forEach(
+ x -> insert((Workplace) (x.value()))
+ );
+ }
+ }
+
+ private class WorkflowContextMapListener implements MapEventListener<String, WorkflowData> {
+
+ @Override
+ public void event(MapEvent<String, WorkflowData> event) {
+
+ WorkflowContext newContext = (WorkflowContext) Versioned.valueOrNull(event.newValue());
+ WorkflowContext oldContext = (WorkflowContext) Versioned.valueOrNull(event.oldValue());
+
+ log.info("WorkflowContext event: {}", event);
+ switch (event.type()) {
+ case INSERT:
+ insert(newContext);
+ notifyDelegate(new WorkflowDataEvent(WorkflowDataEvent.Type.INSERT, newContext));
+ break;
+ case UPDATE:
+ update(newContext);
+ notifyDelegate(new WorkflowDataEvent(WorkflowDataEvent.Type.UPDATE, newContext));
+ break;
+ case REMOVE:
+ remove(oldContext);
+ notifyDelegate(new WorkflowDataEvent(WorkflowDataEvent.Type.REMOVE, oldContext));
+ break;
+ default:
+ }
+ }
+
+ /**
+ * Inserts workflow context on local hash map.
+ * @param context workflow context
+ */
+ private void insert(WorkflowContext context) {
+ String workplaceName = context.workplaceName();
+ Map<String, WorkflowContext> ctxMap = localWorkplaceMemberMap.get(workplaceName);
+ if (ctxMap == null) {
+ ctxMap = new HashMap<>();
+ localWorkplaceMemberMap.put(workplaceName, ctxMap);
+ }
+ ctxMap.put(context.name(), context);
+
+ localContextMap.put(context.name(), context);
+ }
+
+ /**
+ * Updates workflow context on local hash map.
+ * @param context workflow context
+ */
+ private void update(WorkflowContext context) {
+ String workplaceName = context.workplaceName();
+ Map<String, WorkflowContext> ctxMap = localWorkplaceMemberMap.get(workplaceName);
+ if (ctxMap == null) {
+ ctxMap = new HashMap<>();
+ localWorkplaceMemberMap.put(workplaceName, ctxMap);
+ }
+ ctxMap.put(context.name(), context);
+
+ localContextMap.put(context.name(), context);
+ }
+
+ /**
+ * Removes workflow context from local hash map.
+ * @param context workflow context
+ */
+ private void remove(WorkflowContext context) {
+ localContextMap.remove(context.name());
+
+ String workplaceName = context.workplaceName();
+ Map<String, WorkflowContext> ctxMap = localWorkplaceMemberMap.get(workplaceName);
+ if (ctxMap == null) {
+ log.error("remove-context: Failed to find workplace({}) in localWorkplaceMemberMap", workplaceName);
+ return;
+ }
+ ctxMap.remove(context.name());
+ if (ctxMap.size() == 0) {
+ localWorkplaceMemberMap.remove(workplaceName, ctxMap);
+ }
+ }
+
+ /**
+ * Synchronizes local hash map.
+ */
+ public void syncLocal() {
+ contextMap.values().stream().forEach(
+ x -> insert((WorkflowContext) (x.value()))
+ );
+ }
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/ECWorkFlowStore.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/ECWorkFlowStore.java
new file mode 100644
index 0000000..cd7df77
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/ECWorkFlowStore.java
@@ -0,0 +1,155 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Sets;
+import org.apache.felix.scr.annotations.Activate;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Deactivate;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
+import org.apache.felix.scr.annotations.Service;
+import org.onlab.util.KryoNamespace;
+import org.onosproject.core.ApplicationId;
+import org.onosproject.core.CoreService;
+import org.onosproject.workflow.api.AbstractWorkflow;
+import org.onosproject.workflow.api.ImmutableListWorkflow;
+import org.onosproject.workflow.api.Workflow;
+import org.onosproject.workflow.api.WorkflowAttribute;
+import org.onosproject.workflow.api.WorkflowStore;
+import org.onosproject.net.group.GroupEvent;
+import org.onosproject.net.group.GroupStoreDelegate;
+import org.onosproject.store.AbstractStore;
+import org.onosproject.store.serializers.KryoNamespaces;
+import org.onosproject.store.service.EventuallyConsistentMap;
+import org.onosproject.store.service.StorageService;
+import org.onosproject.store.service.WallClockTimestamp;
+import org.slf4j.Logger;
+
+import java.net.URI;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.List;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+@Component(immediate = true)
+@Service
+public class ECWorkFlowStore
+ extends AbstractStore<GroupEvent, GroupStoreDelegate> implements WorkflowStore {
+
+ private final Logger log = getLogger(getClass());
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected CoreService coreService;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected StorageService storageService;
+
+ private ApplicationId appId;
+ private EventuallyConsistentMap<URI, Workflow> workflowStore;
+ private Set<ClassLoader> classloaders = Sets.newConcurrentHashSet();
+
+ @Activate
+ public void activate() {
+
+ appId = coreService.registerApplication("org.onosproject.nfconfig");
+ log.info("appId=" + appId);
+
+ KryoNamespace.Builder workflowSerializer = KryoNamespace.newBuilder()
+ .register(KryoNamespaces.API)
+ .register(URI.class)
+ .register(Workflow.class)
+ .register(AbstractWorkflow.class)
+ .register(ImmutableListWorkflow.class)
+ .register(List.class)
+ .register(ImmutableList.class)
+ .register(Class.class)
+ .register(WorkflowAttribute.class)
+ .register(Set.class)
+ .register(ImmutableSet.class)
+ .register(HashSet.class);
+
+ workflowStore = storageService.<URI, Workflow>eventuallyConsistentMapBuilder()
+ .withName("workflow-workplaceStore")
+ .withSerializer(workflowSerializer)
+ .withAntiEntropyPeriod(5, TimeUnit.SECONDS)
+ .withTimestampProvider((k, v) -> new WallClockTimestamp())
+ .withTombstonesDisabled()
+ .build();
+
+ classloaders.add(this.getClass().getClassLoader());
+
+ log.info("Started");
+ }
+
+ @Deactivate
+ public void deactivate() {
+ workflowStore.destroy();
+
+ log.info("Stopped");
+ }
+
+ @Override
+ public void register(Workflow workflow) {
+ workflowStore.put(workflow.id(), workflow);
+ }
+
+ @Override
+ public void unregister(URI id) {
+ workflowStore.remove(id);
+ }
+
+ @Override
+ public Workflow get(URI id) {
+ return workflowStore.get(id);
+ }
+
+ @Override
+ public Collection<Workflow> getAll() {
+ return workflowStore.values();
+ }
+
+ @Override
+ public void registerLocal(ClassLoader loader) {
+ classloaders.add(loader);
+ }
+
+ @Override
+ public void unregisterLocal(ClassLoader loader) {
+ classloaders.remove(loader);
+ }
+
+ @Override
+ public Class getClass(String name) throws ClassNotFoundException {
+ for (ClassLoader loader : classloaders) {
+ Class cl = null;
+ try {
+ cl = loader.loadClass(name);
+ } catch (ClassNotFoundException e) {
+ // do nothing
+ }
+ if (cl != null) {
+ return cl;
+ }
+ }
+ throw new ClassNotFoundException(name);
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/HandlerTaskAccumulator.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/HandlerTaskAccumulator.java
new file mode 100644
index 0000000..4651cd4
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/HandlerTaskAccumulator.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ListMultimap;
+import org.onlab.util.AbstractAccumulator;
+import org.onosproject.workflow.api.HandlerTask;
+import org.onosproject.workflow.api.HandlerTaskBatchDelegate;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Timer;
+
+/**
+ * An accumulator for building batches of event task operations. Only one batch should
+ * be in process per instance at a time.
+ */
+public class HandlerTaskAccumulator extends AbstractAccumulator<HandlerTask> {
+
+ private static final int DEFAULT_MAX_EVENTS = 5000;
+ private static final int DEFAULT_MAX_IDLE_MS = 10;
+ private static final int DEFAULT_MAX_BATCH_MS = 50;
+
+ private static final Timer TIMER = new Timer("onos-workflow-handlertask-batching");
+
+ private final HandlerTaskBatchDelegate delegate;
+
+ private volatile boolean ready;
+
+ /**
+ * Creates an event-task operation accumulator.
+ *
+ * @param delegate the event-task batch delegate
+ */
+ protected HandlerTaskAccumulator(HandlerTaskBatchDelegate delegate) {
+ super(TIMER, DEFAULT_MAX_EVENTS, DEFAULT_MAX_BATCH_MS, DEFAULT_MAX_IDLE_MS);
+ this.delegate = delegate;
+ // Assume that the delegate is ready for workletType at the start
+ ready = true; //TODO validate the assumption that delegate is ready
+ }
+
+ @Override
+ public void processItems(List<HandlerTask> items) {
+ ready = false;
+ delegate.execute(epoch(items));
+ }
+
+ /**
+ * Gets epoch.
+ * @param ops handler tasks
+ * @return collection of collection of handler task.
+ */
+ private Collection<Collection<HandlerTask>> epoch(List<HandlerTask> ops) {
+
+ ListMultimap<String, HandlerTask> tasks = ArrayListMultimap.create();
+
+ // align event-tasks with context
+ for (HandlerTask op : ops) {
+ tasks.put(op.context().name(), op);
+ }
+
+ return tasks.asMap().values();
+ }
+
+ @Override
+ public boolean isReady() {
+ return ready;
+ }
+
+ /**
+ * Making accumulator to be ready.
+ */
+ public void ready() {
+ ready = true;
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkFlowEngine.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkFlowEngine.java
new file mode 100644
index 0000000..26e0bc9
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkFlowEngine.java
@@ -0,0 +1,740 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import com.google.common.collect.Lists;
+import org.apache.felix.scr.annotations.Activate;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Deactivate;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
+import org.apache.felix.scr.annotations.Service;
+import org.onosproject.cluster.ClusterService;
+import org.onosproject.cluster.LeadershipService;
+import org.onosproject.cluster.NodeId;
+import org.onosproject.core.ApplicationId;
+import org.onosproject.core.CoreService;
+import org.onosproject.workflow.api.DefaultWorkplace;
+import org.onosproject.workflow.api.EventHintSupplier;
+import org.onosproject.workflow.api.EventTask;
+import org.onosproject.workflow.api.JsonDataModelTree;
+import org.onosproject.workflow.api.SystemWorkflowContext;
+import org.onosproject.workflow.api.EventTimeoutTask;
+import org.onosproject.workflow.api.TimeoutTask;
+import org.onosproject.workflow.api.TimerChain;
+import org.onosproject.workflow.api.Worklet;
+import org.onosproject.workflow.api.Workflow;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.WorkflowData;
+import org.onosproject.workflow.api.ContextEventMapStore;
+import org.onosproject.workflow.api.WorkflowState;
+import org.onosproject.workflow.api.WorkflowStore;
+import org.onosproject.workflow.api.WorkflowBatchDelegate;
+import org.onosproject.workflow.api.WorkflowDataEvent;
+import org.onosproject.workflow.api.WorkflowDataListener;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.HandlerTask;
+import org.onosproject.workflow.api.HandlerTaskBatchDelegate;
+import org.onosproject.workflow.api.Workplace;
+import org.onosproject.workflow.api.WorkplaceStore;
+import org.onosproject.workflow.api.WorkplaceStoreDelegate;
+import org.onosproject.workflow.api.WorkflowExecutionService;
+import org.onosproject.event.AbstractListenerManager;
+import org.onosproject.event.Event;
+import org.onosproject.net.intent.WorkPartitionService;
+import org.slf4j.Logger;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.UUID;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.stream.Collectors;
+
+import static java.util.concurrent.Executors.newFixedThreadPool;
+import static java.util.concurrent.Executors.newSingleThreadExecutor;
+import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
+import static org.onlab.util.Tools.groupedThreads;
+import static org.onosproject.workflow.api.WorkflowAttribute.REMOVE_AFTER_COMPLETE;
+import static org.slf4j.LoggerFactory.getLogger;
+
+@Component(immediate = true)
+@Service
+public class WorkFlowEngine extends AbstractListenerManager<WorkflowDataEvent, WorkflowDataListener>
+ implements WorkflowExecutionService {
+
+ protected static final Logger log = getLogger(WorkFlowEngine.class);
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected CoreService coreService;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected ClusterService clusterService;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected LeadershipService leadershipService;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected WorkPartitionService partitionService;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected WorkplaceStore workplaceStore;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected WorkflowStore workflowStore;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected ContextEventMapStore eventMapStore;
+
+ private final WorkplaceStoreDelegate workplaceStoreDelegate = this::post;
+
+ private final WorkflowBatchDelegate workflowBatchDelegate = new InternalWorkflowBatchDelegate();
+ private final WorkflowAccumulator workflowAccumulator = new WorkflowAccumulator(workflowBatchDelegate);
+
+ private final HandlerTaskBatchDelegate eventtaskBatchDelegate = new InternalHandlerTaskBatchDelegate();
+ private final HandlerTaskAccumulator eventtaskAccumulator = new HandlerTaskAccumulator(eventtaskBatchDelegate);
+
+ private ExecutorService workflowBatchExecutor;
+ private ExecutorService workflowExecutor;
+
+ private ExecutorService handlerTaskBatchExecutor;
+ private ExecutorService handlerTaskExecutor;
+
+ private static final int DEFAULT_WORKFLOW_THREADS = 12;
+ private static final int DEFAULT_EVENTTASK_THREADS = 12;
+ private static final int MAX_REGISTER_EVENTMAP_WAITS = 10;
+
+ private ScheduledExecutorService eventMapTriggerExecutor;
+
+ private TimerChain timerChain = new TimerChain();
+
+ public static final String APPID = "org.onosproject.workflow";
+ private ApplicationId appId;
+ private NodeId localNodeId;
+
+ @Activate
+ public void activate() {
+ appId = coreService.registerApplication(APPID);
+ workplaceStore.setDelegate(workplaceStoreDelegate);
+ localNodeId = clusterService.getLocalNode().id();
+ leadershipService.runForLeadership(appId.name());
+
+ workflowBatchExecutor = newSingleThreadExecutor(
+ groupedThreads("onos/workflow", "workflow-batch", log));
+ workflowExecutor = newFixedThreadPool(DEFAULT_WORKFLOW_THREADS,
+ groupedThreads("onos/workflow-exec", "worker-%d", log));
+ handlerTaskBatchExecutor = newSingleThreadExecutor(
+ groupedThreads("onos/workflow", "handlertask-batch", log));
+ handlerTaskExecutor = newFixedThreadPool(DEFAULT_EVENTTASK_THREADS,
+ groupedThreads("onos/handlertask-exec", "worker-%d", log));
+ eventMapTriggerExecutor = newSingleThreadScheduledExecutor(
+ groupedThreads("onos/workflow-engine", "eventmap-trigger-executor"));
+
+ (new WorkplaceWorkflow(this, workflowStore)).registerWorkflows();
+ JsonDataModelTree data = new JsonDataModelTree(JsonNodeFactory.instance.objectNode());
+ workplaceStore.registerWorkplace(Workplace.SYSTEM_WORKPLACE,
+ new DefaultWorkplace(Workplace.SYSTEM_WORKPLACE, data));
+
+ log.info("Started");
+ }
+
+ @Deactivate
+ public void deactivate() {
+ leadershipService.withdraw(appId.name());
+ workplaceStore.unsetDelegate(workplaceStoreDelegate);
+ workflowBatchExecutor.shutdown();
+ workflowExecutor.shutdown();
+ handlerTaskBatchExecutor.shutdown();
+ handlerTaskExecutor.shutdown();
+ eventMapTriggerExecutor.shutdown();
+ log.info("Stopped");
+ }
+
+ @Override
+ public void execInitWorklet(WorkflowContext context) {
+
+ Workflow workflow = workflowStore.get(context.workflowId());
+ if (workflow == null) {
+ log.error("Invalid workflow {}", context.workflowId());
+ return;
+ }
+
+ initWorkletExecution(context);
+ try {
+ Worklet initWorklet = workflow.init(context);
+ if (initWorklet != null) {
+ initWorklet.process(context);
+ }
+
+ } catch (WorkflowException e) {
+ log.error("Exception: {}, trace: {}", e, Lists.newArrayList(e.getStackTrace()));
+ context.setCause(e.getMessage());
+ context.setState(WorkflowState.EXCEPTION);
+ workplaceStore.commitContext(context.name(), context, false);
+ return;
+ }
+ // trigger the execution of next worklet.
+ workplaceStore.registerContext(context.name(), context);
+ }
+
+ @Override
+ public void eventMapTrigger(Event event, EventHintSupplier supplier) {
+
+ if (event.subject() instanceof SystemWorkflowContext) {
+ return;
+ }
+
+ Map<String, String> eventMap;
+
+ String eventHint;
+ try {
+ eventHint = supplier.apply(event);
+ } catch (Throwable e) {
+ log.error("Exception: {}, trace: {}", e, Lists.newArrayList(e.getStackTrace()));
+ return;
+ }
+ if (eventHint == null) {
+ // do nothing
+ log.error("Invalid eventHint, event: {}", event);
+ return;
+ }
+
+ try {
+ eventMap = eventMapStore.getEventMap(event.getClass().getName(), eventHint);
+ } catch (WorkflowException e) {
+ log.error("Exception: {}, trace: {}", e, Lists.newArrayList(e.getStackTrace()));
+ return;
+ }
+
+ if (Objects.isNull(eventMap) || eventMap.isEmpty()) {
+ // do nothing;
+ log.debug("Invalid eventMap, event: {}", event);
+ return;
+ }
+
+ for (Map.Entry<String, String> entry : eventMap.entrySet()) {
+ String contextName = entry.getKey();
+ String workletType = entry.getValue();
+ WorkflowContext context = workplaceStore.getContext(contextName);
+ if (Objects.isNull(context)) {
+ log.info("Invalid context: {}, event: {}", contextName, event);
+ continue;
+ }
+ EventTask eventtask = EventTask.builder()
+ .event(event)
+ .eventHint(eventHint)
+ .context(context)
+ .workletType(workletType)
+ .build();
+
+ log.info("eventtaskAccumulator.add: task: {}", eventtask);
+ eventtaskAccumulator.add(eventtask);
+ }
+ }
+
+ @Override
+ public void registerEventMap(Class<? extends Event> eventType, String eventHint,
+ String contextName, String workletType) throws WorkflowException {
+ eventMapStore.registerEventMap(eventType.getName(), eventHint, contextName, workletType);
+ for (int i = 0; i < MAX_REGISTER_EVENTMAP_WAITS; i++) {
+ Map<String, String> eventMap = eventMapStore.getEventMap(eventType.getName(), eventHint);
+ if (eventMap != null && eventMap.containsKey(contextName)) {
+ return;
+ }
+ try {
+ log.info("sleep {}", i);
+ Thread.sleep(10L * (i + 1));
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ @Override
+ protected void post(WorkflowDataEvent event) {
+
+ if (event.subject() == null || !isRelevant(event.subject())) {
+ log.debug("ignore event {}", event);
+ return;
+ }
+
+ // trigger next worklet selection
+ WorkflowData dataModelContainer = event.subject();
+ switch (event.type()) {
+ case INSERT:
+ case UPDATE:
+ if (dataModelContainer.triggerNext()) {
+ log.info("workflowAccumulator.add: {}", dataModelContainer);
+ workflowAccumulator.add(dataModelContainer);
+ } else {
+ log.debug("pass-workflowAccumulator.add: {}", dataModelContainer);
+ }
+ break;
+ case REMOVE:
+ break;
+ default:
+ }
+
+ // trigger EventTask for WorkflowDataEvent
+ eventMapTriggerExecutor.submit(
+ () -> eventMapTrigger(
+ event,
+ // event hint supplier
+ (ev) -> {
+ if (ev == null || ev.subject() == null) {
+ return null;
+ }
+
+ if (ev.subject() instanceof WorkflowData) {
+ return ((WorkflowData) ev.subject()).name();
+ } else {
+ return null;
+ }
+ }
+ )
+ );
+ }
+
+ /**
+ * Checks whether this workflow data job is relevant to this ONOS node.
+ * @param job workflow data
+ * @return checking result
+ */
+ private boolean isRelevant(WorkflowData job) {
+ // distributes event processing with work-partition
+ return partitionService.isMine(job.distributor(), this::stringHash);
+ }
+
+ /**
+ * Gets hash of the string.
+ * @param str string to get a hash
+ * @return hash value
+ */
+ public Long stringHash(String str) {
+ return UUID.nameUUIDFromBytes(str.getBytes()).getMostSignificantBits();
+ }
+
+ /**
+ * Class for handler task batch delegation.
+ */
+ private class InternalHandlerTaskBatchDelegate implements HandlerTaskBatchDelegate {
+ @Override
+ public void execute(Collection<Collection<HandlerTask>> operations) {
+ log.debug("Execute {} operation(s).", operations.size());
+
+ CompletableFuture.runAsync(() -> {
+ List<CompletableFuture<Collection<HandlerTask>>> futures = operations.stream()
+ .map(
+ x -> CompletableFuture.completedFuture(x)
+ .thenApplyAsync(WorkFlowEngine.this::processHandlerTask, handlerTaskExecutor)
+ .exceptionally(e -> null)
+ )
+ .collect(Collectors.toList());
+
+ // waiting the completion of futures
+ futures.parallelStream().forEach(x -> x.join());
+
+ }, handlerTaskBatchExecutor).exceptionally(e -> {
+ log.error("Error submitting batches:", e);
+ return null;
+ }).thenRun(eventtaskAccumulator::ready);
+ }
+ }
+
+ /**
+ * Initializes worklet execution.
+ * @param context workflow context
+ */
+ private void initWorkletExecution(WorkflowContext context) {
+ context.setState(WorkflowState.RUNNING);
+ context.setCause("");
+ context.setWorkflowExecutionService(this);
+ context.setWorkflowStore(workflowStore);
+ context.setWorkplaceStore(workplaceStore);
+ context.waitCompletion(null, null, null, 0L);
+ context.setTriggerNext(false);
+ }
+
+ /**
+ * Processes handler tasks.
+ * @param tasks handler tasks
+ * @return handler tasks processed
+ */
+ private Collection<HandlerTask> processHandlerTask(Collection<HandlerTask> tasks) {
+
+ for (HandlerTask task : tasks) {
+ if (task instanceof EventTimeoutTask) {
+ execEventTimeoutTask((EventTimeoutTask) task);
+ } else if (task instanceof TimeoutTask) {
+ execTimeoutTask((TimeoutTask) task);
+ } else if (task instanceof EventTask) {
+ execEventTask((EventTask) task);
+ } else {
+ log.error("Unsupported handler task {}", task);
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Executes event task.
+ * @param task event task
+ * @return event task
+ */
+ private EventTask execEventTask(EventTask task) {
+
+ Map<String, String> eventMap = null;
+ try {
+ eventMap = eventMapStore.getEventMap(task.event().getClass().getName(), task.eventHint());
+ } catch (WorkflowException e) {
+ log.error("Exception: {}, trace: {}", e, Lists.newArrayList(e.getStackTrace()));
+ return task;
+ }
+
+ if (Objects.isNull(eventMap) || eventMap.isEmpty()) {
+ return task;
+ }
+
+ if (Objects.isNull(eventMap.get(task.context().name()))) {
+ return task;
+ }
+
+ log.debug("execEventTask- task: {}, hash: {}", task, stringHash(task.context().distributor()));
+
+ WorkflowContext context = (WorkflowContext) (task.context());
+ Workflow workflow = workflowStore.get(context.workflowId());
+ if (workflow == null) {
+ log.error("Invalid workflow {}", context.workflowId());
+ return task;
+ }
+
+ WorkflowContext latestContext = workplaceStore.getContext(context.name());
+ if (latestContext == null) {
+ log.error("Invalid workflow context {}", context.name());
+ return task;
+ }
+
+ try {
+ Worklet worklet = workflow.getWorkletInstance(task.workletType());
+ if (!Objects.equals(latestContext.current(), worklet.tag())) {
+ log.error("Current worklet({}) is not mismatched with task work({}). Ignored.",
+ latestContext.current(), worklet.tag());
+ return task;
+ }
+
+ if (worklet == Worklet.Common.COMPLETED || worklet == Worklet.Common.INIT) {
+ log.error("Current worklet is {}, Ignored", worklet);
+ return task;
+ }
+
+ initWorkletExecution(latestContext);
+
+ log.info("processHandlerTask.isCompleted-task:{}, latest:{}", task, latestContext);
+ if (worklet.isCompleted(latestContext, task.event())) {
+ eventMapStore.unregisterEventMap(
+ task.eventType(), task.eventHint(), latestContext.name());
+
+ workplaceStore.commitContext(latestContext.name(), latestContext, true);
+ return null;
+ } else {
+ workplaceStore.commitContext(latestContext.name(), latestContext, false);
+ }
+
+ } catch (WorkflowException e) {
+ log.error("Exception: {}, trace: {}", e, Lists.newArrayList(e.getStackTrace()));
+ latestContext.setCause(e.getMessage());
+ latestContext.setState(WorkflowState.EXCEPTION);
+ workplaceStore.commitContext(latestContext.name(), latestContext, false);
+ }
+
+ return task;
+ }
+
+ /**
+ * Executes event timeout task.
+ * @param task event timeout task
+ * @return handler task
+ */
+ private HandlerTask execEventTimeoutTask(EventTimeoutTask task) {
+
+ Map<String, String> eventMap = null;
+ try {
+ eventMap = eventMapStore.getEventMap(task.eventType(), task.eventHint());
+ } catch (WorkflowException e) {
+ log.error("execEventTimeoutTask: Exception: {}, trace: {}",
+ e, Lists.newArrayList(e.getStackTrace()));
+ return task;
+ }
+
+ if (Objects.isNull(eventMap) || eventMap.isEmpty()) {
+ return task;
+ }
+
+ if (Objects.isNull(eventMap.get(task.context().name()))) {
+ return task;
+ }
+
+ log.debug("execEventTimeoutTask- task: {}, hash: {}", task, stringHash(task.context().distributor()));
+
+ WorkflowContext context = task.context();
+ Workflow workflow = workflowStore.get(context.workflowId());
+ if (workflow == null) {
+ log.error("execEventTimeoutTask: Invalid workflow {}", context.workflowId());
+ return task;
+ }
+
+ WorkflowContext latestContext = workplaceStore.getContext(context.name());
+ if (latestContext == null) {
+ log.error("execEventTimeoutTask: Invalid workflow context {}", context.name());
+ return task;
+ }
+
+ try {
+ Worklet worklet = workflow.getWorkletInstance(task.workletType());
+ if (!Objects.equals(latestContext.current(), worklet.tag())) {
+ log.error("execEventTimeoutTask: Current worklet({}) is not mismatched with task work({}). Ignored.",
+ latestContext.current(), worklet.tag());
+ return task;
+ }
+
+ if (worklet == Worklet.Common.COMPLETED || worklet == Worklet.Common.INIT) {
+ log.error("execEventTimeoutTask: Current worklet is {}, Ignored", worklet);
+ return task;
+ }
+
+ initWorkletExecution(latestContext);
+
+ log.info("execEventTimeoutTask.timeout-task:{}, latest:{}", task, latestContext);
+ eventMapStore.unregisterEventMap(
+ task.eventType(), task.eventHint(), latestContext.name());
+
+ worklet.timeout(latestContext);
+ workplaceStore.commitContext(latestContext.name(), latestContext, latestContext.triggerNext());
+
+ } catch (WorkflowException e) {
+ log.error("Exception: {}, trace: {}", e, Lists.newArrayList(e.getStackTrace()));
+ latestContext.setCause(e.getMessage());
+ latestContext.setState(WorkflowState.EXCEPTION);
+ workplaceStore.commitContext(latestContext.name(), latestContext, false);
+ }
+
+ return task;
+ }
+
+ /**
+ * Executes timeout task.
+ * @param task time out task
+ * @return handler task
+ */
+ private HandlerTask execTimeoutTask(TimeoutTask task) {
+
+ log.debug("execTimeoutTask- task: {}, hash: {}", task, stringHash(task.context().distributor()));
+
+ WorkflowContext context = (WorkflowContext) (task.context());
+ Workflow workflow = workflowStore.get(context.workflowId());
+ if (workflow == null) {
+ log.error("execTimeoutTask: Invalid workflow {}", context.workflowId());
+ return task;
+ }
+
+ WorkflowContext latestContext = workplaceStore.getContext(context.name());
+ if (latestContext == null) {
+ log.error("execTimeoutTask: Invalid workflow context {}", context.name());
+ return task;
+ }
+
+ try {
+ Worklet worklet = workflow.getWorkletInstance(task.workletType());
+ if (!Objects.equals(latestContext.current(), worklet.tag())) {
+ log.error("execTimeoutTask: Current worklet({}) is not mismatched with task work({}). Ignored.",
+ latestContext.current(), worklet.tag());
+ return task;
+ }
+
+ if (worklet == Worklet.Common.COMPLETED || worklet == Worklet.Common.INIT) {
+ log.error("execTimeoutTask: Current worklet is {}, Ignored", worklet);
+ return task;
+ }
+
+ initWorkletExecution(latestContext);
+
+ worklet.timeout(latestContext);
+ workplaceStore.commitContext(latestContext.name(), latestContext, latestContext.triggerNext());
+
+ } catch (WorkflowException e) {
+ log.error("Exception: {}, trace: {}", e, Lists.newArrayList(e.getStackTrace()));
+ latestContext.setCause(e.getMessage());
+ latestContext.setState(WorkflowState.EXCEPTION);
+ workplaceStore.commitContext(latestContext.name(), latestContext, false);
+ }
+
+ return task;
+ }
+
+ /**
+ * Class for delegation of workflow batch execution.
+ */
+ private class InternalWorkflowBatchDelegate implements WorkflowBatchDelegate {
+ @Override
+ public void execute(Collection<WorkflowData> operations) {
+ log.debug("Execute {} operation(s).", operations.size());
+
+ CompletableFuture.runAsync(() -> {
+ List<CompletableFuture<WorkflowData>> futures = operations.stream()
+ .map(
+ x -> CompletableFuture.completedFuture(x)
+ .thenApplyAsync(WorkFlowEngine.this::execWorkflow, workflowExecutor)
+ .exceptionally(e -> null)
+ )
+ .collect(Collectors.toList());
+
+ // waiting the completion of futures
+ futures.parallelStream().forEach(x -> x.join());
+
+ }, workflowBatchExecutor).exceptionally(e -> {
+ log.error("Error submitting batches:", e);
+ return null;
+ }).thenRun(workflowAccumulator::ready);
+ }
+ }
+
+ /**
+ * Executes workflow.
+ * @param dataModelContainer workflow data model container(workflow or workplace)
+ * @return
+ */
+ private WorkflowData execWorkflow(WorkflowData dataModelContainer) {
+ if (dataModelContainer instanceof WorkflowContext) {
+ return execWorkflowContext((WorkflowContext) dataModelContainer);
+ } else if (dataModelContainer instanceof Workplace) {
+ return execWorkplace((Workplace) dataModelContainer);
+ } else {
+ log.error("Invalid context {}", dataModelContainer);
+ return null;
+ }
+ }
+
+ /**
+ * Executes workflow context.
+ * @param context workflow context
+ * @return workflow context
+ */
+ private WorkflowContext execWorkflowContext(WorkflowContext context) {
+
+ Workflow workflow = workflowStore.get(context.workflowId());
+ if (workflow == null) {
+ log.error("Invalid workflow {}", context.workflowId());
+ return null;
+ }
+
+ final WorkflowContext latestContext = workplaceStore.getContext(context.name());
+ if (latestContext == null) {
+ log.error("Invalid workflow context {}", context.name());
+ return null;
+ }
+
+ initWorkletExecution(latestContext);
+
+ try {
+ final Worklet worklet = workflow.next(latestContext);
+
+ if (worklet == Worklet.Common.INIT) {
+ log.error("workflow.next gave INIT. It cannot be executed (context: {})", context.name());
+ return latestContext;
+ }
+
+ latestContext.setCurrent(worklet);
+ if (worklet == Worklet.Common.COMPLETED) {
+
+ if (workflow.attributes().contains(REMOVE_AFTER_COMPLETE)) {
+ workplaceStore.removeContext(latestContext.name());
+ return null;
+ } else {
+ latestContext.setState(WorkflowState.IDLE);
+ workplaceStore.commitContext(latestContext.name(), latestContext, false);
+ return latestContext;
+ }
+ }
+
+ log.info("execWorkflowContext.process:{}, {}", worklet.tag(), latestContext);
+ worklet.process(latestContext);
+
+ if (latestContext.completionEventType() != null) {
+ if (latestContext.completionEventGenerator() == null) {
+ String msg = String.format("Invalid exepecting event(%s), generator(%s)",
+ latestContext.completionEventType(),
+ latestContext.completionEventGenerator());
+ throw new WorkflowException(msg);
+ }
+
+ registerEventMap(latestContext.completionEventType(), latestContext.completionEventHint(),
+ latestContext.name(), worklet.tag());
+
+ latestContext.completionEventGenerator().apply();
+
+ if (latestContext.completionEventTimeout() != 0L) {
+ final EventTimeoutTask eventTimeoutTask = EventTimeoutTask.builder()
+ .context(latestContext)
+ .workletType(worklet.tag())
+ .eventType(latestContext.completionEventType().getName())
+ .eventHint(latestContext.completionEventHint())
+ .build();
+ timerChain.schedule(latestContext.completionEventTimeout(),
+ () -> {
+ eventtaskAccumulator.add(eventTimeoutTask);
+ });
+ }
+ } else {
+ if (latestContext.completionEventTimeout() != 0L) {
+ final TimeoutTask timeoutTask = TimeoutTask.builder()
+ .context(latestContext)
+ .workletType(worklet.tag())
+ .build();
+
+ timerChain.schedule(latestContext.completionEventTimeout(),
+ () -> {
+ eventtaskAccumulator.add(timeoutTask);
+ });
+ }
+ }
+
+ workplaceStore.commitContext(latestContext.name(), latestContext, latestContext.triggerNext());
+
+ } catch (WorkflowException e) {
+ log.error("Exception: {}, trace: {}", e, Lists.newArrayList(e.getStackTrace()));
+ latestContext.setCause(e.getMessage());
+ latestContext.setState(WorkflowState.EXCEPTION);
+ workplaceStore.commitContext(latestContext.name(), latestContext, false);
+ }
+
+ return latestContext;
+ }
+
+ /**
+ * Execute workplace.
+ * @param workplace workplace
+ * @return workplace
+ */
+ private Workplace execWorkplace(Workplace workplace) {
+
+ return null;
+ }
+
+}
\ No newline at end of file
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowAccumulator.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowAccumulator.java
new file mode 100644
index 0000000..d13b66f
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowAccumulator.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.google.common.collect.Maps;
+import org.onlab.util.AbstractAccumulator;
+import org.onosproject.workflow.api.WorkflowData;
+import org.onosproject.workflow.api.WorkflowBatchDelegate;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Timer;
+
+/**
+ * An accumulator for building batches of workflow operations. Only one batch should
+ * be in process per instance at a time.
+ */
+public class WorkflowAccumulator extends AbstractAccumulator<WorkflowData> {
+
+ private static final int DEFAULT_MAX_EVENTS = 1000;
+ private static final int DEFAULT_MAX_IDLE_MS = 10;
+ private static final int DEFAULT_MAX_BATCH_MS = 50;
+
+ private static final Timer TIMER = new Timer("onos-workflow-op-batching");
+
+ private final WorkflowBatchDelegate delegate;
+
+ private volatile boolean ready;
+
+ /**
+ * Creates an intent operation accumulator.
+ *
+ * @param delegate the intent batch delegate
+ */
+ protected WorkflowAccumulator(WorkflowBatchDelegate delegate) {
+ super(TIMER, DEFAULT_MAX_EVENTS, DEFAULT_MAX_BATCH_MS, DEFAULT_MAX_IDLE_MS);
+ this.delegate = delegate;
+ // Assume that the delegate is ready for worklettype at the start
+ ready = true; //TODO validate the assumption that delegate is ready
+ }
+
+ @Override
+ public void processItems(List<WorkflowData> items) {
+ ready = false;
+ delegate.execute(reduce(items));
+ }
+
+ private Collection<WorkflowData> reduce(List<WorkflowData> ops) {
+ Map<String, WorkflowData> map = Maps.newHashMap();
+ for (WorkflowData op : ops) {
+ map.put(op.name(), op);
+ }
+ //TODO check the version... or maybe workplaceStore will handle this.
+ return map.values();
+ }
+
+ @Override
+ public boolean isReady() {
+ return ready;
+ }
+
+ /**
+ * Making accumulator to be ready.
+ */
+ public void ready() {
+ ready = true;
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowManager.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowManager.java
new file mode 100644
index 0000000..804716e
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowManager.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import org.apache.felix.scr.annotations.Activate;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Deactivate;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
+import org.apache.felix.scr.annotations.Service;
+import org.onosproject.net.config.NetworkConfigRegistry;
+import org.onosproject.net.config.NetworkConfigService;
+import org.onosproject.workflow.api.DefaultWorkplace;
+import org.onosproject.workflow.api.JsonDataModelTree;
+import org.onosproject.workflow.api.Workflow;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.WorkflowDescription;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.WorkflowService;
+import org.onosproject.workflow.api.WorkflowExecutionService;
+import org.onosproject.workflow.api.WorkflowStore;
+import org.onosproject.workflow.api.Workplace;
+import org.onosproject.workflow.api.WorkplaceDescription;
+import org.onosproject.workflow.api.WorkplaceStore;
+import org.slf4j.Logger;
+
+import java.net.URI;
+import java.util.Objects;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+@Component(immediate = true)
+@Service
+public class WorkflowManager implements WorkflowService {
+
+ protected static final Logger log = getLogger(WorkflowManager.class);
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ private WorkflowExecutionService workflowExecutionService;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected WorkplaceStore workplaceStore;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected WorkflowStore workflowStore;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ private NetworkConfigService networkConfigService;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ private NetworkConfigRegistry networkConfigRegistry;
+
+ private WorkflowNetConfigListener netcfgListener;
+
+ @Activate
+ public void activate() {
+ netcfgListener = new WorkflowNetConfigListener(this);
+ networkConfigRegistry.registerConfigFactory(netcfgListener.getConfigFactory());
+ networkConfigService.addListener(netcfgListener);
+ log.info("Started");
+ }
+
+ @Deactivate
+ public void deactivate() {
+ networkConfigService.removeListener(netcfgListener);
+ networkConfigRegistry.unregisterConfigFactory(netcfgListener.getConfigFactory());
+ log.info("Stopped");
+ }
+
+ @Override
+ public void createWorkplace(WorkplaceDescription wpDesc) throws WorkflowException {
+ log.info("createWorkplace: {}", wpDesc);
+
+ JsonNode root;
+ if (wpDesc.data().isPresent()) {
+ root = wpDesc.data().get();
+ } else {
+ root = JsonNodeFactory.instance.objectNode();
+ }
+ DefaultWorkplace workplace =
+ new DefaultWorkplace(wpDesc.name(), new JsonDataModelTree(root));
+ workplaceStore.registerWorkplace(wpDesc.name(), workplace);
+ }
+
+ @Override
+ public void removeWorkplace(WorkplaceDescription wpDesc) throws WorkflowException {
+ log.info("removeWorkplace: {}", wpDesc);
+ //TODO: Removing workflows belong to this workplace
+ workplaceStore.removeWorkplace(wpDesc.name());
+ }
+
+ @Override
+ public void clearWorkplace() throws WorkflowException {
+ log.info("clearWorkplace");
+ workplaceStore.getWorkplaces().stream()
+ .filter(wp -> !Objects.equals(wp.name(), Workplace.SYSTEM_WORKPLACE))
+ .forEach(wp -> workplaceStore.removeWorkplace(wp.name()));
+ }
+
+ @Override
+ public void invokeWorkflow(WorkflowDescription wfDesc) throws WorkflowException {
+ invokeWorkflow(wfDesc.toJson());
+ }
+
+ @Override
+ public void invokeWorkflow(JsonNode worklowDescJson) throws WorkflowException {
+ log.info("invokeWorkflow: {}", worklowDescJson);
+
+ Workplace workplace = workplaceStore.getWorkplace(Workplace.SYSTEM_WORKPLACE);
+ if (Objects.isNull(workplace)) {
+ throw new WorkflowException("Invalid system workplace");
+ }
+
+ Workflow workflow = workflowStore.get(URI.create(WorkplaceWorkflow.WF_CREATE_WORKFLOW));
+ if (Objects.isNull(workflow)) {
+ throw new WorkflowException("Invalid workflow " + WorkplaceWorkflow.WF_CREATE_WORKFLOW);
+ }
+
+ WorkflowContext context = workflow.buildSystemContext(workplace, new JsonDataModelTree(worklowDescJson));
+ workflowExecutionService.execInitWorklet(context);
+ }
+
+ @Override
+ public void terminateWorkflow(WorkflowDescription wfDesc) throws WorkflowException {
+ log.info("terminateWorkflow: {}", wfDesc);
+ if (Objects.nonNull(workplaceStore.getContext(wfDesc.workflowContextName()))) {
+ workplaceStore.removeContext(wfDesc.workflowContextName());
+ }
+ }
+}
\ No newline at end of file
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowNetConfig.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowNetConfig.java
new file mode 100644
index 0000000..2bca555
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowNetConfig.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import org.onosproject.core.ApplicationId;
+import org.onosproject.net.config.Config;
+import org.onosproject.workflow.api.DefaultRpcDescription;
+import org.onosproject.workflow.api.RpcDescription;
+import org.onosproject.workflow.api.WorkflowException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+
+public class WorkflowNetConfig extends Config<ApplicationId> {
+
+ private static final Logger log = LoggerFactory.getLogger(WorkflowNetConfig.class);
+
+ /**
+ * Workflow RPC pointer.
+ */
+ private static final String RPC_PTR = "/rpc";
+
+ public Collection<RpcDescription> getRpcDescriptions() throws WorkflowException {
+
+ JsonNode node = object.at(RPC_PTR);
+ if (!(node instanceof ArrayNode)) {
+ throw new WorkflowException("invalid rpc for " + object);
+ }
+ ArrayNode rpcArrayNode = (ArrayNode) node;
+
+ List<RpcDescription> rpcDescriptions = new ArrayList<>();
+ for (JsonNode rpcNode : rpcArrayNode) {
+ rpcDescriptions.add(DefaultRpcDescription.valueOf(rpcNode));
+ }
+
+ return rpcDescriptions;
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowNetConfigListener.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowNetConfigListener.java
new file mode 100644
index 0000000..59a57e9
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkflowNetConfigListener.java
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import org.onosproject.core.ApplicationId;
+import org.onosproject.net.config.ConfigFactory;
+import org.onosproject.net.config.NetworkConfigEvent;
+import org.onosproject.net.config.NetworkConfigListener;
+import org.onosproject.net.config.basics.SubjectFactories;
+import org.onosproject.workflow.api.DefaultWorkflowDescription;
+import org.onosproject.workflow.api.RpcDescription;
+import org.onosproject.workflow.api.WorkflowService;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.DefaultWorkplaceDescription;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ScheduledExecutorService;
+
+import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
+import static org.onlab.util.Tools.groupedThreads;
+
+public class WorkflowNetConfigListener implements NetworkConfigListener {
+
+ private static final Logger log = LoggerFactory.getLogger(WorkflowNetConfigListener.class);
+
+ public static final String CONFIG_KEY = "workflow";
+ public static final String EXECUTOR_GROUPNAME = "onos/workflow-netcfg";
+ public static final String EXECUTOR_PATTERN = "netcfg-event-handler";
+
+ private final ConfigFactory<ApplicationId, WorkflowNetConfig> configFactory =
+ new ConfigFactory<ApplicationId, WorkflowNetConfig>(
+ SubjectFactories.APP_SUBJECT_FACTORY, WorkflowNetConfig.class, CONFIG_KEY) {
+ @Override
+ public WorkflowNetConfig createConfig() {
+ return new WorkflowNetConfig();
+ }
+ };
+
+ private final WorkflowService workflowService;
+
+ private final ScheduledExecutorService executor =
+ newSingleThreadScheduledExecutor(groupedThreads(EXECUTOR_GROUPNAME, EXECUTOR_PATTERN));
+
+ public WorkflowNetConfigListener(WorkflowService workflowService) {
+ this.workflowService = workflowService;
+ }
+
+ public ConfigFactory<ApplicationId, WorkflowNetConfig> getConfigFactory() {
+ return configFactory;
+ }
+
+ @Override
+ public boolean isRelevant(NetworkConfigEvent event) {
+ return true;
+ }
+
+ @Override
+ public void event(NetworkConfigEvent event) {
+ log.info("Configuration event: {}", event);
+ switch (event.type()) {
+ case CONFIG_ADDED:
+ case CONFIG_UPDATED:
+ if (!event.config().isPresent()) {
+ log.error("No configuration found");
+ return;
+ }
+ WorkflowNetConfig config = (WorkflowNetConfig) event.config().get();
+
+ //Single thread executor(locking is not required)
+ executor.execute(new Handler(workflowService, config));
+ break;
+ default:
+ break;
+ }
+ }
+
+ public static class Handler implements Runnable {
+
+ private WorkflowService workflowService;
+ private WorkflowNetConfig config;
+
+ public Handler(WorkflowService workflowService, WorkflowNetConfig config) {
+ this.workflowService = workflowService;
+ this.config = config;
+ }
+
+ @Override
+ public void run() {
+
+ try {
+ Collection<RpcDescription> rpcs = config.getRpcDescriptions();
+ log.info("" + rpcs);
+ for (RpcDescription rpc : rpcs) {
+ if (!rpcMap.containsKey(rpc.op())) {
+ log.error("Invalid RPC: {}", rpc);
+ continue;
+ }
+
+ rpcMap.get(rpc.op()).apply(this.workflowService, rpc);
+ }
+ } catch (WorkflowException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ @FunctionalInterface
+ public interface RpcCall {
+ void apply(WorkflowService workflowService, RpcDescription rpcDesc) throws WorkflowException;
+ }
+
+ private static Map<String, RpcCall> rpcMap = new HashMap<>();
+ static {
+ rpcMap.put("workplace.create",
+ (service, desc) -> service.createWorkplace(DefaultWorkplaceDescription.valueOf(desc.params())));
+ rpcMap.put("workplace.remove",
+ (service, desc) -> service.removeWorkplace(DefaultWorkplaceDescription.valueOf(desc.params())));
+ rpcMap.put("workflow.invoke",
+ (service, desc) -> service.invokeWorkflow(desc.params()));
+ rpcMap.put("workflow.terminate",
+ (service, desc) -> service.terminateWorkflow(DefaultWorkflowDescription.valueOf(desc.params())));
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkplaceWorkflow.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkplaceWorkflow.java
new file mode 100644
index 0000000..af62aaa
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/WorkplaceWorkflow.java
@@ -0,0 +1,213 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.BooleanNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.onosproject.event.Event;
+import org.onosproject.workflow.api.AbstractWorklet;
+import org.onosproject.workflow.api.DefaultWorkflowContext;
+import org.onosproject.workflow.api.DefaultWorkplace;
+import org.onosproject.workflow.api.ImmutableListWorkflow;
+import org.onosproject.workflow.api.JsonDataModelTree;
+import org.onosproject.workflow.api.SystemWorkflowContext;
+import org.onosproject.workflow.api.Workflow;
+import org.onosproject.workflow.api.WorkflowAttribute;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.WorkflowData;
+import org.onosproject.workflow.api.WorkflowDataEvent;
+import org.onosproject.workflow.api.DefaultWorkflowDescription;
+import org.onosproject.workflow.api.WorkflowDescription;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.WorkflowExecutionService;
+import org.onosproject.workflow.api.WorkflowStore;
+import org.onosproject.workflow.api.Workplace;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.URI;
+import java.util.Objects;
+
+import static org.onosproject.workflow.api.WorkflowDataEvent.Type.INSERT;
+
+public class WorkplaceWorkflow {
+
+ private static final Logger log = LoggerFactory.getLogger(WorkplaceWorkflow.class);
+
+ private WorkflowExecutionService workflowExecutionService;
+ private WorkflowStore workflowStore;
+
+ public WorkplaceWorkflow(WorkflowExecutionService workflowExecutionService,
+ WorkflowStore workflowStore) {
+ this.workflowExecutionService = workflowExecutionService;
+ this.workflowStore = workflowStore;
+ }
+
+ public static final String WF_CREATE_WORKFLOW = "workplace.create-workflow";
+
+ public void registerWorkflows() {
+
+ Workflow workflow = ImmutableListWorkflow.builder()
+ .id(URI.create(WF_CREATE_WORKFLOW))
+ .attribute(WorkflowAttribute.REMOVE_AFTER_COMPLETE)
+ .init(ChangeDistributor.class.getName())
+ .chain(CreateWorkplace.class.getName())
+ .chain(CreateWorkflowContext.class.getName())
+ .build();
+ workflowStore.register(workflow);
+ }
+
+ public abstract static class AbsWorkflowWorklet extends AbstractWorklet {
+
+ protected WorkflowDescription getWorkflowDesc(WorkflowContext context) throws WorkflowException {
+
+ JsonNode root = ((JsonDataModelTree) context.data()).root();
+ return DefaultWorkflowDescription.valueOf(root);
+ }
+ }
+
+ public static class ChangeDistributor extends AbsWorkflowWorklet {
+
+ @Override
+ public void process(WorkflowContext context) throws WorkflowException {
+
+ String workplaceName = getWorkflowDesc(context).workplaceName();
+ // Sets workflow job distribution hash value to make this workflow to be executed on the
+ // same cluster node to execute workplace tasks.
+ ((SystemWorkflowContext) context).setDistributor(workplaceName);
+
+ context.completed();
+ }
+ }
+
+ public static class CreateWorkplace extends AbsWorkflowWorklet {
+
+ @Override
+ public boolean isNext(WorkflowContext context) throws WorkflowException {
+
+ WorkflowDescription wfDesc = getWorkflowDesc(context);
+
+ Workplace workplace = context.workplaceStore().getWorkplace(wfDesc.workplaceName());
+ return Objects.isNull(workplace);
+ }
+
+ @Override
+ public void process(WorkflowContext context) throws WorkflowException {
+
+ WorkflowDescription wfDesc = getWorkflowDesc(context);
+
+ // creates workplace with empty data model
+ DefaultWorkplace workplace = new DefaultWorkplace(wfDesc.workplaceName(), new JsonDataModelTree());
+ log.info("registerWorkplace {}", workplace);
+ context.waitCompletion(WorkflowDataEvent.class, wfDesc.workplaceName(),
+ () -> context.workplaceStore().registerWorkplace(wfDesc.workplaceName(), workplace),
+ 60000L
+ );
+ }
+
+ @Override
+ public boolean isCompleted(WorkflowContext context, Event event)throws WorkflowException {
+
+ if (!(event instanceof WorkflowDataEvent)) {
+ return false;
+ }
+
+ WorkflowDataEvent wfEvent = (WorkflowDataEvent) event;
+ WorkflowData wfData = wfEvent.subject();
+
+ WorkflowDescription wfDesc = getWorkflowDesc(context);
+
+ if (wfData instanceof Workplace
+ && Objects.equals(wfData.name(), wfDesc.workplaceName())
+ && wfEvent.type() == INSERT) {
+ log.info("isCompleted(true): event:{}, context:{}, workplace:{}",
+ event, context, wfDesc.workplaceName());
+ return true;
+ } else {
+ log.info("isCompleted(false) event:{}, context:{}, workplace:{}",
+ event, context, wfDesc.workplaceName());
+ return false;
+ }
+ }
+ }
+
+ public static class CreateWorkflowContext extends AbsWorkflowWorklet {
+
+ private static final String SUBMITTED = "submitted";
+
+ private boolean isSubmitted(WorkflowContext context) throws WorkflowException {
+ JsonNode node = ((JsonDataModelTree) context.data()).nodeAt("/" + SUBMITTED);
+ if (!(node instanceof BooleanNode)) {
+ return false;
+ }
+ return node.asBoolean();
+ }
+
+ private void submitTrue(WorkflowContext context) throws WorkflowException {
+ JsonNode root = ((JsonDataModelTree) context.data()).root();
+ if (!(root instanceof ObjectNode)) {
+ throw new WorkflowException("Invalid root node for " + context);
+ }
+ ((ObjectNode) root).put(SUBMITTED, true);
+ }
+
+ @Override
+ public boolean isNext(WorkflowContext context) throws WorkflowException {
+
+ WorkflowDescription wfDesc = getWorkflowDesc(context);
+
+ String contextName = DefaultWorkflowContext.nameBuilder(wfDesc.id(), wfDesc.workplaceName());
+ if (Objects.isNull(context.workplaceStore().getContext(contextName))) {
+ return (!isSubmitted(context));
+ } else {
+ return false;
+ }
+ }
+
+ @Override
+ public void process(WorkflowContext context) throws WorkflowException {
+
+ WorkflowDescription wfDesc = getWorkflowDesc(context);
+
+ Workplace workplace = context.workplaceStore().getWorkplace(wfDesc.workplaceName());
+ if (Objects.isNull(workplace)) {
+
+ log.error("Failed to find workplace with " + wfDesc.workplaceName());
+ throw new WorkflowException("Failed to find workplace with " + wfDesc.workplaceName());
+ }
+
+ Workflow workflow = context.workflowStore().get(wfDesc.id());
+ if (Objects.isNull(workflow)) {
+ throw new WorkflowException("Failed to find workflow with " + wfDesc.id());
+ }
+
+ //String contextName = DefaultWorkflowContext.nameBuilder(wfDesc.id(), wfDesc.workplaceName());
+ String contextName = wfDesc.workflowContextName();
+ if (Objects.nonNull(context.workplaceStore().getContext(contextName))) {
+ throw new WorkflowException(contextName + " exists already");
+ }
+
+ JsonDataModelTree subTree = new JsonDataModelTree(wfDesc.data());
+ WorkflowContext buildingContext = workflow.buildContext(workplace, subTree);
+ log.info("registerContext {}", buildingContext.name());
+ context.workplaceStore().registerContext(buildingContext.name(), buildingContext);
+ submitTrue(context);
+
+ context.completed();
+ }
+ }
+}
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/example/SampleWorkflow.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/example/SampleWorkflow.java
new file mode 100644
index 0000000..a1ca9cc
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/example/SampleWorkflow.java
@@ -0,0 +1,287 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.onosproject.workflow.impl.example;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.JsonNodeType;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.apache.felix.scr.annotations.Activate;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Deactivate;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
+import org.onosproject.net.device.DeviceService;
+import org.onosproject.workflow.api.AbstractWorklet;
+import org.onosproject.workflow.api.DataModelTree;
+import org.onosproject.workflow.api.ImmutableListWorkflow;
+import org.onosproject.workflow.api.JsonDataModelTree;
+import org.onosproject.workflow.api.Workflow;
+import org.onosproject.workflow.api.WorkflowContext;
+import org.onosproject.workflow.api.WorkflowException;
+import org.onosproject.workflow.api.WorkflowExecutionService;
+import org.onosproject.workflow.api.WorkflowStore;
+import org.onosproject.workflow.api.WorkplaceStore;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.URI;
+
+/**
+ * Class for sample workflow.
+ */
+@Component(immediate = true)
+public class SampleWorkflow {
+
+ private static final Logger log = LoggerFactory.getLogger(SampleWorkflow.class);
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected WorkflowStore workflowStore;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected WorkplaceStore workplaceStore;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected WorkflowExecutionService workflowExecutionService;
+
+ @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
+ protected DeviceService deviceService;
+
+
+ @Activate
+ public void activate() {
+ log.info("Activated");
+
+ registerWorkflows();
+
+ }
+
+ @Deactivate
+ public void deactivate() {
+ log.info("Deactivated");
+ }
+
+ /**
+ * Registers example workflows.
+ */
+ private void registerWorkflows() {
+ // registering class-loader
+ workflowStore.registerLocal(this.getClass().getClassLoader());
+
+ // registering new workflow definition
+ URI uri = URI.create("sample.workflow-0");
+ Workflow workflow = ImmutableListWorkflow.builder()
+ .id(uri)
+ .chain(SampleWorklet1.class.getName())
+ .chain(SampleWorklet2.class.getName())
+ .chain(SampleWorklet3.class.getName())
+ .chain(SampleWorklet4.class.getName())
+ .chain(SampleWorklet5.class.getName())
+ .build();
+ workflowStore.register(workflow);
+
+ // registering new workflow definition
+ uri = URI.create("sample.workflow-1");
+ workflow = ImmutableListWorkflow.builder()
+ .id(uri)
+ .chain(SampleWorklet3.class.getName())
+ .chain(SampleWorklet2.class.getName())
+ .chain(SampleWorklet1.class.getName())
+ .chain(SampleWorklet4.class.getName())
+ .chain(SampleWorklet5.class.getName())
+ .build();
+ workflowStore.register(workflow);
+
+ // registering new workflow definition
+ uri = URI.create("sample.workflow-2");
+ workflow = ImmutableListWorkflow.builder()
+ .id(uri)
+ .chain(SampleWorklet1.class.getName())
+ .chain(SampleWorklet3.class.getName())
+ .chain(SampleWorklet2.class.getName())
+ .chain(SampleWorklet4.class.getName())
+ .chain(SampleWorklet5.class.getName())
+ .build();
+ workflowStore.register(workflow);
+ }
+
+ /**
+ * Abstract class for sample worklet.
+ */
+ public abstract static class AbsSampleWorklet extends AbstractWorklet {
+
+ protected static final String SAMPLE_DATAMODEL_PTR = "/sample/job";
+
+ /**
+ * Constructor for sample worklet.
+ */
+ protected AbsSampleWorklet() {
+
+ }
+
+ /**
+ * Allocates or gets data model.
+ * @param context workflow context
+ * @return json object node
+ * @throws WorkflowException workflow exception
+ */
+ protected ObjectNode allocOrGetModel(WorkflowContext context) throws WorkflowException {
+
+ JsonDataModelTree tree = (JsonDataModelTree) context.data();
+ JsonNode params = tree.root();
+
+ if (params.at(SAMPLE_DATAMODEL_PTR).getNodeType() == JsonNodeType.MISSING) {
+ tree.alloc(SAMPLE_DATAMODEL_PTR, DataModelTree.Nodetype.MAP);
+ }
+ return (ObjectNode) params.at(SAMPLE_DATAMODEL_PTR);
+ }
+
+ /**
+ * Gets data model.
+ * @param context workflow context
+ * @return json object node
+ * @throws WorkflowException workflow exception
+ */
+ protected ObjectNode getDataModel(WorkflowContext context) throws WorkflowException {
+ DataModelTree tree = context.data();
+ return ((JsonDataModelTree) tree.subtree(SAMPLE_DATAMODEL_PTR)).rootObject();
+ }
+
+ /**
+ * Sleeps for 'ms' milli seconds.
+ * @param ms milli seconds to sleep
+ */
+ protected void sleep(long ms) {
+ try {
+ Thread.sleep(ms);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ /**
+ * Class for sample worklet-1.
+ */
+ public static class SampleWorklet1 extends AbsSampleWorklet {
+ @Override
+ public void process(WorkflowContext context) throws WorkflowException {
+ ObjectNode node = getDataModel(context);
+ node.put("work1", "done");
+ log.info("workflow-process {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+ sleep(30);
+
+ context.completed(); //Complete the job of worklet in the process
+ }
+
+ @Override
+ public boolean isNext(WorkflowContext context) throws WorkflowException {
+ ObjectNode node = allocOrGetModel(context);
+ log.info("workflow-isNext {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+ sleep(30);
+ return !node.has("work1");
+
+ }
+ }
+
+ /**
+ * Class for sample worklet-2 (using timeout).
+ */
+ public static class SampleWorklet2 extends AbsSampleWorklet {
+ @Override
+ public void process(WorkflowContext context) throws WorkflowException {
+ ObjectNode node = getDataModel(context);
+ node.put("work2", "done");
+ log.info("workflow-process {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+ sleep(50);
+
+ context.waitFor(50L); //Timeout will happen after 50 milli seconds.
+ }
+
+ @Override
+ public void timeout(WorkflowContext context) throws WorkflowException {
+ context.completed(); //Complete the job of worklet by timeout
+ }
+
+ @Override
+ public boolean isNext(WorkflowContext context) throws WorkflowException {
+ ObjectNode node = allocOrGetModel(context);
+ log.info("workflow-isNext {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+ sleep(50);
+ return !node.has("work2");
+ }
+ }
+
+ public static class SampleWorklet3 extends AbsSampleWorklet {
+ @Override
+ public void process(WorkflowContext context) throws WorkflowException {
+ ObjectNode node = getDataModel(context);
+ node.put("work3", "done");
+ log.info("workflow-process {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+ sleep(10);
+
+ context.completed();
+ }
+
+ @Override
+ public boolean isNext(WorkflowContext context) throws WorkflowException {
+ ObjectNode node = allocOrGetModel(context);
+ log.info("workflow-isNext {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+ sleep(10);
+ return !node.has("work3");
+ }
+ }
+
+ public static class SampleWorklet4 extends AbsSampleWorklet {
+ @Override
+ public void process(WorkflowContext context) throws WorkflowException {
+ ObjectNode node = getDataModel(context);
+ node.put("work4", "done");
+ log.info("workflow-process {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+ sleep(10);
+
+ context.completed();
+ }
+
+ @Override
+ public boolean isNext(WorkflowContext context) throws WorkflowException {
+ ObjectNode node = allocOrGetModel(context);
+ log.info("workflow-isNext {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+ sleep(10);
+ return !node.has("work4");
+ }
+ }
+
+ public static class SampleWorklet5 extends AbsSampleWorklet {
+ @Override
+ public void process(WorkflowContext context) throws WorkflowException {
+ ObjectNode node = getDataModel(context);
+ node.put("work5", "done");
+ log.info("workflow-process {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+ sleep(10);
+
+ context.completed();
+ }
+
+ @Override
+ public boolean isNext(WorkflowContext context) throws WorkflowException {
+ ObjectNode node = allocOrGetModel(context);
+ log.info("workflow-isNext {}-{}", context.workplaceName(), this.getClass().getSimpleName());
+ sleep(10);
+ return !node.has("work5");
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/example/package-info.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/example/package-info.java
new file mode 100644
index 0000000..2fbc9549a
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/example/package-info.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Workflow example package.
+ */
+package org.onosproject.workflow.impl.example;
\ No newline at end of file
diff --git a/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/package-info.java b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/package-info.java
new file mode 100644
index 0000000..f82c452
--- /dev/null
+++ b/apps/workflow/app/src/main/java/org/onosproject/workflow/impl/package-info.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2018-present Open Networking Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Workflow implementation package.
+ */
+package org.onosproject.workflow.impl;
\ No newline at end of file
diff --git a/apps/workflow/app/src/main/resources/OSGI-INF/blueprint/shell-config.xml b/apps/workflow/app/src/main/resources/OSGI-INF/blueprint/shell-config.xml
new file mode 100644
index 0000000..cee835a
--- /dev/null
+++ b/apps/workflow/app/src/main/resources/OSGI-INF/blueprint/shell-config.xml
@@ -0,0 +1,20 @@
+<blueprint xmlns="http://www.osgi.org/xmlns/blueprint/v1.0.0">
+
+ <command-bundle xmlns="http://karaf.apache.org/xmlns/shell/v1.1.0">
+ <command>
+ <action class="org.onosproject.workflow.cli.WorkFlowStoreCommand"></action>
+ </command>
+ <command>
+ <action class="org.onosproject.workflow.cli.WorkplaceStoreCommand"></action>
+ </command>
+ <command>
+ <action class="org.onosproject.workflow.cli.WorkFlowCommand"></action>
+ </command>
+ <command>
+ <action class="org.onosproject.workflow.cli.WorkFlowEventMapCommand"></action>
+ </command>
+ <command>
+ <action class="org.onosproject.workflow.cli.WorkFlowTestCommand"></action>
+ </command>
+ </command-bundle>
+</blueprint>