Prechádzať zdrojové kódy

Cleanup repo. Dependencies and (static) build are handled by Nix.

Joeri Exelmans 3 rokov pred
rodič
commit
709fd31967
50 zmenil súbory, kde vykonal 5384 pridanie a 4147 odobranie
  1. 0 6
      .gitmodules
  2. 0 186
      lib/HistoryDAG.mjs
  3. 0 175
      lib/screenshare.js
  4. 0 332
      lib/screenshare2.js
  5. 0 266
      lib/test_HistoryDAG.mjs
  6. 0 42
      lib/uitools.js
  7. 0 48
      lib/versioning/DisabledCells.js
  8. 0 256
      lib/versioning/History.js
  9. 0 37
      lib/versioning/README.txt
  10. 0 56
      lib/versioning/SelectionHandler.js
  11. 0 7
      lib/versioning/build_client.sh
  12. 0 1
      lib/versioning/draghandler_statechart.drawio
  13. 0 268
      lib/versioning/test_History.js
  14. 57 0
      nix/default.nix
  15. 16 0
      nix/npm_deps/README.txt
  16. 17 0
      nix/npm_deps/default.nix
  17. 588 0
      nix/npm_deps/node-env.nix
  18. 201 0
      nix/npm_deps/node-packages.nix
  19. 9 0
      nix/npm_deps/package.json
  20. 39 0
      nix/static-webapp-builder.sh
  21. 0 1
      sccd
  22. 14 0
      scripts/README.md
  23. 20 0
      scripts/build_collab_plugin.sh
  24. 7 0
      scripts/build_static_app.sh
  25. 10 0
      scripts/dev_server.sh
  26. 1 26
      shell.nix
  27. 7 0
      src/collab/README.txt
  28. 0 0
      src/collab/client/DragHandler.js
  29. 0 0
      src/collab/client/GhostOverlays.js
  30. 0 0
      src/collab/client/UserColors.js
  31. 16 11
      lib/versioning/client.js
  32. 9 8
      src/main/webapp/plugins/cdf/versioning.js
  33. 0 0
      src/collab/client/statechart_src/client_statechart.drawio
  34. BIN
      src/collab/client/statechart_src/client_statechart.pdf
  35. 1 1
      lib/versioning/client.xml
  36. 19 11
      lib/versioning/run_server.js
  37. 2 2
      src/main/webapp/js/app.min.js
  38. 3 8
      src/main/webapp/js/diagramly/App.js
  39. 166 166
      src/main/webapp/js/stencils.min.js
  40. 4182 0
      src/main/webapp/myPlugins/collab.js
  41. 0 0
      src/main/webapp/myPlugins/cursor.svg
  42. 0 0
      src/main/webapp/myPlugins/ftgpm.js
  43. 0 0
      src/main/webapp/myPlugins/svg-viewport.js
  44. 0 94
      src/main/webapp/plugins/cdf/ftgpm.js
  45. 0 29
      src/main/webapp/plugins/cdf/logevents.js
  46. 0 41
      src/main/webapp/plugins/cdf/messaging.js
  47. 0 41
      src/main/webapp/plugins/cdf/screenshare.js
  48. 0 75
      src/main/webapp/plugins/cdf/sendshapes.js
  49. 0 1952
      src/main/webapp/plugins/cdf/versioning.browser.js
  50. 0 1
      websockets

+ 0 - 6
.gitmodules

@@ -1,9 +1,3 @@
-[submodule "websockets"]
-	path = websockets
-	url = git@github.com:joeriexelmans/websockets.git
 [submodule "mxgraph"]
 	path = mxgraph
 	url = git@github.com:joeriexelmans/mxgraph.git
-[submodule "sccd"]
-	path = sccd
-	url = git@msdl.uantwerpen.be:arys/sccd.git

+ 0 - 186
lib/HistoryDAG.mjs

@@ -1,186 +0,0 @@
-"use strict";
-
-// HistoryItems only record the operation that was carried out (by someone, somewhere), and its parents and children in the DAG.
-// They do not contain any other information (is the operation included in 'our' state, or was it undone/rolled back/in conflict with another operation). This allows the same HistoryItem to be added to multiple HistoryDAGs: An operation may be 'visible' in one HistoryDAG, but not in another (e.g. to simulate that it has not yet been received through the network). The main practical benefit of this is testing.
-export class HistoryItem {
-  constructor(value, parents) {
-    this.value = value; // details on the operation that was carried out. 
-    this.parents = parents; // array of HistoryItem, order has no meaning
-
-    // DAG, but doubly-linked:
-    this.children = [];
-    for (const parent of parents) {
-      parent.children.push(this);
-    }
-  }
-}
-
-export class HistoryDAG {
-  // Properties of callbacks:
-  //  - undo/redo:
-  //      undo(A) will only be called if redo(A) was the most recent of undo/redo to be called on A.
-  //      redo(A) will only be called if undo(A) was the most recent of undo/redo to be called on A, or if no call undo(A) or redo(A) was made yet.
-  //      If we are in state S, and redo(A) brings us to state S', then subsequent undo(A) brings us to state S.
-  //      If we are in state S, and undo(B) brings us to state S', then subsequent redo(B) brings us to state S.
-  //  - isConflict defines a binary relation with the following properties:
-  //       symmetric:  isConflict(A,B) => isConflict(B,A)
-  //       transitive: isConflict(A,B) && isConflict(B,C) => isConflict(A,C)
-  //      2 operations are in conflict when the order in which the operations are executed ('redone') may lead to a different state.
-  //      isConflict will never be called with the same operation, hence isConflict(A,A) is allowed to return any result.
-  //  - resolve defines a total ordering on conflicting operations and always returns the 'greatest' (or smallest) element of 2:
-  //       resolve(A,B) == B && resolve(B,C) == C => resolve(A,C) == C
-  constructor(redo, undo, isConflict, resolve, check_preconditions = false) {
-    this.heads = []; // Array but order has no meaning. If heads consists of multiple items, then those items are concurrent.
-
-    if (check_preconditions) {
-      // Perform internal assertions at the cost of performance. Can be disabled in production.
-      this.done = new Set();
-      this.redo = item => {
-        redo(item);
-        if (this.done.has(item)) {
-          throw new Error("Precondition failed: Already redone");
-        }
-        this.done.add(item);
-      };
-      this.undo = item => {
-        undo(item);
-        if (!this.done.has(item)) {
-          throw new Error("Precondition failed: Already undone");
-        }
-        this.done.delete(item);
-      };
-    } else {
-      this.redo = redo;
-      this.undo = undo;
-    }
-    this.isConflict = isConflict;
-    this.resolve = resolve;
-
-    this.lost = new Set(); // Items that lost a conflict in favor of some other item
-    this.won = new Map();
-  }
-
-  // Appends newItem to the History DAG, and advances HEADs to include the new item.
-  add(newItem) {
-    for (const parent of newItem.parents) {
-      const i = this.heads.indexOf(parent);
-      if (i >= 0) {
-        // remove parent from heads, newItem will replace it
-        this.heads.splice(i, 1);
-      }
-    }
-
-    const conflictingItems = [];
-    for (const concurrentItem of iterTopoAncestors(this.heads, item => !isAncestor2(item, newItem))) {
-      // console.log(newItem.value.tag, "is concurrent with", concurrentItem.value.tag)
-      if (this.isConflict(concurrentItem, newItem)) {
-        if (!this.lost.has(concurrentItem)) {
-          conflictingItems.push(concurrentItem);
-        }
-      }
-    }
-
-
-    if (conflictingItems.length > 0) {
-      // console.log("conflictingItems:", conflictingItems.map(item=>item.value.tag))
-
-      // Only the last item in the array is the 'original conflict', because it is the deepest item in our child-first topologically ordered visit.
-      // All other items are guaranteed to be descendants of the first item.
-      const conflictingItem = conflictingItems[conflictingItems.length-1];
-
-      const wonFrom = this.won.get(conflictingItem);
-      if (wonFrom && isAncestor2(wonFrom, newItem)) {
-        // Special case: concurrentItem has already won from an ancestor of newItem
-        // -> as a result, newItem loses from concurrentItem
-
-        // console.log(newItem.value.tag, "loses against", winner.value.tag);
-        // nothing to be done
-      }
-      else {
-        const winner = this.resolve(conflictingItem, newItem);
-        const loser = winner === newItem ? conflictingItem : newItem;
-
-        if (winner === newItem) {
-          // console.log(newItem.value.tag, "wins against", loser.value.tag);
-          // Rollback:
-          for (const item of conflictingItems) {
-            // if (!this.lost.has(item))
-              this.undo(item);
-          }
-          this.redo(newItem);
-        } else {
-          // console.log(newItem.value.tag, "loses against", winner.value.tag);
-          // nothing to be done
-        }
-        this.lost.add(loser);
-        this.won.set(winner, loser);
-      }
-    } else {
-      // no conflict
-      this.redo(newItem);
-    }
-
-    this.heads.push(newItem);
-  }
-}
-
-
-// Is parent ancestor of child?
-// Performs DFS search from child to parent, following 'parents' links.
-// Can be slow if child has many ancestors.
-export function isAncestor(parent, child) {
-  if (parent === child) {
-    return true;
-  }
-  for (const p of child.parents) {
-    if (isAncestor(parent, p)) {
-      return true;
-    }
-  }
-  return false;
-}
-
-// Is parent ancestor of child?
-// Performs DFS search from parent to child, following 'children' links.
-// Best performance when the parent does not have many descendants.
-export function isAncestor2(parent, child) {
-  if (parent === child) {
-    return true;
-  }
-  for (const c of parent.children) {
-    if (isAncestor2(c, child)) {
-      return true;
-    }
-  }
-  return false;
-}
-
-
-// Generator yielding ancestors of startItems in topological order, child-first.
-export function* iterTopoAncestors(startItems, filter) {
-  const visited = new Set();
-
-  function* recurse(item) {
-    yield item;
-
-    visited.add(item);
-
-    const nextRoundItems = item.parents.filter(parent => {
-      // only visit each item once:
-      if (visited.has(parent)) return false;
-      for (const child of parent.children) {
-        // must have visited all children before:
-        if (!visited.has(child)) return false;
-      }
-      return filter(parent);
-    });
-
-    for (const item of nextRoundItems) {
-      yield* recurse(item);
-    }
-  }
-
-  for (const item of startItems) {
-    yield* recurse(item);
-  }
-}

+ 0 - 175
lib/screenshare.js

@@ -1,175 +0,0 @@
-const decodeCells = xmlString => {
-  const parsedXml = new DOMParser().parseFromString(xmlString, "text/xml").firstElementChild;
-  const codec = new mxCodec();
-  return codec.decode(parsedXml);
-};
-
-const encodeCells = cells => {
-    const codec = new mxCodec();
-    const encoded = codec.encode(cells);
-    return new XMLSerializer().serializeToString(encoded);
-};
-
-class ScreenShare {
-  constructor(client, graph, confirm, alert) {
-    this.graph = graph;
-    this.sharingWith = {};
-    this.confirm = confirm;
-    this.alert = alert;
-
-    const shareEvent = serializer => {
-      // return new event listener for mxEventSource:
-      return (source, eventObj) => {
-        const props = serializer(eventObj.properties);
-        if (props !== undefined) {
-          Object.keys(this.sharingWith).forEach(peer => {
-            this.p2p.send(peer, "push_edit", {
-              event: eventObj.name,
-              props,
-            }, (err, data) => {
-              if (err) {}
-              else {}
-            });
-          })
-        }
-      };
-    };
-
-    // We do not use these events because they are fired in the middle of an edit transaction (instead of at the end)
-    // wrong: CELLS_ADDED(cells: Array[mxCell], parent: mxCell, index: int, absolute: bool)
-    // wrong: CELLS_MOVED(cells: Array[mxCell], dx, dy)
-    // wrong: CELLS_REMOVED(cells: Array[mxCell])
-
-    // right: MOVE_CELLS(cells: Array{mxCell], clone: bool, dx, dy, target: mxCell, event: PointerEvent})
-    this.graph.addListener(mxEvent.MOVE_CELLS, shareEvent(({cells, target, clone, dx, dy}) => {
-      const data = {
-        targetId: target ? target.id : null,
-        clone, dx, dy,
-      }
-      if (clone) {
-        // this is also true when a new cell was added
-        return {
-          ...data,
-          cellsXml: encodeCells(cells),
-        }
-      } else {
-        return {
-          ...data,
-          cellIds: cells.map(cell => cell.id),
-        }
-      }
-    }))
-
-    this.graph.addListener(mxEvent.REMOVE_CELLS, shareEvent(({cells, includeEdges}) => {
-      if (cells.length > 0) {
-        // for some reason moving cells causes REMOVE_CELLS to be fired with 0 cells - do not share the event in this case :)
-        return {
-          cellIds: cells.map(cell => cell.id),
-          includeEdges,
-        }
-      }
-    }))
-
-    this.graph.addListener(mxEvent.RESIZE_CELLS, shareEvent(({cells, bounds, previous}) => {
-      return {
-        cellIds: cells.map(cell => cell.id),
-        boundsXml: encodeCells(bounds),
-      }
-    }))
-
-    // edge: mxCell
-    // source: boolean (whether source of edge (=directed) was connected, or target)
-    // terminal: mxCell to which connect happened (potentially undefined)
-    // previous: mxCell from which disconnect happened (potentially undefined)
-    this.graph.addListener(mxEvent.CONNECT_CELL, shareEvent(({edge, source, terminal, previous}) => {
-      return {
-        edgeId: edge.id,
-        edgeGeometryXml: encodeCells(edge.geometry),
-        source,
-        terminalId: terminal ? terminal.id : null,
-        previousId: previous ? previous.id : null,
-      }
-    }))
-
-    // CELLS_RESIZED(cells: .., bounds: mxRectangle{x,y,width,height}, previous: Array[mxGeometry{x,y,width,height, ...}])
-    // CELL_CONNECTED(edge, mxCell, source: bool, [terminal: mxCell,] [previous: mxCell])
-    //   source: whether source of edge connected
-    //   terminal: cell connected to
-    //   previous: cell disconnected from
-
-    // Handler for incoming requests from other peers
-    this.p2p = new PeerToPeer(client, {
-      // incoming request from another peer
-      "push_edit": (from, {event, props}, reply) => {
-        if (this.sharingWith[from]) {
-          // received edit from other peer
-          this.graph.setEventsEnabled(false);
-          ({
-            [mxEvent.MOVE_CELLS]: ({cellsXml, cellIds, targetId, clone, dx, dy}) => {
-              const target = targetId !== null ? this.graph.model.cells[targetId] : undefined;
-              if (clone) {
-                const cells = decodeCells(cellsXml);
-                const result = this.graph.moveCells(cells,
-                  0, 0, // dx, dy - the cells themselves already contain the correct offset. using dx and dy would position them at twice their position-vector
-                  false, // clone - not necessary because decodeCells already created the cells, we want to simply "move" them to the target. if true, the cells would be given new ids
-                  target);
-              } else {
-                const cells = cellIds.map(id => this.graph.model.cells[id]);
-                this.graph.moveCells(cells, dx, dy, clone, target);
-              }
-            },
-            [mxEvent.REMOVE_CELLS]: ({cellIds, includeEdges}) => {
-              const cells = cellIds.map(id => this.graph.model.cells[id]);
-              this.graph.removeCells(cells, includeEdges);
-            },
-            [mxEvent.RESIZE_CELLS]: ({cellIds, boundsXml}) => {
-              const cells = cellIds.map(id => this.graph.model.cells[id]);
-              const bounds = decodeCells(boundsXml);
-              this.graph.resizeCells(cells, bounds,
-                false); // recurse
-            },
-            [mxEvent.CONNECT_CELL]: ({edgeId, edgeGeometryXml, source, terminalId, previousId}) => {
-              const edge = this.graph.model.cells[edgeId];
-              const geometry = decodeCells(edgeGeometryXml);
-              const terminal = terminalId !== null ? this.graph.model.cells[terminalId] : undefined;
-              const previous = previousId !== null ? this.graph.model.cells[previousId] : undefined;
-              edge.setGeometry(geometry);
-              this.graph.connectCell(edge, terminal, source,
-                null); // "constraint". the docs say: optional <mxConnectionConstraint>. are we missing something here?
-            },
-          }[event])(props);
-          this.graph.setEventsEnabled(true);
-          reply(); // acknowledge
-        }
-      },
-      "init_screenshare": (from, graphSerialized, reply) => {
-        const yes = () => {
-          const doc = mxUtils.parseXml(graphSerialized);
-          const codec = new mxCodec(doc);
-          codec.decode(doc.documentElement, graph.model);
-          this.sharingWith[from] = true;
-          reply(); // acknowledge
-          this.alert("You are now <b>screen sharing</b> with " + shortUUID(from));
-        };
-        const no = () => {
-          reply("denied")
-        };
-        this.confirm(`Peer ${shortUUID(from)} wants to <b>screen share</b>.<br />Your diagram will be erased and replaced by his/hers.<br /><br />Accept?`, yes, no);
-      },
-    });
-  }
-
-  initshare(peer) {
-    const codec = new mxCodec();
-    const graphSerialized = mxUtils.getXml(codec.encode(this.graph.model));
-    this.p2p.send(peer, "init_screenshare", graphSerialized, (err, data) => {
-      if (err) {
-        this.alert(err)
-      }
-      else {
-        this.alert("Accepted: You are now <b>screen sharing</b> with " + shortUUID(peer));
-        this.sharingWith[peer] = true;
-      }
-    });
-  }
-}

+ 0 - 332
lib/screenshare2.js

@@ -1,332 +0,0 @@
-ScreenShare = (function() {
-
-  function encode(cells) {
-    const codec = new mxCodec();
-    const encoded = codec.encode(cells);
-    return mxUtils.getXml(encoded);
-  };
-
-
-  return class {
-
-    decode(xmlString) {
-      const parsedXml = mxUtils.parseXml(xmlString).documentElement;
-      const codec = new mxCodec();
-      codec.lookup = id => this.graph.model.cells[id];
-      return codec.decode(parsedXml);
-    }
-
-    constructor(client, peers, graph, undoManager, confirm, alert) {
-      this.graph = graph;
-      this.undoManager = undoManager;
-      this.sharingWith = null;
-      this.confirm = confirm;
-      this.alert = alert;
-
-      const otherPeerEndScreenshare = peer => {
-        if (this.sharingWith === peer) {
-          this.alert(`Peer ${shortUUID(peer)} left. You are alone again.`);
-          this.sharingWith = null;
-        }
-      }
-
-      peers.on('leave', otherPeerEndScreenshare);
-
-      const share = (what, data) => {
-        if (this.sharingWith) {
-          this.p2p.send(this.sharingWith, what, data,
-            err => { if (err) console.log("ignoring err:", err) });
-        }
-      }
-
-      let listenerEnabled = true;
-
-      this.undoManager.addListener(null, (source, eventObj) => {
-        if (listenerEnabled) {
-          if (eventObj.properties.edit) {
-            const {changes, redone, undone, significant} = eventObj.properties.edit;
-            share("undoEvent", {
-              encodedChanges: changes.map(c => encode(c)),
-              redone,
-              undone,
-              significant,
-            });            
-          }
-        }
-      });
-
-      this.graph.selectionModel.addListener(mxEvent.CHANGE, (source, eventObj) => {
-        if (listenerEnabled) {
-          const {added, removed} = eventObj.properties;
-          share("selectionEvent", {
-            addedIds: removed ? removed.map(cell => cell.id) : [],
-            removedIds: added ? added.map(cell => cell.id) : [],
-          });
-        }
-      });
-
-      // Locking
-
-      const locked = {}; // map cell id => mxCellHighlight
-
-      const lockCell = cell => {
-        const highlight = locked[cell.id];
-        if (!highlight) {
-          const highlight = new mxCellHighlight(this.graph, "#7700ff", 6);
-          highlight.highlight(this.graph.view.getState(cell));
-          locked[cell.id] = highlight;
-        }
-      };
-      const unlockCell = cell => {
-        const highlight = locked[cell.id];
-        if (highlight) {
-          highlight.destroy();
-          delete locked[cell.id]
-        }
-      }
-
-      // Locking part #1: Intercepting mxGraph.fireMouseEvent
-      const oldFireMouseEvent = this.graph.fireMouseEvent;
-      this.graph.fireMouseEvent = function(evtName, me, sender) {
-        if (me.state && locked[me.state.cell.id]) {
-          // clicked shape is locked
-          return;
-        }
-        oldFireMouseEvent.apply(this, arguments);
-      }
-      // Locking part #2: Ignore double clicks on locked cells
-      const oldDblClick = this.graph.dblClick;
-      this.graph.dblClick = function(evt, cell) {
-        if (cell && locked[cell.id]) {
-          // clicked shape is locked
-          return;
-        }
-        oldDblClick.apply(this, arguments);
-      }
-      // Locking part #3: Protect locked cells from ever being selected
-      const oldMxSelectionChange = mxSelectionChange; // override constructor :)
-      mxSelectionChange = function(selectionModel, added, removed) {
-        oldMxSelectionChange.apply(this, arguments);
-        if (this.added) {
-          this.added = this.added.filter(cell => !locked[cell.id]);
-        }
-      }
-      mxSelectionChange.prototype = oldMxSelectionChange.prototype;
-
-      // mxGraphHandler overrides to get previews of moving shapes
-      // These overrides wrap the original implementations and additionally send messages to the "screensharee".
-      // The screensharee uses this messages to draw previews at his side.
-
-      // Begin of move
-      const oldStart = this.graph.graphHandler.start;
-      this.graph.graphHandler.start = function(cell, x, y, cells) {
-        oldStart.apply(this, arguments);
-        // cells that will be moved on our side
-        cells = this.graph.graphHandler.getCells(cell);
-        share("graphHandlerStart", {
-          cellIds: cells.map(cell => cell.id),
-          x, y
-        });
-      };
-      // Redraw operation, caused by mouseMove event, during move
-      const oldUpdateLivePreview = this.graph.graphHandler.updateLivePreview;
-      this.graph.graphHandler.updateLivePreview = function(dx, dy) {
-        oldUpdateLivePreview.apply(this, arguments);
-        share("graphHandlerUpdateLivePreview", {dx, dy});
-      }
-      // End of move
-      const oldReset = this.graph.graphHandler.reset;
-      this.graph.graphHandler.reset = function() {
-        oldReset.apply(this, arguments);
-        share("graphHandlerReset", null); // no data
-      };
-
-
-      //// VERTEX HANDLER OVERRIDES -  a broken attempt at previewing resizing shapes ....
-
-      // const oldVertexStart = mxVertexHandler.prototype.start;
-      // mxVertexHandler.prototype.start = function(x, y, index) {
-      //   console.log("begin resize", this, x, y , index);
-      //   oldVertexStart.apply(this, arguments);
-      //   shareFunctionCall("vertexHandlerStart", {
-      //     cellId: this.state.cell.id,
-      //     x, y,
-      //     index, // number (0-7) of resize handle pressed
-      //   });
-      // }
-
-      // const oldVertexReset = mxVertexHandler.prototype.reset;
-      // mxVertexHandler.prototype.reset = function() {
-      //   console.log("reset resize");
-      //   oldVertexReset.apply(this, arguments);
-      //   shareFunctionCall("vertexHandlerReset", {
-      //     cellId: this.state.cell.id,
-      //   });
-      // }
-
-      // const oldVertexUpdateLivePreview = mxVertexHandler.prototype.updateLivePreview;
-      // mxVertexHandler.prototype.updateLivePreview = function(me) {
-      //   console.log("update resize preview", me);
-      //   oldVertexUpdateLivePreview.apply(this, arguments);
-      //   shareFunctionCall("vertexHandlerUpdateLivePreview", {
-      //     cellId: this.state.cell.id,
-      //     bounds: {
-      //       x: this.bounds.x,
-      //       y: this.bounds.y,
-      //       width: this.bounds.width,
-      //       height: this.bounds.height,
-      //     },
-      //   });
-      // }
-
-      // Handler for incoming requests from other peers
-      this.p2p = new PeerToPeer(client, {
-
-        // Handlers for received mxGraphHandler messages that we sent above.
-
-        // mxGraphHandler (moving cells)
-        "graphHandlerStart": (from, {cellIds, x, y}, reply) => {
-          if (this.sharingWith === from) {
-            // the mxGraphHandler will determine the cells to move based on the current selection
-            // a hack within a hack - we temporarily override 'getCells':
-            const oldGetCells = this.graph.graphHandler.getCells;
-            this.graph.graphHandler.getCells = function(initialCell) {
-              return cellIds.map(id => this.graph.model.cells[id]);
-            }
-            oldStart.apply(this.graph.graphHandler, [
-              null, // 'cells' - this argument isn't important since we overrided getCells
-              x, y,
-              null,
-            ]);
-            this.graph.graphHandler.checkPreview(); // force some stuff to happen
-            this.graph.graphHandler.getCells = oldGetCells; // restore override
-          }
-          reply();
-        },
-        "graphHandlerUpdateLivePreview": (from, {dx,dy}, reply) => {
-          if (this.sharingWith === from) {
-            oldUpdateLivePreview.apply(this.graph.graphHandler, [dx, dy]);
-          }
-          reply();
-        },
-        "graphHandlerReset": (from, _, reply) => {
-          if (this.sharingWith === from) {
-            oldReset.apply(this.graph.graphHandler, []);
-          }
-          reply();
-        },
-
-        "undoEvent": (from, {encodedChanges, undone, redone, significant}, reply) => {
-          if (this.sharingWith === from) {
-            try {
-              listenerEnabled = false;
-              // Undoable Edit happened at other peer
-              const changes = encodedChanges.map(encoded => {
-                const change = this.decode(encoded);
-                change.model = this.graph.model;
-                return change
-              });
-              if (undone) {
-                this.undoManager.undo();
-              }
-              else if (redone) {
-                this.undoManager.redo();
-              }
-              else {
-                // Probably not necessary to wrap in update-transaction, but won't do harm:
-                this.graph.model.beginUpdate();
-                changes.forEach(change => this.graph.model.execute(change));
-                this.graph.model.endUpdate();
-              }
-            }
-            finally {
-              listenerEnabled = true;
-              reply(); // acknowledge
-            }
-          }
-        },
-
-        "selectionEvent": (from, {addedIds, removedIds}, reply) => {
-          if (this.sharingWith === from) {
-            try {
-              listenerEnabled = false;
-              // Selection changed at other peer - lock selected cells
-              const removed = removedIds.map(id => this.graph.model.cells[id]);
-              const added = addedIds.map(id => this.graph.model.cells[id]);
-              removed.forEach(unlockCell);
-              added.forEach(lockCell);
-            }
-            finally {
-              listenerEnabled = true;
-              reply(); // acknowledge
-            }
-          }
-        },
-
-        // Received Screen Share request
-        "init_screenshare": (from, {graphSerialized, selectedCellIds}, reply) => {
-          const yes = () => {
-            const doc = mxUtils.parseXml(graphSerialized);
-            const codec = new mxCodec(doc);
-            codec.decode(doc.documentElement, this.graph.model);
-            selectedCellIds.forEach(id => lockCell(this.graph.model.cells[id]));
-            this.sharingWith = from;
-            reply(); // acknowledge
-            this.alert("You are now <b>screen sharing</b> with " + shortUUID(from));
-          };
-          const no = () => {
-            reply("denied")
-          };
-          this.confirm(`Peer ${shortUUID(from)} wants to <b>screen share</b>.<br />Your diagram will be erased and replaced by his/hers.<br /><br />Accept?`, yes, no);
-        },
-
-        "end_screenshare": (from, data, reply) => {
-          reply();
-          otherPeerEndScreenshare(from);
-        }
-      });
-
-    }
-
-    initshare(peer) {
-      const doIt = () => {
-        const graphSerialized = encode(this.graph.model);
-        const selectedCellIds = this.graph.getSelectionCells().map(cell => cell.id);
-        this.p2p.send(peer, "init_screenshare", {
-          graphSerialized,
-          selectedCellIds,
-        }, (err, data) => {
-          if (err) {
-            if (err === "denied") {
-              this.alert(`Peer ${peer} <b>denied</b> your sharing request :(`);
-            } else {
-              this.alert("Error sending screenshare request: " + err);
-            }
-          }
-          else {
-            this.alert("Accepted: You are now <b>screen sharing</b> with " + shortUUID(peer));
-            this.sharingWith = peer;
-          }
-        });
-        this.alert("Request sent. Awaiting response.")
-      }
-
-      if (this.sharingWith && this.sharingWith !== peer) {
-        // first, end earlier screenshare
-        const yes = () => {
-          this.p2p.send(this.sharingWith, "end_screenshare", null, (err, data) => {
-            // don't care about response
-            doIt();
-          })
-        };
-        const no = () => {
-          // do nothing
-        }
-        this.confirm(`To screenshare with peer ${shortUUID(peer)}, you <b>first have to end your screenshare</b> with peer ${shortUUID(this.sharingWith)}.<br /><br />OK?`,
-          yes, no);
-      } else {
-        doIt();
-      }
-    }
-  }
-})();

+ 0 - 266
lib/test_HistoryDAG.mjs

@@ -1,266 +0,0 @@
-"use strict";
-
-// Should work in browser but only tested with NodeJS
-
-import { HistoryItem, HistoryDAG, isAncestor2, iterTopoAncestors } from "./HistoryDAG.mjs";
-
-{
-  // Reinventing the wheel:
-
-  function assert(expr, msg) {
-    class AssertionError extends Error {
-      constructor(msg) {
-        super(msg);
-      }
-    }
-    if (!expr) {
-      // console.log(...arguments);
-      throw new AssertionError(msg);
-    }
-  }
-  function deepEqual(val1, val2) {
-    if (typeof(val1) !== typeof(val2)) return false;
-
-    if ((val1 === null) !== (val2 === null)) return false;
-
-    switch (typeof(val1)) {
-      case 'object':
-        for (var p in val2) {
-          if (val1[p] === undefined) return false;
-        }
-        for (var p in val1) {
-          if (!deepEqual(val1[p], val2[p])) return false;
-        }
-        return true;
-      case 'array':
-        if (val1.length !== val2.length) return false;
-        for (let i=0; i<val1.length; ++i)
-          if (!deepEqual(val1[i], val2[i])) return false;
-        return true;
-      default:
-        return val1 === val2;
-    }
-  }
-  function setsEqual(set1, set2) {
-    if (set1.size !== set2.size) return false;
-    for (const a of set1) if (!set2.has(a)) return false;
-    return true;
-  }
-  function mapsEqual(map1, map2) {
-    if (map1.size !== map2.size) return false;
-    for (const [key,value] of map1) {
-      if (value !== map2.get(key)) return false;
-      return true;
-    }
-  }
-
-
-
-
-  function runTest(verbose) {
-
-    function info() {
-      if (verbose)
-        console.log(...arguments);
-    }
-
-    const largestWins = (item1, item2) => item1.value.tag > item2.value.tag ? item1 : item2;
-    const smallestWins = (item1, item2) => item1.value.tag > item2.value.tag ? item2 : item1;
-
-    function getConcurrentItems(heads, newItem) {
-      return new Set(iterTopoAncestors(heads, item => !isAncestor2(item, newItem)));
-    }
-
-    function makeDAG(trace, resolveCb) {
-      const state = new Map();
-      function redo(item) {
-        item.prevState = state.get(item.value.target);
-        state.set(item.value.target, item.value.tag);
-        info('redo', item.value.tag);
-        trace.push({event: 'redo', tag: item.value.tag});
-      }
-      function undo(item) {
-        state.set(item.value.target, item.prevState);
-        info('undo', item.value.tag);
-        trace.push({event: 'undo', tag: item.value.tag});
-      }
-      function isConflict(item1, item2) {
-        return item1.value.target === item2.value.target
-      }
-      function resolve(item1, item2) {
-        const winner = resolveCb(item1, item2);
-        info('conflict', item1.value.tag, item2.value.tag, "winner:", winner.value.tag);
-        const loser = item1.value.tag <= item2.value.tag ? item1 : item2;
-        trace.push({event:'resolve', winner: winner.value.tag, loser: loser.value.tag});
-        return winner;
-      }
-      return [new HistoryDAG(
-        redo, undo, isConflict, resolve, // callbacks
-        true // check_preconditions
-        ), state];
-    }
-
-    function exec(insertionOrder, resolveCb) {
-      const trace = [];
-      const [dag, state] = makeDAG(trace, resolveCb);
-      insertionOrder.forEach(item => {
-        info("adding ", item.value.tag)
-        dag.add(item);
-      });
-      return [dag, state, trace];
-    }
-
-
-    {
-      const a = new HistoryItem({tag: 'a', target:1}, []);
-      const b = new HistoryItem({tag: 'b', target:2}, []);
-      const c = new HistoryItem({tag: 'c', target:1}, [a,b]);
-      const d = new HistoryItem({tag: 'd', target:1}, [a,b]);
-      const e = new HistoryItem({tag: 'e', target:1}, []);
-
-      {
-        // items concurrent with 'd':
-        const concurrentItems = getConcurrentItems([c], d);
-        assert(setsEqual(concurrentItems, new Set([c])));
-      }
-
-      {
-        // items concurrent with 'e':
-        const concurrentItems = getConcurrentItems([c,d], e);
-        assert(setsEqual(concurrentItems, new Set([a,b,c,d])));
-      }
-
-
-      {
-        info("Test case: abcde.....")
-
-        const [dag, state, trace] = exec([a,b,c,d,e], largestWins);
-
-        const expectedTrace = [
-          // inserting a:
-          {event: 'redo', tag: 'a'},
-          // inserting b:
-          {event: 'redo', tag: 'b'},
-          // inserting c:
-          {event: 'redo', tag: 'c'},
-          // inserting d:
-          {event: 'resolve', winner: 'd', loser: 'c'},
-          {event: 'undo', tag: 'c'},
-          {event: 'redo', tag: 'd'},
-          // inserting e:
-          {event: 'resolve', winner: 'e', loser: 'a'},
-          {event: 'undo', tag: 'd'},
-          {event: 'undo', tag: 'a'},
-          {event: 'redo', tag: 'e'},
-        ];
-
-        info("expectedTrace trace:", expectedTrace);
-        info("actual trace:", trace);
-
-        // slow but simple deep compare:
-        assert(deepEqual(trace, expectedTrace), "unexpected trace");
-      }
-    }
-
-    function runTest(insertionOrders, resolveCb) {
-      let first = true;
-      let firstDAG, firstState;
-      for (const order of insertionOrders) {
-        info("trying insertion order:", order.map(item=>item.value.tag));
-        const [dag, state] = exec(order, resolveCb);
-        info("state:", state);
-        info("dag.done:", [...dag.done].map(item=>item.value.tag));
-        if (first) {
-          firstDAG = dag;
-          firstState = state;
-          first = false;
-        } else {
-          assert(setsEqual(firstDAG.done, dag.done) && mapsEqual(firstState, state), "Insertion order should not affect eventual state.");
-        }
-      }
-    }
-
-    {
-      const a = new HistoryItem({tag: 'a', target:1}, []);
-      const b = new HistoryItem({tag: 'b', target:2}, []);
-      const c = new HistoryItem({tag: 'c', target:1}, [a,b]);
-      const d = new HistoryItem({tag: 'd', target:2}, [a,b]);
-      const e = new HistoryItem({tag: 'e', target:2}, [c]); // conflict with d
-      const f = new HistoryItem({tag: 'f', target:1}, [c]);
-      const g = new HistoryItem({tag: 'g', target:1}, [e,f]);
-      const h = new HistoryItem({tag: 'h', target:3}, [e,f]);
-      const i = new HistoryItem({tag: 'i', target:2}, [g]); // conflict with d
-
-      {
-        const concurrentItems = getConcurrentItems([h,i], d);
-        assert(setsEqual(concurrentItems, new Set([i, g, h, e, f, c])));
-      }
-
-      {
-        const concurrentItems = getConcurrentItems([h,d], i);
-        // info("concurrentItems:", concurrentItems);
-        assert(setsEqual(concurrentItems, new Set([d, h])));
-      }
-
-      const insertionOrders = [
-        // largestWins: e wins from d.
-        // smallestWins: d wins from e. next, d wins from i
-        [a,b,c,e,f,g,h,d,i],
-
-        // largestWins: i wins from d.
-        // smallestWins: d wins from i.
-        [a,b,c,e,f,g,h,i,d],
-
-        // no idea about these, but they are legal insertion orders so they should give the same results:
-        [b,a,d,c,e,f,h,g,i],
-        [a,b,d,c,e,f,h,g,i],
-        [a,b,d,c,e,f,g,i,h],
-        [b,a,d,c,e,f,g,i,h],
-      ]
-
-
-      info("Test case: largestWins...")
-      runTest(insertionOrders, largestWins);
-      info("Test case: smallestWins...")
-      runTest(insertionOrders, smallestWins);
-    }
-
-    {
-      const a = new HistoryItem({tag: 'a', target:1}, []);
-      const b = new HistoryItem({tag: 'b', target:2}, []);
-      const c = new HistoryItem({tag: 'c', target:1}, [a,b]);
-      const d = new HistoryItem({tag: 'd', target:2}, [a,b]);
-      const e = new HistoryItem({tag: 'e', target:2}, [c]); // conflict with d
-      const f = new HistoryItem({tag: 'f', target:1}, [c]);
-      const g = new HistoryItem({tag: 'g', target:1}, [e,f]);
-      const h = new HistoryItem({tag: 'h', target:3}, [e,f]);
-      const zero = new HistoryItem({tag: '0', target:2}, [g]); // conflict with d
-
-      const insertionOrders = [
-        // largestWins: e wins from d.
-        // smallestWins: d wins from e. next, d wins from i
-        [a,b,c,e,f,g,h,d,zero],
-
-        // largestWins: i wins from d.
-        // smallestWins: d wins from i.
-        [a,b,c,e,f,g,h,zero,d],
-
-        // no idea about these, but they are legal insertion orders so they should give the same results:
-        [b,a,d,c,e,f,h,g,zero],
-        [a,b,d,c,e,f,h,g,zero],
-        [a,b,d,c,e,f,g,zero,h],
-        [b,a,d,c,e,f,g,zero,h],
-      ]
-
-      info("Test case: largestWins...")
-      runTest(insertionOrders, largestWins);
-      info("Test case: smallestWins...")
-      runTest(insertionOrders, smallestWins);      
-    }
-
-  }
-
-  runTest(true); // may throw
-  console.log("OK");
-
-}

+ 0 - 42
lib/uitools.js

@@ -1,42 +0,0 @@
-class UiTools {
-
-  constructor(ui) {
-    this.ui = ui;
-  }
-
-  yesNo(msg, yesCallback, noCallback) {
-    const yesButton = mxUtils.button("Yes", () => {
-      this.ui.hideDialog();
-      yesCallback();
-    });
-    yesButton.className = 'geBtn';
-    const noButton = mxUtils.button("No", () => {
-      this.ui.hideDialog();
-      noCallback();
-    });
-    noButton.className = 'geBtn';
-    this._showpopup(msg, [yesButton, noButton]);
-  }
-
-  msgBox(msg) {
-    const ok = mxUtils.button("OK", () => {
-      this.ui.hideDialog();
-    });
-    ok.className = 'geBtn gePrimaryBtn';
-    this._showpopup(msg, [ok]);
-  }
-
-  _showpopup(msg, buttonlist) {
-    const popupDiv = document.createElement('div');
-    popupDiv.innerHTML = msg;
-    const buttonsDiv = document.createElement('div')
-    buttonlist.forEach(b => buttonsDiv.appendChild(b));
-    buttonsDiv.style.marginTop = '20px';
-    popupDiv.appendChild(buttonsDiv);
-    popupDiv.style.textAlign = 'center';
-    this.ui.showDialog(popupDiv,
-      250, 130, // w, h
-      false, // modal
-      false); // closable
-  }
-}

+ 0 - 48
lib/versioning/DisabledCells.js

@@ -1,48 +0,0 @@
-
-class DisabledCells {
-  constructor() {
-    this.disabledCells = new Set();
-  }
-
-  add(cell) {
-    this.disabledCells.add(cell);
-  }
-
-  delete(cell) {
-    this.disabledCells.delete(cell);
-  }
-
-  install(graph) {
-    const self = this;
-    // part #1: Intercepting mxGraph.fireMouseEvent
-    const oldFireMouseEvent = graph.fireMouseEvent;
-    graph.fireMouseEvent = function(evtName, me, sender) {
-      if (me.state && self.disabledCells.has(me.state.cell.id)) {
-        // clicked shape is disabled
-        return;
-      }
-      oldFireMouseEvent.apply(this, arguments);
-    }
-
-    // part #2: Ignore double clicks on disabled cells
-    const oldDblClick = graph.dblClick;
-    graph.dblClick = function(evt, cell) {
-      if (cell && self.disabledCells.has(cell.id)) {
-        // clicked shape is disabled
-        return;
-      }
-      oldDblClick.apply(this, arguments);
-    }
-    // part #3: Protect disabled cells from ever being selected
-    const oldMxSelectionChange = mxSelectionChange; // override constructor :)
-    mxSelectionChange = function(selectionModel, added, removed) {
-      oldMxSelectionChange.apply(this, arguments);
-      if (this.added) {
-        this.added = this.added.filter(cell => !self.disabledCells.has(cell.id));
-      }
-    }
-    mxSelectionChange.prototype = oldMxSelectionChange.prototype;
-  }
-}
-
-module.exports = { DisabledCells };

+ 0 - 256
lib/versioning/History.js

@@ -1,256 +0,0 @@
-"use strict";
-
-const { v4: uuidv4 } = require("uuid");
-
-class Operation {
-  constructor(id, detail) {
-    this.id = id;
-    this.detail = detail;
-  }
-  // Basically replaces JS references by IDs.
-  // Result can be JSON'd with constant time+space complexity. Useful for sharing an edit over the network.
-  serialize(op) {
-    const self = this; // workaround
-    return {
-      id: this.id,
-      detail: Object.fromEntries(
-        (function*() {
-          for (const [key, {value, parent, depth}] of self.detail.entries()) {
-            yield [key, {
-              value,
-              parentId: parent.id,
-              depth,
-            }];
-          }
-        })()),
-    }
-  }
-}
-
-class Context {
-  constructor(fetchCallback) {
-    // Must be a function taking a single 'id' parameter, returning a Promise resolving to the serialized operation with the given id.
-    this.fetchCallback = fetchCallback;
-
-    // "Global" stuff. Operations have GUIDs but can also be shared between Histories. For instance, the 'initial' operation is the common root of all model histories. We could have put these things in a global variable, but that would make it more difficult to mock 'remoteness' (separate contexts) in tests.
-    this.initialOp = new Operation("0", new Map()); // The parent of all parentless Operations. Root of all histories.
-    this.ops = new Map(); // contains all pending or resolved operation-requests; mapping from operation-id to Promise resolving to Operation.
-    this.ops.set(this.initialOp.id, Promise.resolve(this.initialOp));
-  }
-
-  // Get a promise resolving to the Operation with given ID. Fetches the operation (and recursively its dependencies) if necessary. Resolves when the operation and all its dependencies are present. Idempotent.
-  requestOperation(id) {
-    let promise = this.ops.get(id);
-    if (promise === undefined) {
-      promise = this.fetchCallback(id).then(serialized => this._awaitParents(serialized));
-      this.ops.set(id, promise);
-    }
-    return promise;
-  }
-
-  // Similar to requestOperation, but instead the argument is an already fetched/received operation. Missing dependencies are (recursively) fetched, if necessary. Resolves when the operation and all its dependencies are present. Idempotent.
-  receiveOperation(serialized) {
-    let promise = this.ops.get(serialized.id);
-    if (promise === undefined) {
-      promise = this._awaitParents(serialized);
-      this.ops.set(serialized.id, promise);
-    }
-    return promise;
-  }
-
-  // Internal function. Do not use directly.
-  async _awaitParents({id, detail}) {
-    const dependencies = Object.entries(detail).map(async ([key, {value, parentId, depth}]) => {
-      return [key, {
-        value,
-        parent: await this.requestOperation(parentId),
-        depth,
-      }];
-    });
-    return new Operation(id, new Map(await Promise.all(dependencies)));
-  }
-}
-
-class History {
-  constructor(context, setState, resolve) {
-    this.context = context;
-
-    // callbacks
-    this.setState = setState;
-    this.resolve = resolve;
-
-    this.heads = new Map(); // HEAD ptrs; mapping from key to Operation
-
-    this.ops = new Map(); // Operations (winning and losing) that happened within this History.
-    this.ops.set(context.initialOp.id, context.initialOp);
-
-    this.childrenMapping = new Map(); // mapping from operation to object mapping key to current winning child.
-  }
-
-  _getHead(key) {
-    const op = this.heads.get(key);
-    if (op !== undefined) {
-      return {
-        op,
-        depth: op.detail.get(key).depth,
-      };
-    };
-    return {
-      op: this.context.initialOp,
-      depth: 0,
-    };
-  }
-
-  _update_head(op) {
-    for (const [key, {value}] of op.detail.entries()) {
-      this.heads.set(key, op);
-    }
-  }
-
-  _update_state(op) {
-    for (const [key, {value}] of op.detail.entries()) {
-      this.setState(key, value);
-    }
-  }
-
-  _setChild(parent, key, child) {
-    let childMap = this.childrenMapping.get(parent);
-    if (childMap === undefined) {
-      childMap = {};
-      this.childrenMapping.set(parent, childMap);
-    }
-    childMap[key] = child;
-  }
-
-  _getChild(parent, key) {
-    let childMap = this.childrenMapping.get(parent);
-    if (childMap === undefined) return;
-    return childMap[key];
-  }
-
-  // To be called when a new user operation has happened locally.
-  // The new operation advances HEADs.
-  new(v, updateState=true) {
-    const newId = uuidv4();
-    const detail = new Map(Object.entries(v).map(([key,value]) => {
-      const {op: parent, depth} = this._getHead(key);
-      return [key, {
-        value,
-        parent,
-        depth: depth + 1,
-      }];
-    }));
-    const newOp = new Operation(newId, detail);
-    for (const [key, {parent}] of detail.entries()) {
-      this._setChild(parent, key, newOp);
-    }
-    this._update_head(newOp);
-    if (updateState) {
-      this._update_state(newOp);
-    }
-
-    this.context.ops.set(newId, Promise.resolve(newOp));
-    this.ops.set(newId, newOp);
-
-    return newOp;
-  }
-
-  // Idempotent.
-  autoMerge(op) {
-    if (this.ops.has(op.id)) {
-      // Already merged -> skip
-      // console.log('skip (already merged)', op.id)
-      return;
-    }
-
-    let exec = true;
-    for (const [key, {parent}] of op.detail.entries()) {
-      if (!this.ops.has(parent.id)) {
-        // Update this History with operation's dependencies first
-        this.autoMerge(parent);
-      }
-
-      // Check if there's a concurrent sibling with whom there is a conflict
-      const sibling = this._getChild(parent, key);
-      if (sibling) {
-        // Conflict
-        if (this.resolve(op, sibling)) {
-          // console.log("conflict: op wins")
-          const visited = new Set();
-          const rollback = op => {
-            visited.add(op); // Children form a DAG, with possible 'diamond' shapes -> prevent same operation from being visited more than once.
-            for (const [key, {parent}] of op.detail.entries()) {
-              // recurse, child-first
-              const child = this._getChild(op, key);
-              if (child && !visited.has(child)) {
-                // (DFS) recursion
-                rollback(child);
-              }
-              // rollback
-              if (parent === this.context.initialOp) {
-                // Invariant: HEADs never contains initialOp
-                this.heads.delete(key);
-                this.setState(key, undefined);
-              } else {
-                this.heads.set(key, parent);
-                this.setState(key, parent.detail.get(key).value);
-              }
-            }
-          };
-          // Received operation wins conflict - state must be rolled back before executing it
-          rollback(sibling);
-        } else {
-          // Received operation loses conflict - nothing to be done
-          // console.log("conflict: op loses")
-          exec = false;
-          continue;
-        }
-      } else {
-        // console.log('no conflict')
-      }
-      // won (or no conflict):
-      this._setChild(parent, key, op);
-      if (parent !== this._getHead(key).op) {
-        // only execute received operation if it advances HEAD
-        exec = false;
-      }
-    }
-
-    if (exec) {
-      this._update_head(op);
-      this._update_state(op);
-    }
-
-    this.ops.set(op.id, op);
-  }
-
-  // Shorthand
-  async receiveAndMerge(serializedOp) {
-    const op = await this.context.receiveOperation(serializedOp);
-    this.autoMerge(op);
-    return op;
-  }
-
-  // Get operations in history in a sequence, such that any operation's dependencies precede it in the list. To reproduce the state of this History, operations can be executed in the returned order (front to back), and are guaranteed to not give conflicts.
-  getOpsSequence() {
-    const added = new Set([this.context.initialOp]);
-    const visiting = new Set();
-    const seq = [];
-    const visit = op => {
-      if (!added.has(op)) {
-        visiting.add(op);
-        for (const [key, {parent}] of op.detail.entries()) {
-          visit(parent);
-        }
-        seq.push(op);
-        added.add(op);
-      }
-    }
-    for (const op of this.heads.values()) {
-      visit(op);
-    }
-    return seq;
-  }
-}
-
-module.exports = { Context, History, uuidv4 };

+ 0 - 37
lib/versioning/README.txt

@@ -1,37 +0,0 @@
-Synchronous collaboration for drawio.
-
-
-Steps to run server
--------------------
-
- - Install NodeJS and NPM
-
- - In 'lib' dir, run:
-      cd lib
-      npm i serve-handler ws uuid
-
- - Setup server state directory.
-      mkdir /desired/path
-      mkdir /desired/path/ops
-      mkdir /desired/path/branches
-
- - In root dir, run:
-      DRAWIOSTATEDIR=/desired/path node lib/versioning/run_server.js
-
-   The server will also act as a static file server, hosting the drawio web app. Default port is 8700.
-
-
-Steps to build + run client
----------------------------
-
- - Build SCCD model:
-      cd lib/versioning
-      python -m sccd.compiler.sccdc -l javascript -p eventloop client.xml -o client.js
-
- - Browserify plugin:
-      cd src/main/webapp/plugins/cdf
-      browserify versioning.js > versioning.browser.js
-
- - Make sure server is running and navigate to
-      http://localhost:8700/src/main/webapp?dev=1&p=versioning
-

+ 0 - 56
lib/versioning/SelectionHandler.js

@@ -1,56 +0,0 @@
-class SelectionHandler {
-  constructor(getUserId, userColors) {
-    this.userColors = userColors;
-    this.getUserId = getUserId;
-    this.map = new Map();
-  }
-
-  install(graph, controller) {
-    graph.selectionModel.addListener(mxEvent.CHANGE, (source, eventObj) => {
-      if (listenerEnabled) {
-        const {added, removed} = eventObj.properties;
-        controller.addInput(
-          "broadcast",
-          // "selection_change",
-          "in",
-          [{
-            userId: this.getUserId(),
-            addedIds: removed ? removed.map(cell => cell.id) : [],
-            removedIds: added ? added.map(cell => cell.id) : [],
-          }],
-          controller.wallclockToSimtime(),
-        );
-      }
-    });
-
-    controller.addMyOwnOutputListener({
-      'add': event => {
-        if (event.name === "selection_change") {
-          const [{userId, addedIds, removedIds}] = event.parameters;
-          const color = this.userColors.getColor(userId);
-
-          for (const cellId of addedIds) {
-            const cell = graph.model.cells[cellId];
-            const highlight = new mxCellHighlight(graph, color,
-              6); // width
-            highlight.highlight(graph.view.getState(cell));
-            this.map.set(cellId, highlight);
-          }
-
-          for (const cellId of removedIds) {
-            const cell = graph.model.cells[cellId];
-            const highlight = this.map.get(cellId);
-            highlight.destroy();
-            this.map.delete(cellId);
-          }
-        }
-      }
-    })
-  }
-
-  clearAll() {
-    this.map.forEach(highlight => highlight.destroy());
-    this.map.clear();
-  }
-
-}

+ 0 - 7
lib/versioning/build_client.sh

@@ -1,7 +0,0 @@
-#!/bin/sh
-echo "Compiling statechart..."
-python -m sccd.compiler.sccdc -l javascript -p eventloop -o client.js client.xml
-echo "Browserify plugin..."
-browserify ../../src/main/webapp/plugins/cdf/versioning.js > ../../src/main/webapp/plugins/cdf/versioning.browser.js
-
-

Rozdielové dáta súboru neboli zobrazené, pretože súbor je príliš veľký
+ 0 - 1
lib/versioning/draghandler_statechart.drawio


+ 0 - 268
lib/versioning/test_History.js

@@ -1,268 +0,0 @@
-"use strict";
-
-// Should work in browser but only tested with NodeJS v14.16.1
-
-const { Context, History } = require("./History.js");
-
-// From: https://stackoverflow.com/a/43260158
-// returns all the permutations of a given array
-function perm(xs) {
-  let ret = [];
-
-  for (let i = 0; i < xs.length; i = i + 1) {
-    let rest = perm(xs.slice(0, i).concat(xs.slice(i + 1)));
-
-    if(!rest.length) {
-      ret.push([xs[i]])
-    } else {
-      for(let j = 0; j < rest.length; j = j + 1) {
-        ret.push([xs[i]].concat(rest[j]))
-      }
-    }
-  }
-  return ret;
-}
-
-// Reinventing the wheel:
-
-class AssertionError extends Error {
-  constructor(msg) {
-    super(msg);
-  }
-}
-function assert(expr, msg) {
-  if (!expr) {
-    // console.log(...arguments);
-    throw new AssertionError(msg);
-  }
-}
-
-function deepEqual(val1, val2) {
-  if (typeof(val1) !== typeof(val2)) return false;
-
-  if ((val1 === null) !== (val2 === null)) return false;
-
-  switch (typeof(val1)) {
-    case 'object':
-      for (var p in val2) {
-        if (val1[p] === undefined) return false;
-      }
-      for (var p in val1) {
-        if (!deepEqual(val1[p], val2[p])) return false;
-      }
-      return true;
-    case 'array':
-      if (val1.length !== val2.length) return false;
-      for (let i=0; i<val1.length; ++i)
-        if (!deepEqual(val1[i], val2[i])) return false;
-      return true;
-    default:
-      return val1 === val2;
-  }
-}
-
-
-// Test:
-
-
-async function runTest(verbose) {
-
-  function info() {
-    if (verbose) console.log(...arguments);
-  }
-
-  function resolve(op1, op2) {
-    // info("resolve...", props1, props2)
-    if (op1.detail.get('geometry').value !== op2.detail.get('geometry').value) {
-      return op1.detail.get('geometry').value > op2.detail.get('geometry').value;
-    }
-    return op1.detail.get('style').value > op2.detail.get('style').value;
-  }
-
-  function createAppState(label) {
-    const state = {};
-
-    function setState(prop, val) {
-      state[prop] = val;
-      info("  ", label, "state =", state);
-    }
-    
-    return {setState, state};
-  }
-
-  function createHistory(label, context) {
-    const {setState, state} = createAppState(label);
-    // const context = new Context(requestCallback); // simulate 'remoteness' by creating a new context for every History.
-
-    const history = new History(context, setState, resolve);
-    return {history, state};
-  }
-
-  {
-    info("\nTest case: Add local operations (no concurrency) in random order.\n")
-
-    const local = new Context();
-
-    info("insertions...")
-    const {history: expectedHistory, state: expectedState} = createHistory("expected", local);
-    const insertions = [
-      /* 0: */ expectedHistory.new({geometry: 1, style: 1}),
-      /* 1: */ expectedHistory.new({geometry: 2}), // depends on 0
-      /* 2: */ expectedHistory.new({style: 2}), // depends on 0
-    ];
-
-    const permutations = perm(insertions);
-    for (const insertionOrder of permutations) {
-      info("permutation...")
-      const {history: actualHistory, state: actualState} = createHistory("actual", local);
-      // Sequential
-      for (const op of insertionOrder) {
-        actualHistory.autoMerge(op);
-      }
-      console.log("expected:", expectedState, "actual:", actualState)
-      assert(deepEqual(expectedState, actualState));
-    }
-  }
-
-  function noFetch() {
-    throw new AssertionError("Did not expect fetch");
-  }
-
-  {
-    info("\nTest case: Multi-user without conflict\n")
-
-    // Local and remote are just names for our histories.
-    const localContext = new Context(noFetch);
-    const remoteContext = new Context(noFetch);
-
-    const {history: localHistory,  state: localState } = createHistory("local", localContext);
-    const {history: remoteHistory, state: remoteState} = createHistory("remote", remoteContext);
-
-    const localOp1 = localHistory.new({geometry: 1});
-    await remoteHistory.receiveAndMerge(localOp1.serialize());
-
-    console.log("11")
-
-    const remoteOp2 = remoteHistory.new({geometry: 2}); // happens after (hence, overwrites) op1
-    await localHistory.receiveAndMerge(remoteOp2.serialize());
-
-    assert(deepEqual(localState, remoteState));
-  }
-
-  {
-    info("\nTest case: Concurrency with conflict\n")
-
-    const localContext = new Context(noFetch);
-    const remoteContext = new Context(noFetch);
-
-    const {history: localHistory, state: localState} = createHistory("local", localContext);
-    const {history: remoteHistory, state: remoteState} = createHistory("remote", remoteContext);
-
-    const localOp1 = localHistory.new({geometry: 1});
-    const remoteOp2 = remoteHistory.new({geometry: 2});
-
-    await localHistory.receiveAndMerge(remoteOp2.serialize());
-    await remoteHistory.receiveAndMerge(localOp1.serialize());
-
-    assert(deepEqual(localState, remoteState));
-  }
-
-  {
-    info("\nTest case: Concurrency with conflict (2)\n")
-
-    const localContext = new Context(noFetch);
-    const remoteContext = new Context(noFetch);
-
-    const {history: localHistory, state: localState} = createHistory("local", localContext);
-    const {history: remoteHistory, state: remoteState} = createHistory("remote", remoteContext);
-
-    info("localHistory insert...")
-    const localOp1 = localHistory.new({geometry: 1});
-    const localOp2 = localHistory.new({geometry: 4});
-
-    info("remoteHistory insert...")
-    const remoteOp3 = remoteHistory.new({geometry: 2});
-    const remoteOp4 = remoteHistory.new({geometry: 3});
-
-    info("localHistory receive...")
-    await localHistory.receiveAndMerge(remoteOp3.serialize()); // op3 wins from op1 -> op2 and op1 undone
-    await localHistory.receiveAndMerge(remoteOp4.serialize()); // buffered
-
-    info("remoteHistory receive...")
-    await remoteHistory.receiveAndMerge(((localOp1.serialize()))); // op1 loses from op3
-    await remoteHistory.receiveAndMerge(((localOp2.serialize()))); // no conflict
-
-    assert(deepEqual(localState, remoteState));
-  }
-
-  {
-    info("\nTest case: Fetch\n")
-
-    const fetched = [];
-
-    async function fetchFromLocal(id) {
-      // console.log("fetching", id)
-      fetched.push(id);
-      return localContext.ops.get(id).then(op => op.serialize());
-    }
-
-    const localContext = new Context(noFetch);
-    const remoteContext = new Context(fetchFromLocal);
-
-    const {history: localHistory, state: localState} = createHistory("local", localContext);
-
-    const localOps = [
-      localHistory.new({geometry:1}),                       // [0] (no deps)
-      localHistory.new({geometry:2, style: 3}),             // [1], depends on [0]
-      localHistory.new({style: 4}),                         // [2], depends on [1]
-      localHistory.new({geometry: 5, style: 6, parent: 7}), // [3], depends on [1], [2]
-      localHistory.new({parent: 8}),                        // [4], depends on [3]
-      localHistory.new({terminal: 9}),                      // [5] (no deps)
-    ];
-
-    // when given [2], should fetch [1], then [0]
-    await remoteContext.receiveOperation(localOps[2].serialize());
-    assert(deepEqual(fetched, [localOps[1].id, localOps[0].id]));
-
-    // when given [5], should not fetch anything
-    await remoteContext.receiveOperation(localOps[5].serialize());
-    assert(deepEqual(fetched, [localOps[1].id, localOps[0].id]));
-
-    // when given [4], should fetch [3]. (already have [0-2] from previous step)
-    await remoteContext.receiveOperation(localOps[4].serialize());
-    assert(deepEqual(fetched, [localOps[1].id, localOps[0].id, localOps[3].id]));
-  }
-
-  {
-    info("\nTest case: Get as sequence\n")
-
-    const {history} = createHistory("local", new Context(noFetch));
-
-    const ops = [
-      history.new({x:1, y:1}), // 0
-      history.new({x:2}),      // 1 depends on 0
-      history.new({y:2}),      // 2 depends on 0
-      history.new({x:3, z:3}), // 3 depends on 1
-      history.new({a:4}),      // 4
-      history.new({a:5}),      // 5 depends on 4
-      history.new({a:6, z:6}), // 6 depends on 5, 3
-    ];
-
-    const seq = history.getOpsSequence();
-    console.log(seq.map(op => op.serialize()));
-
-    assert(seq.indexOf(ops[1]) > seq.indexOf(0));
-    assert(seq.indexOf(ops[2]) > seq.indexOf(0));
-    assert(seq.indexOf(ops[3]) > seq.indexOf(1));
-    assert(seq.indexOf(ops[5]) > seq.indexOf(4));
-    assert(seq.indexOf(ops[6]) > seq.indexOf(5));
-    assert(seq.indexOf(ops[6]) > seq.indexOf(3));
-  }
-}
-
-runTest(/* verbose: */ true).then(() => {
-  console.log("OK");
-}, err => {
-  console.log(err);
-  process.exit(1);
-});

+ 57 - 0
nix/default.nix

@@ -0,0 +1,57 @@
+{ pkgs ? import <nixpkgs> {} }:
+
+let
+  # dependency: SCCD compiler
+  sccd = builtins.fetchGit {
+    url = "https://msdl.uantwerpen.be/git/arys/sccd.git";
+    ref = "dev";
+    rev = "8e63f96438fdc84e74389daa11bcd4bb332ecedb";
+  };
+
+  sccdJsRuntimePath = "${sccd.outPath}/docs/runtimes/javascript";
+
+  # dependency: Dependency-aware Operation History (DOH)
+  doh = import (builtins.fetchGit {
+    url = "https://msdl.uantwerpen.be/git/jexelmans/doh";
+    ref = "master";
+    rev = "ef92a53aff52bd62756071f5e69a035375731b82";
+  }) {};
+
+  # 
+  npmDeps = import ./npm_deps {};
+
+in rec {
+  # get a development shell
+  shell = pkgs.mkShell {
+    buildInputs = [
+      # for SCCD
+      pkgs.python39
+
+      # to run a test server (for development)
+      pkgs.nodejs-slim # no NPM :)
+
+      pkgs.nodePackages.browserify
+
+      # to build (produce minified JS)
+      pkgs.ant
+      pkgs.jre8_headless
+    ];
+
+    # find our NPM dependencies (for development and for building a Docker container)
+    NODE_PATH = "${npmDeps.nodeDependencies}/lib/node_modules:${doh.package}/lib/node_modules:${sccdJsRuntimePath}";
+
+    # find SCCD compiler
+    PYTHONPATH = sccd.outPath;
+  };
+
+  staticWebApp = derivation {
+    name = "drawio-static";
+    system = builtins.currentSystem;
+
+    coreutils = pkgs.coreutils;
+    src_webapp = ../src/main/webapp;
+
+    builder = "${pkgs.bash}/bin/bash";
+    args = [ ./static-webapp-builder.sh ];
+  };
+}

+ 16 - 0
nix/npm_deps/README.txt

@@ -0,0 +1,16 @@
+This is NOT an actual (valid) NPM package!
+
+The file 'package.json' only serves as a declaration of NPM dependencies, and serves as input to Node2Nix, which outputs a bunch of Nix derivations. These derivations in turn make Nix (instead of 'npm') fetch the NPM dependencies and put them in the Nix store, instead of in a 'node_modules' folder (in-place, yuck!).
+
+After changing 'package.json', simply re-run 'node2nix' in order to update the generated *.nix files.
+
+BTW, even though node2nix generates Nix expressions for a whole lotta things:
+  - building a package (based on package.json)
+  - building a distributable tarball
+  - entering a development shell
+  - building the dependencies of a package
+we ONLY use the Nix expression to do the last option (building the dependencies). Building a package or redistributable tarball just makes no sense (recall that this is NOT an NPM package), and entering a development shell does not seem to work very well (it overwrites our NODE_PATH completely).
+
+
+
+If you do not want to use Nix, probably you can run 'npm i' in this directory, which will create a node_modules folder in-place. Then you add the node_modules folder to NODE_PATH environment variable.

+ 17 - 0
nix/npm_deps/default.nix

@@ -0,0 +1,17 @@
+# This file has been generated by node2nix 1.9.0. Do not edit!
+
+{pkgs ? import <nixpkgs> {
+    inherit system;
+  }, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-12_x"}:
+
+let
+  nodeEnv = import ./node-env.nix {
+    inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
+    inherit pkgs nodejs;
+    libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
+  };
+in
+import ./node-packages.nix {
+  inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
+  inherit nodeEnv;
+}

+ 588 - 0
nix/npm_deps/node-env.nix

@@ -0,0 +1,588 @@
+# This file originates from node2nix
+
+{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
+
+let
+  # Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
+  utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
+
+  python = if nodejs ? python then nodejs.python else python2;
+
+  # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
+  tarWrapper = runCommand "tarWrapper" {} ''
+    mkdir -p $out/bin
+
+    cat > $out/bin/tar <<EOF
+    #! ${stdenv.shell} -e
+    $(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
+    EOF
+
+    chmod +x $out/bin/tar
+  '';
+
+  # Function that generates a TGZ file from a NPM project
+  buildNodeSourceDist =
+    { name, version, src, ... }:
+
+    stdenv.mkDerivation {
+      name = "node-tarball-${name}-${version}";
+      inherit src;
+      buildInputs = [ nodejs ];
+      buildPhase = ''
+        export HOME=$TMPDIR
+        tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
+      '';
+      installPhase = ''
+        mkdir -p $out/tarballs
+        mv $tgzFile $out/tarballs
+        mkdir -p $out/nix-support
+        echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
+      '';
+    };
+
+  # Common shell logic
+  installPackage = writeShellScript "install-package" ''
+    installPackage() {
+      local packageName=$1 src=$2
+
+      local strippedName
+
+      local DIR=$PWD
+      cd $TMPDIR
+
+      unpackFile $src
+
+      # Make the base dir in which the target dependency resides first
+      mkdir -p "$(dirname "$DIR/$packageName")"
+
+      if [ -f "$src" ]
+      then
+          # Figure out what directory has been unpacked
+          packageDir="$(find . -maxdepth 1 -type d | tail -1)"
+
+          # Restore write permissions to make building work
+          find "$packageDir" -type d -exec chmod u+x {} \;
+          chmod -R u+w "$packageDir"
+
+          # Move the extracted tarball into the output folder
+          mv "$packageDir" "$DIR/$packageName"
+      elif [ -d "$src" ]
+      then
+          # Get a stripped name (without hash) of the source directory.
+          # On old nixpkgs it's already set internally.
+          if [ -z "$strippedName" ]
+          then
+              strippedName="$(stripHash $src)"
+          fi
+
+          # Restore write permissions to make building work
+          chmod -R u+w "$strippedName"
+
+          # Move the extracted directory into the output folder
+          mv "$strippedName" "$DIR/$packageName"
+      fi
+
+      # Change to the package directory to install dependencies
+      cd "$DIR/$packageName"
+    }
+  '';
+
+  # Bundle the dependencies of the package
+  #
+  # Only include dependencies if they don't exist. They may also be bundled in the package.
+  includeDependencies = {dependencies}:
+    lib.optionalString (dependencies != []) (
+      ''
+        mkdir -p node_modules
+        cd node_modules
+      ''
+      + (lib.concatMapStrings (dependency:
+        ''
+          if [ ! -e "${dependency.name}" ]; then
+              ${composePackage dependency}
+          fi
+        ''
+      ) dependencies)
+      + ''
+        cd ..
+      ''
+    );
+
+  # Recursively composes the dependencies of a package
+  composePackage = { name, packageName, src, dependencies ? [], ... }@args:
+    builtins.addErrorContext "while evaluating node package '${packageName}'" ''
+      installPackage "${packageName}" "${src}"
+      ${includeDependencies { inherit dependencies; }}
+      cd ..
+      ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
+    '';
+
+  pinpointDependencies = {dependencies, production}:
+    let
+      pinpointDependenciesFromPackageJSON = writeTextFile {
+        name = "pinpointDependencies.js";
+        text = ''
+          var fs = require('fs');
+          var path = require('path');
+
+          function resolveDependencyVersion(location, name) {
+              if(location == process.env['NIX_STORE']) {
+                  return null;
+              } else {
+                  var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
+
+                  if(fs.existsSync(dependencyPackageJSON)) {
+                      var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
+
+                      if(dependencyPackageObj.name == name) {
+                          return dependencyPackageObj.version;
+                      }
+                  } else {
+                      return resolveDependencyVersion(path.resolve(location, ".."), name);
+                  }
+              }
+          }
+
+          function replaceDependencies(dependencies) {
+              if(typeof dependencies == "object" && dependencies !== null) {
+                  for(var dependency in dependencies) {
+                      var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
+
+                      if(resolvedVersion === null) {
+                          process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
+                      } else {
+                          dependencies[dependency] = resolvedVersion;
+                      }
+                  }
+              }
+          }
+
+          /* Read the package.json configuration */
+          var packageObj = JSON.parse(fs.readFileSync('./package.json'));
+
+          /* Pinpoint all dependencies */
+          replaceDependencies(packageObj.dependencies);
+          if(process.argv[2] == "development") {
+              replaceDependencies(packageObj.devDependencies);
+          }
+          replaceDependencies(packageObj.optionalDependencies);
+
+          /* Write the fixed package.json file */
+          fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
+        '';
+      };
+    in
+    ''
+      node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
+
+      ${lib.optionalString (dependencies != [])
+        ''
+          if [ -d node_modules ]
+          then
+              cd node_modules
+              ${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
+              cd ..
+          fi
+        ''}
+    '';
+
+  # Recursively traverses all dependencies of a package and pinpoints all
+  # dependencies in the package.json file to the versions that are actually
+  # being used.
+
+  pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
+    ''
+      if [ -d "${packageName}" ]
+      then
+          cd "${packageName}"
+          ${pinpointDependencies { inherit dependencies production; }}
+          cd ..
+          ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
+      fi
+    '';
+
+  # Extract the Node.js source code which is used to compile packages with
+  # native bindings
+  nodeSources = runCommand "node-sources" {} ''
+    tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
+    mv node-* $out
+  '';
+
+  # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
+  addIntegrityFieldsScript = writeTextFile {
+    name = "addintegrityfields.js";
+    text = ''
+      var fs = require('fs');
+      var path = require('path');
+
+      function augmentDependencies(baseDir, dependencies) {
+          for(var dependencyName in dependencies) {
+              var dependency = dependencies[dependencyName];
+
+              // Open package.json and augment metadata fields
+              var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
+              var packageJSONPath = path.join(packageJSONDir, "package.json");
+
+              if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
+                  console.log("Adding metadata fields to: "+packageJSONPath);
+                  var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
+
+                  if(dependency.integrity) {
+                      packageObj["_integrity"] = dependency.integrity;
+                  } else {
+                      packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
+                  }
+
+                  if(dependency.resolved) {
+                      packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
+                  } else {
+                      packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
+                  }
+
+                  if(dependency.from !== undefined) { // Adopt from property if one has been provided
+                      packageObj["_from"] = dependency.from;
+                  }
+
+                  fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
+              }
+
+              // Augment transitive dependencies
+              if(dependency.dependencies !== undefined) {
+                  augmentDependencies(packageJSONDir, dependency.dependencies);
+              }
+          }
+      }
+
+      if(fs.existsSync("./package-lock.json")) {
+          var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
+
+          if(![1, 2].includes(packageLock.lockfileVersion)) {
+             process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
+             process.exit(1);
+          }
+
+          if(packageLock.dependencies !== undefined) {
+              augmentDependencies(".", packageLock.dependencies);
+          }
+      }
+    '';
+  };
+
+  # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
+  reconstructPackageLock = writeTextFile {
+    name = "addintegrityfields.js";
+    text = ''
+      var fs = require('fs');
+      var path = require('path');
+
+      var packageObj = JSON.parse(fs.readFileSync("package.json"));
+
+      var lockObj = {
+          name: packageObj.name,
+          version: packageObj.version,
+          lockfileVersion: 1,
+          requires: true,
+          dependencies: {}
+      };
+
+      function augmentPackageJSON(filePath, dependencies) {
+          var packageJSON = path.join(filePath, "package.json");
+          if(fs.existsSync(packageJSON)) {
+              var packageObj = JSON.parse(fs.readFileSync(packageJSON));
+              dependencies[packageObj.name] = {
+                  version: packageObj.version,
+                  integrity: "sha1-000000000000000000000000000=",
+                  dependencies: {}
+              };
+              processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
+          }
+      }
+
+      function processDependencies(dir, dependencies) {
+          if(fs.existsSync(dir)) {
+              var files = fs.readdirSync(dir);
+
+              files.forEach(function(entry) {
+                  var filePath = path.join(dir, entry);
+                  var stats = fs.statSync(filePath);
+
+                  if(stats.isDirectory()) {
+                      if(entry.substr(0, 1) == "@") {
+                          // When we encounter a namespace folder, augment all packages belonging to the scope
+                          var pkgFiles = fs.readdirSync(filePath);
+
+                          pkgFiles.forEach(function(entry) {
+                              if(stats.isDirectory()) {
+                                  var pkgFilePath = path.join(filePath, entry);
+                                  augmentPackageJSON(pkgFilePath, dependencies);
+                              }
+                          });
+                      } else {
+                          augmentPackageJSON(filePath, dependencies);
+                      }
+                  }
+              });
+          }
+      }
+
+      processDependencies("node_modules", lockObj.dependencies);
+
+      fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
+    '';
+  };
+
+  prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
+    let
+      forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
+    in
+    ''
+        # Pinpoint the versions of all dependencies to the ones that are actually being used
+        echo "pinpointing versions of dependencies..."
+        source $pinpointDependenciesScriptPath
+
+        # Patch the shebangs of the bundled modules to prevent them from
+        # calling executables outside the Nix store as much as possible
+        patchShebangs .
+
+        # Deploy the Node.js package by running npm install. Since the
+        # dependencies have been provided already by ourselves, it should not
+        # attempt to install them again, which is good, because we want to make
+        # it Nix's responsibility. If it needs to install any dependencies
+        # anyway (e.g. because the dependency parameters are
+        # incomplete/incorrect), it fails.
+        #
+        # The other responsibilities of NPM are kept -- version checks, build
+        # steps, postprocessing etc.
+
+        export HOME=$TMPDIR
+        cd "${packageName}"
+        runHook preRebuild
+
+        ${lib.optionalString bypassCache ''
+          ${lib.optionalString reconstructLock ''
+            if [ -f package-lock.json ]
+            then
+                echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
+                echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
+                rm package-lock.json
+            else
+                echo "No package-lock.json file found, reconstructing..."
+            fi
+
+            node ${reconstructPackageLock}
+          ''}
+
+          node ${addIntegrityFieldsScript}
+        ''}
+
+        npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
+
+        if [ "''${dontNpmInstall-}" != "1" ]
+        then
+            # NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
+            rm -f npm-shrinkwrap.json
+
+            npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
+        fi
+    '';
+
+  # Builds and composes an NPM package including all its dependencies
+  buildNodePackage =
+    { name
+    , packageName
+    , version
+    , dependencies ? []
+    , buildInputs ? []
+    , production ? true
+    , npmFlags ? ""
+    , dontNpmInstall ? false
+    , bypassCache ? false
+    , reconstructLock ? false
+    , preRebuild ? ""
+    , dontStrip ? true
+    , unpackPhase ? "true"
+    , buildPhase ? "true"
+    , meta ? {}
+    , ... }@args:
+
+    let
+      extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
+    in
+    stdenv.mkDerivation ({
+      name = "${name}-${version}";
+      buildInputs = [ tarWrapper python nodejs ]
+        ++ lib.optional (stdenv.isLinux) utillinux
+        ++ lib.optional (stdenv.isDarwin) libtool
+        ++ buildInputs;
+
+      inherit nodejs;
+
+      inherit dontStrip; # Stripping may fail a build for some package deployments
+      inherit dontNpmInstall preRebuild unpackPhase buildPhase;
+
+      compositionScript = composePackage args;
+      pinpointDependenciesScript = pinpointDependenciesOfPackage args;
+
+      passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
+
+      installPhase = ''
+        source ${installPackage}
+
+        # Create and enter a root node_modules/ folder
+        mkdir -p $out/lib/node_modules
+        cd $out/lib/node_modules
+
+        # Compose the package and all its dependencies
+        source $compositionScriptPath
+
+        ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
+
+        # Create symlink to the deployed executable folder, if applicable
+        if [ -d "$out/lib/node_modules/.bin" ]
+        then
+            ln -s $out/lib/node_modules/.bin $out/bin
+        fi
+
+        # Create symlinks to the deployed manual page folders, if applicable
+        if [ -d "$out/lib/node_modules/${packageName}/man" ]
+        then
+            mkdir -p $out/share
+            for dir in "$out/lib/node_modules/${packageName}/man/"*
+            do
+                mkdir -p $out/share/man/$(basename "$dir")
+                for page in "$dir"/*
+                do
+                    ln -s $page $out/share/man/$(basename "$dir")
+                done
+            done
+        fi
+
+        # Run post install hook, if provided
+        runHook postInstall
+      '';
+
+      meta = {
+        # default to Node.js' platforms
+        platforms = nodejs.meta.platforms;
+      } // meta;
+    } // extraArgs);
+
+  # Builds a node environment (a node_modules folder and a set of binaries)
+  buildNodeDependencies =
+    { name
+    , packageName
+    , version
+    , src
+    , dependencies ? []
+    , buildInputs ? []
+    , production ? true
+    , npmFlags ? ""
+    , dontNpmInstall ? false
+    , bypassCache ? false
+    , reconstructLock ? false
+    , dontStrip ? true
+    , unpackPhase ? "true"
+    , buildPhase ? "true"
+    , ... }@args:
+
+    let
+      extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
+    in
+      stdenv.mkDerivation ({
+        name = "node-dependencies-${name}-${version}";
+
+        buildInputs = [ tarWrapper python nodejs ]
+          ++ lib.optional (stdenv.isLinux) utillinux
+          ++ lib.optional (stdenv.isDarwin) libtool
+          ++ buildInputs;
+
+        inherit dontStrip; # Stripping may fail a build for some package deployments
+        inherit dontNpmInstall unpackPhase buildPhase;
+
+        includeScript = includeDependencies { inherit dependencies; };
+        pinpointDependenciesScript = pinpointDependenciesOfPackage args;
+
+        passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
+
+        installPhase = ''
+          source ${installPackage}
+
+          mkdir -p $out/${packageName}
+          cd $out/${packageName}
+
+          source $includeScriptPath
+
+          # Create fake package.json to make the npm commands work properly
+          cp ${src}/package.json .
+          chmod 644 package.json
+          ${lib.optionalString bypassCache ''
+            if [ -f ${src}/package-lock.json ]
+            then
+                cp ${src}/package-lock.json .
+            fi
+          ''}
+
+          # Go to the parent folder to make sure that all packages are pinpointed
+          cd ..
+          ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
+
+          ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
+
+          # Expose the executables that were installed
+          cd ..
+          ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
+
+          mv ${packageName} lib
+          ln -s $out/lib/node_modules/.bin $out/bin
+        '';
+      } // extraArgs);
+
+  # Builds a development shell
+  buildNodeShell =
+    { name
+    , packageName
+    , version
+    , src
+    , dependencies ? []
+    , buildInputs ? []
+    , production ? true
+    , npmFlags ? ""
+    , dontNpmInstall ? false
+    , bypassCache ? false
+    , reconstructLock ? false
+    , dontStrip ? true
+    , unpackPhase ? "true"
+    , buildPhase ? "true"
+    , ... }@args:
+
+    let
+      nodeDependencies = buildNodeDependencies args;
+    in
+    stdenv.mkDerivation {
+      name = "node-shell-${name}-${version}";
+
+      buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
+      buildCommand = ''
+        mkdir -p $out/bin
+        cat > $out/bin/shell <<EOF
+        #! ${stdenv.shell} -e
+        $shellHook
+        exec ${stdenv.shell}
+        EOF
+        chmod +x $out/bin/shell
+      '';
+
+      # Provide the dependencies in a development shell through the NODE_PATH environment variable
+      inherit nodeDependencies;
+      shellHook = lib.optionalString (dependencies != []) ''
+        export NODE_PATH="${nodeDependencies}/lib/node_modules:$NODE_PATH"
+        export PATH="${nodeDependencies}/bin:$PATH"
+      '';
+    };
+in
+{
+  buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
+  buildNodePackage = lib.makeOverridable buildNodePackage;
+  buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
+  buildNodeShell = lib.makeOverridable buildNodeShell;
+}

+ 201 - 0
nix/npm_deps/node-packages.nix

@@ -0,0 +1,201 @@
+# This file has been generated by node2nix 1.9.0. Do not edit!
+
+{nodeEnv, fetchurl, fetchgit, nix-gitignore, stdenv, lib, globalBuildInputs ? []}:
+
+let
+  sources = {
+    "balanced-match-1.0.2" = {
+      name = "balanced-match";
+      packageName = "balanced-match";
+      version = "1.0.2";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz";
+        sha512 = "3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==";
+      };
+    };
+    "brace-expansion-1.1.11" = {
+      name = "brace-expansion";
+      packageName = "brace-expansion";
+      version = "1.1.11";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz";
+        sha512 = "iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==";
+      };
+    };
+    "bytes-3.0.0" = {
+      name = "bytes";
+      packageName = "bytes";
+      version = "3.0.0";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz";
+        sha1 = "d32815404d689699f85a4ea4fa8755dd13a96048";
+      };
+    };
+    "concat-map-0.0.1" = {
+      name = "concat-map";
+      packageName = "concat-map";
+      version = "0.0.1";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz";
+        sha1 = "d8a96bd77fd68df7793a73036a3ba0d5405d477b";
+      };
+    };
+    "content-disposition-0.5.2" = {
+      name = "content-disposition";
+      packageName = "content-disposition";
+      version = "0.5.2";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz";
+        sha1 = "0cf68bb9ddf5f2be7961c3a85178cb85dba78cb4";
+      };
+    };
+    "fast-url-parser-1.1.3" = {
+      name = "fast-url-parser";
+      packageName = "fast-url-parser";
+      version = "1.1.3";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz";
+        sha1 = "f4af3ea9f34d8a271cf58ad2b3759f431f0b318d";
+      };
+    };
+    "mime-db-1.33.0" = {
+      name = "mime-db";
+      packageName = "mime-db";
+      version = "1.33.0";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz";
+        sha512 = "BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==";
+      };
+    };
+    "mime-types-2.1.18" = {
+      name = "mime-types";
+      packageName = "mime-types";
+      version = "2.1.18";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz";
+        sha512 = "lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==";
+      };
+    };
+    "minimatch-3.0.4" = {
+      name = "minimatch";
+      packageName = "minimatch";
+      version = "3.0.4";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz";
+        sha512 = "yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==";
+      };
+    };
+    "path-is-inside-1.0.2" = {
+      name = "path-is-inside";
+      packageName = "path-is-inside";
+      version = "1.0.2";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz";
+        sha1 = "365417dede44430d1c11af61027facf074bdfc53";
+      };
+    };
+    "path-to-regexp-2.2.1" = {
+      name = "path-to-regexp";
+      packageName = "path-to-regexp";
+      version = "2.2.1";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz";
+        sha512 = "gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ==";
+      };
+    };
+    "punycode-1.4.1" = {
+      name = "punycode";
+      packageName = "punycode";
+      version = "1.4.1";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz";
+        sha1 = "c0d5a63b2718800ad8e1eb0fa5269c84dd41845e";
+      };
+    };
+    "range-parser-1.2.0" = {
+      name = "range-parser";
+      packageName = "range-parser";
+      version = "1.2.0";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz";
+        sha1 = "f49be6b487894ddc40dcc94a322f611092e00d5e";
+      };
+    };
+    "serve-handler-6.1.3" = {
+      name = "serve-handler";
+      packageName = "serve-handler";
+      version = "6.1.3";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.3.tgz";
+        sha512 = "FosMqFBNrLyeiIDvP1zgO6YoTzFYHxLDEIavhlmQ+knB2Z7l1t+kGLHkZIDN7UVWqQAmKI3D20A6F6jo3nDd4w==";
+      };
+    };
+    "uuid-8.3.2" = {
+      name = "uuid";
+      packageName = "uuid";
+      version = "8.3.2";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz";
+        sha512 = "+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==";
+      };
+    };
+    "ws-8.4.2" = {
+      name = "ws";
+      packageName = "ws";
+      version = "8.4.2";
+      src = fetchurl {
+        url = "https://registry.npmjs.org/ws/-/ws-8.4.2.tgz";
+        sha512 = "Kbk4Nxyq7/ZWqr/tarI9yIt/+iNNFOjBXEWgTb4ydaNHBNGgvf2QHbS9fdfsndfjFlFwEd4Al+mw83YkaD10ZA==";
+      };
+    };
+  };
+  args = {
+    name = "drawio-npm";
+    packageName = "drawio-npm";
+    version = "0.0.1";
+    src = ./.;
+    dependencies = [
+      sources."balanced-match-1.0.2"
+      sources."brace-expansion-1.1.11"
+      sources."bytes-3.0.0"
+      sources."concat-map-0.0.1"
+      sources."content-disposition-0.5.2"
+      sources."fast-url-parser-1.1.3"
+      sources."mime-db-1.33.0"
+      sources."mime-types-2.1.18"
+      sources."minimatch-3.0.4"
+      sources."path-is-inside-1.0.2"
+      sources."path-to-regexp-2.2.1"
+      sources."punycode-1.4.1"
+      sources."range-parser-1.2.0"
+      sources."serve-handler-6.1.3"
+      sources."uuid-8.3.2"
+      sources."ws-8.4.2"
+    ];
+    buildInputs = globalBuildInputs;
+    meta = {
+    };
+    production = true;
+    bypassCache = true;
+    reconstructLock = true;
+  };
+in
+{
+  args = args;
+  sources = sources;
+  tarball = nodeEnv.buildNodeSourceDist args;
+  package = nodeEnv.buildNodePackage args;
+  shell = nodeEnv.buildNodeShell args;
+  nodeDependencies = nodeEnv.buildNodeDependencies (lib.overrideExisting args {
+    src = stdenv.mkDerivation {
+      name = args.name + "-package-json";
+      src = nix-gitignore.gitignoreSourcePure [
+        "*"
+        "!package.json"
+        "!package-lock.json"
+      ] args.src;
+      dontBuild = true;
+      installPhase = "mkdir -p $out; cp -r ./* $out;";
+    };
+  });
+}

+ 9 - 0
nix/npm_deps/package.json

@@ -0,0 +1,9 @@
+{
+  "name": "drawio-npm",
+  "version": "0.0.1",
+  "dependencies": {
+    "serve-handler": "^6.1.3",
+    "uuid": "^8.3.2",
+    "ws": "^8.4.2"
+  }
+}

+ 39 - 0
nix/static-webapp-builder.sh

@@ -0,0 +1,39 @@
+# Nix will execute this script with bash when asked to build the static web app.
+
+# We assume that minified sources (which are checked into the git repo) have already been built.
+# (the scripts in the toplevel 'scripts' directory take care of this)
+
+# This 'build' only depends on 2 things
+#   - 'coreutils' (mkdir, cp)
+#   - src/main/webapp to copy things from
+
+export PATH="$coreutils/bin"
+
+# The environment variable 'out' points to the file or directory that is the result of the build. We aim to create a directory that contains the static part of the web app:
+mkdir $out
+
+cd $src_webapp # = src/main/webapp
+
+cp -r images $out/images
+cp -r js $out/js
+cp -r math $out/math
+cp -r mxgraph $out/mxgraph # contains minified version of mxGraph
+cp -r plugins $out/plugins
+cp -r resources $out/resources
+cp -r styles $out/styles
+
+cp disableUpdate.js $out/disableUpdate.js
+cp dropbox.html $out/dropbox.html
+cp electronFilesWorker.js $out/electronFilesWorker.js
+cp electron.js $out/electron.js
+cp export3.html $out/export3.html
+cp export-fonts.css $out/export-fonts.css
+cp favicon.ico $out/favicon.ico
+cp github.html $out/github.html
+cp gitlab.html $out/gitlab.html
+cp index.html $out/index.html
+cp onedrive3.html $out/onedrive3.html
+cp open.html $out/open.html
+cp service-worker.js $out/service-worker.js
+cp shortcuts.svg $out/shortcuts.svg
+cp vsdxImporter.html $out/vsdxImporter.html

+ 0 - 1
sccd

@@ -1 +0,0 @@
-Subproject commit 322ec37a0bded4d9ea57a7662dfccc5fe2872ed6

+ 14 - 0
scripts/README.md

@@ -0,0 +1,14 @@
+The scripts in this directory are to be executed in a development shell.
+
+The command
+```
+nix-shell
+```
+will create a shell with all dependencies resolved and environment variables set correctly.
+
+
+It is also possible to run only a single script in a non-interactive shell. For instance
+```
+nix-shell --run scripts/dev_server.sh
+```
+will start a development server.

+ 20 - 0
scripts/build_collab_plugin.sh

@@ -0,0 +1,20 @@
+# Build the collaboration plugin.
+
+echo "Compiling statechart..."
+
+INPUTFILE=src/collab/client/statechart_src/client_statechart.xml
+OUTPUTFILE=src/collab/client/client_compiled.js
+
+echo "Transforming $INPUTFILE to $OUTPUTFILE"
+
+python -m sccd.compiler.sccdc -l nodejs -p eventloop -o $OUTPUTFILE $INPUTFILE
+
+
+echo "Browserify plugin..."
+
+INPUTFILE=src/collab/client/plugin.js
+OUTPUTFILE=src/main/webapp/myPlugins/collab.js
+
+echo "Transforming $INPUTFILE to $OUTPUTFILE"
+
+browserify $INPUTFILE > $OUTPUTFILE # Put result in 'myPlugins' folder of webapp

+ 7 - 0
scripts/build_static_app.sh

@@ -0,0 +1,7 @@
+# Build the static part of the draw.io app.
+# Following draw.io conventions, build output is placed in the source code tree, and checked into git.
+# This makes it impossible to run the script automatically as part of a Nix build, because in Nix, sources are immutable. It is therefore the responsibility of the developer to manually run this script after making changes.
+
+pushd etc/build
+ant -f build.xml
+popd

+ 10 - 0
scripts/dev_server.sh

@@ -0,0 +1,10 @@
+# Run a development server to debug the app.
+#
+# Depends on development shell.
+#
+# Run with:
+#   nix-shell --run scripts/dev_server.sh
+
+pushd src/main/webapp # serve this directory
+node ../../collab/server/run_server.js
+popd

+ 1 - 26
shell.nix

@@ -1,28 +1,3 @@
 { pkgs ? import <nixpkgs> {} }:
-  pkgs.mkShell {
-    buildInputs = [
-      # to run tests and to run demo server
-      pkgs.nodejs
-      pkgs.nodePackages.npm
 
-      # To build plugin
-      pkgs.nodePackages.browserify
-
-      # to run standalone app
-      pkgs.electron
-
-      # to build (produce minified JS)
-      pkgs.ant
-      pkgs.jre8_headless
-
-      # for SCCD
-      pkgs.python39
-      pkgs.python39Packages.websockets
-    ];
-    # environment variable for standalone app. ignored by browser.
-    shellHook = ''
-      export DRAWIO_ENV=dev
-      #export PYTHONPATH=$PYTHONPATH:~/cdf/repos/drawio/sccd/sccd
-    '';
-    PYTHONPATH = ./sccd;
-}
+(import ./nix {}).shell

+ 7 - 0
src/collab/README.txt

@@ -0,0 +1,7 @@
+This directory contains an implementation of an operation-based versioning system with draw.io.
+
+It consists of 2 parts:
+
+   - The client, which is a plugin for draw.io. The 'client' directory contains the source code for this plugin. The plugin needs to be built with 'browserify', and the output (a single .js file) is put into 'main/webapp/myPlugins'.
+
+   - The server, which is a NodeJS script. The server can be ran from a development shell, or it can be built into a Docker container.

lib/versioning/DragHandler.js → src/collab/client/DragHandler.js


lib/versioning/GhostOverlays.js → src/collab/client/GhostOverlays.js


lib/versioning/UserColors.js → src/collab/client/UserColors.js


+ 16 - 11
lib/versioning/client.js

@@ -1,16 +1,25 @@
 /*
-Date: Fri Oct 22 13:09:20 2021
+Date: Fri Jan 28 15:35:51 2022
 
 Model author: Joeri Exelmans
 Model name: client
 Model description:
-Example of a browser-based WebSocket client with heartbeats
+Statechart model implementing much of the 'client' part of our protocol for synchronous collaboration sessions (joining, leaving, auto-reconnecting, broadcasting and receiving operations, and heartbeats to monitor connection health).
 */
 
 
 // package client
-let client = {};
-(function() {
+const {
+                RuntimeClassBase,
+                StatechartSemantics,
+                ObjectManagerBase,
+                EventLoopControllerBase,
+                State,
+                ParallelState,
+                Transition,
+                Event,
+                Association,
+            } = require("statecharts_core");
 
 let Main = function(controller, uiState, myId, getMyName) {
     RuntimeClassBase.call(this, controller);
@@ -798,8 +807,7 @@ Main.prototype.initializeStatechart = function() {
     RuntimeClassBase.prototype.initializeStatechart.call(this);
 };
 
-// Add symbol 'Main' to package 'client'
-client.Main = Main;
+module.exports.Main = Main;
 
 let ObjectManager = function(controller) {
     ObjectManagerBase.call(this, controller);
@@ -823,8 +831,7 @@ ObjectManager.prototype.instantiate = function(className, constructParams) {
     return instance;
 };
 
-// Add symbol 'ObjectManager' to package 'client'
-client.ObjectManager = ObjectManager;
+module.exports.ObjectManager = ObjectManager;
 
 let Controller = function(uiState, myId, getMyName, eventLoopCallbacks, finishedCallback, behindScheduleCallback) {
     if (finishedCallback === undefined) finishedCallback = null;
@@ -842,6 +849,4 @@ Controller.prototype = new Object();
     }
 })();
 
-// Add symbol 'Controller' to package 'client'
-client.Controller = Controller;
-})();
+module.exports.Controller = Controller;

+ 9 - 8
src/main/webapp/plugins/cdf/versioning.js

@@ -3,12 +3,12 @@
 
 Draw.loadPlugin(async function(ui) {
 
-  const {Context, History, uuidv4} = require("../../../../../lib/versioning/History.js");
-  const {UserColors} = require("../../../../../lib/versioning/UserColors.js")
-  const {DragHandler} = require("../../../../../lib/versioning/DragHandler.js");
-  const {GhostOverlays} = require("../../../../../lib/versioning/GhostOverlays.js");
-  await loadScript("../../../sccd/docs/runtimes/javascript/statecharts_core.js");
-  await loadScript("../../../lib/versioning/client.js");
+  const {Context, History, uuidv4} = require("doh");
+  const {UserColors} = require("./UserColors.js")
+  const {DragHandler} = require("./DragHandler.js");
+  const {GhostOverlays} = require("./GhostOverlays.js");
+  const {Controller: StatechartController} = require("./client_compiled.js");
+  const {JsEventLoop} = require("statecharts_core");
 
   const myId = uuidv4();
 
@@ -400,7 +400,7 @@ Draw.loadPlugin(async function(ui) {
     return myName;
   }
 
-  const controller = new client.Controller(uiState, myId, getMyName, new JsEventLoop());
+  const controller = new StatechartController(uiState, myId, getMyName, new JsEventLoop());
 
   const selectionCounter = new Map();
   const userSelection = new Map();
@@ -453,7 +453,7 @@ Draw.loadPlugin(async function(ui) {
   const dragHandler = new DragHandler(controller);
   dragHandler.install(graph);
 
-  const ghostOverlays = new GhostOverlays(graph, userColors, "../../../lib/versioning/resources/cursor.svg");
+  const ghostOverlays = new GhostOverlays(graph, userColors, "myPlugins/cursor.svg");
 
   graph.addMouseListener({
     mouseDown(graph, event) {
@@ -638,6 +638,7 @@ Draw.loadPlugin(async function(ui) {
 
 
   // HACK: disable undo
+  
   mxUndoManager.prototype.canUndo = function() {
     return false;
   };

lib/versioning/client_statechart.drawio → src/collab/client/statechart_src/client_statechart.drawio


BIN
src/collab/client/statechart_src/client_statechart.pdf


+ 1 - 1
lib/versioning/client.xml

@@ -1,6 +1,6 @@
 <?xml version="1.0" ?>
 <diagram author="Joeri Exelmans" name="client">
-  <description>Example of a browser-based WebSocket client with heartbeats</description>
+  <description>Statechart model implementing much of the 'client' part of our protocol for synchronous collaboration sessions (joining, leaving, auto-reconnecting, broadcasting and receiving operations, and heartbeats to monitor connection health).</description>
   <top></top>
   <inport name="in"/>
   <outport name="out"/>

+ 19 - 11
lib/versioning/run_server.js

@@ -16,6 +16,15 @@ const stateDir = process.env.DRAWIOSTATEDIR || ".";
 async function startServer() {
   const opsDir = path.join(stateDir, 'ops');
   const sessionDir = path.join(stateDir, 'sessions');
+  try {
+    // make state directories if necessary
+    await Promise.all([
+      fs.mkdir(opsDir, {recursive: true}),
+      fs.mkdir(sessionDir, {recursive: true}),
+    ]);
+  } catch (e) {
+    // ignore
+  }
   try {
     process.stdout.write("Can read/write in directory '" + opsDir + "' ? ...");
     await fs.access(opsDir, fsConstants.R_OK | fsConstants.W_OK);
@@ -42,12 +51,12 @@ async function startServer() {
     // More details here: https://github.com/vercel/serve-handler#options
     console.log(request.method, request.url)
     return handler(request, response, {
-      "headers": [
+      headers: [
         {
-          "source": "**/*.js",
-          "headers": [{
-            "key": "Cache-Control",
-            "value": "max-age=0",
+          source: "**/*.js",
+          headers: [{
+            key: "Cache-Control",
+            value: "max-age=0",
           }],
         }],
       });
@@ -184,9 +193,9 @@ async function startServer() {
               throw e;
             }
           });
-          const opIds = new Array(session.length / IDLENGTH);
+          const opIds = new Array(session.length / (IDLENGTH+1));
           for (let i=0; i<opIds.length; i+=1) {
-            const offset = i * IDLENGTH;
+            const offset = i * (IDLENGTH+1);
             opIds[i] = session.slice(offset, offset + IDLENGTH);
           }
           const ops = await Promise.all(opIds.map(async id => ({id, detail: await opsDB.readJSON(id)})));
@@ -196,11 +205,10 @@ async function startServer() {
           return new JoinedHandler(sessionId);
         }
         else if (req.type === "new_share") {
-          console.log("HEY");
           const { reqId, ops } = req;
           const sessionId = uuidv4();
           await Promise.all(ops.map(({id, detail}) => opsDB.writeJSON(id, detail)));
-          await sessionDB.write(sessionId, ops.map(op=>op.id).join(''));
+          await sessionDB.write(sessionId, ops.map(op=>op.id).join('\n'));
           socket.sendJSON({type: "ack", reqId, sessionId});
 
           this.close();
@@ -241,7 +249,7 @@ async function startServer() {
           // await asyncSleep(3000);
           const { reqId, op: {id, detail} } = req;
           await opsDB.writeJSON(id, detail);
-          await sessionDB.append(this.session.id, id); // Creates file if it doesn't exist yet
+          await sessionDB.append(this.session.id, id+'\n'); // Creates file if it doesn't exist yet
           // Best effort broadcast to all subscribers
           this.session.broadcast({type: "pub_edit", op: {id, detail}}, socket);
           // Send ACK
@@ -312,7 +320,7 @@ async function startServer() {
   });
 
   httpServer.listen(port);
-  console.log("Listening on", port);
+  console.log("Listening on http://localhost:" + port);
 }
 
 

Rozdielové dáta súboru neboli zobrazené, pretože súbor je príliš veľký
+ 2 - 2
src/main/webapp/js/app.min.js


+ 3 - 8
src/main/webapp/js/diagramly/App.js

@@ -331,14 +331,9 @@ App.pluginRegistry = {
 	'tags': 'plugins/tags.js',
 
 
-	'logevents': 'plugins/cdf/logevents.js',
-	'ftgpm': 'plugins/cdf/ftgpm.js',
-	'ftgpm-edit': 'plugins/cdf/ftgpm-edit.js',
-	// 'versioning': 'plugins/cdf/versioning.js',
-	'versioning': 'plugins/cdf/versioning.browser.js',
-	'sendshapes': 'plugins/cdf/sendshapes.js',
-	'screenshare': 'plugins/cdf/screenshare.js',
-	'svg-viewport': 'plugins/cdf/svg-viewport.js',
+	'ftgpm': 'myPlugins/ftgpm.js',
+	'svg-viewport': 'myPlugins/svg-viewport.js',
+	'collab': 'myPlugins/collab.js',
 };
 
 App.publicPlugin = [];

Rozdielové dáta súboru neboli zobrazené, pretože súbor je príliš veľký
+ 166 - 166
src/main/webapp/js/stencils.min.js


Rozdielové dáta súboru neboli zobrazené, pretože súbor je príliš veľký
+ 4182 - 0
src/main/webapp/myPlugins/collab.js


lib/versioning/resources/cursor.svg → src/main/webapp/myPlugins/cursor.svg


src/main/webapp/plugins/cdf/ftgpm-edit.js → src/main/webapp/myPlugins/ftgpm.js


src/main/webapp/plugins/cdf/svg-viewport.js → src/main/webapp/myPlugins/svg-viewport.js


Rozdielové dáta súboru neboli zobrazené, pretože súbor je príliš veľký
+ 0 - 94
src/main/webapp/plugins/cdf/ftgpm.js


+ 0 - 29
src/main/webapp/plugins/cdf/logevents.js

@@ -1,29 +0,0 @@
-Draw.loadPlugin(function(ui) {
-  const graph = ui.editor.graph;
-
-  const new_handler = (label) => {
-    return (source, eventObj) => {
-      if (eventObj.name == mxEvent.FIRE_MOUSE_EVENT)
-        return;
-
-      console.log(label + " emit ", eventObj);
-    };
-  }
-
-  // ui.editor.graph.model.addListener(null, new_handler("mxGraphModel"));
-  // ui.editor.graph.selectionModel.addListener(null, new_handler("mxSelectionModel"));
-
-  const loggers = {}
-
-  window.ui = ui;
-
-  window.logEvents = (eventSource, name, event) => {
-    logger = new_handler(name);
-    eventSource.addListener(event, logger);
-    loggers[name] = logger
-  };
-
-  window.unlogEvents = (eventSource, name) => {
-    eventSource.removeListener(loggers[name]);
-  };
-})

+ 0 - 41
src/main/webapp/plugins/cdf/messaging.js

@@ -1,41 +0,0 @@
-async function getMessagingClient(ui)
-{
-  // Load scripts in the right order
-  const p = Promise.all([
-      loadScript("../../../websockets/common.js"),
-      loadScript("../../../websockets/uuidv4.min.js"),
-    ]).then(() => loadScript("../../../websockets/client.js"));
-
-  // Display Connected/Disconnected status on toolbar
-  ui.toolbar.addSeparator();
-  const statusDiv = document.createElement('div');
-  statusDiv.classList.add("geLabel");
-  statusDiv.style = "white-space: nowrap; position: relative;";
-  const status = document.createTextNode("Disconnected");
-  statusDiv.appendChild(status);
-  ui.toolbar.container.appendChild(statusDiv);
-
-  let websocketOrigin;
-  if (document.location.protocol === "https:") {
-    websocketOrigin = "wss://" + document.location.origin.substring(8);
-  } else {
-    websocketOrigin = "ws://" + document.location.origin.substring(7);
-  }
-
-  await p;
-
-  const ourId = uuidv4();
-  const client = new Client(websocketOrigin + "/websocket", ourId);
-
-  client.on('disconnected', () => {
-    status.textContent = "Disconnected";
-  })
-
-  client.on('receivePush', (what, data) => {
-    if (what === "peers") {
-      status.textContent = "Connected as Peer " + shortUUID(client.uuid);
-    }
-  });
-
-  return client;
-}

+ 0 - 41
src/main/webapp/plugins/cdf/screenshare.js

@@ -1,41 +0,0 @@
-Draw.loadPlugin(async function(ui) {
-
-  window.ui = ui; // global variable for debugging
-  const graph = ui.editor.graph;
-
-  await Promise.all([
-    loadScript("plugins/cdf/messaging.js"),
-    loadScript("../../../lib/screenshare2.js"),
-    loadScript("../../../lib/uitools.js"),
-  ]);
-
-  client = await getMessagingClient(ui);
-
-  const peers = new Peers(client);
-
-  const uiTools = new UiTools(ui);
-
-  const screenShare = new ScreenShare(client, peers, graph, ui.editor.undoManager, uiTools.yesNo.bind(uiTools), ui.showAlert.bind(ui));
-
-  client.connect();
-
-  // UI stuff
-  ui.toolbar.addSeparator();
-  ui.menus.put('screenshare', new Menu(function(menu, parent) {
-    const peerList = peers.getPeers();
-    if (peerList.length > 0) {
-      peerList.forEach(peer => {
-        menu.addItem("Peer " + shortUUID(peer), screenShare.sharingWith === peer  ? Editor.checkmarkImage : null, () => screenShare.initshare(peer), menu);
-      });
-    } else {
-      menu.addItem("No peers ", null, null, menu, null, false);
-    }
-  }))
-  const screenshareMenu = ui.toolbar.addMenu('', "Share screen with another user", true, 'screenshare');
-  screenshareMenu.style.width = '100px';
-  screenshareMenu.showDisabled = true;
-  screenshareMenu.style.whiteSpace = 'nowrap';
-  screenshareMenu.style.position = 'relative';
-  screenshareMenu.style.overflow = 'hidden';
-  screenshareMenu.innerHTML = "Screen Share" + ui.toolbar.dropdownImageHtml;
-});

+ 0 - 75
src/main/webapp/plugins/cdf/sendshapes.js

@@ -1,75 +0,0 @@
-Draw.loadPlugin(async function(ui) {
-
-  const p = loadScript("plugins/cdf/messaging.js");
-
-  const graph = ui.editor.graph;
-
-  console.log(ui)
-
-  let peers = [];
-
-  const oldFactoryMethod = graph.popupMenuHandler.factoryMethod;
-
-  const newFactoryMethod = function(menu, cell, evt) {
-    // build default context menu
-    oldFactoryMethod.apply(this, arguments);
-
-    // add submenu at the end
-    const selectedCells = graph.getSelectionCells()
-    if (selectedCells.length > 0) {
-      console.log("selected:", selectedCells);
-      const codec = new mxCodec();
-      const encoded = codec.encode(selectedCells);
-
-      const submenu = menu.addItem("Send to", null);
-      menu.addItem("You are peer " + shortUUID(you), null, null, submenu, null, false);
-      if (peers.length > 0) {
-        peers.forEach(peer => {
-          menu.addItem("Peer " + shortUUID(peer), null, function() {
-            const serializedShapes = new XMLSerializer().serializeToString(encoded);
-            p2p.send(peer, "shapes", serializedShapes, (err, data) => {
-              if (err) {
-                alert("Error:" + err + " (see console for details)");
-                console.log("err", err, "data", data)
-              }
-            })
-          }, submenu);
-        })
-      } else {
-        menu.addItem("No peers", null, null, submenu, null, false);
-      }
-    }
-  }
-
-  await p;
-  client = await getMessagingClient(ui);
-
-  client.on('disconnected', () => {
-    graph.popupMenuHandler.factoryMethod = oldFactoryMethod;
-  })
-
-  client.on('receivePush', (what, data) => {
-    if (what === "peers") {
-      you = data.you;
-      peers = data.peers.filter(p => p !== you);
-      graph.popupMenuHandler.factoryMethod = newFactoryMethod;
-    }
-  });
-
-  const p2p = new PeerToPeer(client, {
-    // incoming request from another peer
-    "shapes": (from, data, reply) => {
-      console.log("received shapes from peer", from);
-      console.log("data:", data);
-
-      const parsedXml = new DOMParser().parseFromString(data, "text/xml").firstElementChild;
-      const codec = new mxCodec();
-      const cells = codec.decode(parsedXml);
-      graph.importCells(cells);
-
-      reply(); // acknowledge
-    },
-  });
-
-  client.connect();
-});

Rozdielové dáta súboru neboli zobrazené, pretože súbor je príliš veľký
+ 0 - 1952
src/main/webapp/plugins/cdf/versioning.browser.js


+ 0 - 1
websockets

@@ -1 +0,0 @@
-Subproject commit 16d96da04a049fa1753e77b560bbbde0350feb7b