}
public void register(INode node) {
- // We use ths size of this map to determine whether updates are needed, this is done in AWT thread
+ // We use the size of this map to determine whether updates are needed, this is done in AWT thread
synchronized(requesters) {
if(requesters.size() == 0) {
if(state.compareAndSet(false, true)) {
- ThreadUtils.getNonBlockingWorkExecutor().scheduleAtFixedRate(this, 0, 500, TimeUnit.MILLISECONDS);
+ ThreadUtils.getNonBlockingWorkExecutor().scheduleWithFixedDelay(this, 0, 500, TimeUnit.MILLISECONDS);
}
}
ICanvasContext context = DiagramNodeUtil.getPossibleCanvasContext((G2DNode)node);
StandardAssertedGraphPropertyVariable ass = (StandardAssertedGraphPropertyVariable)property;
if("dataDefinitions".equals(ass.property.name) || "commands".equals(ass.property.name) || "pollingFunction".equals(ass.property.name)) {
storePropertyValueAndExceptions(graph, parent, ass.property.name, property, map);
+ continue;
}
- continue;
}
Resource predicate = property.getPossiblePredicateResource(graph);
if(predicate != null) {
StandardAssertedGraphPropertyVariable ass = (StandardAssertedGraphPropertyVariable)property;
if("dataDefinitions".equals(ass.property.name) || "commands".equals(ass.property.name) || "pollingFunction".equals(ass.property.name)) {
result.add(ass.property.name);
+ continue;
}
- continue;
}
Resource predicate = property.getPossiblePredicateResource(graph);
if(predicate != null) {
// long delay = 1000 / 25; this sounds quite frequent
long delay = 1000 / 10;
lastTrigger = System.currentTimeMillis();
- timer.scheduleAtFixedRate(task, delay, delay, TimeUnit.MILLISECONDS);
+ timer.scheduleWithFixedDelay(task, delay, delay, TimeUnit.MILLISECONDS);
}
@HintListener(Class = Hints.class, Field = "KEY_CANVAS_BOUNDS")
return;
long interval = getInterval();
- future = ThreadUtils.getNonBlockingWorkExecutor().scheduleAtFixedRate(onTimer, DEFAULT_INTERVAL, interval, TimeUnit.MILLISECONDS);
+ future = ThreadUtils.getNonBlockingWorkExecutor().scheduleWithFixedDelay(onTimer, DEFAULT_INTERVAL, interval, TimeUnit.MILLISECONDS);
}
private void cancelTimer() {
*/
public class CSVFormatter {
+ /**
+ * This is the tolerance used to decide whether or not the last data point of
+ * the exported items is included in the exported material or not. If
+ * <code>0 <= (t - t(lastDataPoint) < {@value #RESAMPLING_END_TIMESTAMP_INCLUSION_TOLERANCE}</code>
+ * is true, then the last exported data point will be
+ * <code>lastDataPoint</code>, with timestamp <code>t(lastDataPoint)</code> even
+ * if <code>t > t(lastDataPoint)</code>.
+ *
+ * <p>
+ * This works around problems where floating point inaccuracy causes a data
+ * point to be left out from the the export when it would be fair for the user
+ * to expect the data to be exported would contain a point with time stamp
+ * <code>9.999999999999996</code> when sampling with time-step <code>1.0</code>
+ * starting from time <code>0.0</code>.
+ */
+ private static final double RESAMPLING_END_TIMESTAMP_INCLUSION_TOLERANCE = 1e-13;
+
List<Item> items = new ArrayList<Item>();
double from = -Double.MAX_VALUE;
double end = Double.MAX_VALUE;
BigDecimal bigTime = new BigDecimal(String.valueOf(time));
BigDecimal bigTimeStep = new BigDecimal(String.valueOf(timeStep));
+ // Loop kill-switch for the case where timeStep > 0
+ boolean breakAfterNextWrite = false;
+
+// System.out.println("time: " + time);
+// System.out.println("timeStep: " + timeStep);
+// System.out.println("_end: " + Double.toString(_end));
+
for (Item i : items) i.iter.gotoTime(time);
do {
if ( monitor!=null && monitor.isCanceled() ) return;
sb.append( lineFeed );
- // Read next values, and the following times
- if ( timeStep>0.0 ) {
- bigTime = bigTime.add(bigTimeStep);
- time = bigTime.doubleValue();
+ if (breakAfterNextWrite)
+ break;
+
+ // Read next values, and the following times
+ if ( timeStep>0.0 ) {
+ bigTime = bigTime.add(bigTimeStep);
+ time = bigTime.doubleValue();
+
+ // gitlab #529: prevent last data point from getting dropped
+ // due to small imprecisions in re-sampling mode.
+ double diff = time - _end;
+ if (diff > 0 && diff <= RESAMPLING_END_TIMESTAMP_INCLUSION_TOLERANCE) {
+ time = _end;
+ breakAfterNextWrite = true;
+ // Take floating point inaccuracy into account when re-sampling
+ // to prevent the last data point from being left out if there
+ // is small-enough imprecision in the last data point time stamp
+ // to be considered negligible compared to expected stepped time.
+ }
+
} else {
// Get smallest end time that is larger than current time
Double nextTime = null;
if(contains(i, time)) hasMore = true;
}
+ //System.out.println("hasMore @ " + time + " (" + bigTime + ") = " + hasMore);
if(!hasMore) break;
} while (time<=_end);
Variable firstRepresentedParent = findFirstParentWithRepresentation(graph, parameter, STR);
if (firstRepresentedParent == null)
return null;
- Resource realParentComposite = graph.getPossibleObject(firstRepresentedParent.getRepresents(graph), L0.PartOf);
- if (realParentComposite == null)
- return null;
- isInsideStructure = graph.hasStatement(realParentComposite, STR.Defines);
+ Resource representedParent = firstRepresentedParent.getRepresents(graph);
+ Resource representedParentType = graph.getPossibleType(representedParent, STR.Component);
+ if (representedParentType != null && graph.isInstanceOf(representedParentType, STR.ProceduralComponentType)) {
+ isInsideStructure = !parameter.equals(firstRepresentedParent);
+ } else {
+ Resource realParentComposite = graph.getPossibleObject(representedParent, L0.PartOf);
+ if (realParentComposite == null)
+ return null;
+ isInsideStructure = graph.hasStatement(realParentComposite, STR.Defines);
+ }
Variable firstParentComposite = findFirstParentComposite(graph, firstRepresentedParent, STR);
if (firstParentComposite != null) {