]> gerrit.simantics Code Review - simantics/platform.git/commitdiff
Merge "Some fixes for FileImportService to throw exceptions forward"
authorJani Simomaa <jani.simomaa@vtt.fi>
Mon, 13 Feb 2017 17:32:45 +0000 (19:32 +0200)
committerGerrit Code Review <gerrit2@www.simantics.org>
Mon, 13 Feb 2017 17:32:45 +0000 (19:32 +0200)
15 files changed:
bundles/org.simantics.acorn/src/org/simantics/acorn/ClusterManager.java
bundles/org.simantics.acorn/src/org/simantics/acorn/GraphClientImpl2.java
bundles/org.simantics.acorn/src/org/simantics/acorn/HeadState.java
bundles/org.simantics.acorn/src/org/simantics/acorn/HeadState1.java [new file with mode: 0644]
bundles/org.simantics.acorn/src/org/simantics/acorn/internal/AcornDatabase.java
bundles/org.simantics.acorn/src/org/simantics/acorn/lru/LRU.java
bundles/org.simantics.acorn/src/org/simantics/acorn/lru/LRUObject.java
bundles/org.simantics.browsing.ui.model/src/org/simantics/browsing/ui/model/labels/LabelContribution.java
bundles/org.simantics.db.layer0/src/org/simantics/db/layer0/util/Layer0Utils.java
bundles/org.simantics.graph.compiler/src/org/simantics/graph/compiler/internal/resourceFiles/FilterCRWriter.java [new file with mode: 0644]
bundles/org.simantics.graph.compiler/src/org/simantics/graph/compiler/internal/resourceFiles/ResourceFile.java
bundles/org.simantics.scl.db/scl/Simantics/DB.scl
bundles/org.simantics.simulation/META-INF/MANIFEST.MF
bundles/org.simantics.simulation/src/org/simantics/simulation/project/ExperimentManager.java
features/org.simantics.ui.workbench.feature/feature.xml

index 22629d4e64237ab63c6fc89a12f0754087254edb..40c5de37e8c1991e6e5ce846e2b90e7b651b6c5e 100644 (file)
@@ -2,9 +2,11 @@ package org.simantics.acorn;
 
 import java.io.IOException;
 import java.math.BigInteger;
+import java.nio.file.CopyOption;
 import java.nio.file.DirectoryStream;
 import java.nio.file.Files;
 import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -22,6 +24,7 @@ import org.simantics.acorn.lru.ClusterLRU;
 import org.simantics.acorn.lru.ClusterStreamChunk;
 import org.simantics.acorn.lru.FileInfo;
 import org.simantics.acorn.lru.LRU;
+import org.simantics.databoard.file.RuntimeIOException;
 import org.simantics.db.ClusterCreator;
 import org.simantics.db.Database.Session.ClusterIds;
 import org.simantics.db.Database.Session.ResourceSegment;
@@ -33,6 +36,7 @@ import org.simantics.db.impl.ClusterSupport;
 import org.simantics.db.procore.cluster.ClusterTraits;
 import org.simantics.db.service.ClusterSetsSupport;
 import org.simantics.db.service.ClusterUID;
+import org.simantics.utils.FileUtils;
 import org.simantics.utils.threads.logger.ITask;
 import org.simantics.utils.threads.logger.ThreadLogger;
 import org.slf4j.Logger;
@@ -134,6 +138,119 @@ public class ClusterManager {
        private AtomicBoolean safeToMakeSnapshot = new AtomicBoolean(true);
        private IllegalAcornStateException cause;
        
+       public synchronized void purge(ServiceLocator locator) throws IllegalAcornStateException {
+               
+           try {
+
+               // Schedule writing of all data to disk
+               refreshHeadState();
+               // Wait for files to be written
+               synchronizeWorkingDirectory();
+               
+               String currentDir = workingDirectory.getFileName().toString();
+               Path baseline = workingDirectory.resolveSibling(currentDir + "_baseline");
+               
+               Files.createDirectories(baseline);
+               
+               for(String clusterKey : state.clusters) {
+                       String[] parts1 = clusterKey.split("#");
+                       String[] parts = parts1[0].split("\\.");
+                       String readDirName = parts1[1];
+                       if(!readDirName.equals(currentDir)) {
+                               String fileName = parts[0] + "." + parts[1] + ".cluster";
+                               Path from = dbFolder.resolve(readDirName).resolve(fileName);
+                               Path to = baseline.resolve(fileName);
+                               System.err.println("purge copies " + from + "  => " + to);
+                               Files.copy(from, to, StandardCopyOption.COPY_ATTRIBUTES);
+                               long first = new BigInteger(parts[0], 16).longValue();
+                               long second = new BigInteger(parts[1], 16).longValue();
+                               ClusterUID uuid = ClusterUID.make(first, second);
+                               ClusterInfo info = clusterLRU.getWithoutMutex(uuid);
+                               info.moveTo(baseline);
+                       }
+               }
+               
+               for (String fileKey : state.files) {
+                       String[] parts = fileKey.split("#");
+                       String readDirName = parts[1];
+                       if(!readDirName.equals(currentDir)) {
+                               String fileName = parts[0] + ".extFile";
+                               Path from = dbFolder.resolve(readDirName).resolve(fileName);
+                               Path to = baseline.resolve(fileName);
+                               System.err.println("purge copies " + from + "  => " + to);
+                               Files.copy(from, to, StandardCopyOption.COPY_ATTRIBUTES);
+                               FileInfo info = fileLRU.getWithoutMutex(parts[0]);
+                               info.moveTo(baseline);
+                       }
+               }
+               
+               for (String fileKey : state.stream) {
+                       String[] parts = fileKey.split("#");
+                       String readDirName = parts[1];
+                       if(!readDirName.equals(currentDir)) {
+                               ClusterStreamChunk chunk = streamLRU.purge(parts[0]);
+                               System.err.println("purge removes " + chunk);
+                       }
+               }
+               
+               // Change sets
+               for (String fileKey : state.cs) {
+                       String[] parts = fileKey.split("#");
+                       String readDirName = parts[1];
+                       if(!readDirName.equals(currentDir)) {
+                               Long revisionId = Long.parseLong(parts[0]);
+                               ChangeSetInfo info = csLRU.purge(revisionId);
+                               System.err.println("purge removes " + info);
+                       }
+//                     Path readDir = dbFolder.resolve(parts[1]);
+//                     Long revisionId = Long.parseLong(parts[0]);
+//                     int offset = Integer.parseInt(parts[2]);
+//                     int length = Integer.parseInt(parts[3]);
+//                     ChangeSetInfo info = new ChangeSetInfo(csLRU, readDir, revisionId, offset, length);
+//                     csLRU.map(info);
+               }               
+               
+               state.tailChangeSetId = state.headChangeSetId;
+               
+               makeSnapshot(locator, true);
+               
+               Files.walk(dbFolder, 1).filter(Files::isDirectory).forEach(f -> tryPurgeDirectory(f));
+
+           } catch (IllegalAcornStateException e) {
+               notSafeToMakeSnapshot(e);
+               throw e;
+           } catch (IOException e) {
+               IllegalAcornStateException e1 = new IllegalAcornStateException(e);
+               notSafeToMakeSnapshot(e1);
+               throw e1;
+           } catch (AcornAccessVerificationException e) {
+               IllegalAcornStateException e1 = new IllegalAcornStateException(e);
+               notSafeToMakeSnapshot(e1);
+               throw e1;
+               }
+        
+       }
+       
+       void tryPurgeDirectory(Path f) {
+               
+               
+               System.err.println("purge deletes " + f);
+
+               String currentDir = f.getFileName().toString();
+               if(currentDir.endsWith("db"))
+                       return;
+
+               if(currentDir.endsWith("_baseline"))
+                       currentDir = currentDir.replace("_baseline", "");
+
+               int ordinal = Integer.parseInt(currentDir);
+               if(ordinal < mainState.headDir - 1) {
+                       System.err.println("purge deletes " + f);
+                       FileUtils.deleteDir(f.toFile());
+               }
+               
+       }
+
        public synchronized boolean makeSnapshot(ServiceLocator locator, boolean fullSave) throws IllegalAcornStateException {
            try {
            if (!safeToMakeSnapshot.get())
@@ -221,9 +338,13 @@ public class ClusterManager {
                csLRU.persist(state.cs);
        }
        
-       private void persistHeadState() throws IOException {
+       private void synchronizeWorkingDirectory() throws IOException {
                // Sync current working directory
                Files.walk(workingDirectory, 1).filter(Files::isRegularFile).forEach(FileIO::uncheckedSyncPath);
+       }
+       
+       private void persistHeadState() throws IOException {
+               synchronizeWorkingDirectory();
                state.save(workingDirectory);
                mainState.headDir++;
        }
@@ -583,4 +704,8 @@ public class ClusterManager {
         this.cause = t;
     }
 
+    public long getTailChangeSetId() {
+       return state.tailChangeSetId;
+    }
+    
 }
index 904427b6d617f6682c78547db1dde17a419dff48..acf8b6e659bfe7de8393b4a046ab32b365bc300d 100644 (file)
@@ -725,29 +725,67 @@ public class GraphClientImpl2 implements Database.Session {
     public boolean rolledback() {
         return clusters.rolledback();
     }
+    
+    public void purge() throws IllegalAcornStateException {
+        clusters.purge(locator);
+    }
 
-       
-       
-       
-       
-       
-       
-       
-       
-       
-       
-       ////////////////////////
-       
-       
-       
-       
-       
-       
-       
-       
-       
-       
-       
-       
+    public void purgeDatabase() {
+       
+           if (isClosing || unexpectedClose)
+               return;
+           
+               saver.execute(new Runnable() {
+
+                       @Override
+                       public void run() {
+                               Transaction tr = null;
+                               try {
+                                       // First take a write transaction
+                                       tr = askWriteTransaction(-1);
+                                       // Then make sure that MainProgram is idling
+                                       mainProgram.mutex.acquire();
+                                       try {
+                                               synchronized(mainProgram) {
+                                                       if(mainProgram.operations.isEmpty()) {
+                                                               purge();
+                                                       } else {
+                                                               // MainProgram is becoming busy again - delay snapshotting
+                                                               return;
+                                                       }
+                                               }
+                                       } finally {
+                                               mainProgram.mutex.release();
+                                       }
+                               } catch (IllegalAcornStateException | ProCoreException e) {
+                                       Logger.defaultLogError("Purge failed", e);
+                                       unexpectedClose = true;
+                               } catch (InterruptedException e) {
+                                       Logger.defaultLogError("Purge interrupted", e);
+                               } finally {
+                                       try {
+                                               if(tr != null)
+                                                       endTransaction(tr.getTransactionId());
+                                               if (unexpectedClose) {
+                                               LifecycleSupport support = getServiceLocator().getService(LifecycleSupport.class);
+                           try {
+                               support.close();
+                           } catch (DatabaseException e1) {
+                               Logger.defaultLogError("Failed to close database as a safety measure due to failed purge", e1);
+                           }
+                                               }
+                                       } catch (ProCoreException e) {
+                                               Logger.defaultLogError("Failed to end purge write transaction", e);
+                                       }
+                               }
+                       }
+               });
+       
+    }
+    
+    public long getTailChangeSetId() {
+       return clusters.getTailChangeSetId();
+    }
+    
 }
 
index dd8703c1fc689e0ed0bbfc1968e6730913d74750..fd38bc98542052bbdb2e762c65d49a4cad4f3585 100644 (file)
@@ -12,6 +12,7 @@ import java.util.Arrays;
 
 import org.simantics.acorn.exception.InvalidHeadStateException;
 import org.simantics.databoard.Bindings;
+import org.simantics.databoard.adapter.AdapterConstructionException;
 import org.simantics.databoard.binding.mutable.MutableVariant;
 import org.simantics.databoard.serialization.Serializer;
 import org.simantics.databoard.util.binary.BinaryMemory;
@@ -31,6 +32,8 @@ public class HeadState {
     public ArrayList<String> cs = new ArrayList<>();
 //    public ArrayList<String> ccs = new ArrayList<String>();
 
+    public long tailChangeSetId = 1;
+
     public static HeadState load(Path directory) throws InvalidHeadStateException {
         Path f = directory.resolve(HEAD_STATE);
         
@@ -51,6 +54,11 @@ public class HeadState {
                 return object;
             }
         } catch (IOException i) {
+               Throwable cause = i.getCause();
+               if(cause instanceof AdapterConstructionException) {
+                       HeadState1 old = HeadState1.load(directory);
+                       return old.migrate();
+               }
             return new HeadState();
 //            throw new InvalidHeadStateException(i);
         } catch (NoSuchAlgorithmException e) {
diff --git a/bundles/org.simantics.acorn/src/org/simantics/acorn/HeadState1.java b/bundles/org.simantics.acorn/src/org/simantics/acorn/HeadState1.java
new file mode 100644 (file)
index 0000000..d58a286
--- /dev/null
@@ -0,0 +1,117 @@
+package org.simantics.acorn;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.Arrays;
+
+import org.simantics.acorn.exception.InvalidHeadStateException;
+import org.simantics.databoard.Bindings;
+import org.simantics.databoard.binding.mutable.MutableVariant;
+import org.simantics.databoard.serialization.Serializer;
+import org.simantics.databoard.util.binary.BinaryMemory;
+
+public class HeadState1 {
+
+    public static final String HEAD_STATE = "head.state";
+    public static final String SHA_1 = "SHA-1";
+    
+    public int headChangeSetId = 0;
+    public long transactionId = 1;
+    public long reservedIds = 3;
+
+    public ArrayList<String> clusters = new ArrayList<>();
+    public ArrayList<String> files = new ArrayList<>();
+    public ArrayList<String> stream = new ArrayList<>();
+    public ArrayList<String> cs = new ArrayList<>();
+    
+    public HeadState migrate() {
+       HeadState state = new HeadState();
+       state.headChangeSetId = headChangeSetId;
+       state.transactionId = transactionId;
+       state.reservedIds = reservedIds;
+       state.clusters = clusters;
+       state.files = files;
+       state.stream = stream;
+       state.cs = cs;
+       return state;
+    }
+
+    public static HeadState1 load(Path directory) throws InvalidHeadStateException {
+        Path f = directory.resolve(HEAD_STATE);
+        
+        try {
+            byte[] bytes = Files.readAllBytes(f);
+            MessageDigest sha1 = MessageDigest.getInstance(SHA_1);
+            int digestLength = sha1.getDigestLength();
+            
+            sha1.update(bytes, digestLength, bytes.length - digestLength);
+            byte[] newChecksum = sha1.digest();
+            if (!Arrays.equals(newChecksum, Arrays.copyOfRange(bytes, 0, digestLength))) {
+                throw new InvalidHeadStateException(
+                        "Checksum " + Arrays.toString(newChecksum) + " does not match excpected "
+                                + Arrays.toString(Arrays.copyOfRange(bytes, 0, digestLength)) + " for " + f.toAbsolutePath());
+            }
+            try (ByteArrayInputStream bais = new ByteArrayInputStream(bytes, digestLength, bytes.length - digestLength)) {
+                HeadState1 object = (HeadState1) org.simantics.databoard.Files.readFile(bais, Bindings.getBindingUnchecked(HeadState1.class));
+                return object;
+            }
+        } catch (IOException i) {
+            return new HeadState1();
+        } catch (NoSuchAlgorithmException e) {
+            throw new Error("SHA-1 Algorithm not found", e);
+        } catch (Throwable t) {
+            throw new InvalidHeadStateException(t);
+        }
+    }
+    
+    public void save(Path directory) throws IOException {
+        Path f = directory.resolve(HEAD_STATE);
+        try {
+            BinaryMemory rf = new BinaryMemory(4096);
+            try {
+                MutableVariant v = new MutableVariant(Bindings.getBindingUnchecked(HeadState1.class), this);
+                Serializer s = Bindings.getSerializerUnchecked( Bindings.VARIANT );
+                s.serialize(rf, v);
+            } finally {
+                rf.close();
+            }
+            
+            byte[] bytes = rf.toByteBuffer().array();
+            
+            MessageDigest sha1 = MessageDigest.getInstance(SHA_1);
+            sha1.update(bytes);
+            byte[] checksum = sha1.digest();
+            
+            try (OutputStream out = Files.newOutputStream(f)) {
+                out.write(checksum);
+                out.write(bytes);
+            }
+            FileIO.syncPath(f);
+        } catch (NoSuchAlgorithmException e) {
+            throw new Error("SHA-1 digest not found, should not happen", e);
+        }
+    }
+
+    public static void validateHeadStateIntegrity(Path headState) throws InvalidHeadStateException, IOException {
+        try {
+            byte[] bytes = Files.readAllBytes(headState);
+            MessageDigest sha1 = MessageDigest.getInstance(SHA_1);
+            int digestLength = sha1.getDigestLength();
+            sha1.update(bytes, digestLength, bytes.length - digestLength);
+            byte[] newChecksum = sha1.digest();
+            if (!Arrays.equals(newChecksum, Arrays.copyOfRange(bytes, 0, digestLength))) {
+                throw new InvalidHeadStateException(
+                        "Checksum " + Arrays.toString(newChecksum) + " does not match excpected "
+                                + Arrays.toString(Arrays.copyOfRange(bytes, 0, digestLength)) + " for " + headState.toAbsolutePath());
+            }
+        } catch (NoSuchAlgorithmException e) {
+            throw new Error("SHA-1 digest not found, should not happen", e);
+        }
+    }
+}
index be505c6039712e322ce80ffa653c1b4c6babac13..1025cc6a5dac0a2e6d5b940b1ad0d17ba971fa00 100644 (file)
@@ -19,7 +19,10 @@ import org.simantics.db.Database;
 import org.simantics.db.DatabaseUserAgent;
 import org.simantics.db.ServiceLocator;
 import org.simantics.db.common.utils.Logger;
+import org.simantics.db.exception.SDBException;
+import org.simantics.db.server.DatabaseStartException;
 import org.simantics.db.server.ProCoreException;
+import org.simantics.db.server.internal.InternalException;
 
 /**
  * @author Tuukka Lehtonen
@@ -27,6 +30,8 @@ import org.simantics.db.server.ProCoreException;
 public class AcornDatabase implements Database {
 
     private final Path folder;
+    
+    private GraphClientImpl2 currentClient;
 
     private DatabaseUserAgent userAgent;
 
@@ -190,22 +195,22 @@ public class AcornDatabase implements Database {
 
     @Override
     public void purgeDatabase() throws ProCoreException {
-        // TODO: implement
-        throw new UnsupportedOperationException();
+       if(currentClient == null) throw new IllegalStateException("No current session.");
+       currentClient.purgeDatabase();
     }
 
     @Override
     public long serverGetTailChangeSetId() throws ProCoreException {
-        // "We have it all"
-        // But after purging we don't so beware.
-        // TODO: beware for purge
-        return 1;
+       if(currentClient == null) throw new IllegalStateException("No current session.");
+       return currentClient.getTailChangeSetId();
     }
 
     @Override
     public Session newSession(ServiceLocator locator) throws ProCoreException {
         try {
-            return new GraphClientImpl2(this, folder, locator);
+               if(currentClient != null) throw new DatabaseStartException(folder.toFile(), "A session is already running. Only one session is supported.");
+               currentClient = new GraphClientImpl2(this, folder, locator); 
+            return currentClient;
         } catch (IOException e) {
             throw new ProCoreException(e);
         }
index 323d66d3df12ea34d2d429293801eeacfc95a23b..40dbad0397db0ac83639decc09f9661d022fc3a1 100644 (file)
@@ -148,6 +148,12 @@ public class LRU<MapKey,MapValue extends LRUObject<MapKey, MapValue>> {
                }
        }
 
+       
+       
+       public MapValue purge(MapKey id) {
+               return map.remove(id);
+       }
+
        public MapValue get(MapKey key) throws AcornAccessVerificationException {
                
                if(VERIFY) verifyAccess();
index 3194d591e0b37e8712e8cabf3cc1afcb9549a754..508c951067dedc3df895d938b3a0e84f9dda771c 100644 (file)
@@ -243,5 +243,9 @@ public abstract class LRUObject<MapKey, MapValue extends LRUObject<MapKey, MapVa
                if(VERIFY) verifyAccess();
                return readDirectory;
        }
+
+       public void moveTo(Path path) {
+               readDirectory = path;
+       }
        
 }
\ No newline at end of file
index e8a7b62227db8d40ab51bb5ea978af1bb20dc322..25f0d054d5b55670c934940c993048fbcb728c79 100644 (file)
@@ -24,6 +24,7 @@ import org.simantics.browsing.ui.model.tests.Test;
 import org.simantics.browsing.ui.model.visuals.VisualsContribution;
 import org.simantics.db.ReadGraph;
 import org.simantics.db.exception.DatabaseException;
+import org.simantics.db.layer0.exception.PendingVariableException;
 
 /**
  * Produces labels for nodes of given node type.
@@ -31,7 +32,7 @@ import org.simantics.db.exception.DatabaseException;
  */
 public class LabelContribution extends VisualsContribution {
     LabelRule labelRule;
-    
+
     public LabelContribution(NodeType nodeType, Test test, LabelRule labelRule, double priority) throws InvalidContribution {
         super(nodeType, test, priority);
         if(!labelRule.isCompatible(
@@ -40,7 +41,7 @@ public class LabelContribution extends VisualsContribution {
             throw new InvalidContribution("Label rule is not compatible with the content type.");
         this.labelRule = labelRule;
     }
-       
+
     /**
      * Returns a label for the node or null, if contribution is
      * not suitable for the input.
@@ -48,15 +49,15 @@ public class LabelContribution extends VisualsContribution {
     public Map<String, String> getLabel(ReadGraph graph, NodeContext context) {
         Object content = context.getConstant(BuiltinKeys.INPUT);
         try {
-            if(test == null || test.test(graph, content))            
+            if(test == null || test.test(graph, content))
                 return labelRule.getLabel(graph, content);
             else
                 return null;
+        } catch(PendingVariableException e) {
+            return Collections.singletonMap(ColumnKeys.SINGLE, "");
         } catch(DatabaseException e) {
             ErrorLogger.defaultLogError(e);
-               //Logger.defaultLogError(e);
-            // TODO reconsider
             return Collections.singletonMap(ColumnKeys.SINGLE, "");
         }
-    }    
+    }
 }
index 888e5fe4e312af10d98af326966b6e2ad42e7894..538f22478d00f1708a4b0c89e2059adae754f1a4 100644 (file)
@@ -23,6 +23,7 @@ import java.util.Set;
 import java.util.TreeSet;
 
 import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.NullProgressMonitor;
 import org.eclipse.core.runtime.SubMonitor;
 import org.simantics.databoard.Bindings;
 import org.simantics.databoard.Datatypes;
@@ -1234,6 +1235,10 @@ public class Layer0Utils {
        return Layer0.getInstance(graph).String;
     }
 
+    public static void emptyTrashBin() throws ServiceException {
+        emptyTrashBin(new NullProgressMonitor());
+    }
+
     public static void emptyTrashBin(IProgressMonitor monitor) throws ServiceException {
         emptyTrashBin(monitor, SimanticsInternal.getSession(), SimanticsInternal.getProject());
     }
@@ -1281,15 +1286,31 @@ public class Layer0Utils {
                 return;
             mon.subTask("Purging Database");
             mon.newChild(1000);
-            XSupport xs = session.getService(XSupport.class);
-            xs.purge();
+            purgeDatabase(monitor, session);
         } catch (CancelTransactionException e) {
             // Ignore.
         } catch (DatabaseException e) {
             throw new ServiceException(e);
         }
     }
-    
+
+    public static void purgeDatabase() throws ServiceException {
+       purgeDatabase(new NullProgressMonitor());
+    }
+
+    public static void purgeDatabase(final IProgressMonitor monitor) throws ServiceException {
+       purgeDatabase(monitor, SimanticsInternal.getSession());
+    }
+
+    public static void purgeDatabase(final IProgressMonitor monitor, Session session) throws ServiceException {
+       try {
+               XSupport xs = session.getService(XSupport.class);
+               xs.purge();
+       } catch (DatabaseException e) {
+               throw new ServiceException(e);
+       }
+    }
+
     public static Resource getSingleDomainOf(ReadGraph graph, Resource type, Resource target) throws DatabaseException {
        Resource result = null;
        for(Resource candidate : getDomainOf(graph, type).values()) {
diff --git a/bundles/org.simantics.graph.compiler/src/org/simantics/graph/compiler/internal/resourceFiles/FilterCRWriter.java b/bundles/org.simantics.graph.compiler/src/org/simantics/graph/compiler/internal/resourceFiles/FilterCRWriter.java
new file mode 100644 (file)
index 0000000..2e96675
--- /dev/null
@@ -0,0 +1,47 @@
+package org.simantics.graph.compiler.internal.resourceFiles;
+
+import java.io.FilterWriter;
+import java.io.IOException;
+import java.io.Writer;
+
+public class FilterCRWriter extends FilterWriter {
+
+    public FilterCRWriter(Writer out) {
+        super(out);
+    }
+    
+    @Override
+    public void write(int c) throws IOException {
+        if(c != '\r')
+            out.write(c);
+    }
+    
+    @Override
+    public void write(char[] cbuf, int off, int len) throws IOException {
+        int begin = 0;
+        for(int i=0;i<len;++i) {
+            if(cbuf[off+i] == '\r') {
+                if(i > begin)
+                    write(cbuf, off+begin, i-begin);
+                begin = i+1;
+            }
+        }
+        if(len > begin)
+            write(cbuf, off+begin, len-begin);
+    }
+    
+    @Override
+    public void write(String str, int off, int len) throws IOException {
+        int begin = 0;
+        for(int i=0;i<len;++i) {
+            if(str.charAt(off+i) == '\r') {
+                if(i > begin)
+                    write(str, off+begin, i-begin);
+                begin = i+1;
+            }
+        }
+        if(len > begin)
+            write(str, off+begin, len-begin);
+    }
+
+}
index 25758343d59751d3dd2a5d28d2aac8d950485ebe..d9d155037e1eadd3b624cca948bcd95394676afb 100644 (file)
@@ -6,6 +6,7 @@ import java.io.OutputStreamWriter;
 import java.io.StringWriter;
 import java.io.Writer;
 import java.net.URL;
+import java.nio.charset.Charset;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
@@ -19,6 +20,8 @@ import freemarker.template.DefaultObjectWrapper;
 import freemarker.template.Template;
 
 public class ResourceFile implements IResourceFile {
+    private final static Charset UTF8 = Charset.forName("UTF-8");
+    
        String packageName;
        String className;
        List<ResourceRef> resources;
@@ -88,8 +91,8 @@ public class ResourceFile implements IResourceFile {
        @Override
        public InputStream getContent() {
                StringWriter writer = new StringWriter();
-               write(writer);
-               return new ByteArrayInputStream(writer.toString().getBytes());
+               write(new FilterCRWriter(writer));
+               return new ByteArrayInputStream(writer.toString().getBytes(UTF8));
        }
        
        /* (non-Javadoc)
index 280f6ebbf7286397a867b8f96e67fdfbc9c8df04..e42af1878214e75fdf90378fdafa57631b33aa57 100644 (file)
@@ -312,6 +312,8 @@ importJava "org.simantics.db.layer0.util.Layer0Utils" where
     sortByCluster :: [a] -> (a->Resource) -> <ReadGraph> [a]
     makeSynchronous :: Boolean -> <ReadGraph> ()
     listOntologies :: () -> <ReadGraph> [Resource]
+    emptyTrashBin :: () -> <Proc> ()
+    purgeDatabase :: () -> <Proc> ()
 
     @private
     @JavaName copyTo
index 53d5638b242daabe6e2f682d3514091163a1a2cd..f1ba40ed48468bdc5d834b193578a12c894beede 100644 (file)
@@ -12,7 +12,8 @@ Require-Bundle: org.simantics.ui;bundle-version="1.0.0",
  org.simantics.modeling.ontology;bundle-version="1.2.0",
  org.simantics.fastlz;bundle-version="1.2.1",
  org.apache.commons.compress;bundle-version="1.7.0",
- org.simantics.lz4;bundle-version="1.3.0"
+ org.simantics.lz4;bundle-version="1.3.0",
+ org.slf4j.api;bundle-version="1.7.0"
 Export-Package: org.simantics.simulation,
  org.simantics.simulation.data,
  org.simantics.simulation.experiment,
index ec56854751f55bc5d5666cb29422b6a7ff10e184..613346b1147830521336c31d7a64c761e0f6da60 100644 (file)
@@ -37,12 +37,16 @@ import org.simantics.simulation.experiment.IExperimentListener;
 import org.simantics.simulation.model.IModel;
 import org.simantics.ui.workbench.WorkbenchShutdownService;
 import org.simantics.utils.datastructures.ListenerList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Simple local ExperimentManager implementation
  */
 public class ExperimentManager implements IExperimentManager {
 
+    private static final Logger LOGGER = LoggerFactory.getLogger(ExperimentManager.class);
+
     CopyOnWriteArrayList<IExperimentManagerListener> listeners = new CopyOnWriteArrayList<IExperimentManagerListener>();
     ListenerList<IExperiment> experiments = new ListenerList<IExperiment>(IExperiment.class);
     IExperiment activeExperiment;
@@ -218,9 +222,9 @@ public class ExperimentManager implements IExperimentManager {
 
             if (!listeners.isEmpty()) {
                 // Some clients are leaking listeners. Shame on them.
-                System.err.println("ExperimentManager still contains the following listeners after disposal:");
+                LOGGER.warn("ExperimentManager still contains the following listeners after disposal:");
                 for (IExperimentManagerListener listener : listeners)
-                    System.err.println("\t" + listener);
+                    LOGGER.warn("\t" + listener);
             }
         }
     }
index dfa20f265dd3b23f97fcda08e4b721993edac93b..b234acd5cc5bf988c7073d2c48370e383fecb052 100644 (file)
          version="0.0.0"
          unpack="false"/>
 
+  <plugin
+         id="org.eclipse.jetty.http"
+         download-size="0"
+         install-size="0"
+         version="0.0.0"
+         unpack="false"/>
+
+   <plugin
+         id="org.eclipse.jetty.server"
+         download-size="0"
+         install-size="0"
+         version="0.0.0"
+         unpack="false"/>
+
+   <plugin
+         id="org.eclipse.jetty.servlet"
+         download-size="0"
+         install-size="0"
+         version="0.0.0"
+         unpack="false"/>
+
+   <plugin
+         id="org.eclipse.jetty.util"
+         download-size="0"
+         install-size="0"
+         version="0.0.0"
+         unpack="false"/>
+
+   <plugin
+         id="org.eclipse.jetty.io"
+         download-size="0"
+         install-size="0"
+         version="0.0.0"
+         unpack="false"/>
+
+   <plugin
+         id="org.eclipse.jetty.security"
+         download-size="0"
+         install-size="0"
+         version="0.0.0"
+         unpack="false"/>
+
+   <plugin
+         id="org.eclipse.ui.cheatsheets"
+         download-size="0"
+         install-size="0"
+         version="0.0.0"
+         unpack="false"/>
+
+   <plugin
+         id="javax.servlet"
+         download-size="0"
+         install-size="0"
+         version="0.0.0"
+         unpack="false"/>
+
 </feature>