import java.util.Collection;
import java.util.Formatter;
import java.util.Locale;
+import java.util.TreeMap;
import org.simantics.databoard.Bindings;
import org.simantics.databoard.Files;
}
public static TransferableGraph1 read(InputStream stream) throws AdaptException, IOException {
- DataContainer container = DataContainers.readFile(new DataInputStream(stream));
- stream.close();
- return (TransferableGraph1)container.content.getValue(TransferableGraph1.BINDING);
+ try (InputStream in = stream) {
+ DataContainer container = DataContainers.readFile(new DataInputStream(stream));
+ return (TransferableGraph1)container.content.getValue(TransferableGraph1.BINDING);
+ }
}
public static InputStream write(TransferableGraph1 tg) throws BindingException, IOException {
Binding binding = TransferableGraph1.BINDING;
int hashCode = binding.hashValue(tg);
- tg.extensions.put(Extensions.CACHED_HASHCODE, new Variant(Bindings.INTEGER, hashCode));
- try {
- byte[] buffer = DataContainers.writeFile(
- new DataContainer("graph", 1, new Variant(TransferableGraph1.BINDING, tg))
- );
- return new ByteArrayInputStream(buffer);
- } finally {
- tg.extensions.remove(Extensions.CACHED_HASHCODE);
- }
+ TreeMap<String, Variant> metadata = new TreeMap<>();
+ metadata.put(Extensions.CACHED_HASHCODE, new Variant(Bindings.INTEGER, hashCode));
+ byte[] buffer = DataContainers.writeFile(
+ new DataContainer("graph", 1, metadata, new Variant(binding, tg))
+ );
+ return new ByteArrayInputStream(buffer);
+ }
+
+ public static void write(File file, TransferableGraph1 tg) throws BindingException, IOException {
+ Binding binding = TransferableGraph1.BINDING;
+ int hashCode = binding.hashValue(tg);
+ TreeMap<String, Variant> metadata = new TreeMap<>();
+ metadata.put(Extensions.CACHED_HASHCODE, new Variant(Bindings.INTEGER, hashCode));
+ DataContainers.writeFile(file, new DataContainer("graph", 1, metadata, new Variant(binding, tg)));
}
public static CompilationResult compile(
/**
* Used for storing a cached hash code computed from a
- * {@link TransferableGraph1} instance <em>without</em> this cached hashcode
- * key,value pair in the <code>extensions</code> map.
+ * {@link TransferableGraph1} instance in the metadata map of the
+ * <code>DataContainer</code> containing the TG.
*/
public final static String CACHED_HASHCODE = "cached.hashCode";
*******************************************************************************/
package org.simantics.project.management;
+import java.util.function.Supplier;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.simantics.databoard.Bindings;
-import org.simantics.databoard.binding.Binding;
import org.simantics.databoard.binding.error.BindingException;
import org.simantics.databoard.binding.error.RuntimeBindingException;
import org.simantics.db.ReadGraph;
import org.simantics.db.exception.DatabaseException;
import org.simantics.graph.representation.TransferableGraph1;
import org.simantics.layer0.DatabaseManagementResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* GraphBundle represents a bundle graph that may exist in memory
*/
public class GraphBundle implements Comparable<GraphBundle> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(GraphBundle.class);
+
/** Versioned Id pattern */
static String ID_PATTERN_STRING = "[a-zA-Z_0-9\\-]+(?:\\.[a-zA-Z_0-9\\-]+)*";
static String VERSION_PATTERN_STRING = "(\\d+).(\\d+).(\\d+).([a-zA-Z_0-9\\-]+)";
/** User-friendly name */
String name;
+ /** If {@link #graph} is null then this may be defined to fetch the data on-demand */
+ Supplier<TransferableGraph1> graphSource;
+
/** Actual graph */
TransferableGraph1 graph;
boolean immutable = true;
GraphBundle() {}
-
+
public GraphBundle(String name, TransferableGraph1 data, String versionedId)
- throws RuntimeBindingException {
- try {
+ throws RuntimeBindingException {
+ try {
// Assert version id is correct
Matcher m = VERSIONED_ID_PATTERN.matcher(versionedId);
if (!m.matches()) {
throw new IllegalArgumentException("Illegal VersionId \""+versionedId+"\", <id>/<major.minor.micro.qualifier> is expected.");
}
-
- Binding binding = Bindings.getBindingUnchecked( TransferableGraph1.class );
-
+
this.name = name;
- this.graph = data;
- this.hashcode = data != null ? binding.hashValue( data ) : 0;
+ this.graph = data;
+ this.hashcode = hash(data);
this.id = m.group(1);
this.major = Integer.valueOf( m.group(2) );
this.minor = Integer.valueOf( m.group(3) );
} catch (BindingException e) {
// Unexpected
throw new RuntimeBindingException(e);
- }
+ }
}
-
+
public GraphBundle(String name, TransferableGraph1 data, String id, String version)
- throws RuntimeBindingException {
+ throws RuntimeBindingException {
Matcher m = ID_PATTERN.matcher(id);
if (!m.matches())
throw new IllegalArgumentException("Illegal Id, got \""+id+"\"");
m = VERSION_PATTERN.matcher(version);
if (!m.matches())
throw new IllegalArgumentException("Illegal Version, got \""+id+"\", <id>/<major.minor.micro.qualifier> is expected.");
- try {
- Binding binding = Bindings.getBindingUnchecked( TransferableGraph1.class );
+ try {
this.name = name;
- this.graph = data;
- this.hashcode = binding.hashValue( data );
+ this.graph = data;
+ this.hashcode = hash(data);
this.id = id;
this.major = Integer.valueOf( m.group(1) );
this.minor = Integer.valueOf( m.group(2) );
throw new RuntimeBindingException(e);
}
}
-
+
+ public GraphBundle(String name, Supplier<TransferableGraph1> source, int hashCode, String id, String version) {
+ Matcher m = ID_PATTERN.matcher(id);
+ if (!m.matches())
+ throw new IllegalArgumentException("Illegal Id, got \""+id+"\"");
+ m = VERSION_PATTERN.matcher(version);
+ if (!m.matches())
+ throw new IllegalArgumentException("Illegal Version, got \""+id+"\", <id>/<major.minor.micro.qualifier> is expected.");
+ this.name = name;
+ this.graphSource = source;
+ this.hashcode = hashCode;
+ this.id = id;
+ this.major = Integer.valueOf( m.group(1) );
+ this.minor = Integer.valueOf( m.group(2) );
+ this.service = Integer.valueOf( m.group(3) );
+ if (m.group(4) != null) {
+ this.qualifier = m.group(4);
+ }
+ }
+
+ private int hash(TransferableGraph1 data) throws BindingException {
+ return data == null ? 0 : TransferableGraph1.BINDING.hashValue( data );
+ }
+
public String getName() {
return name;
}
*/
public TransferableGraph1 getGraph() {
if (graph == null) {
- ReadGraph g = Transaction.readGraph();
- if (g == null)
- throw new IllegalStateException("No read transaction available");
- try {
- Binding tg_binding = Bindings.getBindingUnchecked( TransferableGraph1.class );
- DatabaseManagementResource DatabaseManagement = DatabaseManagementResource.getInstance(g);
- graph = g.getRelatedValue(resource, DatabaseManagement.HasFile, tg_binding);
- } catch (DatabaseException e) {
- e.printStackTrace();
+ if (graphSource != null) {
+ graph = graphSource.get();
+ }
+ if (graph == null) {
+ ReadGraph g = Transaction.readGraph();
+ if (g == null)
+ throw new IllegalStateException("No read transaction available");
+ try {
+ graph = readTg(g);
+ } catch (DatabaseException e) {
+ LOGGER.error("Failed to read transferable graph from " + resource, e);
+ }
}
}
return graph;
graph = processor.syncRequest(new ResourceRead<TransferableGraph1>(resource) {
@Override
public TransferableGraph1 perform(ReadGraph graph) throws DatabaseException {
- Binding tg_binding = Bindings.getBindingUnchecked( TransferableGraph1.class );
- DatabaseManagementResource DatabaseManagement = DatabaseManagementResource.getInstance(graph);
- return graph.getRelatedValue(resource, DatabaseManagement.HasFile, tg_binding);
+ return readTg(graph);
}
});
} catch (DatabaseException e) {
- e.printStackTrace();
+ LOGGER.error("Failed to read transferable graph from " + resource, e);
}
}
return graph;
}
-
+
+ private TransferableGraph1 readTg(ReadGraph graph) throws DatabaseException {
+ DatabaseManagementResource DatabaseManagement = DatabaseManagementResource.getInstance(graph);
+ return graph.getRelatedValue(resource, DatabaseManagement.HasFile, TransferableGraph1.BINDING);
+ }
+
public int getHashcode() {
return hashcode;
}
import java.text.SimpleDateFormat;
import java.util.Date;
+import java.util.function.Supplier;
import org.eclipse.equinox.p2.metadata.IVersionedId;
import org.eclipse.equinox.p2.metadata.Version;
VersionedId vid;
GraphBundleEx(GraphBundle e) {
+ this.graphSource = e.graphSource;
this.graph = e.graph;
this.resource = e.resource;
this.hashcode = e.hashcode;
this.immutable = isImmutable;
}
+ public GraphBundleEx(String name, Supplier<TransferableGraph1> source, int hashValue, IVersionedId vid, boolean isImmutable)
+ throws RuntimeBindingException
+ {
+ super(name, source, hashValue, vid.getId(), vid.getVersion().getSegment(0).toString()+"."+vid.getVersion().getSegment(1).toString()+"."+vid.getVersion().getSegment(2).toString()+"."+vid.getVersion().getSegment(3).toString());
+ this.vid = new VersionedId(id, vid.getVersion());
+ this.immutable = isImmutable;
+ }
+
public GraphBundleEx(String name, TransferableGraph1 data, IVersionedId vid)
throws RuntimeBindingException
{
import java.io.BufferedInputStream;
import java.io.Closeable;
-import java.io.DataInput;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Enumeration;
+import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
-import java.util.concurrent.CompletableFuture;
-import java.util.concurrent.ExecutionException;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Supplier;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
import java.util.stream.Collectors;
import org.eclipse.equinox.p2.metadata.Version;
import org.eclipse.equinox.p2.metadata.VersionedId;
import org.osgi.framework.Bundle;
+import org.simantics.databoard.Bindings;
+import org.simantics.databoard.adapter.AdaptException;
import org.simantics.databoard.binding.Binding;
import org.simantics.databoard.binding.mutable.Variant;
import org.simantics.databoard.container.DataContainer;
import org.simantics.databoard.container.DataContainers;
+import org.simantics.databoard.container.FormatHandler;
import org.simantics.graph.compiler.CompilationResult;
import org.simantics.graph.compiler.GraphCompiler;
import org.simantics.graph.compiler.GraphCompilerPreferences;
import org.simantics.graph.compiler.ValidationMode;
+import org.simantics.graph.representation.Extensions;
import org.simantics.graph.representation.TransferableGraph1;
import org.simantics.ltk.FileSource;
import org.simantics.ltk.ISource;
import org.simantics.ltk.Problem;
import org.simantics.scl.reflection.OntologyVersions;
+import org.simantics.utils.datastructures.ArrayMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public static Bundle[] getBundles() {
return PlatformActivator.getContext().getBundles();
}
-
+
/**
* Get the manifest file of a bundle
*
public static Manifest getManifest(Bundle bundle) throws IOException {
URL url = bundle.getEntry("META-INF/MANIFEST.MF");
if (url==null) return null;
- InputStream is = url.openStream();
- try {
- return new Manifest(is);
- } finally {
- is.close();
+ try (InputStream is = url.openStream()) {
+ return new Manifest(is);
}
}
-
+
/**
* Get the manifest file of a bundle
*
public static Manifest getSimanticsManifest(Bundle bundle) throws IOException {
URL url = bundle.getEntry("META-INF/SIMANTICS.MF");
if (url==null) return null;
- InputStream is = url.openStream();
- try {
- return new Manifest(is);
- } finally {
- is.close();
+ try (InputStream is = url.openStream()) {
+ return new Manifest(is);
}
}
for (Entry<Object, Object> entry2 : attributes.entrySet()) {
Object key = entry2.getKey();
if (key.toString().contains("Installable-Unit")) {
- String bid = entry2.getValue().toString();
+ String bid = entry2.getValue().toString();
list.add( bid );
}
}
- }
+ }
}
-
+
/**
* Get all transferable graphs in the platform
*
org.osgi.framework.Version osgiVersion = bundle.getVersion();
Version p2Version = Version.createOSGi(osgiVersion.getMajor(), osgiVersion.getMinor(), osgiVersion.getMicro(), osgiVersion.getQualifier());
String id = bundle.getSymbolicName();
-
+
TGInfo info = new TGInfo();
info.location = e.nextElement();
info.bundle = bundle;
//ignore
}
}
-
+
private static File copyResource(URL url, File targetFile) throws IOException, FileNotFoundException {
FileOutputStream os = null;
InputStream is = null;
uncheckedClose(is);
}
}
-
+
private static File extractLib(URL libURL, String libName) throws FileNotFoundException, IOException {
String tmpDirStr = System.getProperty("java.io.tmpdir");
if (tmpDirStr == null)
File libFile = new File(tmpDir, libName);
return copyResource(libURL, libFile);
}
-
+
private static File url2file(URL url, String fileName) {
if ("file".equals(url.getProtocol())) {
try {
}
return null;
}
-
+
public static void compile(Bundle b) throws Exception {
-
- Collection<ISource> sources = new ArrayList<ISource>();
- Collection<TransferableGraph1> dependencies = new ArrayList<TransferableGraph1>();
-
+
+ Collection<ISource> sources = new ArrayList<>();
+ Collection<TransferableGraph1> dependencies = new ArrayList<>();
+
for (Bundle b2 : getBundles()) {
if(b.equals(b2)) continue;
URL url = b2.getEntry("graph.tg");
File graphFile = url2file(FileLocator.resolve(b2.getEntry("/graph.tg")), b2.toString());
dependencies.add(GraphCompiler.read(graphFile));
}
-
+
File bundleFile = FileLocator.getBundleFile(b);
if(bundleFile.isDirectory()) {
File folder = new File(bundleFile, "dynamicGraph");
for(File f : folder.listFiles(new FilenameFilter() {
-
+
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".pgraph");
}
-
+
})) {
sources.add(new FileSource(f));
}
- }
-
+ }
+
// System.out.println("source is " + tmpFile.getAbsolutePath());
-
+
final StringBuilder errorStringBuilder = new StringBuilder();
GraphCompilerPreferences prefs = new GraphCompilerPreferences();
prefs.validate = true;
prefs.validateResourceHasType = ValidationMode.ERROR;
String currentLayer0Version = OntologyVersions.getInstance().currentOntologyVersion("http://www.simantics.org/Layer0-0.0");
CompilationResult result = GraphCompiler.compile(currentLayer0Version, sources, dependencies, null, prefs);
-
+
for(Problem problem : result.getErrors())
errorStringBuilder.append(problem.getLocation() + ": " + problem.getDescription() + "\n");
for(Problem problem : result.getWarnings())
if(errorStringBuilder.length() > 0) {
LOGGER.error(errorStringBuilder.toString());
} else {
- DataContainers.writeFile(new File(bundleFile, "graph.tg"),
- new DataContainer("graph", 1, new Variant(TransferableGraph1.BINDING, result.getGraph())));
+ GraphCompiler.write(new File(bundleFile, "graph.tg"), result.getGraph());
}
-
+
}
-
+
/**
* Compile all dynamic ontologies in the Platform
*
}
}
}
-
+
/**
* Get all graphs in the Platform
*
* @param collection
* @throws IOException
*/
- public static Collection<GraphBundle> getAllGraphs() throws IOException {
- CompletableFuture<Object> f = new CompletableFuture<>();
- Bundle[] bundles = getBundles();
- Collection<GraphBundle> gbundles = Arrays.stream(bundles).map(t -> { // this could be done in parallel in the future?
- if (f.isCompletedExceptionally())
- return null;
- try {
- return PlatformUtil.getGraph(t);
- } catch (IOException e) {
- if (LOGGER.isDebugEnabled())
- LOGGER.debug("Could not get graph {}", t, e);
- f.completeExceptionally(e);
- return null;
- }
- }).filter(Objects::nonNull).collect(Collectors.toList());
- if (f.isCompletedExceptionally()) {
- try {
- f.get();
- } catch (ExecutionException | InterruptedException e) {
- throw (IOException) e.getCause();
- }
- }
- return gbundles;
- }
+ public static Collection<GraphBundle> getAllGraphs() throws IOException {
+ AtomicReference<IOException> problem = new AtomicReference<>();
+
+ Collection<GraphBundle> gbundles = Arrays.stream(getBundles())
+ .parallel()
+ .map(b -> {
+ try {
+ return problem.get() == null ? getGraph(b) : null;
+ } catch (IOException e) {
+ if (LOGGER.isDebugEnabled())
+ LOGGER.debug("Could not get graph from bundle {}", b, e);
+ problem.set(e);
+ return null;
+ }
+ })
+ .filter(Objects::nonNull)
+ .collect(Collectors.toList());
+
+ if (problem.get() != null)
+ throw problem.get();
+ return gbundles;
+ }
/**
* Get bundle
if (bundle == null) return null;
return getGraph( bundle );
}
-
+
/**
* Read the graph in a graph bundle. Graph is read from "graph.tg" file in the root.
*
*/
public static GraphBundleEx getGraph(Bundle bundle) throws IOException {
URL url = bundle.getEntry("graph.tg");
-
- if (url==null) return null;
- InputStream is = url.openStream();
- // NOTE: this is vital for performance.
- is = new BufferedInputStream(is, 128*1024);
+ if (url == null)
+ return null;
+ GraphBundleEx result = tryGetOnDemandGraph(bundle, url);
+ return result != null ? result : getCompleteGraph(bundle, url);
+ }
+
+ private static GraphBundleEx getCompleteGraph(Bundle bundle, URL url) throws IOException {
try {
- DataInput dis = new DataInputStream(is);
- // or
- // dis = new InputStreamReadable(is, <max limit>) to protect from OOM
-
- org.simantics.databoard.container.DataContainer container =
- DataContainers.readFile(dis);
-
- Binding binding = TransferableGraph1.BINDING;
- TransferableGraph1 graph = (TransferableGraph1)container.content.getValue(binding);
-// TransferableGraph1 graph = (TransferableGraph1) Files.readFile(is, binding);
-// System.out.println("getGraph(" + bundle.getSymbolicName() + "): read transferable graph in " + (System.nanoTime()-start)*1e-6 + "ms");
- org.osgi.framework.Version osgiVersion = bundle.getVersion();
- Version p2Version = Version.createOSGi(osgiVersion.getMajor(), osgiVersion.getMinor(), osgiVersion.getMicro(), osgiVersion.getQualifier());
String id = bundle.getSymbolicName();
- VersionedId vid = new VersionedId(id, p2Version);
- String name = (String) bundle.getHeaders().get("Bundle-Name");
- if (name == null) name = id;
- String immutable = (String) bundle.getHeaders().get("Immutable");
- boolean isImmutable =
- immutable != null ?
- "true".equals(immutable) :
- true;
-
-// System.out.println("getGraph(" + bundle.getSymbolicName() + "): before hashcode calculation in " + (System.nanoTime()-start)*1e-6 + "ms");
- GraphBundleEx entry = new GraphBundleEx(name, graph, vid, isImmutable);
-// System.out.println("getGraph(" + bundle.getSymbolicName() + "): completed in " + (System.nanoTime()-start)*1e-6 + "ms");
- return entry;
+ return new GraphBundleEx(
+ getBundleName(bundle, id),
+ readTG(url),
+ new VersionedId(id, toP2Version(bundle)),
+ isImmutable(bundle));
} catch (Exception e) {
throw new IOException("Problem loading graph.tg from bundle " + bundle.getSymbolicName(), e);
} catch (Error e) {
LOGGER.error("Serious problem loading graph.tg from bundle " + bundle.getSymbolicName(), e);
throw e;
- } finally {
- is.close();
}
}
+ /**
+ * Read the graph in a graph bundle. Graph is read from "graph.tg" file in the root.
+ *
+ * @param bundle
+ * @return transferable graph, or <tt>null</tt> if there is no graph in the bundle.
+ * @throws IOException
+ */
+ private static GraphBundleEx tryGetOnDemandGraph(Bundle bundle, URL url) throws IOException {
+ try {
+ Integer cachedHash = readCachedHash(url);
+ System.out.println("Read cached hashcode from " + bundle + ": " + cachedHash);
+ if (cachedHash == null)
+// if (true)
+ return null;
+
+ Supplier<TransferableGraph1> graphSource = () -> {
+ try {
+ return readTG(url);
+ } catch (Exception e) {
+ throw new RuntimeException("Problem loading graph.tg from bundle " + bundle.getSymbolicName(), e);
+ } catch (Error e) {
+ LOGGER.error("Serious problem loading graph.tg from bundle " + bundle.getSymbolicName(), e);
+ throw e;
+ }
+ };
+
+ String id = bundle.getSymbolicName();
+
+ return new GraphBundleEx(
+ getBundleName(bundle, id),
+ graphSource,
+ cachedHash,
+ new VersionedId(id, toP2Version(bundle)),
+ isImmutable(bundle));
+ } catch (Exception e) {
+ throw new IOException("Problem loading graph.tg from bundle " + bundle.getSymbolicName(), e);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private static Map<String, FormatHandler<TransferableGraph1>> handlers = ArrayMap.make(
+ new String[] {
+ "graph:1"
+ },
+ new FormatHandler<TransferableGraph1>() {
+ @Override
+ public Binding getBinding() {
+ return TransferableGraph1.BINDING;
+ }
+ @Override
+ public TransferableGraph1 process(DataContainer container) throws Exception {
+ return (TransferableGraph1) container.content.getValue(TransferableGraph1.BINDING);
+ }
+ });
+
+ private static TransferableGraph1 readTG(InputStream is) throws Exception {
+ // For an unknown reason this is totally broken when running the TestSCLOsgi
+ // in the SDK Tycho build. It returns incomplete results because the
+ // ReadableByteChannel used by ByteFileReader starts returning 0 unexpectedly.
+// try (TransferableGraphFileReader reader = new TransferableGraphFileReader(is)) {
+// return reader.readTG();
+// }
+ return DataContainers.readFile(new DataInputStream(is), handlers);
+ }
+
+ private static TransferableGraph1 readTG(URL url) throws Exception {
+ try (InputStream is = url.openStream()) {
+ return readTG(is);
+ }
+ }
+
+ private static DataContainer readHeader(URL url) throws IOException {
+ try (InputStream is = url.openStream()) {
+ return DataContainers.readHeader(new DataInputStream(new BufferedInputStream(is, 1 << 14)));
+ }
+ }
+
+ private static Integer readCachedHash(URL url) throws IOException, AdaptException {
+ DataContainer header = readHeader(url);
+ Variant hashCode = header.metadata.get(Extensions.CACHED_HASHCODE);
+ return hashCode != null ? (Integer) hashCode.getValue(Bindings.INTEGER) : null;
+ }
+
+ private static Version toP2Version(Bundle bundle) {
+ org.osgi.framework.Version osgiVersion = bundle.getVersion();
+ return Version.createOSGi(osgiVersion.getMajor(), osgiVersion.getMinor(), osgiVersion.getMicro(), osgiVersion.getQualifier());
+ }
+
+ private static String getBundleName(Bundle bundle, String id) {
+ String name = (String) bundle.getHeaders().get("Bundle-Name");
+ return name != null ? name : id;
+ }
+
+ private static boolean isImmutable(Bundle bundle) {
+ String immutable = (String) bundle.getHeaders().get("Immutable");
+ return immutable != null ? "true".equals(immutable) : true;
+ }
+
public static class TGInfo {
public Bundle bundle;
public URL location;
}
}
- private static void copy(File file, ZipOutputStream zout) throws IOException {
+ public static void copy(File file, OutputStream out) throws IOException {
try (InputStream in = new FileInputStream(file)) {
- copy(in, zout);
+ copy(in, out);
}
}
</run>
</application>
</extension>
+ <extension
+ id="BaselineCreator"
+ name="Baseline Creator"
+ point="org.eclipse.core.runtime.applications">
+ <application
+ cardinality="singleton-global"
+ thread="main"
+ visible="true">
+ <run
+ class="org.simantics.BaselineCreatorApplication">
+ </run>
+ </application>
+ </extension>
</plugin>
--- /dev/null
+package org.simantics;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.NullProgressMonitor;
+import org.eclipse.core.runtime.Platform;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.equinox.app.IApplication;
+import org.eclipse.equinox.app.IApplicationContext;
+import org.eclipse.osgi.service.datalocation.Location;
+import org.simantics.application.arguments.Arguments;
+import org.simantics.application.arguments.IArgumentFactory;
+import org.simantics.application.arguments.IArgumentFactory.StringArgumentFactory;
+import org.simantics.application.arguments.IArgumentFactory.NoValueArgumentFactory;
+import org.simantics.application.arguments.IArguments;
+import org.simantics.application.arguments.SimanticsArguments;
+import org.simantics.internal.Activator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author Tuukka Lehtonen
+ * @since 1.34.0
+ */
+public class BaselineCreatorApplication implements IApplication {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(BaselineCreatorApplication.class);
+
+ private static final IArgumentFactory<String> OUTPUT = new StringArgumentFactory("-o");
+ private static final IArgumentFactory<Boolean> VERBOSE = new NoValueArgumentFactory("-v");
+
+ IArgumentFactory<?>[] accepted = {
+ SimanticsArguments.RECOVERY_POLICY_FIX_ERRORS,
+ SimanticsArguments.ONTOLOGY_RECOVERY_POLICY_REINSTALL,
+ SimanticsArguments.DISABLE_INDEX,
+ SimanticsArguments.DATABASE_ID,
+ OUTPUT,
+ VERBOSE,
+ };
+
+ private static String currentLocalDateTimeStamp() {
+ return LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HHmm"));
+ }
+
+ private static Path constructOutputPath(Path workspace, IArguments parsedArgs) {
+ if (parsedArgs.contains(OUTPUT)) {
+ return workspace.resolve(parsedArgs.get(OUTPUT));
+ } else {
+ return workspace.resolve(workspace.getFileName().toString() + "-" + currentLocalDateTimeStamp() + ".zip");
+ }
+ }
+
+ private static Path getInstanceLocation() throws CoreException, IOException {
+ Location l = Platform.getInstanceLocation();
+ if (l == null)
+ throw new CoreException(new Status(IStatus.ERROR, Activator.PLUGIN_ID,
+ "Workspace not defined. Use -data <path> argument to define where to place the baselining workspace."));
+
+ Location instanceLoc = Platform.getInstanceLocation();
+ if (instanceLoc == null || instanceLoc.isReadOnly())
+ throw new CoreException(new Status(IStatus.ERROR, Activator.PLUGIN_ID,
+ "Workspace not defined. Use -data <path> argument to define where to place the baselining workspace."));
+
+ URL workspaceUrl = l.getURL();
+ Path workspacePath = new File(workspaceUrl.getPath()).toPath();
+ Files.createDirectories(workspacePath);
+ return workspacePath;
+ }
+
+ @Override
+ public Object start(IApplicationContext context) throws Exception {
+ try {
+ Path workspace = getInstanceLocation();
+
+ String[] args = (String[]) context.getArguments().get("application.args");
+ IArguments parsedArgs = Arguments.parse(args, accepted);
+
+ Path output = constructOutputPath(workspace, parsedArgs);
+
+ // Create database and indexes
+ IProgressMonitor progress = parsedArgs.contains(VERBOSE)
+ ? new TimingProgressMonitor()
+ : new NullProgressMonitor();
+ Simantics.startUpHeadless(parsedArgs, progress);
+ Simantics.shutdown(progress);
+
+ // Create the baseline package file
+ Path actualOutput = DatabaseBaselines.packageBaseline(workspace, output);
+ System.out.println("OK " + actualOutput.toAbsolutePath());
+
+ return IApplication.EXIT_OK;
+ } catch (Exception e) {
+ LOGGER.error("Baseline creation failed.", e);
+ throw (Exception) e;
+ } finally {
+ System.exit(0);
+ }
+ }
+
+ @Override
+ public void stop() {
+ }
+
+}
--- /dev/null
+package org.simantics;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.format.DateTimeFormatter;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipFile;
+import java.util.zip.ZipOutputStream;
+
+import org.simantics.utils.FileUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author Tuukka Lehtonen
+ * @since 1.34.0
+ */
+public class DatabaseBaselines {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseBaselines.class);
+
+ private static final boolean REQUIRE_INDEX_IN_BASELINE = false;
+
+ private static final String DB_DIRECTORY = "db"; //$NON-NLS-1$
+ private static final String INDEX_DIRECTORY = ".metadata/.plugins/org.simantics.db.indexing"; //$NON-NLS-1$
+
+ private static final DateTimeFormatter TIMESTAMP_FORMAT = DateTimeFormatter.ofPattern("d. MMM yyyy HH:mm:ss");
+
+ public static Path packageBaseline(Path fromWorkspace, Path packageFile) throws IOException {
+ return compressZip(fromWorkspace, collectBaselinePaths(fromWorkspace), packageFile);
+ }
+
+ private static List<Path> collectBaselinePaths(Path workspace) throws IOException {
+ Path dbPath = workspace.resolve(DB_DIRECTORY);
+ Path indexPath = workspace.resolve(INDEX_DIRECTORY);
+
+ if (!Files.isDirectory(dbPath))
+ throw new IllegalArgumentException("workspace database directory " + dbPath + " does not exist");
+
+ List<Path> paths = Files.walk(dbPath).collect(Collectors.toList());
+ if (Files.isDirectory(indexPath)) {
+ List<Path> indexPaths = Files.walk(indexPath).collect(Collectors.toList());
+ paths.addAll(indexPaths);
+ } else {
+ if (REQUIRE_INDEX_IN_BASELINE)
+ throw new IllegalArgumentException("workspace database index directory " + indexPath + " does not exist");
+ }
+ return paths;
+ }
+
+ private static Path compressZip(Path relativeRoot, List<Path> paths, Path zipFile) throws IOException {
+ if (LOGGER.isDebugEnabled())
+ LOGGER.debug("Compressing " + paths.size() + " path entries into ZIP file " + zipFile);
+ try (ZipOutputStream zout = new ZipOutputStream(Files.newOutputStream(zipFile))) {
+ compressZip(relativeRoot, zout, paths);
+ return zipFile;
+ } finally {
+ if (LOGGER.isDebugEnabled())
+ LOGGER.debug("Compressed " + paths.size() + " entries into " + zipFile);
+ }
+ }
+
+ private static void compressZip(Path relativeRoot, ZipOutputStream zout, List<Path> paths) throws IOException {
+ for (Path p : paths) {
+ Path rp = relativeRoot.relativize(p);
+ String name = rp.toString();
+ if (Files.isDirectory(p)) {
+ name = name.endsWith("/") ? name : name + "/";
+ zout.putNextEntry(new ZipEntry(name));
+ } else {
+ zout.putNextEntry(new ZipEntry(name));
+ FileUtils.copy(p.toFile(), zout);
+ zout.closeEntry();
+ }
+ }
+ }
+
+ public static byte[] baselineIndicatorContents(Path path) throws IOException {
+ return String.format("%s%n%s%n",
+ path.toString(),
+ Instant.now().atZone(ZoneId.systemDefault()).format(TIMESTAMP_FORMAT))
+ .getBytes("UTF-8");
+ }
+
+ public static void validateWorkspaceForBaselineInitialization(Path workspaceLocation) throws PlatformException {
+ try {
+ Path db = workspaceLocation.resolve(DB_DIRECTORY);
+ if (Files.exists(db))
+ throw new PlatformException("Database location " + db + " already exists. Cannot re-initialize workspace from baseline.");
+ if (REQUIRE_INDEX_IN_BASELINE) {
+ Path index = workspaceLocation.resolve(INDEX_DIRECTORY);
+ if (!Files.exists(index) || !isEmptyDirectory(index))
+ throw new PlatformException("Index location " + index + " already exists. Cannot re-initialize workspace from baseline.");
+ }
+ } catch (IOException e) {
+ throw new PlatformException("Failed to validate workspace for baseline initialization", e);
+ }
+ }
+
+ private static boolean isEmptyDirectory(Path dir) throws IOException {
+ return Files.walk(dir).count() == 1;
+ }
+
+ public static void validateBaselineFile(Path baseline) throws PlatformException {
+ try (ZipFile zip = new ZipFile(baseline.toFile())) {
+ ZipEntry db = zip.getEntry(DB_DIRECTORY);
+ if (db == null)
+ throw new PlatformException("Baseline archive does not contain database directory '" + DB_DIRECTORY + "'");
+
+ if (REQUIRE_INDEX_IN_BASELINE) {
+ ZipEntry index = zip.getEntry(INDEX_DIRECTORY);
+ if (index == null)
+ throw new PlatformException("Baseline archive does not contain database index directory '" + INDEX_DIRECTORY + "'");
+ }
+ } catch (IOException e) {
+ throw new PlatformException("Failed to validate baseline archive " + baseline, e);
+ }
+ }
+
+ public static void main(String[] args) throws IOException {
+ packageBaseline(Paths.get("D:/temp/desktop/workspace"), Paths.get("d:/temp/desktop/workspace/baseline.zip"));
+ }
+
+}
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.time.Instant;
-import java.time.ZoneId;
-import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Set;
import java.util.TreeMap;
import java.util.UUID;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipFile;
import org.eclipse.core.runtime.ILog;
import org.eclipse.core.runtime.IProduct;
public void synchronizeOntologies(IProgressMonitor progressMonitor, OntologyRecoveryPolicy ontologyPolicy, boolean requireSynchronize) throws PlatformException {
- if (progressMonitor == null) progressMonitor = new NullProgressMonitor();
-
- final DatabaseManagement mgmt = new DatabaseManagement();
+ SubMonitor monitor = SubMonitor.convert(progressMonitor, 100);
+ monitor.setTaskName("Compile dynamic ontologies");
PlatformUtil.compileAllDynamicOntologies();
String message = "Asserting all ontologies are installed";
LOGGER.info(message);
- progressMonitor.setTaskName(message);
- final Map<GraphBundleRef, GraphBundleEx> platformTGs = new HashMap<GraphBundleRef, GraphBundleEx>();
+ monitor.setTaskName(message);
+
+ DatabaseManagement mgmt = new DatabaseManagement();
+ Map<GraphBundleRef, GraphBundleEx> platformTGs = new HashMap<>();
try {
// Get a list of bundles installed into the database
message = "find installed bundles from database";
- progressMonitor.subTask(message);
+ monitor.subTask(message);
LOGGER.info(message);
- Map<GraphBundleRef, GraphBundleEx> installedTGs = new HashMap<GraphBundleRef, GraphBundleEx>();
+ Map<GraphBundleRef, GraphBundleEx> installedTGs = new HashMap<>();
for (GraphBundle b : session.syncRequest( mgmt.GraphBundleQuery )) {
installedTGs.put(GraphBundleRef.of(b), GraphBundleEx.extend(b));
}
// Get a list of all bundles in the platform (Bundle Context)
message = "load all transferable graphs from platform";
- progressMonitor.subTask(message);
+ monitor.subTask(message);
LOGGER.info(message);
Collection<GraphBundle> tgs = PlatformUtil.getAllGraphs();
message = "extend bundles to compile versions";
- progressMonitor.subTask(message);
+ monitor.subTask(message);
LOGGER.info(message);
for (GraphBundle b : tgs) {
GraphBundleEx gbe = GraphBundleEx.extend(b);
// Compile a list of TGs that need to be installed or reinstalled in the database
message = "check bundle reinstallation demand";
- progressMonitor.subTask(message);
+ monitor.subTask(message);
LOGGER.info(message);
- List<GraphBundleEx> installTGs = new ArrayList<GraphBundleEx>();
+ List<GraphBundleEx> installTGs = new ArrayList<>();
// Create list of TGs to update, <newTg, oldTg>
- Map<GraphBundleEx,GraphBundleEx> reinstallTGs = new TreeMap<GraphBundleEx,GraphBundleEx>();
+ Map<GraphBundleEx,GraphBundleEx> reinstallTGs = new TreeMap<>();
for (Entry<GraphBundleRef, GraphBundleEx> e : platformTGs.entrySet()) {
GraphBundleRef key = e.getKey();
GraphBundleEx platformBundle = e.getValue();
if (ontologyPolicy == OntologyRecoveryPolicy.Merge) {
message = "Merging ontology changes";
- progressMonitor.subTask(message);
+ monitor.subTask(message);
LOGGER.info(message);
// Sort missing TGs into install order
GraphDependencyAnalyzer<GraphBundle> analyzer = new GraphDependencyAnalyzer<GraphBundle>();
log.log(new Status(IStatus.INFO, Activator.PLUGIN_ID, "Merging new version of "+tg.toString()));
startTransaction(session, true);
-
+
//delta.print();
try {
-
-
long[] resourceArray = TransferableGraphs.applyDelta(writeGraph(), oldResources, delta);
tg.setResourceArray(resourceArray);
mgmt.setGraphBundleEntry(tg);
if (mergedOntologies)
DatabaseIndexing.deleteAllIndexes();
}
-
- TimeLogger.log("Ontologies synchronized.");
-
}
session.getService(XSupport.class).setServiceMode(false, false);
}
- progressMonitor.worked(20);
+ monitor.worked(100);
} catch (IOException e) {
throw new PlatformException(e);
} catch (DatabaseException e) {
if (!Files.isRegularFile(baseline))
throw new PlatformException("Specified database baseline archive " + baseline + " does not exist. Cannot initialize workspace database.");
- validateBaselineFile(baseline);
- validateWorkspaceForBaselineInitialization(workspaceLocation);
+ DatabaseBaselines.validateBaselineFile(baseline);
+ DatabaseBaselines.validateWorkspaceForBaselineInitialization(workspaceLocation);
try {
Files.createDirectories(workspaceLocation);
FileUtils.extractZip(baseline.toFile(), workspaceLocation.toFile());
- Files.write(baselineIndicatorFile, baselineIndicatorContents(baselineIndicatorFile));
+ Files.write(baselineIndicatorFile, DatabaseBaselines.baselineIndicatorContents(baselineIndicatorFile));
return true;
} catch (IOException e) {
throw new PlatformException(e);
}
}
- private static final DateTimeFormatter TIMESTAMP_FORMAT = DateTimeFormatter.ofPattern("d. MMM yyyy HH:mm:ss");
-
- private static byte[] baselineIndicatorContents(Path path) throws IOException {
- return String.format("%s%n%s%n",
- path.toString(),
- Instant.now().atZone(ZoneId.systemDefault()).format(TIMESTAMP_FORMAT))
- .getBytes("UTF-8");
- }
-
- private void validateWorkspaceForBaselineInitialization(Path workspaceLocation) throws PlatformException {
- try {
- Path db = workspaceLocation.resolve("db");
- if (Files.exists(db))
- throw new PlatformException("Database location " + db + " already exists. Cannot re-initialize workspace from baseline.");
- Path index = workspaceLocation.resolve(".metadata/.plugins/org.simantics.db.indexing");
- if (!Files.exists(index) || !isEmptyDirectory(index))
- throw new PlatformException("Index location " + index + " already exists. Cannot re-initialize workspace from baseline.");
- } catch (IOException e) {
- throw new PlatformException("Failed to validate workspace for baseline initialization", e);
- }
- }
-
- private static boolean isEmptyDirectory(Path dir) throws IOException {
- return Files.walk(dir).count() == 1;
- }
-
- private void validateBaselineFile(Path baseline) throws PlatformException {
- try (ZipFile zip = new ZipFile(baseline.toFile())) {
- ZipEntry db = zip.getEntry("db");
- if (db == null)
- throw new PlatformException("Baseline archive does not contain database directory 'db'");
- ZipEntry index = zip.getEntry(".metadata/.plugins/org.simantics.db.indexing");
- if (index == null)
- throw new PlatformException("Baseline archive does not contain database index directory '.metadata/.plugins/org.simantics.db.indexing'");
- } catch (IOException e) {
- throw new PlatformException("Failed to validate baseline archive " + baseline, e);
- }
- }
-
/**
* Start-up the platform. The procedure consists of 8 steps. Once everything
* is up and running, all fields are set property.
// 0.2 Clear VariableRepository.repository static map which holds references to SessionImplDb
VariableRepository.clear();
-
+
// 0.3 Handle baseline database before opening db
+ @SuppressWarnings("unused")
boolean usingBaseline = handleBaselineDatabase();
-
+
// 1. Assert there is a database at <workspace>/db
SessionDescriptor sessionDescriptor = setupDatabase(databaseDriverId, monitor.newChild(200, SubMonitor.SUPPRESS_NONE), workspacePolicy, userAgent);
session = sessionDescriptor.getSession();
TimeLogger.log("Database setup complete");
- // 1.1 Delete all indexes if we cannot be certain they are up-to-date
- // A full index rebuild will be done later, before project activation.
+ // 2. Delete all indexes if we cannot be certain they are up-to-date
+ // A full index rebuild will be done later, before project activation.
XSupport support = session.getService(XSupport.class);
if (support.rolledback()) {
try {
throw new PlatformException(e);
}
}
-
- // 2. Assert all graphs, and correct versions, are installed to the database
- if(!usingBaseline) {
- synchronizeOntologies(monitor.newChild(400, SubMonitor.SUPPRESS_NONE), ontologyPolicy, requireSynchronize);
- TimeLogger.log("Synchronized ontologies");
- }
+
+ // 3. Assert all graphs, and correct versions, are installed to the database
+ synchronizeOntologies(monitor.newChild(400, SubMonitor.SUPPRESS_NONE), ontologyPolicy, requireSynchronize);
+ TimeLogger.log("Synchronized ontologies");
// 4. Assert simantics.cfg exists
boolean installProject = assertConfiguration(monitor.newChild(25, SubMonitor.SUPPRESS_NONE),workspacePolicy);
}
if(loadProject) {
- TimeLogger.log("Load projects");
+ TimeLogger.log("Load project");
+ monitor.setTaskName("Load project");
project = Projects.loadProject(sessionContext.getSession(), SimanticsPlatform.INSTANCE.projectResource);
- monitor.worked(100);
-
sessionContext.setHint(ProjectKeys.KEY_PROJECT, project);
+ monitor.worked(100);
TimeLogger.log("Loading projects complete");
+ monitor.setTaskName("Activate project");
project.activate();
- TimeLogger.log("Project activated");
monitor.worked(100);
+ TimeLogger.log("Project activated");
}
} catch (DatabaseException e) {
<plugin>
<groupId>org.simantics</groupId>
<artifactId>graph-builder-maven-plugin</artifactId>
- <version>0.0.7</version>
+ <version>0.0.9</version>
<executions>
<execution>
<goals><goal>compile-graphs</goal></goals>