-package org.simantics.district.imports;\r
-\r
-import java.io.IOException;\r
-import java.nio.file.Files;\r
-import java.nio.file.Path;\r
-import java.util.ArrayList;\r
-import java.util.HashMap;\r
-import java.util.Iterator;\r
-import java.util.List;\r
-import java.util.Map;\r
-\r
-import org.apache.commons.csv.CSVFormat;\r
-import org.apache.commons.csv.CSVParser;\r
-import org.apache.commons.csv.CSVRecord;\r
-import org.simantics.db.Resource;\r
-\r
-public class DistrictImportUtils {\r
-\r
- private DistrictImportUtils() { }\r
- \r
- \r
- public static Resource importCSVAsLayer(Path csvFile) throws IOException {\r
- \r
- \r
- try (CSVParser parser = CSVFormat.DEFAULT.withFirstRecordAsHeader().parse(Files.newBufferedReader(csvFile))) {\r
- Map<String, Integer> header = parser.getHeaderMap();\r
- System.out.println(header);\r
- }\r
- return null;\r
- }\r
- \r
- public static Map<String, Integer> readCSVHeader(Path source, char delimiter, boolean firstAsHeader) throws IOException {\r
- return readCSVHeader(source, CSVFormat.newFormat(delimiter), firstAsHeader);\r
- }\r
- \r
- public static Map<String, Integer> readCSVHeader(Path source, CSVFormat format, boolean firstAsHeader) throws IOException {\r
- if (firstAsHeader)\r
- format = format.withFirstRecordAsHeader();\r
- try (CSVParser parser = format.parse(Files.newBufferedReader(source))) {\r
- return parser.getHeaderMap();\r
- }\r
- }\r
-\r
-\r
- public static Map<String, Character> getSupportedCSVDelimiterFormats() {\r
- Map<String, Character> delimiters = new HashMap<>();\r
- delimiters.put("COMMA", ',');\r
- delimiters.put("SEMICOMMA", ';');\r
- return delimiters;\r
- }\r
-\r
- public static List<Map<String, String>> readRows(Path source, char delimiter, boolean firstAsHeader, int amount) throws IOException {\r
- return readRows(source, CSVFormat.newFormat(delimiter), firstAsHeader, amount);\r
- }\r
-\r
- public static List<Map<String, String>> readRows(Path source, CSVFormat format, boolean firstAsHeader, int amount) throws IOException {\r
- if (firstAsHeader)\r
- format = format.withFirstRecordAsHeader();\r
- try (CSVParser parser = format.parse(Files.newBufferedReader(source))) {\r
- int start = 0;\r
- List<Map<String, String>> results = new ArrayList<>(amount);\r
- Iterator<CSVRecord> iter = parser.iterator();\r
- while (start < amount && iter.hasNext()) {\r
- CSVRecord record = iter.next();\r
- results.add(record.toMap());\r
- start++;\r
- }\r
- return results;\r
- }\r
- }\r
-\r
-\r
- public static Map<CSVHeader, List<String>> readCSVHeaderAndRows(Path source, char delimiter, boolean firstAsHeader, int amount) throws IOException {\r
- Map<CSVHeader, List<String>> results = new HashMap<>();\r
- CSVFormat format = CSVFormat.newFormat(delimiter);\r
- if (firstAsHeader)\r
- format = format.withFirstRecordAsHeader();\r
- try (CSVParser parser = format.parse(Files.newBufferedReader(source))) {\r
- Map<String, Integer> headers = parser.getHeaderMap();\r
- if (headers != null && !headers.isEmpty()) {\r
- for (int index = 0; index < headers.size(); index++) {\r
- for (String head : headers.keySet()) {\r
- results.put(new CSVHeader(head, index), new ArrayList<>());\r
- }\r
- }\r
- }\r
- \r
- Iterator<CSVRecord> records = parser.iterator();\r
- int rows = 0;\r
- while (rows < amount && records.hasNext()) {\r
- CSVRecord record = records.next();\r
- for (int j = 0; j < record.size(); j++) {\r
- String value = record.get(j);\r
- String header = Integer.toString(j);\r
- CSVHeader csvHeader = new CSVHeader(header, j);\r
- List<String> vals = results.get(csvHeader);\r
- if (vals == null) {\r
- vals = new ArrayList<>();\r
- results.put(csvHeader, vals);\r
- }\r
- vals.add(value);\r
- }\r
- rows++;\r
- }\r
- }\r
- return results;\r
- }\r
- \r
- public static class CSVHeader {\r
-\r
- private final String header;\r
- private final int index;\r
- \r
- public CSVHeader(String header, int index) {\r
- this.header = header;\r
- this.index = index;\r
- }\r
-\r
- public String getHeader() {\r
- return header;\r
- }\r
-\r
- public int getIndex() {\r
- return index;\r
- }\r
- \r
- @Override\r
- public int hashCode() {\r
- final int prime = 31;\r
- int result = 1;\r
- result = prime * result + ((header == null) ? 0 : header.hashCode());\r
- result = prime * result + index;\r
- return result;\r
- }\r
-\r
- @Override\r
- public boolean equals(Object obj) {\r
- if (this == obj)\r
- return true;\r
- if (obj == null)\r
- return false;\r
- if (getClass() != obj.getClass())\r
- return false;\r
- CSVHeader other = (CSVHeader) obj;\r
- if (header == null) {\r
- if (other.header != null)\r
- return false;\r
- } else if (!header.equals(other.header))\r
- return false;\r
- if (index != other.index)\r
- return false;\r
- return true;\r
- }\r
- }\r
-\r
-}\r
+package org.simantics.district.imports;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Function;
+
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVParser;
+import org.apache.commons.csv.CSVRecord;
+import org.geotools.geometry.DirectPosition2D;
+import org.geotools.referencing.CRS;
+import org.opengis.geometry.DirectPosition;
+import org.opengis.geometry.MismatchedDimensionException;
+import org.opengis.referencing.FactoryException;
+import org.opengis.referencing.NoSuchAuthorityCodeException;
+import org.opengis.referencing.crs.CoordinateReferenceSystem;
+import org.opengis.referencing.operation.MathTransform;
+import org.opengis.referencing.operation.TransformException;
+import org.simantics.Simantics;
+import org.simantics.databoard.Bindings;
+import org.simantics.db.ReadGraph;
+import org.simantics.db.Resource;
+import org.simantics.db.WriteGraph;
+import org.simantics.db.common.request.ObjectsWithType;
+import org.simantics.db.common.request.UniqueRead;
+import org.simantics.db.common.request.WriteRequest;
+import org.simantics.db.exception.DatabaseException;
+import org.simantics.db.layer0.util.Layer0Utils;
+import org.simantics.diagram.stubs.DiagramResource;
+import org.simantics.district.network.DNEdgeBuilder;
+import org.simantics.district.network.DistrictNetworkUtil;
+import org.simantics.district.network.ontology.DistrictNetworkResource;
+import org.simantics.layer0.Layer0;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.simantics.district.network.DistrictNetworkUtil.ResourceVertex;
+
+import com.vividsolutions.jts.geom.Envelope;
+import com.vividsolutions.jts.index.quadtree.Quadtree;
+
+public class DistrictImportUtils {
+
+ private DistrictImportUtils() { }
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(DistrictImportUtils.class);
+
+ public static Resource importCSVAsLayer(Path csvFile) throws IOException {
+
+ try (CSVParser parser = CSVFormat.DEFAULT.withFirstRecordAsHeader().parse(Files.newBufferedReader(csvFile))) {
+ Map<String, Integer> header = parser.getHeaderMap();
+ System.out.println(header);
+ }
+ return null;
+ }
+
+ public static Map<String, Integer> readCSVHeader(Path source, char delimiter, boolean firstAsHeader) throws IOException {
+ return readCSVHeader(source, CSVFormat.newFormat(delimiter), firstAsHeader);
+ }
+
+ public static Map<String, Integer> readCSVHeader(Path source, CSVFormat format, boolean firstAsHeader) throws IOException {
+ if (firstAsHeader)
+ format = format.withFirstRecordAsHeader();
+ try (CSVParser parser = format.parse(Files.newBufferedReader(source))) {
+ return parser.getHeaderMap();
+ }
+ }
+
+ public static Map<String, Character> getSupportedCSVDelimiterFormats() {
+ Map<String, Character> delimiters = new HashMap<>();
+ delimiters.put("Comma", ',');
+ delimiters.put("Semicolon", ';');
+ delimiters.put("Tabulator", '\t');
+ return delimiters;
+ }
+
+ public static List<Map<String, String>> readRows(Path source, CSVFormat format, boolean firstAsHeader, int amount) throws IOException {
+ if (firstAsHeader)
+ format = format.withFirstRecordAsHeader();
+ try (CSVParser parser = format.parse(Files.newBufferedReader(source))) {
+ int start = 0;
+ List<Map<String, String>> results = new ArrayList<>(amount);
+ Iterator<CSVRecord> iter = parser.iterator();
+ while (start < amount && iter.hasNext()) {
+ CSVRecord record = iter.next();
+ results.add(record.toMap());
+ start++;
+ }
+ return results;
+ }
+ }
+
+ public static List<CSVRecord> readRows(Path source, char delim, boolean firstAsHeader, int rowAmount) throws IOException {
+ List<CSVRecord> results = new ArrayList<>();
+ AtomicInteger count = new AtomicInteger(0);
+ consumeCSV(source, delim, firstAsHeader, t -> {
+ results.add(t);
+ int current = count.incrementAndGet();
+ return current < rowAmount;
+ });
+ return results;
+ }
+
+ public static void consumeCSV(Path source, char delim, boolean firstAsHeader, Function<CSVRecord, Boolean> consumer) throws IOException {
+ CSVFormat format = CSVFormat.newFormat(delim);
+ if (firstAsHeader) {
+ format = format.withFirstRecordAsHeader();
+ }
+ try (CSVParser parser = format.parse(Files.newBufferedReader(source))) {
+ Iterator<CSVRecord> records = parser.iterator();
+ while (records.hasNext()) {
+ Boolean cont = consumer.apply(records.next());
+ if (!cont) {
+ break;
+ }
+ }
+ }
+ }
+
+
+ public static Map<CSVHeader, List<String>> readCSVHeaderAndRows(Path source, char delimiter, boolean firstAsHeader, int amount) throws IOException {
+ Map<CSVHeader, List<String>> results = new HashMap<>();
+ CSVFormat format = CSVFormat.newFormat(delimiter);
+ if (firstAsHeader)
+ format = format.withFirstRecordAsHeader();
+ try (CSVParser parser = format.parse(Files.newBufferedReader(source))) {
+ Map<String, Integer> headers = parser.getHeaderMap();
+ if (headers != null && !headers.isEmpty()) {
+ for (int index = 0; index < headers.size(); index++) {
+ for (String head : headers.keySet()) {
+ results.put(new CSVHeader(head, index), new ArrayList<>());
+ }
+ }
+ }
+
+ Iterator<CSVRecord> records = parser.iterator();
+ int rows = 0;
+ while (rows < amount && records.hasNext()) {
+ CSVRecord record = records.next();
+ for (int j = 0; j < record.size(); j++) {
+ String value = record.get(j);
+ String header = Integer.toString(j);
+ CSVHeader csvHeader = new CSVHeader(header, j);
+ List<String> vals = results.get(csvHeader);
+ if (vals == null) {
+ vals = new ArrayList<>();
+ results.put(csvHeader, vals);
+ }
+ vals.add(value);
+ }
+ rows++;
+ }
+ }
+ return results;
+ }
+
+ public static class CSVHeader {
+
+ private final String header;
+ private final int index;
+
+ public CSVHeader(String header, int index) {
+ this.header = header;
+ this.index = index;
+ }
+
+ public String getHeader() {
+ return header;
+ }
+
+ public int getIndex() {
+ return index;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((header == null) ? 0 : header.hashCode());
+ result = prime * result + index;
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ CSVHeader other = (CSVHeader) obj;
+ if (header == null) {
+ if (other.header != null)
+ return false;
+ } else if (!header.equals(other.header))
+ return false;
+ if (index != other.index)
+ return false;
+ return true;
+ }
+ }
+
+ public static Collection<String> readDistinctValuesOfColumn(Path source, char delim, int mappingIndex) throws IOException {
+ Set<String> results = new HashSet<>();
+ CSVFormat format = CSVFormat.newFormat(delim);
+ try (CSVParser parser = format.parse(Files.newBufferedReader(source))) {
+ Iterator<CSVRecord> records = parser.iterator();
+ if (records.hasNext())
+ records.next();
+ while (records.hasNext()) {
+ CSVRecord row = records.next();
+ String value = row.get(mappingIndex);
+ results.add(value);
+ }
+ }
+ return results;
+ }
+
+ public static void importVertices(CSVImportModel model) throws NoSuchAuthorityCodeException, FactoryException, DatabaseException {
+
+ Path csvFile = model.getSource();
+ char delim = model.getDelimiter();
+
+ int xCoordColumnIndex = model.getXCoordIndex();
+ int yCoordColumnIndex = model.getYCoordIndex();
+ int zCoordColumnIndex = model.getZCoordIndex();
+ int altElevationIndex = model.getAlternativeElevationIndex();
+ int supplyTempColumnIndex = model.getSupplyTempIndex();
+ int returnTempColumnIndex = model.getReturnTempIndex();
+ int supplyPressureColumnIndex = model.getSupplyPressureIndex();
+ int returnPressureColumnIndex = model.getReturnPressureIndex();
+ int dpIndex = model.getDeltaPressureIndex();
+ int dtIndex = model.getDeltaTemperatureIndex();
+ int heatPowerIndex = model.getHeatPowerIndex();
+ int peakPowerIndex = model.getPeakPowerIndex();
+ int valvePositionIndex = model.getValvePositionIndx();
+ int nominalHeadMIndex = model.getNominalHeadMIndex();
+ int nominalHeadBIndex = model.getNominalHeadBIndex();
+ int nominalFlowIndex = model.getNominalFlowIndex();
+ int maximumHeadMIndex = model.getMaximumHeadMIndex();
+ int heatLoadDsIndex = model.getHeatLoadDsIndex();
+ int massFlowIndex = model.getMassFlowIndex();
+ int volFlowIndex = model.getVolFlowIndex();
+ int velocityIndex = model.getVelocityIndex();
+ int flowAreaIndex = model.getFlowAreaIndex();
+ int nominalPressureLossIndex = model.getNominalPressureLossIndex();
+ int addressIndex = model.getAddressIndex();
+ int regionIndex = model.getRegionIndex();
+
+ int mappingColumn = model.getComponentMappingIndex();
+ int idColumn = model.getIdIndex();
+
+ String sourceEPSGCRS = model.getSourceCRS();
+
+ MathTransform transform = null;
+ boolean doTransform = false;
+ // if sourceEPSGCRS is empty || null then ignore transformation
+ if (sourceEPSGCRS != null && !sourceEPSGCRS.isEmpty()) {
+ CoordinateReferenceSystem sourceCRS = CRS.decode(sourceEPSGCRS);
+ CoordinateReferenceSystem targetCRS = CRS.decode("EPSG:4326");
+ transform = CRS.findMathTransform(sourceCRS, targetCRS, true);
+ doTransform = true;
+ }
+ final boolean actualDoTransform = doTransform;
+ final MathTransform actualTransform = transform;
+
+ Simantics.getSession().syncRequest(new WriteRequest() {
+
+ @Override
+ public void perform(WriteGraph graph) throws DatabaseException {
+ try {
+ Layer0Utils.setDependenciesIndexingDisabled(graph, true);
+ graph.markUndoPoint();
+
+ DistrictNetworkResource DN = DistrictNetworkResource.getInstance(graph);
+
+ DistrictImportUtils.consumeCSV(csvFile, delim, true, (Function<CSVRecord, Boolean>) row -> {
+ try {
+ String mappingValue = row.get(mappingColumn);
+
+ String xCoords = row.get(xCoordColumnIndex);
+ String yCoords = row.get(yCoordColumnIndex);
+ double xCoord = Double.parseDouble(xCoords);
+ double yCoord = Double.parseDouble(yCoords);
+
+ double z = 0;
+ if (zCoordColumnIndex != -1) {
+ String zs = row.get(zCoordColumnIndex);
+
+ if (!zs.isEmpty()) {
+ try {
+ z = Double.parseDouble(zs);
+ } catch (NumberFormatException e1) {
+ throw new DatabaseException(e1);
+ }
+ }
+ }
+
+ double[] coords;
+ if (actualDoTransform) {
+ DirectPosition2D targetPos = new DirectPosition2D();
+ DirectPosition2D sourcePos = new DirectPosition2D(xCoord, yCoord);
+ DirectPosition res = actualTransform.transform(sourcePos, targetPos);
+ coords = res.getCoordinate();
+ } else {
+ coords = new double[] { xCoord, yCoord };
+ }
+
+ // Switch to (longitude, latitude)
+ flipAxes(coords);
+
+ Resource vertex = DistrictNetworkUtil.createVertex(graph, model.getParentDiagram(), coords, z, model.getComponentMappings().get(mappingValue));
+
+ writeStringValue(graph, row, idColumn, vertex, DN.HasId);
+
+ writeValue(graph, row, altElevationIndex, vertex, DN.Vertex_HasAltElevation);
+
+ writeValue(graph, row, supplyTempColumnIndex, vertex, DN.Vertex_HasSupplyTemperature);
+ writeValue(graph, row, returnTempColumnIndex, vertex, DN.Vertex_HasReturnTemperature);
+ writeValue(graph, row, supplyPressureColumnIndex, vertex, DN.Vertex_HasSupplyPressure);
+ writeValue(graph, row, returnPressureColumnIndex, vertex, DN.Vertex_HasReturnPressure);
+ writeValue(graph, row, dpIndex, vertex, DN.Vertex_HasDeltaPressure);
+ writeValue(graph, row, dtIndex, vertex, DN.Vertex_HasDeltaTemperature);
+ writeValue(graph, row, heatPowerIndex, vertex, DN.Vertex_HasHeatPower);
+ writeValue(graph, row, peakPowerIndex, vertex, DN.Vertex_HasPeakPower);
+ writeValue(graph, row, valvePositionIndex, vertex, DN.Vertex_HasValvePosition);
+ writeValue(graph, row, nominalHeadMIndex, vertex, DN.Vertex_HasNominalHeadM);
+ writeValue(graph, row, nominalHeadBIndex, vertex, DN.Vertex_HasNominalHeadB);
+ writeValue(graph, row, nominalFlowIndex, vertex, DN.Vertex_HasNominalFlow);
+ writeValue(graph, row, maximumHeadMIndex, vertex, DN.Vertex_HasMaximumHeadM);
+ writeValue(graph, row, heatLoadDsIndex, vertex, DN.Vertex_HasHeatLoadDs);
+ writeValue(graph, row, massFlowIndex, vertex, DN.Vertex_HasMassFlow);
+ writeValue(graph, row, volFlowIndex, vertex, DN.Vertex_HasVolFlow);
+ writeValue(graph, row, velocityIndex, vertex, DN.Vertex_HasVelocity);
+ writeValue(graph, row, flowAreaIndex, vertex, DN.Vertex_HasFlowArea);
+ writeValue(graph, row, nominalPressureLossIndex, vertex, DN.Vertex_HasNominalPressureLoss);
+ writeStringValue(graph, row, addressIndex, vertex, DN.Vertex_HasAddress);
+ writeStringValue(graph, row, regionIndex, vertex, DN.HasRegion);
+ } catch (DatabaseException | MismatchedDimensionException | TransformException e) {
+ throw new RuntimeException(e);
+ }
+ return true;
+ });
+
+ } catch (IOException e) {
+ LOGGER.error("Could not import", e);
+ throw new DatabaseException(e);
+ } finally {
+ Layer0Utils.setDependenciesIndexingDisabled(graph, false);
+ }
+ }
+ });
+ }
+
+ public static void importEdges(CSVImportModel model) throws NoSuchAuthorityCodeException, FactoryException, DatabaseException {
+
+ Path csvFile = model.getSource();
+ char delim = model.getDelimiter();
+
+ Set<String> writtenIds = new HashSet<>();
+
+ int startXCoordColumnIndex = model.getStartXCoordIndex();
+ int startYCoordColumnIndex = model.getStartYCoordIndex();
+ int startZValueColumnIndex = model.getStartZCoordIndex();
+ int endXCoordColumnIndex = model.getEndXCoordIndex();
+ int endYCoordColumnIndex = model.getEndYCoordIndex();
+ int endZValueColumnIndex = model.getEndZCoordIndex();
+ int diameterColumnIndex= model.getDiameterIndex();
+ int outerDiameterColumnIndex = model.getOuterDiamterIndex();
+ int nominalMassFlowIndex = model.getNominalMassFlowIndex();
+ int tGroundIndex = model.gettGroundIndex();
+ int edgeFlowAreaIndex = model.getEdgeFlowAreaIndex();
+ int kReturnIndex = model.getkReturnIndex();
+ int kSupplyIndex = model.getkSupplyIndex();
+ int lengthIndex = model.getLengthIndex();
+ int detailedGeometryIndex = model.getDetailedGeometryIndex();
+ int regionIndex = model.getRegionIndex();
+
+ int mappingColumn = model.getComponentMappingIndex();
+ int idColumn = model.getIdIndex();
+
+ double padding = model.getEdgePadding();
+
+ String sourceEPSGCRS = model.getSourceCRS();
+
+ MathTransform transform = null;
+ boolean doTransform = false;
+ // if sourceEPSGCRS is empty || null then ignore transformation
+ if (sourceEPSGCRS != null && !sourceEPSGCRS.isEmpty()) {
+ CoordinateReferenceSystem sourceCRS = CRS.decode(sourceEPSGCRS);
+ CoordinateReferenceSystem targetCRS = CRS.decode("EPSG:4326");
+ transform = CRS.findMathTransform(sourceCRS, targetCRS, true);
+ doTransform = true;
+ }
+ final boolean actualDoTransform = doTransform;
+ final MathTransform actualTransform = transform;
+
+ double halfPadding = padding / 2;
+
+ Quadtree vv = Simantics.getSession().syncRequest(new UniqueRead<Quadtree>() {
+
+ @Override
+ public Quadtree perform(ReadGraph graph) throws DatabaseException {
+ Collection<Resource> vertices = graph.syncRequest(new ObjectsWithType(model.getParentDiagram(), Layer0.getInstance(graph).ConsistsOf, DistrictNetworkResource.getInstance(graph).Vertex));
+ Quadtree vv = new Quadtree();
+ for (Resource vertex : vertices) {
+ double[] coords = graph.getRelatedValue2(vertex, DiagramResource.getInstance(graph).HasLocation, Bindings.DOUBLE_ARRAY);
+ double x1 = coords[0] - halfPadding;
+ double y1= coords[1] - halfPadding;
+ double x2 = coords[0] + halfPadding;
+ double y2= coords[1] + halfPadding;
+ Envelope e = new Envelope(x1, x2, y1, y2);
+ vv.insert(e, new ResourceVertex(vertex, coords, true));
+ }
+ return vv;
+ }
+ });
+
+ Simantics.getSession().syncRequest(new WriteRequest() {
+
+ @Override
+ public void perform(WriteGraph graph) throws DatabaseException {
+ try {
+ Layer0Utils.setDependenciesIndexingDisabled(graph, true);
+ graph.markUndoPoint();
+
+ DistrictNetworkResource DN = DistrictNetworkResource.getInstance(graph);
+
+ DistrictImportUtils.consumeCSV(csvFile, delim, true, row -> {
+ try {
+
+ String idValue = row.get(idColumn);
+ if (!writtenIds.contains(idValue)) {
+ writtenIds.add(idValue);
+ String mappingValue = row.get(mappingColumn);
+
+ String startXCoords = row.get(startXCoordColumnIndex);
+ String startYCoords = row.get(startYCoordColumnIndex);
+ String startZCoords = row.get(startZValueColumnIndex);
+ String endXCoords = row.get(endXCoordColumnIndex);
+ String endYCoords = row.get(endYCoordColumnIndex);
+ String endZCoords = row.get(endZValueColumnIndex);
+
+ double startXCoord = Double.parseDouble(startXCoords); // make negative
+ double startYCoord = Double.parseDouble(startYCoords);
+ double startZCoord = Double.parseDouble(startZCoords);
+
+ double endXCoord = Double.parseDouble(endXCoords); // make negative
+ double endYCoord = Double.parseDouble(endYCoords);
+ double endZCoord = Double.parseDouble(endZCoords);
+
+ double[] startCoords;
+ double[] endCoords;
+ if (actualDoTransform) {
+ DirectPosition2D startTargetPos = new DirectPosition2D();
+ DirectPosition2D startSourcePos = new DirectPosition2D(startXCoord, startYCoord);
+ DirectPosition startRes = actualTransform.transform(startSourcePos, startTargetPos);
+ startCoords = startRes.getCoordinate();
+
+ DirectPosition2D endTargetPos = new DirectPosition2D();
+ DirectPosition2D endSourcePos = new DirectPosition2D(endXCoord, endYCoord);
+ DirectPosition endRes = actualTransform.transform(endSourcePos, endTargetPos);
+ endCoords = endRes.getCoordinate();
+ } else {
+ startCoords = new double[] { startXCoord , startYCoord };
+ endCoords = new double[] { endXCoord , endYCoord };
+ }
+
+ // Switch to (longitude, latitude)
+ flipAxes(startCoords);
+ flipAxes(endCoords);
+
+ Optional<Resource> oedge = DNEdgeBuilder.create(graph, vv, model.getParentDiagram(), model.getComponentMappings().get(mappingValue), startCoords, startZCoord, endCoords, endZCoord, new double[0], padding, true);
+ if (oedge.isPresent()) {
+ Resource edge = oedge.get();
+
+ writeStringValue(graph, row, idColumn, edge, DN.HasId);
+
+ writeValue(graph, row, diameterColumnIndex, edge, DN.Edge_HasDiameter);
+ writeValue(graph, row, outerDiameterColumnIndex, edge, DN.Edge_HasOuterDiameter);
+ writeValue(graph, row, nominalMassFlowIndex, edge, DN.Edge_HasNominalMassFlow);
+ writeValue(graph, row, tGroundIndex, edge, DN.Edge_HasTGround);
+ writeValue(graph, row, kReturnIndex, edge, DN.Edge_HasKReturn);
+ writeValue(graph, row, kSupplyIndex, edge, DN.Edge_HasKSupply);
+ writeValue(graph, row, edgeFlowAreaIndex, edge, DN.Edge_HasFlowArea);
+ writeValue(graph, row, lengthIndex, edge, DN.Edge_HasLength);
+ writeStringValue(graph, row, regionIndex, edge, DN.HasRegion);
+ writeDoubleArrayFromString(graph, row, detailedGeometryIndex, edge, DN.Edge_HasGeometry, actualTransform);
+ }
+ }
+ return true;
+ } catch (DatabaseException | MismatchedDimensionException | TransformException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ } catch (IOException e) {
+ LOGGER.error("Could not import edges {}", model.getSource(), e);
+ } finally {
+ Layer0Utils.setDependenciesIndexingDisabled(graph, false);
+ }
+ }
+ });
+ }
+
+ private static void flipAxes(double[] coords) {
+ double tmp = coords[0];
+ coords[0] = coords[1];
+ coords[1] = tmp;
+ }
+
+ private static void writeValue(WriteGraph graph, CSVRecord row, int index, Resource subject, Resource relation) throws DatabaseException {
+ if (index != -1) {
+ String stringValue = row.get(index);
+ if (!stringValue.isEmpty()) {
+ try {
+ if (stringValue.startsWith("\"") && stringValue.endsWith("\"")) {
+ stringValue = stringValue.substring(1, stringValue.length() - 1);
+ }
+ graph.claimLiteral(subject, relation, Double.parseDouble(stringValue), Bindings.DOUBLE);
+ } catch (NumberFormatException e) {
+ LOGGER.error("Could not parse {} {} {} {}", row, index, subject, relation, e);
+ //throw new DatabaseException(e);
+ }
+ }
+ }
+ }
+
+ private static void writeStringValue(WriteGraph graph, CSVRecord row, int index, Resource subject, Resource relation) throws DatabaseException {
+ if (index != -1) {
+ String stringValue = row.get(index);
+ if (!stringValue.isEmpty()) {
+ try {
+ graph.claimLiteral(subject, relation, stringValue, Bindings.STRING);
+ } catch (NumberFormatException e) {
+ throw new DatabaseException(e);
+ }
+ }
+ }
+ }
+
+ private static void writeDoubleArrayFromString(WriteGraph graph, CSVRecord row, int index, Resource subject, Resource relation, MathTransform actualTransform) throws DatabaseException, MismatchedDimensionException, TransformException {
+ if (index != -1) {
+ String stringValue = row.get(index);
+ if (!stringValue.isEmpty()) {
+ if (stringValue.startsWith("\"") && stringValue.endsWith("\"")) {
+ stringValue = stringValue.substring(1, stringValue.length() - 1);
+ }
+ String[] coordPairs = stringValue.split(";");
+ ArrayList<Double> dd = new ArrayList<>(coordPairs.length * 2);
+ for (int i = 0; i < coordPairs.length; i++) {
+ String coordPair = coordPairs[i];
+ String[] p = coordPair.split(" ");
+ double x = Double.parseDouble(p[0]);
+ double y = Double.parseDouble(p[1]);
+ if (actualTransform != null) {
+ DirectPosition2D targetPos = new DirectPosition2D();
+ DirectPosition2D sourcePos = new DirectPosition2D(y, x);
+ DirectPosition res = actualTransform.transform(sourcePos, targetPos);
+ double[] coords = res.getCoordinate();
+ x = coords[1];
+ y = coords[0];
+ }
+ dd.add(x);
+ dd.add(y);
+ }
+ double[] detailedGeometryCoords = new double[dd.size()];
+ for (int i = 0; i < dd.size(); i++) {
+ double d = dd.get(i);
+ detailedGeometryCoords[i] = d;
+ }
+ try {
+ graph.claimLiteral(subject, relation, detailedGeometryCoords, Bindings.DOUBLE_ARRAY);
+ } catch (NumberFormatException e) {
+ throw new DatabaseException(e);
+ }
+ }
+ }
+ }
+}