]> gerrit.simantics Code Review - simantics/platform.git/blob - bundles/org.simantics.db.procore/src/fi/vtt/simantics/procore/internal/ClusteringAlgorithmImpl.java
Fixes for non-compiling code after merge 5930811a7
[simantics/platform.git] / bundles / org.simantics.db.procore / src / fi / vtt / simantics / procore / internal / ClusteringAlgorithmImpl.java
1 /*******************************************************************************\r
2  * Copyright (c) 2007, 2010 Association for Decentralized Information Management\r
3  * in Industry THTH ry.\r
4  * All rights reserved. This program and the accompanying materials\r
5  * are made available under the terms of the Eclipse Public License v1.0\r
6  * which accompanies this distribution, and is available at\r
7  * http://www.eclipse.org/legal/epl-v10.html\r
8  *\r
9  * Contributors:\r
10  *     VTT Technical Research Centre of Finland - initial API and implementation\r
11  *******************************************************************************/\r
12 package fi.vtt.simantics.procore.internal;\r
13 //package fi.vtt.simantics.procore.internal2;\r
14 //\r
15 //import java.util.ArrayList;\r
16 //import java.util.Collection;\r
17 //import java.util.HashMap;\r
18 //import java.util.HashSet;\r
19 //import java.util.Map;\r
20 //\r
21 //import org.simantics.db.ReadGraph;\r
22 //import org.simantics.db.Resource;\r
23 //import org.simantics.db.WriteGraph;\r
24 //import org.simantics.db.common.queries.QueryProvider;\r
25 //import org.simantics.db.queries.QuerySupport;\r
26 //import org.simantics.utils.datastructures.Pair;\r
27 //\r
28 //import fi.vtt.simantics.procore.internal2.ClusteringInformation.ReclusterIterator;\r
29 //\r
30 //class ClusteringAlgorithmImpl implements ClusteringAlgorithm {\r
31 //\r
32 //    HashMap<Integer, NewCluster> assignment;\r
33 //\r
34 //    class NewCluster {\r
35 //        int existing;\r
36 //        int root;\r
37 //        int size = 0;\r
38 //        long id = 0;\r
39 //        ArrayList<Integer> ids = new ArrayList<Integer>();\r
40 //        public NewCluster(int existing, int root) {\r
41 //            this.existing = existing;\r
42 //            this.root = root;\r
43 //        }\r
44 //        public void grow(Integer id) {\r
45 //            size++;\r
46 //            assignment.put(id, this);\r
47 //            ids.add(id);\r
48 //        }\r
49 //        public void merge(NewCluster other) {\r
50 //            assert(other != this);\r
51 //            for(int i : other.ids) grow(i);\r
52 //            other.size = 0;\r
53 //        }\r
54 //        public int size() {\r
55 //            return size;\r
56 //        }\r
57 //    }\r
58 //\r
59 //    class CoverageNode {\r
60 //\r
61 //        public int seed;\r
62 //        public int lastCoverage = 1;\r
63 //        public int coverage = 1;\r
64 //\r
65 //        public CoverageNode(int id) {\r
66 //            this.seed = id;\r
67 //        }\r
68 //\r
69 //    }\r
70 //\r
71 //    int instanceOf;\r
72 //    int consistsOf;\r
73 //    int dependsOn;\r
74 //\r
75 //    HashSet<Integer> properties;\r
76 //    HashSet<Integer> depends;\r
77 //    HashSet<Integer> unknown;\r
78 //\r
79 //    class Statement {\r
80 //        final public int subject;\r
81 //        final public int predicate;\r
82 //        final public int object;\r
83 //        public Statement(int s, int p, int o) {\r
84 //            //System.out.println("new Statement(" + s + "," + p + "," + o + ")");\r
85 //            subject = s;\r
86 //            predicate = p;\r
87 //            object = o;\r
88 //        }\r
89 //    };\r
90 //\r
91 //    private void computeCoverages(HashMap<Integer, CoverageNode> newNodes, QuerySupport core, ArrayList<Statement> statements) {\r
92 //\r
93 //        for(int i=0;i<5;i++) {\r
94 //            for(CoverageNode n : newNodes.values()) {\r
95 //                n.lastCoverage = n.coverage;\r
96 //                n.coverage = 1;\r
97 //            }\r
98 //            for(Statement s : statements) {\r
99 //                if(depends.contains(s.predicate)) {\r
100 //                    CoverageNode sn = newNodes.get(s.subject);\r
101 //                    CoverageNode on = newNodes.get(s.object);\r
102 //                    if(sn != null && on != null) {\r
103 //                        sn.coverage += on.lastCoverage;\r
104 //                    }\r
105 //                }\r
106 //            }\r
107 //        }\r
108 //\r
109 //    }\r
110 //\r
111 //    private final int MAX_COVERAGE = 5000;\r
112 //    private final int MAX_CLUSTER = 5000;\r
113 //    private final int MAX_CLUSTER_2 = 15000;\r
114 //\r
115 //    private void clusterNode(CoverageNode node, HashMap<Integer, CoverageNode> newNodes, QuerySupport core, HashMap<Integer, Collection<Integer>> deps, HashSet<CoverageNode> visited, NewCluster topCluster, NewCluster currentCluster, ArrayList<NewCluster> clusters) {\r
116 //\r
117 //        assert(node != null);\r
118 //\r
119 //        if(visited.contains(node)) return;\r
120 //        visited.add(node);\r
121 //\r
122 //        if(node.coverage > MAX_COVERAGE) {\r
123 //            topCluster.grow(node.seed);\r
124 //        } else {\r
125 //            currentCluster.grow(node.seed);\r
126 //        }\r
127 //\r
128 //        Collection<Integer> dc = deps.get(node.seed);\r
129 //        if(dc == null) return;\r
130 //\r
131 //        for(Integer i : dc) {\r
132 //\r
133 //            CoverageNode sn = newNodes.get(i);\r
134 //            if(sn != null) {\r
135 //\r
136 //                //System.out.println("traverse: " + node.coverage + " " + sn.coverage + " " + currentCluster.size());\r
137 //\r
138 //                if(node.coverage > MAX_COVERAGE && sn.coverage < MAX_COVERAGE) {\r
139 //\r
140 //                    if(currentCluster.size() > MAX_CLUSTER) {\r
141 ////                        System.out.println("new cluster: " + node.coverage + " " + sn.coverage + " " + currentCluster.size());\r
142 //                        currentCluster = new NewCluster(0, node.seed);\r
143 //                        clusters.add(currentCluster);\r
144 //                    } else {\r
145 ////                        System.out.println("continue cluster: " + node.coverage + " " + sn.coverage + " " + currentCluster.size());\r
146 //                    }\r
147 //\r
148 //                }\r
149 //\r
150 //                clusterNode(sn, newNodes, core, deps, visited, topCluster, currentCluster, clusters);\r
151 //\r
152 //            }\r
153 //\r
154 //        }\r
155 //\r
156 //    }\r
157 //\r
158 //    private void combineExistingSiblings(ArrayList<NewCluster> clusters, ClusteringSupport support) {\r
159 //\r
160 //        HashMap<Integer, ArrayList<NewCluster>> siblings = new HashMap<Integer, ArrayList<NewCluster>>();\r
161 //        for(NewCluster cluster : clusters) {\r
162 //\r
163 //            if(cluster.size() < MAX_CLUSTER && cluster.existing > 0) {\r
164 //\r
165 //                ArrayList<NewCluster> list = siblings.get(cluster.existing);\r
166 //                if(list == null) {\r
167 //                    list = new ArrayList<NewCluster>();\r
168 //                    siblings.put(cluster.existing, list);\r
169 //                }\r
170 //                list.add(cluster);\r
171 //\r
172 //            }\r
173 //\r
174 //        }\r
175 //\r
176 //        for(ArrayList<NewCluster> list : siblings.values()) {\r
177 //\r
178 //            if(list.size() < 2) continue;\r
179 //\r
180 ////            System.out.println("Processing shared root with  " + list.size() + " new clusters.");\r
181 //\r
182 //            NewCluster current = null;\r
183 //\r
184 //            for(NewCluster cluster : list) {\r
185 //\r
186 //                if(current == null) {\r
187 //                    current = cluster;\r
188 //                } else {\r
189 //                    //System.out.println("Merging to sibling cluster " + current.size + " <-> " + cluster.size);\r
190 //                    current.merge(cluster);\r
191 //                }\r
192 //\r
193 //                if(current.size > MAX_CLUSTER) {\r
194 //                    current.id = support.newClusterId();\r
195 //                    current = null;\r
196 //                }\r
197 //\r
198 //            }\r
199 //\r
200 //        }\r
201 //\r
202 //    }\r
203 //\r
204 //\r
205 //   private void combineRootSiblings(ArrayList<NewCluster> clusters, ClusteringSupport support) {\r
206 //\r
207 //       HashMap<Integer, ArrayList<NewCluster>> siblings = new HashMap<Integer, ArrayList<NewCluster>>();\r
208 //       for(NewCluster cluster : clusters) {\r
209 //\r
210 //           if(cluster.size() < MAX_CLUSTER) {\r
211 //\r
212 //               ArrayList<NewCluster> list = siblings.get(cluster.root);\r
213 //               if(list == null) {\r
214 //                   list = new ArrayList<NewCluster>();\r
215 //                   siblings.put(cluster.root, list);\r
216 //               }\r
217 //               list.add(cluster);\r
218 //\r
219 //           }\r
220 //\r
221 //       }\r
222 //\r
223 //       for(ArrayList<NewCluster> list : siblings.values()) {\r
224 //\r
225 //           if(list.size() < 2) continue;\r
226 //\r
227 ////           System.out.println("Processing shared root with  " + list.size() + " new clusters.");\r
228 //\r
229 //           NewCluster current = null;\r
230 //\r
231 //           for(NewCluster cluster : list) {\r
232 //\r
233 //               if(current == null) {\r
234 //                   current = cluster;\r
235 //               } else {\r
236 ////                   System.out.println("Merging to sibling cluster " + current.size + " <-> " + cluster.size);\r
237 //                   current.merge(cluster);\r
238 //               }\r
239 //\r
240 //               if(current.size > MAX_CLUSTER) {\r
241 //                   current.id = support.newClusterId();\r
242 //                   current = null;\r
243 //               }\r
244 //\r
245 //           }\r
246 //\r
247 //       }\r
248 //\r
249 //   }\r
250 //\r
251 //    private void cluster(HashMap<Integer, Integer> roots, HashMap<Integer, CoverageNode> newNodes, ClusteringSupport support, QuerySupport core, ReadGraph graph, HashMap<Integer, Collection<Integer>> deps) {\r
252 //\r
253 //        ArrayList<NewCluster> clusters = new ArrayList<NewCluster>();\r
254 //        HashSet<CoverageNode> visited = new HashSet<CoverageNode>();\r
255 //        for(Map.Entry<Integer, Integer> e : roots.entrySet()) {\r
256 //            NewCluster topCluster = new NewCluster(e.getValue(), e.getKey());\r
257 //            NewCluster currentCluster = new NewCluster(e.getValue(), e.getKey());\r
258 //            clusterNode(newNodes.get(e.getKey()), newNodes, core, deps, visited, topCluster, currentCluster, clusters);\r
259 //            if(topCluster.size > 0) clusters.add(topCluster);\r
260 //            if(currentCluster.size > 0) clusters.add(currentCluster);\r
261 //        }\r
262 //\r
263 ////        System.out.println("Initial clustering produced " + clusters.size() + " clusters.");\r
264 //\r
265 //        combineRootSiblings(clusters, support);\r
266 //        combineExistingSiblings(clusters, support);\r
267 //\r
268 //        for(NewCluster cluster : clusters) {\r
269 //\r
270 //            if(cluster.size() > 0 && cluster.size() < MAX_CLUSTER) {\r
271 //\r
272 //                if(!newNodes.containsKey(cluster.existing)) {\r
273 //\r
274 //                    Collection<Resource> siblings2 = graph.getObjects(core.getResource(cluster.root), graph.getBuiltins().DependsOn);\r
275 //\r
276 //                    for(Resource sibling : siblings2) {\r
277 //\r
278 //                        if(newNodes.get(core.getId(sibling)) == null) {\r
279 //\r
280 //                            long existing = support.getCluster(sibling);\r
281 //                            long existingSize = support.getClusterSizeCache(existing);\r
282 //                            if(existingSize < MAX_CLUSTER_2) {\r
283 //                                cluster.id = existing;\r
284 //                                System.out.println("Merging to existing cluster " + existing + " with size " + existingSize);\r
285 //                            } else {\r
286 //                                System.out.println(" -sibling too large (" + existingSize + ")");\r
287 //                            }\r
288 //\r
289 //                        }\r
290 //\r
291 //                    }\r
292 //\r
293 //                }\r
294 //\r
295 //            }\r
296 //\r
297 //            if(cluster.size > 0 && cluster.id == 0) {\r
298 //                cluster.id = support.newClusterId();\r
299 //            }\r
300 //\r
301 //        }\r
302 //\r
303 ////        System.out.println("Clustering report:");\r
304 //\r
305 //        int total = 0;\r
306 //        int totalClusters = 0;\r
307 //        for(NewCluster c : clusters) {\r
308 //            if(c.size() > 0) {\r
309 ////                System.out.println("-" + c.size() + " elements - id = " + c.id);\r
310 //                total += c.size();\r
311 //                totalClusters++;\r
312 //            }\r
313 //        }\r
314 //\r
315 //        //System.out.println("Total of " + total + " elements in " + totalClusters + " clusters.");\r
316 //\r
317 //    }\r
318 //\r
319 //    @Override\r
320 //    public void recluster(ClusteringInformation info, ClusteringSupport clusteringSupport, ReadGraph graph, QuerySupport querySupport, QueryProvider queryProvider) {\r
321 //\r
322 ////        Collection<Integer> resources = new ArrayList<Integer>();\r
323 ////        ReclusterIterator it = info.getReclusterIterator();\r
324 ////        if(it == null) return;\r
325 ////\r
326 ////        while(it.hasNext()) {\r
327 ////            it.advance();\r
328 ////            resources.add(it.getReclusterResourceId());\r
329 ////        }\r
330 ////\r
331 ////        ArrayList<Statement> statements = new ArrayList<Statement>();\r
332 ////        AddedStatmentsIterator it2 = info.getAddedStatmentsIterator();\r
333 ////        while(it2.hasNext()) {\r
334 ////            it2.advance();\r
335 ////            statements.add(new Statement(it2.getAddedSubjectId(), it2.getAddedPredicateId(), it2.getAddedObjectId()));\r
336 ////        }\r
337 ////\r
338 ////        //System.out.println("Clustering " + resources.size() + " new resources,  " + statements.size() + " new statements.");\r
339 ////\r
340 ////        try {\r
341 ////\r
342 ////            HashMap<Integer, CoverageNode> newNodes = new HashMap<Integer, CoverageNode>();\r
343 ////\r
344 ////            instanceOf = querySupport.getId(graph.getBuiltins().InstanceOf);\r
345 ////            consistsOf = querySupport.getId(graph.getBuiltins().ConsistsOf);\r
346 ////            dependsOn = querySupport.getId(graph.getBuiltins().DependsOn);\r
347 ////\r
348 ////            assignment = new HashMap<Integer, NewCluster>();\r
349 ////            properties = new HashSet<Integer>();\r
350 ////            depends = new HashSet<Integer>();\r
351 ////            unknown = new HashSet<Integer>();\r
352 ////\r
353 ////            depends.add(consistsOf);\r
354 ////\r
355 ////            for(Integer r : resources) {\r
356 ////                newNodes.put(r, new CoverageNode(r));\r
357 ////            }\r
358 ////\r
359 ////            for(Statement s : statements) {\r
360 ////\r
361 ////                if(unknown.contains(s.predicate)) continue;\r
362 ////                if(depends.contains(s.predicate)) continue;\r
363 ////                if(properties.contains(s.predicate)) continue;\r
364 ////                if(s.predicate == instanceOf) continue;\r
365 ////                if(s.predicate == consistsOf) continue;\r
366 ////\r
367 ////                if(graph.isSubrelationOf(querySupport.getResource(s.predicate), graph.getBuiltins().HasProperty)) {\r
368 ////                    properties.add(s.predicate);\r
369 ////                } else if(graph.isSubrelationOf(querySupport.getResource(s.predicate), graph.getBuiltins().DependsOn)) {\r
370 ////                    depends.add(s.predicate);\r
371 ////                } else {\r
372 ////                    unknown.add(s.predicate);\r
373 ////                }\r
374 ////\r
375 ////            }\r
376 ////\r
377 ////            depends.addAll(properties);\r
378 ////\r
379 ////            HashSet<Integer> roots = new HashSet<Integer>();\r
380 ////            for(Integer r : resources) roots.add(r);\r
381 ////\r
382 ////            HashMap<Integer, Collection<Integer>> deps = new HashMap<Integer, Collection<Integer>>();\r
383 ////\r
384 ////            for(Statement s : statements) {\r
385 ////\r
386 ////                if(depends.contains(s.predicate)) {\r
387 ////                    if(newNodes.containsKey(s.subject)) roots.remove(s.object);\r
388 ////                    Collection<Integer> coll = deps.get(s.subject);\r
389 ////                    if(coll == null) {\r
390 ////                        coll = new ArrayList<Integer>();\r
391 ////                        deps.put(s.subject, coll);\r
392 ////                    }\r
393 ////                    coll.add(s.object);\r
394 ////                }\r
395 ////\r
396 ////            }\r
397 ////\r
398 //////            System.out.println("" + roots.size() + " roots.");\r
399 ////\r
400 ////            for(Statement s : statements) {\r
401 ////\r
402 ////                if(roots.contains(s.object) && s.predicate == instanceOf && newNodes.containsKey(s.subject)) {\r
403 ////                    roots.remove(s.object);\r
404 ////                    Collection<Integer> coll = deps.get(s.subject);\r
405 ////                    if(coll == null) {\r
406 ////                        deps.put(s.subject, new SingletonCollection<Integer>(s.object));\r
407 ////                    } else {\r
408 ////                        coll.add(s.object);\r
409 ////                    }\r
410 ////                }\r
411 ////\r
412 ////            }\r
413 //\r
414 ////            System.out.println("" + roots.size() + " roots.");\r
415 //\r
416 ////            HashMap<Integer,Integer> roots2 = new HashMap<Integer,Integer>();\r
417 ////            for(Statement s : statements) {\r
418 ////                if(depends.contains(s.predicate)) {\r
419 ////                    if(roots.contains(s.object)) {\r
420 ////                        roots2.put(s.object, s.subject);\r
421 ////                    }\r
422 ////                }\r
423 ////            }\r
424 //\r
425 ////            for(Integer i : roots) {\r
426 ////                System.out.println("root");\r
427 ////                for(StatementImpl2 s2 : statements) {\r
428 ////                    int sub = core.getId(s2.getSubject());\r
429 ////                    if(sub == i) {\r
430 ////                        System.out.println("-" + g.adapt(s2.getPredicate(), g.getBuiltins().HasStringRepresentation));\r
431 ////                    }\r
432 ////                }\r
433 ////            }\r
434 //\r
435 ////            System.out.println("" + roots.size() + " roots after parent search.");\r
436 //\r
437 ////            System.out.println("-found " + properties.size() + " property relations");\r
438 ////            for(Integer i : properties) {\r
439 ////                System.out.println("-" + graph.adapt(querySupport.getResource(i), graph.getBuiltins().HasStringRepresentation));\r
440 ////            }\r
441 ////            System.out.println("-found " + depends.size() + " depends on relations");\r
442 ////            for(Integer i : depends) {\r
443 ////                System.out.println("-" + graph.adapt(querySupport.getResource(i), graph.getBuiltins().HasStringRepresentation));\r
444 ////            }\r
445 ////            System.out.println("-found " + unknown.size() + " other relations");\r
446 ////            for(Integer i : unknown) {\r
447 ////                System.out.println("-" + graph.adapt(querySupport.getResource(i), graph.getBuiltins().HasStringRepresentation));\r
448 ////            }\r
449 //\r
450 ////            computeCoverages(newNodes, querySupport, statements);\r
451 ////            cluster(roots2, newNodes, clusteringSupport, querySupport, graph, deps);\r
452 //\r
453 ////            System.out.println("finished clustering");\r
454 //\r
455 ////            long cid = clusteringSupport.newClusterId();\r
456 //\r
457 //            long defaultCluster = 0;\r
458 //            ReclusterIterator it3 = info.getReclusterIterator();\r
459 //            while(it3.hasNext()) {\r
460 //              int id = it3.getReclusterResourceId();\r
461 //\r
462 //              Long cluster = assignment2.get(id);\r
463 //              if(cluster == null) {\r
464 //                  if(defaultCluster == 0) defaultCluster = clusteringSupport.newClusterId();\r
465 //                  cluster = defaultCluster;\r
466 //              }\r
467 //\r
468 //              it3.setReclusterResourceCluster(cluster);\r
469 ////                it3.setReclusterResourceCluster(cid);\r
470 //                it3.advance();\r
471 //\r
472 ////                    if(newContexts.contains(id)) {\r
473 ////                    it3.setReclusterResourceCluster(clusteringSupport.newClusterId());\r
474 ////                    it3.advance();\r
475 ////                } else {\r
476 ////                    NewCluster t = assignment.get(id);\r
477 ////                    it3.setReclusterResourceCluster(t.id);\r
478 ////                    it3.advance();\r
479 ////                }\r
480 //\r
481 //            }\r
482 //\r
483 ////        } catch(Throwable t) {\r
484 ////\r
485 ////            t.printStackTrace();\r
486 ////\r
487 ////        }\r
488 //\r
489 //    }\r
490 //\r
491 //    private HashMap<Resource, Pair<Resource, Integer>> contextCache = new HashMap<Resource, Pair<Resource, Integer>>();\r
492 //\r
493 //    HashSet<Integer> newContexts;\r
494 //\r
495 //    HashMap<Integer, Long> assignment2;\r
496 //\r
497 //    @Override\r
498 //    public void createContexts(HashMap<Resource, Resource> newResources, WriteGraph g, QuerySupport querySupport, ClusteringSupport clusteringSupport) {\r
499 //\r
500 //        newContexts = new HashSet<Integer>();\r
501 //        assignment2 = new HashMap<Integer, Long>();\r
502 //\r
503 //        HashMap<Resource, Collection<Resource>> contexts = new HashMap<Resource, Collection<Resource>>();\r
504 //        for(Map.Entry<Resource, Resource> entry : newResources.entrySet()) {\r
505 //\r
506 //            assert(entry.getKey() != null);\r
507 //            assert(entry.getValue() != null);\r
508 //\r
509 //            Collection<Resource> coll = contexts.get(entry.getValue());\r
510 //            if(coll == null) {\r
511 //                coll = new ArrayList<Resource>();\r
512 //                contexts.put(entry.getValue(), coll);\r
513 //            }\r
514 //            coll.add(entry.getKey());\r
515 //\r
516 //        }\r
517 //\r
518 ////        long newClusterId = clusteringSupport.newClusterId();\r
519 ////\r
520 //        for(Map.Entry<Resource, Collection<Resource>> entry : contexts.entrySet()) {\r
521 //\r
522 //            Resource context = g.getBuiltins().RootLibrary;\r
523 //            Resource type = entry.getKey();\r
524 //\r
525 //            Resource typeContext = null;\r
526 //            long fill = 10000;\r
527 //\r
528 //            long assignedClusterId = 0;\r
529 //\r
530 ////            Collection<Resource> ctxs = g.getObjects(context, type);\r
531 //\r
532 //            Pair<Resource, Integer> contextPair = contextCache.get(type);\r
533 //            if(contextPair != null) {\r
534 //\r
535 //              typeContext = contextPair.first;\r
536 //              fill = contextPair.second;\r
537 //              assignedClusterId = clusteringSupport.getCluster(typeContext);\r
538 //\r
539 //            } else {\r
540 //\r
541 ////                    System.out.println("No existing context found in " + context.getResourceId() + " for type " + type.getResourceId());\r
542 //\r
543 //            }\r
544 //\r
545 ////            for(Resource ctx : ctxs) {\r
546 ////                long clusterId = clusteringSupport.getCluster(ctx);\r
547 ////                long size = clusteringSupport.getClusterSizeCache(clusterId);\r
548 ////                if(size < 500000) {\r
549 ////                    typeContext = ctx;\r
550 ////                    fill = (long)(10000.0 * ((double)size / 500000.0));\r
551 ////                    System.out.println("Append to existing context "  + clusteringSupport.getCluster(ctx) + "(res=" + typeContext.getResourceId() + ") with size " + clusteringSupport.getClusterSize(clusteringSupport.getCluster(ctx)));\r
552 ////                    assignedClusterId = clusterId;\r
553 ////                    break;\r
554 ////                } else {\r
555 ////                    System.out.println("Context cluster size was " + clusteringSupport.getClusterSize(clusteringSupport.getCluster(ctx)));\r
556 ////                }\r
557 ////            }\r
558 ////\r
559 ////            if(ctxs.size() == 0) System.out.println("No contexts found in " + context.getResourceId() + " for type " + type.getResourceId());\r
560 //\r
561 //            for(Resource newResource : entry.getValue()) {\r
562 //\r
563 //                if(fill >= 10000) {\r
564 //                    typeContext = g.newResource();\r
565 //                    g.addStatement(context, type, type, typeContext);\r
566 //                    g.addStatement(typeContext, g.getBuiltins().Inherits, g.getBuiltins().SupertypeOf, type);\r
567 //                    newContexts.add(querySupport.getId(typeContext));\r
568 //                    fill = 0;\r
569 //                    assignedClusterId = clusteringSupport.newClusterId();\r
570 //                    assignment2.put(querySupport.getId(typeContext), assignedClusterId);\r
571 //                }\r
572 //\r
573 //                assignment2.put(querySupport.getId(newResource), assignedClusterId);\r
574 //\r
575 //                g.addStatement(typeContext, g.getBuiltins().HasInstance, g.getBuiltins().InstanceOf, newResource);\r
576 //                fill++;\r
577 //\r
578 //            }\r
579 //\r
580 //            contextCache.put(type, new Pair<Resource, Integer>(typeContext, (int)fill));\r
581 //\r
582 //        }\r
583 //\r
584 //    }\r
585 //\r
586 //}