source: josm/trunk/src/org/openstreetmap/josm/data/validation/tests/DuplicateNode.java@ 11627

Last change on this file since 11627 was 11627, checked in by Don-vip, 7 years ago

fix #3346 - improve drastically the performance of fixing duplicate nodes by:

  • caching data sources area computation
  • moving layer invalidation from UndoRedoHandler.addNoRedraw to UndoRedoHandler.add
  • avoiding any EDT call when building tag conflict dialog if it's not meant to be displayed
  • Property svn:eol-style set to native
File size: 14.2 KB
Line 
1// License: GPL. For details, see LICENSE file.
2package org.openstreetmap.josm.data.validation.tests;
3
4import static org.openstreetmap.josm.tools.I18n.tr;
5
6import java.util.ArrayList;
7import java.util.Collection;
8import java.util.Collections;
9import java.util.HashMap;
10import java.util.HashSet;
11import java.util.Iterator;
12import java.util.LinkedHashSet;
13import java.util.LinkedList;
14import java.util.List;
15import java.util.Map;
16import java.util.Map.Entry;
17import java.util.Set;
18
19import org.openstreetmap.josm.Main;
20import org.openstreetmap.josm.actions.MergeNodesAction;
21import org.openstreetmap.josm.command.Command;
22import org.openstreetmap.josm.data.coor.LatLon;
23import org.openstreetmap.josm.data.osm.AbstractPrimitive;
24import org.openstreetmap.josm.data.osm.Hash;
25import org.openstreetmap.josm.data.osm.Node;
26import org.openstreetmap.josm.data.osm.OsmPrimitive;
27import org.openstreetmap.josm.data.osm.OsmPrimitiveType;
28import org.openstreetmap.josm.data.osm.Storage;
29import org.openstreetmap.josm.data.osm.Way;
30import org.openstreetmap.josm.data.validation.Severity;
31import org.openstreetmap.josm.data.validation.Test;
32import org.openstreetmap.josm.data.validation.TestError;
33import org.openstreetmap.josm.gui.progress.ProgressMonitor;
34import org.openstreetmap.josm.tools.MultiMap;
35
36/**
37 * Tests if there are duplicate nodes
38 *
39 * @author frsantos
40 */
41public class DuplicateNode extends Test {
42
43 private static class NodeHash implements Hash<Object, Object> {
44
45 private final double precision = Main.pref.getDouble("validator.duplicatenodes.precision", 0.);
46
47 private LatLon roundCoord(LatLon coor) {
48 return new LatLon(
49 Math.round(coor.lat() / precision) * precision,
50 Math.round(coor.lon() / precision) * precision
51 );
52 }
53
54 @SuppressWarnings("unchecked")
55 private LatLon getLatLon(Object o) {
56 if (o instanceof Node) {
57 LatLon coor = ((Node) o).getCoor();
58 if (coor == null)
59 return null;
60 if (precision == 0)
61 return coor.getRoundedToOsmPrecision();
62 return roundCoord(coor);
63 } else if (o instanceof List<?>) {
64 LatLon coor = ((List<Node>) o).get(0).getCoor();
65 if (coor == null)
66 return null;
67 if (precision == 0)
68 return coor.getRoundedToOsmPrecision();
69 return roundCoord(coor);
70 } else
71 throw new AssertionError();
72 }
73
74 @Override
75 public boolean equals(Object k, Object t) {
76 LatLon coorK = getLatLon(k);
77 LatLon coorT = getLatLon(t);
78 return coorK == coorT || (coorK != null && coorT != null && coorK.equals(coorT));
79 }
80
81 @Override
82 public int getHashCode(Object k) {
83 LatLon coorK = getLatLon(k);
84 return coorK == null ? 0 : coorK.hashCode();
85 }
86 }
87
88 protected static final int DUPLICATE_NODE = 1;
89 protected static final int DUPLICATE_NODE_MIXED = 2;
90 protected static final int DUPLICATE_NODE_OTHER = 3;
91 protected static final int DUPLICATE_NODE_BUILDING = 10;
92 protected static final int DUPLICATE_NODE_BOUNDARY = 11;
93 protected static final int DUPLICATE_NODE_HIGHWAY = 12;
94 protected static final int DUPLICATE_NODE_LANDUSE = 13;
95 protected static final int DUPLICATE_NODE_NATURAL = 14;
96 protected static final int DUPLICATE_NODE_POWER = 15;
97 protected static final int DUPLICATE_NODE_RAILWAY = 16;
98 protected static final int DUPLICATE_NODE_WATERWAY = 17;
99
100 private static final String[] TYPES = {
101 "none", "highway", "railway", "waterway", "boundary", "power", "natural", "landuse", "building"};
102
103 /** The map of potential duplicates.
104 *
105 * If there is exactly one node for a given pos, the map includes a pair &lt;pos, Node&gt;.
106 * If there are multiple nodes for a given pos, the map includes a pair
107 * &lt;pos, NodesByEqualTagsMap&gt;
108 */
109 private Storage<Object> potentialDuplicates;
110
111 /**
112 * Constructor
113 */
114 public DuplicateNode() {
115 super(tr("Duplicated nodes"),
116 tr("This test checks that there are no nodes at the very same location."));
117 }
118
119 @Override
120 public void startTest(ProgressMonitor monitor) {
121 super.startTest(monitor);
122 potentialDuplicates = new Storage<>(new NodeHash());
123 }
124
125 @SuppressWarnings("unchecked")
126 @Override
127 public void endTest() {
128 for (Object v: potentialDuplicates) {
129 if (v instanceof Node) {
130 // just one node at this position. Nothing to report as error
131 continue;
132 }
133
134 // multiple nodes at the same position -> check if all nodes have a distinct elevation
135 List<Node> nodes = (List<Node>) v;
136 Set<String> eles = new HashSet<>();
137 for (Node n : nodes) {
138 String ele = n.get("ele");
139 if (ele != null) {
140 eles.add(ele);
141 }
142 }
143 if (eles.size() == nodes.size()) {
144 // All nodes at this position have a distinct elevation.
145 // This is normal in some particular cases (for example, geodesic points in France)
146 // Do not report this as an error
147 continue;
148 }
149
150 // report errors
151 errors.addAll(buildTestErrors(this, nodes));
152 }
153 super.endTest();
154 potentialDuplicates = null;
155 }
156
157 /**
158 * Returns the list of "duplicate nodes" errors for the given selection of node and parent test
159 * @param parentTest The parent test of returned errors
160 * @param nodes The nodes selction to look into
161 * @return the list of "duplicate nodes" errors
162 */
163 public List<TestError> buildTestErrors(Test parentTest, List<Node> nodes) {
164 List<TestError> errors = new ArrayList<>();
165
166 MultiMap<Map<String, String>, OsmPrimitive> mm = new MultiMap<>();
167 for (Node n: nodes) {
168 mm.put(n.getKeys(), n);
169 }
170
171 Map<String, Boolean> typeMap = new HashMap<>();
172
173 // check whether we have multiple nodes at the same position with the same tag set
174 for (Iterator<Map<String, String>> it = mm.keySet().iterator(); it.hasNext();) {
175 Set<OsmPrimitive> primitives = mm.get(it.next());
176 if (primitives.size() > 1) {
177
178 for (String type: TYPES) {
179 typeMap.put(type, Boolean.FALSE);
180 }
181
182 for (OsmPrimitive p : primitives) {
183 if (p.getType() == OsmPrimitiveType.NODE) {
184 Node n = (Node) p;
185 List<OsmPrimitive> lp = n.getReferrers();
186 for (OsmPrimitive sp: lp) {
187 if (sp.getType() == OsmPrimitiveType.WAY) {
188 boolean typed = false;
189 Way w = (Way) sp;
190 Map<String, String> keys = w.getKeys();
191 for (String type: typeMap.keySet()) {
192 if (keys.containsKey(type)) {
193 typeMap.put(type, Boolean.TRUE);
194 typed = true;
195 }
196 }
197 if (!typed) {
198 typeMap.put("none", Boolean.TRUE);
199 }
200 }
201 }
202 }
203 }
204
205 long nbType = typeMap.entrySet().stream().filter(Entry::getValue).count();
206
207 if (nbType > 1) {
208 errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE_MIXED)
209 .message(tr("Mixed type duplicated nodes"))
210 .primitives(primitives)
211 .build());
212 } else if (typeMap.get("highway")) {
213 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_HIGHWAY)
214 .message(tr("Highway duplicated nodes"))
215 .primitives(primitives)
216 .build());
217 } else if (typeMap.get("railway")) {
218 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_RAILWAY)
219 .message(tr("Railway duplicated nodes"))
220 .primitives(primitives)
221 .build());
222 } else if (typeMap.get("waterway")) {
223 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_WATERWAY)
224 .message(tr("Waterway duplicated nodes"))
225 .primitives(primitives)
226 .build());
227 } else if (typeMap.get("boundary")) {
228 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_BOUNDARY)
229 .message(tr("Boundary duplicated nodes"))
230 .primitives(primitives)
231 .build());
232 } else if (typeMap.get("power")) {
233 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_POWER)
234 .message(tr("Power duplicated nodes"))
235 .primitives(primitives)
236 .build());
237 } else if (typeMap.get("natural")) {
238 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_NATURAL)
239 .message(tr("Natural duplicated nodes"))
240 .primitives(primitives)
241 .build());
242 } else if (typeMap.get("building")) {
243 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_BUILDING)
244 .message(tr("Building duplicated nodes"))
245 .primitives(primitives)
246 .build());
247 } else if (typeMap.get("landuse")) {
248 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_LANDUSE)
249 .message(tr("Landuse duplicated nodes"))
250 .primitives(primitives)
251 .build());
252 } else {
253 errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE_OTHER)
254 .message(tr("Other duplicated nodes"))
255 .primitives(primitives)
256 .build());
257 }
258 it.remove();
259 }
260 }
261
262 // check whether we have multiple nodes at the same position with differing tag sets
263 if (!mm.isEmpty()) {
264 List<OsmPrimitive> duplicates = new ArrayList<>();
265 for (Set<OsmPrimitive> l: mm.values()) {
266 duplicates.addAll(l);
267 }
268 if (duplicates.size() > 1) {
269 errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE)
270 .message(tr("Nodes at same position"))
271 .primitives(duplicates)
272 .build());
273 }
274 }
275 return errors;
276 }
277
278 @SuppressWarnings("unchecked")
279 @Override
280 public void visit(Node n) {
281 if (n.isUsable()) {
282 if (potentialDuplicates.get(n) == null) {
283 // in most cases there is just one node at a given position. We
284 // avoid to create an extra object and add remember the node
285 // itself at this position
286 potentialDuplicates.put(n);
287 } else if (potentialDuplicates.get(n) instanceof Node) {
288 // we have an additional node at the same position. Create an extra
289 // object to keep track of the nodes at this position.
290 //
291 Node n1 = (Node) potentialDuplicates.get(n);
292 List<Node> nodes = new ArrayList<>(2);
293 nodes.add(n1);
294 nodes.add(n);
295 potentialDuplicates.put(nodes);
296 } else if (potentialDuplicates.get(n) instanceof List<?>) {
297 // we have multiple nodes at the same position.
298 //
299 List<Node> nodes = (List<Node>) potentialDuplicates.get(n);
300 nodes.add(n);
301 }
302 }
303 }
304
305 /**
306 * Merge the nodes into one.
307 * Copied from UtilsPlugin.MergePointsAction
308 */
309 @Override
310 public Command fixError(TestError testError) {
311 Collection<OsmPrimitive> sel = new LinkedList<>(testError.getPrimitives());
312 Set<Node> nodes = new LinkedHashSet<>(OsmPrimitive.getFilteredList(sel, Node.class));
313
314 // Filter nodes that have already been deleted (see #5764 and #5773)
315 nodes.removeIf(AbstractPrimitive::isDeleted);
316
317 // Merge only if at least 2 nodes remain
318 if (nodes.size() >= 2) {
319 // Use first existing node or first node if all nodes are new
320 Node target = null;
321 for (Node n: nodes) {
322 if (!n.isNew()) {
323 target = n;
324 break;
325 }
326 }
327 if (target == null) {
328 target = nodes.iterator().next();
329 }
330
331 if (Command.checkOutlyingOrIncompleteOperation(nodes, Collections.singleton(target)) == Command.IS_OK)
332 return MergeNodesAction.mergeNodes(Main.getLayerManager().getEditLayer(), nodes, target);
333 }
334
335 return null; // undoRedo handling done in mergeNodes
336 }
337
338 @Override
339 public boolean isFixable(TestError testError) {
340 if (!(testError.getTester() instanceof DuplicateNode)) return false;
341 // never merge nodes with different tags.
342 if (testError.getCode() == DUPLICATE_NODE) return false;
343 // cannot merge nodes outside download area
344 final Iterator<? extends OsmPrimitive> it = testError.getPrimitives().iterator();
345 if (!it.hasNext() || it.next().isOutsideDownloadArea()) return false;
346 // everything else is ok to merge
347 return true;
348 }
349}
Note: See TracBrowser for help on using the repository browser.