source: josm/trunk/src/org/openstreetmap/josm/data/validation/tests/DuplicateNode.java@ 11007

Last change on this file since 11007 was 11007, checked in by Don-vip, 8 years ago

fix #13643 - prevent creation of DuplicateNode errors with empty list of primitives, improve unit test

  • Property svn:eol-style set to native
File size: 15.6 KB
Line 
1// License: GPL. For details, see LICENSE file.
2package org.openstreetmap.josm.data.validation.tests;
3
4import static org.openstreetmap.josm.tools.I18n.marktr;
5import static org.openstreetmap.josm.tools.I18n.tr;
6
7import java.util.ArrayList;
8import java.util.Collection;
9import java.util.Collections;
10import java.util.HashMap;
11import java.util.HashSet;
12import java.util.Iterator;
13import java.util.LinkedHashSet;
14import java.util.LinkedList;
15import java.util.List;
16import java.util.Map;
17import java.util.Map.Entry;
18import java.util.Set;
19
20import org.openstreetmap.josm.Main;
21import org.openstreetmap.josm.actions.MergeNodesAction;
22import org.openstreetmap.josm.command.Command;
23import org.openstreetmap.josm.data.coor.LatLon;
24import org.openstreetmap.josm.data.osm.Hash;
25import org.openstreetmap.josm.data.osm.Node;
26import org.openstreetmap.josm.data.osm.OsmPrimitive;
27import org.openstreetmap.josm.data.osm.OsmPrimitiveType;
28import org.openstreetmap.josm.data.osm.Storage;
29import org.openstreetmap.josm.data.osm.Way;
30import org.openstreetmap.josm.data.validation.Severity;
31import org.openstreetmap.josm.data.validation.Test;
32import org.openstreetmap.josm.data.validation.TestError;
33import org.openstreetmap.josm.gui.progress.ProgressMonitor;
34import org.openstreetmap.josm.tools.CheckParameterUtil;
35import org.openstreetmap.josm.tools.MultiMap;
36
37/**
38 * Tests if there are duplicate nodes
39 *
40 * @author frsantos
41 */
42public class DuplicateNode extends Test {
43
44 private static class NodeHash implements Hash<Object, Object> {
45
46 private final double precision = Main.pref.getDouble("validator.duplicatenodes.precision", 0.);
47
48 private LatLon roundCoord(LatLon coor) {
49 return new LatLon(
50 Math.round(coor.lat() / precision) * precision,
51 Math.round(coor.lon() / precision) * precision
52 );
53 }
54
55 @SuppressWarnings("unchecked")
56 private LatLon getLatLon(Object o) {
57 if (o instanceof Node) {
58 LatLon coor = ((Node) o).getCoor();
59 if (coor == null)
60 return null;
61 if (precision == 0)
62 return coor.getRoundedToOsmPrecision();
63 return roundCoord(coor);
64 } else if (o instanceof List<?>) {
65 LatLon coor = ((List<Node>) o).get(0).getCoor();
66 if (coor == null)
67 return null;
68 if (precision == 0)
69 return coor.getRoundedToOsmPrecision();
70 return roundCoord(coor);
71 } else
72 throw new AssertionError();
73 }
74
75 @Override
76 public boolean equals(Object k, Object t) {
77 LatLon coorK = getLatLon(k);
78 LatLon coorT = getLatLon(t);
79 return coorK == coorT || (coorK != null && coorT != null && coorK.equals(coorT));
80 }
81
82 @Override
83 public int getHashCode(Object k) {
84 LatLon coorK = getLatLon(k);
85 return coorK == null ? 0 : coorK.hashCode();
86 }
87 }
88
89 private static class DuplicateNodeTestError extends TestError {
90 DuplicateNodeTestError(Test parentTest, Severity severity, String msg, int code, Set<OsmPrimitive> primitives) {
91 super(parentTest, severity, tr("Duplicated nodes"), tr(msg), msg, code, primitives);
92 CheckParameterUtil.ensureThat(!primitives.isEmpty(), "Empty primitives: " + msg);
93 }
94 }
95
96 protected static final int DUPLICATE_NODE = 1;
97 protected static final int DUPLICATE_NODE_MIXED = 2;
98 protected static final int DUPLICATE_NODE_OTHER = 3;
99 protected static final int DUPLICATE_NODE_BUILDING = 10;
100 protected static final int DUPLICATE_NODE_BOUNDARY = 11;
101 protected static final int DUPLICATE_NODE_HIGHWAY = 12;
102 protected static final int DUPLICATE_NODE_LANDUSE = 13;
103 protected static final int DUPLICATE_NODE_NATURAL = 14;
104 protected static final int DUPLICATE_NODE_POWER = 15;
105 protected static final int DUPLICATE_NODE_RAILWAY = 16;
106 protected static final int DUPLICATE_NODE_WATERWAY = 17;
107
108 private static final String[] TYPES = {
109 "none", "highway", "railway", "waterway", "boundary", "power", "natural", "landuse", "building"};
110
111 /** The map of potential duplicates.
112 *
113 * If there is exactly one node for a given pos, the map includes a pair &lt;pos, Node&gt;.
114 * If there are multiple nodes for a given pos, the map includes a pair
115 * &lt;pos, NodesByEqualTagsMap&gt;
116 */
117 private Storage<Object> potentialDuplicates;
118
119 /**
120 * Constructor
121 */
122 public DuplicateNode() {
123 super(tr("Duplicated nodes"),
124 tr("This test checks that there are no nodes at the very same location."));
125 }
126
127 @Override
128 public void startTest(ProgressMonitor monitor) {
129 super.startTest(monitor);
130 potentialDuplicates = new Storage<>(new NodeHash());
131 }
132
133 @SuppressWarnings("unchecked")
134 @Override
135 public void endTest() {
136 for (Object v: potentialDuplicates) {
137 if (v instanceof Node) {
138 // just one node at this position. Nothing to report as error
139 continue;
140 }
141
142 // multiple nodes at the same position -> check if all nodes have a distinct elevation
143 List<Node> nodes = (List<Node>) v;
144 Set<String> eles = new HashSet<>();
145 for (Node n : nodes) {
146 String ele = n.get("ele");
147 if (ele != null) {
148 eles.add(ele);
149 }
150 }
151 if (eles.size() == nodes.size()) {
152 // All nodes at this position have a distinct elevation.
153 // This is normal in some particular cases (for example, geodesic points in France)
154 // Do not report this as an error
155 continue;
156 }
157
158 // report errors
159 errors.addAll(buildTestErrors(this, nodes));
160 }
161 super.endTest();
162 potentialDuplicates = null;
163 }
164
165 /**
166 * Returns the list of "duplicate nodes" errors for the given selection of node and parent test
167 * @param parentTest The parent test of returned errors
168 * @param nodes The nodes selction to look into
169 * @return the list of "duplicate nodes" errors
170 */
171 public List<TestError> buildTestErrors(Test parentTest, List<Node> nodes) {
172 List<TestError> errors = new ArrayList<>();
173
174 MultiMap<Map<String, String>, OsmPrimitive> mm = new MultiMap<>();
175 for (Node n: nodes) {
176 mm.put(n.getKeys(), n);
177 }
178
179 Map<String, Boolean> typeMap = new HashMap<>();
180
181 // check whether we have multiple nodes at the same position with the same tag set
182 for (Iterator<Map<String, String>> it = mm.keySet().iterator(); it.hasNext();) {
183 Set<OsmPrimitive> primitives = mm.get(it.next());
184 if (primitives.size() > 1) {
185
186 for (String type: TYPES) {
187 typeMap.put(type, Boolean.FALSE);
188 }
189
190 for (OsmPrimitive p : primitives) {
191 if (p.getType() == OsmPrimitiveType.NODE) {
192 Node n = (Node) p;
193 List<OsmPrimitive> lp = n.getReferrers();
194 for (OsmPrimitive sp: lp) {
195 if (sp.getType() == OsmPrimitiveType.WAY) {
196 boolean typed = false;
197 Way w = (Way) sp;
198 Map<String, String> keys = w.getKeys();
199 for (String type: typeMap.keySet()) {
200 if (keys.containsKey(type)) {
201 typeMap.put(type, Boolean.TRUE);
202 typed = true;
203 }
204 }
205 if (!typed) {
206 typeMap.put("none", Boolean.TRUE);
207 }
208 }
209 }
210 }
211 }
212
213 long nbType = typeMap.entrySet().stream().filter(Entry::getValue).count();
214
215 if (nbType > 1) {
216 errors.add(new DuplicateNodeTestError(
217 parentTest,
218 Severity.WARNING,
219 marktr("Mixed type duplicated nodes"),
220 DUPLICATE_NODE_MIXED,
221 primitives
222 ));
223 } else if (typeMap.get("highway")) {
224 errors.add(new DuplicateNodeTestError(
225 parentTest,
226 Severity.ERROR,
227 marktr("Highway duplicated nodes"),
228 DUPLICATE_NODE_HIGHWAY,
229 primitives
230 ));
231 } else if (typeMap.get("railway")) {
232 errors.add(new DuplicateNodeTestError(
233 parentTest,
234 Severity.ERROR,
235 marktr("Railway duplicated nodes"),
236 DUPLICATE_NODE_RAILWAY,
237 primitives
238 ));
239 } else if (typeMap.get("waterway")) {
240 errors.add(new DuplicateNodeTestError(
241 parentTest,
242 Severity.ERROR,
243 marktr("Waterway duplicated nodes"),
244 DUPLICATE_NODE_WATERWAY,
245 primitives
246 ));
247 } else if (typeMap.get("boundary")) {
248 errors.add(new DuplicateNodeTestError(
249 parentTest,
250 Severity.ERROR,
251 marktr("Boundary duplicated nodes"),
252 DUPLICATE_NODE_BOUNDARY,
253 primitives
254 ));
255 } else if (typeMap.get("power")) {
256 errors.add(new DuplicateNodeTestError(
257 parentTest,
258 Severity.ERROR,
259 marktr("Power duplicated nodes"),
260 DUPLICATE_NODE_POWER,
261 primitives
262 ));
263 } else if (typeMap.get("natural")) {
264 errors.add(new DuplicateNodeTestError(
265 parentTest,
266 Severity.ERROR,
267 marktr("Natural duplicated nodes"),
268 DUPLICATE_NODE_NATURAL,
269 primitives
270 ));
271 } else if (typeMap.get("building")) {
272 errors.add(new DuplicateNodeTestError(
273 parentTest,
274 Severity.ERROR,
275 marktr("Building duplicated nodes"),
276 DUPLICATE_NODE_BUILDING,
277 primitives
278 ));
279 } else if (typeMap.get("landuse")) {
280 errors.add(new DuplicateNodeTestError(
281 parentTest,
282 Severity.ERROR,
283 marktr("Landuse duplicated nodes"),
284 DUPLICATE_NODE_LANDUSE,
285 primitives
286 ));
287 } else {
288 errors.add(new DuplicateNodeTestError(
289 parentTest,
290 Severity.WARNING,
291 marktr("Other duplicated nodes"),
292 DUPLICATE_NODE_OTHER,
293 primitives
294 ));
295 }
296 it.remove();
297 }
298 }
299
300 // check whether we have multiple nodes at the same position with differing tag sets
301 if (!mm.isEmpty()) {
302 List<OsmPrimitive> duplicates = new ArrayList<>();
303 for (Set<OsmPrimitive> l: mm.values()) {
304 duplicates.addAll(l);
305 }
306 if (duplicates.size() > 1) {
307 errors.add(new TestError(
308 parentTest,
309 Severity.WARNING,
310 tr("Nodes at same position"),
311 DUPLICATE_NODE,
312 duplicates
313 ));
314 }
315 }
316 return errors;
317 }
318
319 @SuppressWarnings("unchecked")
320 @Override
321 public void visit(Node n) {
322 if (n.isUsable()) {
323 if (potentialDuplicates.get(n) == null) {
324 // in most cases there is just one node at a given position. We
325 // avoid to create an extra object and add remember the node
326 // itself at this position
327 potentialDuplicates.put(n);
328 } else if (potentialDuplicates.get(n) instanceof Node) {
329 // we have an additional node at the same position. Create an extra
330 // object to keep track of the nodes at this position.
331 //
332 Node n1 = (Node) potentialDuplicates.get(n);
333 List<Node> nodes = new ArrayList<>(2);
334 nodes.add(n1);
335 nodes.add(n);
336 potentialDuplicates.put(nodes);
337 } else if (potentialDuplicates.get(n) instanceof List<?>) {
338 // we have multiple nodes at the same position.
339 //
340 List<Node> nodes = (List<Node>) potentialDuplicates.get(n);
341 nodes.add(n);
342 }
343 }
344 }
345
346 /**
347 * Merge the nodes into one.
348 * Copied from UtilsPlugin.MergePointsAction
349 */
350 @Override
351 public Command fixError(TestError testError) {
352 if (!isFixable(testError)) return null;
353 // Diamond operator does not work with Java 9 here
354 @SuppressWarnings("unused")
355 Collection<OsmPrimitive> sel = new LinkedList<OsmPrimitive>(testError.getPrimitives());
356 Set<Node> nodes = new LinkedHashSet<>(OsmPrimitive.getFilteredList(sel, Node.class));
357
358 // Filter nodes that have already been deleted (see #5764 and #5773)
359 for (Iterator<Node> it = nodes.iterator(); it.hasNext();) {
360 if (it.next().isDeleted()) {
361 it.remove();
362 }
363 }
364
365 // Merge only if at least 2 nodes remain
366 if (nodes.size() >= 2) {
367 // Use first existing node or first node if all nodes are new
368 Node target = null;
369 for (Node n: nodes) {
370 if (!n.isNew()) {
371 target = n;
372 break;
373 }
374 }
375 if (target == null) {
376 target = nodes.iterator().next();
377 }
378
379 if (Command.checkOutlyingOrIncompleteOperation(nodes, Collections.singleton(target)) == Command.IS_OK)
380 return MergeNodesAction.mergeNodes(Main.getLayerManager().getEditLayer(), nodes, target);
381 }
382
383 return null; // undoRedo handling done in mergeNodes
384 }
385
386 @Override
387 public boolean isFixable(TestError testError) {
388 if (!(testError.getTester() instanceof DuplicateNode)) return false;
389 // never merge nodes with different tags.
390 if (testError.getCode() == DUPLICATE_NODE) return false;
391 // cannot merge nodes outside download area
392 if (testError.getPrimitives().iterator().next().isOutsideDownloadArea()) return false;
393 // everything else is ok to merge
394 return true;
395 }
396}
Note: See TracBrowser for help on using the repository browser.