1 | // License: GPL. For details, see LICENSE file.
|
---|
2 | package org.openstreetmap.josm.data.validation.tests;
|
---|
3 |
|
---|
4 | import static org.openstreetmap.josm.data.validation.tests.CrossingWays.HIGHWAY;
|
---|
5 | import static org.openstreetmap.josm.data.validation.tests.CrossingWays.RAILWAY;
|
---|
6 | import static org.openstreetmap.josm.data.validation.tests.CrossingWays.WATERWAY;
|
---|
7 | import static org.openstreetmap.josm.tools.I18n.tr;
|
---|
8 |
|
---|
9 | import java.util.ArrayList;
|
---|
10 | import java.util.Collections;
|
---|
11 | import java.util.HashMap;
|
---|
12 | import java.util.Iterator;
|
---|
13 | import java.util.LinkedHashSet;
|
---|
14 | import java.util.List;
|
---|
15 | import java.util.Map;
|
---|
16 | import java.util.Map.Entry;
|
---|
17 | import java.util.Objects;
|
---|
18 | import java.util.Set;
|
---|
19 | import java.util.stream.Collectors;
|
---|
20 |
|
---|
21 | import org.openstreetmap.josm.actions.MergeNodesAction;
|
---|
22 | import org.openstreetmap.josm.command.Command;
|
---|
23 | import org.openstreetmap.josm.data.coor.LatLon;
|
---|
24 | import org.openstreetmap.josm.data.osm.Hash;
|
---|
25 | import org.openstreetmap.josm.data.osm.Node;
|
---|
26 | import org.openstreetmap.josm.data.osm.OsmPrimitive;
|
---|
27 | import org.openstreetmap.josm.data.osm.OsmPrimitiveType;
|
---|
28 | import org.openstreetmap.josm.data.osm.Storage;
|
---|
29 | import org.openstreetmap.josm.data.osm.Way;
|
---|
30 | import org.openstreetmap.josm.data.validation.Severity;
|
---|
31 | import org.openstreetmap.josm.data.validation.Test;
|
---|
32 | import org.openstreetmap.josm.data.validation.TestError;
|
---|
33 | import org.openstreetmap.josm.gui.progress.ProgressMonitor;
|
---|
34 | import org.openstreetmap.josm.spi.preferences.Config;
|
---|
35 | import org.openstreetmap.josm.tools.MultiMap;
|
---|
36 |
|
---|
37 | /**
|
---|
38 | * Tests if there are duplicate nodes
|
---|
39 | *
|
---|
40 | * @author frsantos
|
---|
41 | */
|
---|
42 | public class DuplicateNode extends Test {
|
---|
43 |
|
---|
44 | private static class NodeHash implements Hash<Object, Object> {
|
---|
45 |
|
---|
46 | private final double precision = Config.getPref().getDouble("validator.duplicatenodes.precision", 0.);
|
---|
47 |
|
---|
48 | private LatLon roundCoord(LatLon coor) {
|
---|
49 | return new LatLon(
|
---|
50 | Math.round(coor.lat() / precision) * precision,
|
---|
51 | Math.round(coor.lon() / precision) * precision
|
---|
52 | );
|
---|
53 | }
|
---|
54 |
|
---|
55 | @SuppressWarnings("unchecked")
|
---|
56 | private LatLon getLatLon(Object o) {
|
---|
57 | if (o instanceof Node) {
|
---|
58 | LatLon coor = ((Node) o).getCoor();
|
---|
59 | if (coor == null)
|
---|
60 | return null;
|
---|
61 | if (precision == 0)
|
---|
62 | return coor.getRoundedToOsmPrecision();
|
---|
63 | return roundCoord(coor);
|
---|
64 | } else if (o instanceof List<?>) {
|
---|
65 | LatLon coor = ((List<Node>) o).get(0).getCoor();
|
---|
66 | if (coor == null)
|
---|
67 | return null;
|
---|
68 | if (precision == 0)
|
---|
69 | return coor.getRoundedToOsmPrecision();
|
---|
70 | return roundCoord(coor);
|
---|
71 | } else
|
---|
72 | throw new AssertionError();
|
---|
73 | }
|
---|
74 |
|
---|
75 | @Override
|
---|
76 | public boolean equals(Object k, Object t) {
|
---|
77 | LatLon coorK = getLatLon(k);
|
---|
78 | LatLon coorT = getLatLon(t);
|
---|
79 | return coorK == coorT || (coorK != null && coorT != null && coorK.equals(coorT));
|
---|
80 | }
|
---|
81 |
|
---|
82 | @Override
|
---|
83 | public int getHashCode(Object k) {
|
---|
84 | LatLon coorK = getLatLon(k);
|
---|
85 | return coorK == null ? 0 : coorK.hashCode();
|
---|
86 | }
|
---|
87 | }
|
---|
88 |
|
---|
89 | protected static final int DUPLICATE_NODE = 1;
|
---|
90 | protected static final int DUPLICATE_NODE_MIXED = 2;
|
---|
91 | protected static final int DUPLICATE_NODE_OTHER = 3;
|
---|
92 | protected static final int DUPLICATE_NODE_BUILDING = 10;
|
---|
93 | protected static final int DUPLICATE_NODE_BOUNDARY = 11;
|
---|
94 | protected static final int DUPLICATE_NODE_HIGHWAY = 12;
|
---|
95 | protected static final int DUPLICATE_NODE_LANDUSE = 13;
|
---|
96 | protected static final int DUPLICATE_NODE_NATURAL = 14;
|
---|
97 | protected static final int DUPLICATE_NODE_POWER = 15;
|
---|
98 | protected static final int DUPLICATE_NODE_RAILWAY = 16;
|
---|
99 | protected static final int DUPLICATE_NODE_WATERWAY = 17;
|
---|
100 |
|
---|
101 | private static final String[] TYPES = {
|
---|
102 | "none", HIGHWAY, RAILWAY, WATERWAY, "boundary", "power", "natural", "landuse", "building"};
|
---|
103 |
|
---|
104 | /** The map of potential duplicates.
|
---|
105 | *
|
---|
106 | * If there is exactly one node for a given pos, the map includes a pair <pos, Node>.
|
---|
107 | * If there are multiple nodes for a given pos, the map includes a pair
|
---|
108 | * <pos, NodesByEqualTagsMap>
|
---|
109 | */
|
---|
110 | private Storage<Object> potentialDuplicates;
|
---|
111 |
|
---|
112 | /**
|
---|
113 | * Constructor
|
---|
114 | */
|
---|
115 | public DuplicateNode() {
|
---|
116 | super(tr("Duplicated nodes"),
|
---|
117 | tr("This test checks that there are no nodes at the very same location."));
|
---|
118 | }
|
---|
119 |
|
---|
120 | @Override
|
---|
121 | public void startTest(ProgressMonitor monitor) {
|
---|
122 | super.startTest(monitor);
|
---|
123 | potentialDuplicates = new Storage<>(new NodeHash());
|
---|
124 | }
|
---|
125 |
|
---|
126 | @SuppressWarnings("unchecked")
|
---|
127 | @Override
|
---|
128 | public void endTest() {
|
---|
129 | for (Object v: potentialDuplicates) {
|
---|
130 | if (v instanceof Node) {
|
---|
131 | // just one node at this position. Nothing to report as error
|
---|
132 | continue;
|
---|
133 | }
|
---|
134 |
|
---|
135 | // multiple nodes at the same position -> check if all nodes have a distinct elevation
|
---|
136 | List<Node> nodes = (List<Node>) v;
|
---|
137 | Set<String> eles = nodes.stream()
|
---|
138 | .map(n -> n.get("ele"))
|
---|
139 | .filter(Objects::nonNull)
|
---|
140 | .collect(Collectors.toSet());
|
---|
141 | if (eles.size() == nodes.size()) {
|
---|
142 | // All nodes at this position have a distinct elevation.
|
---|
143 | // This is normal in some particular cases (for example, geodesic points in France)
|
---|
144 | // Do not report this as an error
|
---|
145 | continue;
|
---|
146 | }
|
---|
147 |
|
---|
148 | // report errors
|
---|
149 | errors.addAll(buildTestErrors(this, nodes));
|
---|
150 | }
|
---|
151 | super.endTest();
|
---|
152 | potentialDuplicates = null;
|
---|
153 | }
|
---|
154 |
|
---|
155 | /**
|
---|
156 | * Returns the list of "duplicate nodes" errors for the given selection of node and parent test
|
---|
157 | * @param parentTest The parent test of returned errors
|
---|
158 | * @param nodes The nodes selction to look into
|
---|
159 | * @return the list of "duplicate nodes" errors
|
---|
160 | */
|
---|
161 | public List<TestError> buildTestErrors(Test parentTest, List<Node> nodes) {
|
---|
162 | List<TestError> errors = new ArrayList<>();
|
---|
163 |
|
---|
164 | MultiMap<Map<String, String>, OsmPrimitive> mm = new MultiMap<>();
|
---|
165 | for (Node n: nodes) {
|
---|
166 | mm.put(n.getKeys(), n);
|
---|
167 | }
|
---|
168 |
|
---|
169 | Map<String, Boolean> typeMap = new HashMap<>();
|
---|
170 |
|
---|
171 | // check whether we have multiple nodes at the same position with the same tag set
|
---|
172 | for (Iterator<Map<String, String>> it = mm.keySet().iterator(); it.hasNext();) {
|
---|
173 | Set<OsmPrimitive> primitives = mm.get(it.next());
|
---|
174 | if (primitives.size() > 1) {
|
---|
175 |
|
---|
176 | for (String type: TYPES) {
|
---|
177 | typeMap.put(type, Boolean.FALSE);
|
---|
178 | }
|
---|
179 |
|
---|
180 | for (OsmPrimitive p : primitives) {
|
---|
181 | if (p.getType() == OsmPrimitiveType.NODE) {
|
---|
182 | Node n = (Node) p;
|
---|
183 | List<OsmPrimitive> lp = n.getReferrers();
|
---|
184 | for (OsmPrimitive sp: lp) {
|
---|
185 | if (sp.getType() == OsmPrimitiveType.WAY) {
|
---|
186 | boolean typed = false;
|
---|
187 | Way w = (Way) sp;
|
---|
188 | Map<String, String> keys = w.getKeys();
|
---|
189 | for (Iterator<Entry<String, Boolean>> itt = typeMap.entrySet().iterator(); itt.hasNext();) {
|
---|
190 | Entry<String, Boolean> e = itt.next();
|
---|
191 | if (keys.containsKey(e.getKey())) {
|
---|
192 | e.setValue(Boolean.TRUE);
|
---|
193 | typed = true;
|
---|
194 | }
|
---|
195 | }
|
---|
196 | if (!typed) {
|
---|
197 | typeMap.put("none", Boolean.TRUE);
|
---|
198 | }
|
---|
199 | }
|
---|
200 | }
|
---|
201 | }
|
---|
202 | }
|
---|
203 |
|
---|
204 | long nbType = typeMap.entrySet().stream().filter(Entry::getValue).count();
|
---|
205 |
|
---|
206 | if (nbType > 1) {
|
---|
207 | errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE_MIXED)
|
---|
208 | .message(tr("Mixed type duplicated nodes"))
|
---|
209 | .primitives(primitives)
|
---|
210 | .build());
|
---|
211 | } else if (typeMap.get(HIGHWAY)) {
|
---|
212 | errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_HIGHWAY)
|
---|
213 | .message(tr("Highway duplicated nodes"))
|
---|
214 | .primitives(primitives)
|
---|
215 | .build());
|
---|
216 | } else if (typeMap.get(RAILWAY)) {
|
---|
217 | errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_RAILWAY)
|
---|
218 | .message(tr("Railway duplicated nodes"))
|
---|
219 | .primitives(primitives)
|
---|
220 | .build());
|
---|
221 | } else if (typeMap.get(WATERWAY)) {
|
---|
222 | errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_WATERWAY)
|
---|
223 | .message(tr("Waterway duplicated nodes"))
|
---|
224 | .primitives(primitives)
|
---|
225 | .build());
|
---|
226 | } else if (typeMap.get("boundary")) {
|
---|
227 | errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_BOUNDARY)
|
---|
228 | .message(tr("Boundary duplicated nodes"))
|
---|
229 | .primitives(primitives)
|
---|
230 | .build());
|
---|
231 | } else if (typeMap.get("power")) {
|
---|
232 | errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_POWER)
|
---|
233 | .message(tr("Power duplicated nodes"))
|
---|
234 | .primitives(primitives)
|
---|
235 | .build());
|
---|
236 | } else if (typeMap.get("natural")) {
|
---|
237 | errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_NATURAL)
|
---|
238 | .message(tr("Natural duplicated nodes"))
|
---|
239 | .primitives(primitives)
|
---|
240 | .build());
|
---|
241 | } else if (typeMap.get("building")) {
|
---|
242 | errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_BUILDING)
|
---|
243 | .message(tr("Building duplicated nodes"))
|
---|
244 | .primitives(primitives)
|
---|
245 | .build());
|
---|
246 | } else if (typeMap.get("landuse")) {
|
---|
247 | errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_LANDUSE)
|
---|
248 | .message(tr("Landuse duplicated nodes"))
|
---|
249 | .primitives(primitives)
|
---|
250 | .build());
|
---|
251 | } else {
|
---|
252 | errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE_OTHER)
|
---|
253 | .message(tr("Other duplicated nodes"))
|
---|
254 | .primitives(primitives)
|
---|
255 | .build());
|
---|
256 | }
|
---|
257 | it.remove();
|
---|
258 | }
|
---|
259 | }
|
---|
260 |
|
---|
261 | // check whether we have multiple nodes at the same position with differing tag sets
|
---|
262 | if (!mm.isEmpty()) {
|
---|
263 | List<OsmPrimitive> duplicates = new ArrayList<>();
|
---|
264 | for (Set<OsmPrimitive> l: mm.values()) {
|
---|
265 | duplicates.addAll(l);
|
---|
266 | }
|
---|
267 | if (duplicates.size() > 1) {
|
---|
268 | errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE)
|
---|
269 | .message(tr("Nodes at same position"))
|
---|
270 | .primitives(duplicates)
|
---|
271 | .build());
|
---|
272 | }
|
---|
273 | }
|
---|
274 | return errors;
|
---|
275 | }
|
---|
276 |
|
---|
277 | @SuppressWarnings("unchecked")
|
---|
278 | @Override
|
---|
279 | public void visit(Node n) {
|
---|
280 | if (n.isUsable()) {
|
---|
281 | if (potentialDuplicates.get(n) == null) {
|
---|
282 | // in most cases there is just one node at a given position. We
|
---|
283 | // avoid to create an extra object and add remember the node
|
---|
284 | // itself at this position
|
---|
285 | potentialDuplicates.put(n);
|
---|
286 | } else if (potentialDuplicates.get(n) instanceof Node) {
|
---|
287 | // we have an additional node at the same position. Create an extra
|
---|
288 | // object to keep track of the nodes at this position.
|
---|
289 | //
|
---|
290 | Node n1 = (Node) potentialDuplicates.get(n);
|
---|
291 | List<Node> nodes = new ArrayList<>(2);
|
---|
292 | nodes.add(n1);
|
---|
293 | nodes.add(n);
|
---|
294 | potentialDuplicates.put(nodes);
|
---|
295 | } else if (potentialDuplicates.get(n) instanceof List<?>) {
|
---|
296 | // we have multiple nodes at the same position.
|
---|
297 | //
|
---|
298 | List<Node> nodes = (List<Node>) potentialDuplicates.get(n);
|
---|
299 | nodes.add(n);
|
---|
300 | }
|
---|
301 | }
|
---|
302 | }
|
---|
303 |
|
---|
304 | /**
|
---|
305 | * Merge the nodes into one.
|
---|
306 | * Copied from UtilsPlugin.MergePointsAction
|
---|
307 | */
|
---|
308 | @Override
|
---|
309 | public Command fixError(TestError testError) {
|
---|
310 | final Set<Node> nodes = testError.primitives(Node.class)
|
---|
311 | // Filter nodes that have already been deleted (see #5764 and #5773)
|
---|
312 | .filter(n -> !n.isDeleted())
|
---|
313 | .collect(Collectors.toCollection(LinkedHashSet::new));
|
---|
314 |
|
---|
315 | // Merge only if at least 2 nodes remain
|
---|
316 | if (nodes.size() >= 2) {
|
---|
317 | // Use first existing node or first node if all nodes are new
|
---|
318 | Node target = nodes.stream()
|
---|
319 | .filter(n -> !n.isNew())
|
---|
320 | .findFirst()
|
---|
321 | .orElseGet(() -> nodes.iterator().next());
|
---|
322 |
|
---|
323 | if (Command.checkOutlyingOrIncompleteOperation(nodes, Collections.singleton(target)) == Command.IS_OK)
|
---|
324 | return MergeNodesAction.mergeNodes(nodes, target);
|
---|
325 | }
|
---|
326 |
|
---|
327 | return null; // undoRedo handling done in mergeNodes
|
---|
328 | }
|
---|
329 |
|
---|
330 | @Override
|
---|
331 | public boolean isFixable(TestError testError) {
|
---|
332 | if (!(testError.getTester() instanceof DuplicateNode)) return false;
|
---|
333 | // never merge nodes with different tags.
|
---|
334 | if (testError.getCode() == DUPLICATE_NODE) return false;
|
---|
335 | // cannot merge nodes outside download area
|
---|
336 | final Iterator<? extends OsmPrimitive> it = testError.getPrimitives().iterator();
|
---|
337 | return it.hasNext() && !it.next().isOutsideDownloadArea();
|
---|
338 | // everything else is ok to merge
|
---|
339 | }
|
---|
340 | }
|
---|