1 | // License: GPL. For details, see LICENSE file.
|
---|
2 | package org.openstreetmap.josm.data.validation.tests;
|
---|
3 |
|
---|
4 | import static org.openstreetmap.josm.data.validation.tests.CrossingWays.HIGHWAY;
|
---|
5 | import static org.openstreetmap.josm.data.validation.tests.CrossingWays.RAILWAY;
|
---|
6 | import static org.openstreetmap.josm.data.validation.tests.CrossingWays.WATERWAY;
|
---|
7 | import static org.openstreetmap.josm.tools.I18n.tr;
|
---|
8 |
|
---|
9 | import java.util.ArrayList;
|
---|
10 | import java.util.HashMap;
|
---|
11 | import java.util.Iterator;
|
---|
12 | import java.util.LinkedHashSet;
|
---|
13 | import java.util.List;
|
---|
14 | import java.util.Map;
|
---|
15 | import java.util.Map.Entry;
|
---|
16 | import java.util.Objects;
|
---|
17 | import java.util.Set;
|
---|
18 | import java.util.stream.Collectors;
|
---|
19 | import java.util.stream.Stream;
|
---|
20 |
|
---|
21 | import org.openstreetmap.josm.actions.MergeNodesAction;
|
---|
22 | import org.openstreetmap.josm.command.Command;
|
---|
23 | import org.openstreetmap.josm.data.coor.LatLon;
|
---|
24 | import org.openstreetmap.josm.data.osm.Hash;
|
---|
25 | import org.openstreetmap.josm.data.osm.Node;
|
---|
26 | import org.openstreetmap.josm.data.osm.OsmPrimitive;
|
---|
27 | import org.openstreetmap.josm.data.osm.Storage;
|
---|
28 | import org.openstreetmap.josm.data.osm.Way;
|
---|
29 | import org.openstreetmap.josm.data.validation.Severity;
|
---|
30 | import org.openstreetmap.josm.data.validation.Test;
|
---|
31 | import org.openstreetmap.josm.data.validation.TestError;
|
---|
32 | import org.openstreetmap.josm.gui.progress.ProgressMonitor;
|
---|
33 | import org.openstreetmap.josm.spi.preferences.Config;
|
---|
34 | import org.openstreetmap.josm.tools.MultiMap;
|
---|
35 |
|
---|
36 | /**
|
---|
37 | * Tests if there are duplicate nodes
|
---|
38 | *
|
---|
39 | * @author frsantos
|
---|
40 | */
|
---|
41 | public class DuplicateNode extends Test {
|
---|
42 |
|
---|
43 | protected static class NodeHash implements Hash<Object, Object> {
|
---|
44 |
|
---|
45 | /**
|
---|
46 | * Rounding on OSM server and via {@link LatLon#roundToOsmPrecision} sometimes differs in the last digit by 1.
|
---|
47 | * Thus, for the duplicate node test, we reduce the precision by one to find errors before uploading.
|
---|
48 | * @see LatLon#MAX_SERVER_INV_PRECISION
|
---|
49 | */
|
---|
50 | private final double precision =
|
---|
51 | 1 / Config.getPref().getDouble("validator.duplicatenodes.precision", LatLon.MAX_SERVER_PRECISION * 10);
|
---|
52 |
|
---|
53 | /**
|
---|
54 | * Returns the rounded coordinated according to {@link #precision}
|
---|
55 | * @see LatLon#roundToOsmPrecision
|
---|
56 | */
|
---|
57 | protected LatLon roundCoord(LatLon coor) {
|
---|
58 | return new LatLon(
|
---|
59 | Math.round(coor.lat() * precision) / precision,
|
---|
60 | Math.round(coor.lon() * precision) / precision
|
---|
61 | );
|
---|
62 | }
|
---|
63 |
|
---|
64 | @SuppressWarnings("unchecked")
|
---|
65 | private LatLon getLatLon(Object o) {
|
---|
66 | if (o instanceof Node) {
|
---|
67 | LatLon coor = ((Node) o).getCoor();
|
---|
68 | if (coor == null)
|
---|
69 | return null;
|
---|
70 | if (precision == 0)
|
---|
71 | return coor.getRoundedToOsmPrecision();
|
---|
72 | return roundCoord(coor);
|
---|
73 | } else if (o instanceof List<?>) {
|
---|
74 | LatLon coor = ((List<Node>) o).get(0).getCoor();
|
---|
75 | if (coor == null)
|
---|
76 | return null;
|
---|
77 | if (precision == 0)
|
---|
78 | return coor.getRoundedToOsmPrecision();
|
---|
79 | return roundCoord(coor);
|
---|
80 | } else
|
---|
81 | throw new AssertionError();
|
---|
82 | }
|
---|
83 |
|
---|
84 | @Override
|
---|
85 | public boolean equals(Object k, Object t) {
|
---|
86 | LatLon coorK = getLatLon(k);
|
---|
87 | LatLon coorT = getLatLon(t);
|
---|
88 | return coorK == coorT || (coorK != null && coorT != null && coorK.equals(coorT));
|
---|
89 | }
|
---|
90 |
|
---|
91 | @Override
|
---|
92 | public int getHashCode(Object k) {
|
---|
93 | LatLon coorK = getLatLon(k);
|
---|
94 | return coorK == null ? 0 : coorK.hashCode();
|
---|
95 | }
|
---|
96 | }
|
---|
97 |
|
---|
98 | protected static final int DUPLICATE_NODE = 1;
|
---|
99 | protected static final int DUPLICATE_NODE_MIXED = 2;
|
---|
100 | protected static final int DUPLICATE_NODE_OTHER = 3;
|
---|
101 | protected static final int DUPLICATE_NODE_BUILDING = 10;
|
---|
102 | protected static final int DUPLICATE_NODE_BOUNDARY = 11;
|
---|
103 | protected static final int DUPLICATE_NODE_HIGHWAY = 12;
|
---|
104 | protected static final int DUPLICATE_NODE_LANDUSE = 13;
|
---|
105 | protected static final int DUPLICATE_NODE_NATURAL = 14;
|
---|
106 | protected static final int DUPLICATE_NODE_POWER = 15;
|
---|
107 | protected static final int DUPLICATE_NODE_RAILWAY = 16;
|
---|
108 | protected static final int DUPLICATE_NODE_WATERWAY = 17;
|
---|
109 |
|
---|
110 | private static final String[] TYPES = {
|
---|
111 | "none", HIGHWAY, RAILWAY, WATERWAY, "boundary", "power", "natural", "landuse", "building"};
|
---|
112 |
|
---|
113 | /** The map of potential duplicates.
|
---|
114 | *
|
---|
115 | * If there is exactly one node for a given pos, the map includes a pair <pos, Node>.
|
---|
116 | * If there are multiple nodes for a given pos, the map includes a pair
|
---|
117 | * <pos, List<Node>>
|
---|
118 | */
|
---|
119 | private Storage<Object> potentialDuplicates;
|
---|
120 |
|
---|
121 | /**
|
---|
122 | * Constructor
|
---|
123 | */
|
---|
124 | public DuplicateNode() {
|
---|
125 | super(tr("Duplicated nodes"),
|
---|
126 | tr("This test checks that there are no nodes at the very same location."));
|
---|
127 | }
|
---|
128 |
|
---|
129 | @Override
|
---|
130 | public void startTest(ProgressMonitor monitor) {
|
---|
131 | super.startTest(monitor);
|
---|
132 | potentialDuplicates = new Storage<>(new NodeHash());
|
---|
133 | }
|
---|
134 |
|
---|
135 | @SuppressWarnings("unchecked")
|
---|
136 | @Override
|
---|
137 | public void endTest() {
|
---|
138 | for (Object v: potentialDuplicates) {
|
---|
139 | if (v instanceof Node) {
|
---|
140 | // just one node at this position. Nothing to report as error
|
---|
141 | continue;
|
---|
142 | }
|
---|
143 |
|
---|
144 | // multiple nodes at the same position -> check if all nodes have a distinct elevation
|
---|
145 | List<Node> nodes = (List<Node>) v;
|
---|
146 | Set<String> eles = nodes.stream()
|
---|
147 | .map(n -> n.get("ele"))
|
---|
148 | .filter(Objects::nonNull)
|
---|
149 | .collect(Collectors.toSet());
|
---|
150 | if (eles.size() == nodes.size()) {
|
---|
151 | // All nodes at this position have a distinct elevation.
|
---|
152 | // This is normal in some particular cases (for example, geodesic points in France)
|
---|
153 | // Do not report this as an error
|
---|
154 | continue;
|
---|
155 | }
|
---|
156 |
|
---|
157 | // report errors
|
---|
158 | errors.addAll(buildTestErrors(this, nodes));
|
---|
159 | }
|
---|
160 | super.endTest();
|
---|
161 | potentialDuplicates = null;
|
---|
162 | }
|
---|
163 |
|
---|
164 | /**
|
---|
165 | * Returns the list of "duplicate nodes" errors for the given selection of node and parent test
|
---|
166 | * @param parentTest The parent test of returned errors
|
---|
167 | * @param nodes The nodes selection to look into
|
---|
168 | * @return the list of "duplicate nodes" errors
|
---|
169 | */
|
---|
170 | public List<TestError> buildTestErrors(Test parentTest, List<Node> nodes) {
|
---|
171 | List<TestError> errors = new ArrayList<>();
|
---|
172 |
|
---|
173 | MultiMap<Map<String, String>, Node> mm = new MultiMap<>();
|
---|
174 | for (Node n: nodes) {
|
---|
175 | mm.put(n.getKeys(), n);
|
---|
176 | }
|
---|
177 |
|
---|
178 | Map<String, Boolean> typeMap = new HashMap<>();
|
---|
179 |
|
---|
180 | // check whether we have multiple nodes at the same position with the same tag set
|
---|
181 | for (Iterator<Map<String, String>> it = mm.keySet().iterator(); it.hasNext();) {
|
---|
182 | Set<Node> primitives = mm.get(it.next());
|
---|
183 | if (primitives.size() > 1) {
|
---|
184 |
|
---|
185 | for (String type: TYPES) {
|
---|
186 | typeMap.put(type, Boolean.FALSE);
|
---|
187 | }
|
---|
188 |
|
---|
189 | for (Node n : primitives) {
|
---|
190 | for (Way w: n.getParentWays()) {
|
---|
191 | boolean typed = false;
|
---|
192 | Map<String, String> keys = w.getKeys();
|
---|
193 | for (Entry<String, Boolean> e : typeMap.entrySet()) {
|
---|
194 | if (keys.containsKey(e.getKey())) {
|
---|
195 | e.setValue(Boolean.TRUE);
|
---|
196 | typed = true;
|
---|
197 | }
|
---|
198 | }
|
---|
199 | if (!typed) {
|
---|
200 | typeMap.put("none", Boolean.TRUE);
|
---|
201 | }
|
---|
202 | }
|
---|
203 | }
|
---|
204 |
|
---|
205 | long nbType = typeMap.entrySet().stream().filter(Entry::getValue).count();
|
---|
206 | final TestError.Builder builder;
|
---|
207 | if (nbType > 1) {
|
---|
208 | builder = TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE_MIXED)
|
---|
209 | .message(tr("Mixed type duplicated nodes"));
|
---|
210 | } else if (Boolean.TRUE.equals(typeMap.get(HIGHWAY))) {
|
---|
211 | builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_HIGHWAY)
|
---|
212 | .message(tr("Highway duplicated nodes"));
|
---|
213 | } else if (Boolean.TRUE.equals(typeMap.get(RAILWAY))) {
|
---|
214 | builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_RAILWAY)
|
---|
215 | .message(tr("Railway duplicated nodes"));
|
---|
216 | } else if (Boolean.TRUE.equals(typeMap.get(WATERWAY))) {
|
---|
217 | builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_WATERWAY)
|
---|
218 | .message(tr("Waterway duplicated nodes"));
|
---|
219 | } else if (Boolean.TRUE.equals(typeMap.get("boundary"))) {
|
---|
220 | builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_BOUNDARY)
|
---|
221 | .message(tr("Boundary duplicated nodes"));
|
---|
222 | } else if (Boolean.TRUE.equals(typeMap.get("power"))) {
|
---|
223 | builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_POWER)
|
---|
224 | .message(tr("Power duplicated nodes"));
|
---|
225 | } else if (Boolean.TRUE.equals(typeMap.get("natural"))) {
|
---|
226 | builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_NATURAL)
|
---|
227 | .message(tr("Natural duplicated nodes"));
|
---|
228 | } else if (Boolean.TRUE.equals(typeMap.get("building"))) {
|
---|
229 | builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_BUILDING)
|
---|
230 | .message(tr("Building duplicated nodes"));
|
---|
231 | } else if (Boolean.TRUE.equals(typeMap.get("landuse"))) {
|
---|
232 | builder = TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_LANDUSE)
|
---|
233 | .message(tr("Landuse duplicated nodes"));
|
---|
234 | } else {
|
---|
235 | builder = TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE_OTHER)
|
---|
236 | .message(tr("Other duplicated nodes"));
|
---|
237 | }
|
---|
238 | errors.add(builder.primitives(primitives).build());
|
---|
239 | it.remove();
|
---|
240 | }
|
---|
241 | }
|
---|
242 |
|
---|
243 | // check whether we have multiple nodes at the same position with differing tag sets
|
---|
244 | if (!mm.isEmpty()) {
|
---|
245 | List<OsmPrimitive> duplicates = new ArrayList<>();
|
---|
246 | for (Set<Node> l: mm.values()) {
|
---|
247 | duplicates.addAll(l);
|
---|
248 | }
|
---|
249 | if (duplicates.size() > 1) {
|
---|
250 | errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE)
|
---|
251 | .message(tr("Nodes at same position"))
|
---|
252 | .primitives(duplicates)
|
---|
253 | .build());
|
---|
254 | }
|
---|
255 | }
|
---|
256 | return errors;
|
---|
257 | }
|
---|
258 |
|
---|
259 | @SuppressWarnings("unchecked")
|
---|
260 | @Override
|
---|
261 | public void visit(Node n) {
|
---|
262 | if (n.isUsable()) {
|
---|
263 | Object old = potentialDuplicates.get(n);
|
---|
264 | if (old == null) {
|
---|
265 | // in most cases there is just one node at a given position. We
|
---|
266 | // avoid to create an extra object and add remember the node
|
---|
267 | // itself at this position
|
---|
268 | potentialDuplicates.put(n);
|
---|
269 | } else if (old instanceof Node) {
|
---|
270 | // we have an additional node at the same position. Create an extra
|
---|
271 | // object to keep track of the nodes at this position.
|
---|
272 | //
|
---|
273 | potentialDuplicates.put(Stream.of((Node) old, n).collect(Collectors.toList()));
|
---|
274 | } else {
|
---|
275 | // we have more than two nodes at the same position.
|
---|
276 | //
|
---|
277 | ((List<Node>) old).add(n);
|
---|
278 | }
|
---|
279 | }
|
---|
280 | }
|
---|
281 |
|
---|
282 | /**
|
---|
283 | * Merge the nodes into one.
|
---|
284 | * Copied from UtilsPlugin.MergePointsAction
|
---|
285 | */
|
---|
286 | @Override
|
---|
287 | public Command fixError(TestError testError) {
|
---|
288 | if (!isFixable(testError))
|
---|
289 | return null;
|
---|
290 | final Set<Node> nodes = testError.primitives(Node.class)
|
---|
291 | // Filter nodes that have already been deleted (see #5764 and #5773)
|
---|
292 | .filter(n -> !n.isDeleted())
|
---|
293 | .collect(Collectors.toCollection(LinkedHashSet::new));
|
---|
294 |
|
---|
295 | // Use first existing node or first node if all nodes are new
|
---|
296 | Node target = nodes.stream()
|
---|
297 | .filter(n -> !n.isNew())
|
---|
298 | .findFirst()
|
---|
299 | .orElseGet(() -> nodes.iterator().next());
|
---|
300 |
|
---|
301 | return MergeNodesAction.mergeNodes(nodes, target);
|
---|
302 | }
|
---|
303 |
|
---|
304 | @Override
|
---|
305 | public boolean isFixable(TestError testError) {
|
---|
306 | if (!(testError.getTester() instanceof DuplicateNode)) return false;
|
---|
307 | // never merge nodes with different tags.
|
---|
308 | if (testError.getCode() == DUPLICATE_NODE) return false;
|
---|
309 | // cannot merge nodes outside download area
|
---|
310 | return testError.getPrimitives().stream().filter(p -> !p.isDeleted()).count() > 1
|
---|
311 | && Command.checkOutlyingOrIncompleteOperation(testError.getPrimitives(), null) == Command.IS_OK;
|
---|
312 | // everything else is ok to merge
|
---|
313 | }
|
---|
314 | }
|
---|