001// License: GPL. For details, see LICENSE file.
002package org.openstreetmap.josm.data.validation.tests;
003
004import static org.openstreetmap.josm.data.validation.tests.CrossingWays.HIGHWAY;
005import static org.openstreetmap.josm.data.validation.tests.CrossingWays.RAILWAY;
006import static org.openstreetmap.josm.data.validation.tests.CrossingWays.WATERWAY;
007import static org.openstreetmap.josm.tools.I18n.tr;
008
009import java.util.ArrayList;
010import java.util.Collection;
011import java.util.Collections;
012import java.util.HashMap;
013import java.util.HashSet;
014import java.util.Iterator;
015import java.util.LinkedHashSet;
016import java.util.LinkedList;
017import java.util.List;
018import java.util.Map;
019import java.util.Map.Entry;
020import java.util.Set;
021
022import org.openstreetmap.josm.actions.MergeNodesAction;
023import org.openstreetmap.josm.command.Command;
024import org.openstreetmap.josm.data.coor.LatLon;
025import org.openstreetmap.josm.data.osm.AbstractPrimitive;
026import org.openstreetmap.josm.data.osm.Hash;
027import org.openstreetmap.josm.data.osm.Node;
028import org.openstreetmap.josm.data.osm.OsmPrimitive;
029import org.openstreetmap.josm.data.osm.OsmPrimitiveType;
030import org.openstreetmap.josm.data.osm.Storage;
031import org.openstreetmap.josm.data.osm.Way;
032import org.openstreetmap.josm.data.validation.Severity;
033import org.openstreetmap.josm.data.validation.Test;
034import org.openstreetmap.josm.data.validation.TestError;
035import org.openstreetmap.josm.gui.progress.ProgressMonitor;
036import org.openstreetmap.josm.spi.preferences.Config;
037import org.openstreetmap.josm.tools.MultiMap;
038
039/**
040 * Tests if there are duplicate nodes
041 *
042 * @author frsantos
043 */
044public class DuplicateNode extends Test {
045
046    private static class NodeHash implements Hash<Object, Object> {
047
048        private final double precision = Config.getPref().getDouble("validator.duplicatenodes.precision", 0.);
049
050        private LatLon roundCoord(LatLon coor) {
051            return new LatLon(
052                    Math.round(coor.lat() / precision) * precision,
053                    Math.round(coor.lon() / precision) * precision
054                    );
055        }
056
057        @SuppressWarnings("unchecked")
058        private LatLon getLatLon(Object o) {
059            if (o instanceof Node) {
060                LatLon coor = ((Node) o).getCoor();
061                if (coor == null)
062                    return null;
063                if (precision == 0)
064                    return coor.getRoundedToOsmPrecision();
065                return roundCoord(coor);
066            } else if (o instanceof List<?>) {
067                LatLon coor = ((List<Node>) o).get(0).getCoor();
068                if (coor == null)
069                    return null;
070                if (precision == 0)
071                    return coor.getRoundedToOsmPrecision();
072                return roundCoord(coor);
073            } else
074                throw new AssertionError();
075        }
076
077        @Override
078        public boolean equals(Object k, Object t) {
079            LatLon coorK = getLatLon(k);
080            LatLon coorT = getLatLon(t);
081            return coorK == coorT || (coorK != null && coorT != null && coorK.equals(coorT));
082        }
083
084        @Override
085        public int getHashCode(Object k) {
086            LatLon coorK = getLatLon(k);
087            return coorK == null ? 0 : coorK.hashCode();
088        }
089    }
090
091    protected static final int DUPLICATE_NODE = 1;
092    protected static final int DUPLICATE_NODE_MIXED = 2;
093    protected static final int DUPLICATE_NODE_OTHER = 3;
094    protected static final int DUPLICATE_NODE_BUILDING = 10;
095    protected static final int DUPLICATE_NODE_BOUNDARY = 11;
096    protected static final int DUPLICATE_NODE_HIGHWAY = 12;
097    protected static final int DUPLICATE_NODE_LANDUSE = 13;
098    protected static final int DUPLICATE_NODE_NATURAL = 14;
099    protected static final int DUPLICATE_NODE_POWER = 15;
100    protected static final int DUPLICATE_NODE_RAILWAY = 16;
101    protected static final int DUPLICATE_NODE_WATERWAY = 17;
102
103    private static final String[] TYPES = {
104            "none", HIGHWAY, RAILWAY, WATERWAY, "boundary", "power", "natural", "landuse", "building"};
105
106    /** The map of potential duplicates.
107     *
108     * If there is exactly one node for a given pos, the map includes a pair &lt;pos, Node&gt;.
109     * If there are multiple nodes for a given pos, the map includes a pair
110     * &lt;pos, NodesByEqualTagsMap&gt;
111     */
112    private Storage<Object> potentialDuplicates;
113
114    /**
115     * Constructor
116     */
117    public DuplicateNode() {
118        super(tr("Duplicated nodes"),
119                tr("This test checks that there are no nodes at the very same location."));
120    }
121
122    @Override
123    public void startTest(ProgressMonitor monitor) {
124        super.startTest(monitor);
125        potentialDuplicates = new Storage<>(new NodeHash());
126    }
127
128    @SuppressWarnings("unchecked")
129    @Override
130    public void endTest() {
131        for (Object v: potentialDuplicates) {
132            if (v instanceof Node) {
133                // just one node at this position. Nothing to report as error
134                continue;
135            }
136
137            // multiple nodes at the same position -> check if all nodes have a distinct elevation
138            List<Node> nodes = (List<Node>) v;
139            Set<String> eles = new HashSet<>();
140            for (Node n : nodes) {
141                String ele = n.get("ele");
142                if (ele != null) {
143                    eles.add(ele);
144                }
145            }
146            if (eles.size() == nodes.size()) {
147                // All nodes at this position have a distinct elevation.
148                // This is normal in some particular cases (for example, geodesic points in France)
149                // Do not report this as an error
150                continue;
151            }
152
153            // report errors
154            errors.addAll(buildTestErrors(this, nodes));
155        }
156        super.endTest();
157        potentialDuplicates = null;
158    }
159
160    /**
161     * Returns the list of "duplicate nodes" errors for the given selection of node and parent test
162     * @param parentTest The parent test of returned errors
163     * @param nodes The nodes selction to look into
164     * @return the list of "duplicate nodes" errors
165     */
166    public List<TestError> buildTestErrors(Test parentTest, List<Node> nodes) {
167        List<TestError> errors = new ArrayList<>();
168
169        MultiMap<Map<String, String>, OsmPrimitive> mm = new MultiMap<>();
170        for (Node n: nodes) {
171            mm.put(n.getKeys(), n);
172        }
173
174        Map<String, Boolean> typeMap = new HashMap<>();
175
176        // check whether we have multiple nodes at the same position with the same tag set
177        for (Iterator<Map<String, String>> it = mm.keySet().iterator(); it.hasNext();) {
178            Set<OsmPrimitive> primitives = mm.get(it.next());
179            if (primitives.size() > 1) {
180
181                for (String type: TYPES) {
182                    typeMap.put(type, Boolean.FALSE);
183                }
184
185                for (OsmPrimitive p : primitives) {
186                    if (p.getType() == OsmPrimitiveType.NODE) {
187                        Node n = (Node) p;
188                        List<OsmPrimitive> lp = n.getReferrers();
189                        for (OsmPrimitive sp: lp) {
190                            if (sp.getType() == OsmPrimitiveType.WAY) {
191                                boolean typed = false;
192                                Way w = (Way) sp;
193                                Map<String, String> keys = w.getKeys();
194                                for (String type: typeMap.keySet()) {
195                                    if (keys.containsKey(type)) {
196                                        typeMap.put(type, Boolean.TRUE);
197                                        typed = true;
198                                    }
199                                }
200                                if (!typed) {
201                                    typeMap.put("none", Boolean.TRUE);
202                                }
203                            }
204                        }
205                    }
206                }
207
208                long nbType = typeMap.entrySet().stream().filter(Entry::getValue).count();
209
210                if (nbType > 1) {
211                    errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE_MIXED)
212                            .message(tr("Mixed type duplicated nodes"))
213                            .primitives(primitives)
214                            .build());
215                } else if (typeMap.get(HIGHWAY)) {
216                    errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_HIGHWAY)
217                            .message(tr("Highway duplicated nodes"))
218                            .primitives(primitives)
219                            .build());
220                } else if (typeMap.get(RAILWAY)) {
221                    errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_RAILWAY)
222                            .message(tr("Railway duplicated nodes"))
223                            .primitives(primitives)
224                            .build());
225                } else if (typeMap.get(WATERWAY)) {
226                    errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_WATERWAY)
227                            .message(tr("Waterway duplicated nodes"))
228                            .primitives(primitives)
229                            .build());
230                } else if (typeMap.get("boundary")) {
231                    errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_BOUNDARY)
232                            .message(tr("Boundary duplicated nodes"))
233                            .primitives(primitives)
234                            .build());
235                } else if (typeMap.get("power")) {
236                    errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_POWER)
237                            .message(tr("Power duplicated nodes"))
238                            .primitives(primitives)
239                            .build());
240                } else if (typeMap.get("natural")) {
241                    errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_NATURAL)
242                            .message(tr("Natural duplicated nodes"))
243                            .primitives(primitives)
244                            .build());
245                } else if (typeMap.get("building")) {
246                    errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_BUILDING)
247                            .message(tr("Building duplicated nodes"))
248                            .primitives(primitives)
249                            .build());
250                } else if (typeMap.get("landuse")) {
251                    errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_LANDUSE)
252                            .message(tr("Landuse duplicated nodes"))
253                            .primitives(primitives)
254                            .build());
255                } else {
256                    errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE_OTHER)
257                            .message(tr("Other duplicated nodes"))
258                            .primitives(primitives)
259                            .build());
260                }
261                it.remove();
262            }
263        }
264
265        // check whether we have multiple nodes at the same position with differing tag sets
266        if (!mm.isEmpty()) {
267            List<OsmPrimitive> duplicates = new ArrayList<>();
268            for (Set<OsmPrimitive> l: mm.values()) {
269                duplicates.addAll(l);
270            }
271            if (duplicates.size() > 1) {
272                errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE)
273                        .message(tr("Nodes at same position"))
274                        .primitives(duplicates)
275                        .build());
276            }
277        }
278        return errors;
279    }
280
281    @SuppressWarnings("unchecked")
282    @Override
283    public void visit(Node n) {
284        if (n.isUsable()) {
285            if (potentialDuplicates.get(n) == null) {
286                // in most cases there is just one node at a given position. We
287                // avoid to create an extra object and add remember the node
288                // itself at this position
289                potentialDuplicates.put(n);
290            } else if (potentialDuplicates.get(n) instanceof Node) {
291                // we have an additional node at the same position. Create an extra
292                // object to keep track of the nodes at this position.
293                //
294                Node n1 = (Node) potentialDuplicates.get(n);
295                List<Node> nodes = new ArrayList<>(2);
296                nodes.add(n1);
297                nodes.add(n);
298                potentialDuplicates.put(nodes);
299            } else if (potentialDuplicates.get(n) instanceof List<?>) {
300                // we have multiple nodes at the same position.
301                //
302                List<Node> nodes = (List<Node>) potentialDuplicates.get(n);
303                nodes.add(n);
304            }
305        }
306    }
307
308    /**
309     * Merge the nodes into one.
310     * Copied from UtilsPlugin.MergePointsAction
311     */
312    @Override
313    public Command fixError(TestError testError) {
314        Collection<OsmPrimitive> sel = new LinkedList<>(testError.getPrimitives());
315        Set<Node> nodes = new LinkedHashSet<>(OsmPrimitive.getFilteredList(sel, Node.class));
316
317        // Filter nodes that have already been deleted (see #5764 and #5773)
318        nodes.removeIf(AbstractPrimitive::isDeleted);
319
320        // Merge only if at least 2 nodes remain
321        if (nodes.size() >= 2) {
322            // Use first existing node or first node if all nodes are new
323            Node target = null;
324            for (Node n: nodes) {
325                if (!n.isNew()) {
326                    target = n;
327                    break;
328                }
329            }
330            if (target == null) {
331                target = nodes.iterator().next();
332            }
333
334            if (Command.checkOutlyingOrIncompleteOperation(nodes, Collections.singleton(target)) == Command.IS_OK)
335                return MergeNodesAction.mergeNodes(nodes, target);
336        }
337
338        return null; // undoRedo handling done in mergeNodes
339    }
340
341    @Override
342    public boolean isFixable(TestError testError) {
343        if (!(testError.getTester() instanceof DuplicateNode)) return false;
344        // never merge nodes with different tags.
345        if (testError.getCode() == DUPLICATE_NODE) return false;
346        // cannot merge nodes outside download area
347        final Iterator<? extends OsmPrimitive> it = testError.getPrimitives().iterator();
348        return it.hasNext() && !it.next().isOutsideDownloadArea();
349        // everything else is ok to merge
350    }
351}