mirror of
https://github.com/FoggedLens/iD.git
synced 2026-02-13 01:02:58 +00:00
Move the location index and resolver into a global coreLocations
This commit is contained in:
@@ -4,6 +4,7 @@ export { coreDifference } from './difference';
|
||||
export { coreGraph } from './graph';
|
||||
export { coreHistory } from './history';
|
||||
export { coreLocalizer, t, localizer } from './localizer';
|
||||
export { coreLocations } from './locations';
|
||||
export { prefs } from './preferences';
|
||||
export { coreTree } from './tree';
|
||||
export { coreUploader } from './uploader';
|
||||
|
||||
209
modules/core/locations.js
Normal file
209
modules/core/locations.js
Normal file
@@ -0,0 +1,209 @@
|
||||
import LocationConflation from '@ideditor/location-conflation';
|
||||
import whichPolygon from 'which-polygon';
|
||||
import calcArea from '@mapbox/geojson-area';
|
||||
import { utilArrayChunk } from '../util';
|
||||
|
||||
let _mainLocations = coreLocations(); // singleton
|
||||
export { _mainLocations as locationManager };
|
||||
|
||||
//
|
||||
// `coreLocations` maintains an internal index of all the boundaries/geofences used by iD.
|
||||
// It's used by presets, community index, background imagery, to know where in the world these things are valid.
|
||||
// These geofences should be defined by `locationSet` objects:
|
||||
//
|
||||
// let locationSet = {
|
||||
// include: [ Array of locations ],
|
||||
// exclude: [ Array of locations ]
|
||||
// };
|
||||
//
|
||||
// For more info see the location-conflation and country-coder projects, see:
|
||||
// https://github.com/ideditor/location-conflation
|
||||
// https://github.com/ideditor/country-coder
|
||||
//
|
||||
export function coreLocations() {
|
||||
let _this = {};
|
||||
let _resolvedFeatures = {}; // cache of *resolved* locationSet features
|
||||
let _loco = new LocationConflation(); // instance of a location-conflation resolver
|
||||
let _wp = whichPolygon({ features: [] }); // instance of a which-polygon index
|
||||
|
||||
let _queue = [];
|
||||
let _deferred = new Set();
|
||||
let _inProcess;
|
||||
|
||||
//
|
||||
// `mergeCustomGeoJSON`
|
||||
// Accepts an FeatureCollection-like object containing custom locations
|
||||
// Each feature must have a filename-like `id`, for example: `something.geojson`
|
||||
//
|
||||
// {
|
||||
// "type": "FeatureCollection"
|
||||
// "features": [
|
||||
// {
|
||||
// "type": "Feature",
|
||||
// "id": "philly_metro.geojson",
|
||||
// "properties": { … },
|
||||
// "geometry": { … }
|
||||
// }
|
||||
// ]
|
||||
// }
|
||||
//
|
||||
_this.mergeCustomGeoJSON = (fc) => {
|
||||
if (fc && fc.type === 'FeatureCollection' && Array.isArray(fc.features)) {
|
||||
fc.features.forEach(feature => {
|
||||
feature.properties = feature.properties || {};
|
||||
let props = feature.properties;
|
||||
|
||||
// Get `id` from either `id` or `properties`
|
||||
let id = feature.id || props.id;
|
||||
if (!id || !/^\S+\.geojson$/i.test(id)) return;
|
||||
|
||||
// Ensure `id` exists and is lowercase
|
||||
id = id.toLowerCase();
|
||||
feature.id = id;
|
||||
props.id = id;
|
||||
|
||||
// Ensure `area` property exists
|
||||
if (!props.area) {
|
||||
const area = calcArea.geometry(feature.geometry) / 1e6; // m² to km²
|
||||
props.area = Number(area.toFixed(2));
|
||||
}
|
||||
|
||||
_loco._cache[id] = feature;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
//
|
||||
// `mergeLocationSets`
|
||||
// Accepts an Array of locationSets to merge into the index
|
||||
// Returns a Promise fullfilled when the resolving/indexing has been completed
|
||||
// This will take some seconds but happen in the background during browser idle time
|
||||
//
|
||||
_this.mergeLocationSets = (locationSets) => {
|
||||
if (!Array.isArray(locationSets)) return Promise.reject('nothing to do');
|
||||
|
||||
// Resolve all locationSets -> geojson, processing data in chunks
|
||||
//
|
||||
// Because this will happen during idle callbacks, we want to choose a chunk size
|
||||
// that won't make the browser stutter too badly. LocationSets that are a simple
|
||||
// country coder include will resolve instantly, but ones that involve complex
|
||||
// include/exclude operations will take some milliseconds longer.
|
||||
//
|
||||
// Some discussion and performance results on these tickets:
|
||||
// https://github.com/ideditor/location-conflation/issues/26
|
||||
// https://github.com/osmlab/name-suggestion-index/issues/4784#issuecomment-742003434
|
||||
_queue = _queue.concat(utilArrayChunk(locationSets, 200));
|
||||
|
||||
// Everything after here will be deferred.
|
||||
if (!_inProcess) {
|
||||
_inProcess = processQueue()
|
||||
.then(() => { // rebuild the which-polygon index
|
||||
_wp = whichPolygon({ features: Object.values(_resolvedFeatures) });
|
||||
_inProcess = null;
|
||||
});
|
||||
}
|
||||
return _inProcess;
|
||||
|
||||
|
||||
function processQueue() {
|
||||
if (!_queue.length) return Promise.resolve();
|
||||
|
||||
// console.log(`queue length ${_queue.length}`);
|
||||
const chunk = _queue.pop();
|
||||
return new Promise(resolvePromise => {
|
||||
const handle = window.requestIdleCallback(() => {
|
||||
_deferred.delete(handle);
|
||||
// const t0 = performance.now();
|
||||
chunk.forEach(resolveLocationSet);
|
||||
// const t1 = performance.now();
|
||||
// console.log('chunk processed in ' + (t1 - t0) + ' ms');
|
||||
resolvePromise();
|
||||
});
|
||||
_deferred.add(handle);
|
||||
})
|
||||
.then(() => processQueue());
|
||||
}
|
||||
|
||||
|
||||
function resolveLocationSet(locationSet) {
|
||||
try {
|
||||
const resolved = _loco.resolveLocationSet(locationSet);
|
||||
const locationSetID = resolved.id;
|
||||
if (!resolved.feature.geometry.coordinates.length || !resolved.feature.properties.area) {
|
||||
throw new Error(`locationSet ${locationSetID} resolves to an empty feature.`);
|
||||
}
|
||||
if (!_resolvedFeatures[locationSetID]) { // First time seeing this locationSet feature
|
||||
let feature = JSON.parse(JSON.stringify(resolved.feature)); // deep clone
|
||||
feature.id = locationSetID; // Important: always use the locationSet `id` (`+[Q30]`), not the feature `id` (`Q30`)
|
||||
feature.properties.id = locationSetID;
|
||||
_resolvedFeatures[locationSetID] = feature; // insert into cache
|
||||
}
|
||||
} catch (err) { /* ignore? */ }
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
//
|
||||
// `locationSetID`
|
||||
// Return a locationSetID for a given locationSet (fallback to the 'world')
|
||||
//
|
||||
_this.locationSetID = (locationSet) => {
|
||||
let locationSetID;
|
||||
try {
|
||||
locationSetID = _loco.validateLocationSet(locationSet).id;
|
||||
} catch (err) {
|
||||
locationSetID = '+[Q2]'; // the world
|
||||
}
|
||||
return locationSetID;
|
||||
};
|
||||
|
||||
|
||||
//
|
||||
// `feature`
|
||||
// Return the GeoJSON feature for a given locationSetID (fallback to 'world')
|
||||
//
|
||||
_this.feature = (locationSetID) => _resolvedFeatures[locationSetID] || _resolvedFeatures['+[Q2]'];
|
||||
|
||||
|
||||
//
|
||||
// `query`
|
||||
// Execute a query directly against which-polygon
|
||||
// https://github.com/mapbox/which-polygon
|
||||
// Arguments
|
||||
// `loc`: the [lon,lat] location to query,
|
||||
// `multi`= true to return all results, `false` to return first result
|
||||
// Returns
|
||||
// Array of GeoJSON *properties* for the locationSet features that exist at `loc`
|
||||
//
|
||||
_this.query = (loc, multi) => _wp(loc, multi);
|
||||
|
||||
//
|
||||
// `locationsHere`
|
||||
// Convenience method to find all the locationSets valid at the given location.
|
||||
// Arguments
|
||||
// `loc`: the [lon,lat] location to query
|
||||
// Returns
|
||||
// A result Object of ids to areas
|
||||
// {
|
||||
// "+[Q2]": 511207893.3958111,
|
||||
// "+[Q30]": 21817019.17,
|
||||
// "+[new_jersey.geojson]": 22390.77,
|
||||
// …
|
||||
// }
|
||||
//
|
||||
_this.locationsAt = (loc) => {
|
||||
let result = {};
|
||||
_wp(loc, true).forEach(prop => result[prop.id] = prop.area);
|
||||
return result;
|
||||
};
|
||||
|
||||
// Direct access to the location-conflation resolver
|
||||
_this.loco = () => _loco;
|
||||
|
||||
// Direct access to the which-polygon index
|
||||
_this.wp = () => _wp;
|
||||
|
||||
|
||||
return _this;
|
||||
}
|
||||
@@ -1,16 +1,15 @@
|
||||
import { dispatch as d3_dispatch } from 'd3-dispatch';
|
||||
|
||||
import LocationConflation from '@ideditor/location-conflation';
|
||||
import whichPolygon from 'which-polygon';
|
||||
|
||||
import { prefs } from '../core/preferences';
|
||||
import { fileFetcher } from '../core/file_fetcher';
|
||||
import { locationManager } from '../core/locations';
|
||||
|
||||
import { osmNodeGeometriesForTags, osmSetAreaKeys, osmSetPointTags, osmSetVertexTags } from '../osm/tags';
|
||||
import { presetCategory } from './category';
|
||||
import { presetCollection } from './collection';
|
||||
import { presetField } from './field';
|
||||
import { presetPreset } from './preset';
|
||||
import { utilArrayChunk, utilArrayUniq, utilRebind } from '../util';
|
||||
import { utilArrayUniq, utilRebind } from '../util';
|
||||
|
||||
export { presetCategory };
|
||||
export { presetCollection };
|
||||
@@ -48,22 +47,15 @@ export function presetIndex() {
|
||||
let _fields = {};
|
||||
let _categories = {};
|
||||
let _universal = [];
|
||||
let _customFeatures = {};
|
||||
let _resolvedFeatures = {}; // cache of all locationSet Features
|
||||
let _addablePresetIDs = null; // Set of preset IDs that the user can add
|
||||
let _recents;
|
||||
let _favorites;
|
||||
|
||||
let _deferred = new Set();
|
||||
let _queue = [];
|
||||
|
||||
// Index of presets by (geometry, tag key).
|
||||
let _geometryIndex = { point: {}, vertex: {}, line: {}, area: {}, relation: {} };
|
||||
|
||||
let _loco;
|
||||
let _featureIndex;
|
||||
let _loadPromise;
|
||||
|
||||
|
||||
_this.ensureLoaded = () => {
|
||||
if (_loadPromise) return _loadPromise;
|
||||
|
||||
@@ -96,24 +88,26 @@ export function presetIndex() {
|
||||
// featureCollection: {}
|
||||
//}
|
||||
_this.merge = (d) => {
|
||||
|
||||
// Cancel any existing deferred work - we'll end up redoing it after this merge
|
||||
_queue = [];
|
||||
Array.from(_deferred).forEach(handle => {
|
||||
window.cancelIdleCallback(handle);
|
||||
_deferred.delete(handle);
|
||||
});
|
||||
let newFields = [];
|
||||
let newPresets = [];
|
||||
let newLocationSets = [];
|
||||
|
||||
// Merge Fields
|
||||
if (d.fields) {
|
||||
Object.keys(d.fields).forEach(fieldID => {
|
||||
const f = d.fields[fieldID];
|
||||
let f = d.fields[fieldID];
|
||||
|
||||
if (f) { // add or replace
|
||||
_fields[fieldID] = presetField(fieldID, f);
|
||||
if (!_fields[fieldID].locationSet) {
|
||||
_fields[fieldID].locationSet = { include: ['Q2'] }; // default worldwide
|
||||
_fields[fieldID].locationSetID = '+[Q2]';
|
||||
f = presetField(fieldID, f);
|
||||
if (f.locationSet) {
|
||||
newFields.push(f);
|
||||
newLocationSets.push(f.locationSet);
|
||||
} else {
|
||||
f.locationSet = { include: ['Q2'] }; // default worldwide
|
||||
f.locationSetID = '+[Q2]';
|
||||
}
|
||||
_fields[fieldID] = f;
|
||||
|
||||
} else { // remove
|
||||
delete _fields[fieldID];
|
||||
}
|
||||
@@ -123,14 +117,20 @@ export function presetIndex() {
|
||||
// Merge Presets
|
||||
if (d.presets) {
|
||||
Object.keys(d.presets).forEach(presetID => {
|
||||
const p = d.presets[presetID];
|
||||
let p = d.presets[presetID];
|
||||
|
||||
if (p) { // add or replace
|
||||
const isAddable = !_addablePresetIDs || _addablePresetIDs.has(presetID);
|
||||
_presets[presetID] = presetPreset(presetID, p, isAddable, _fields, _presets);
|
||||
if (!_presets[presetID].locationSet) {
|
||||
_presets[presetID].locationSet = { include: ['Q2'] }; // default worldwide
|
||||
_presets[presetID].locationSetID = '+[Q2]';
|
||||
p = presetPreset(presetID, p, isAddable, _fields, _presets);
|
||||
if (p.locationSet) {
|
||||
newPresets.push(p);
|
||||
newLocationSets.push(p.locationSet);
|
||||
} else {
|
||||
p.locationSet = { include: ['Q2'] }; // default worldwide
|
||||
p.locationSetID = '+[Q2]';
|
||||
}
|
||||
_presets[presetID] = p;
|
||||
|
||||
} else { // remove (but not if it's a fallback)
|
||||
const existing = _presets[presetID];
|
||||
if (existing && !existing.isFallback()) {
|
||||
@@ -191,85 +191,20 @@ export function presetIndex() {
|
||||
|
||||
// Merge Custom Features
|
||||
if (d.featureCollection && Array.isArray(d.featureCollection.features)) {
|
||||
d.featureCollection.features.forEach(feature => {
|
||||
const featureID = feature.id || (feature.properties && feature.properties.id);
|
||||
if (featureID) { // add or replace
|
||||
_customFeatures[featureID] = feature;
|
||||
}
|
||||
});
|
||||
locationManager.mergeCustomGeoJSON(d.featureCollection);
|
||||
}
|
||||
|
||||
// Replace LocationConflation resolver if new customFeatures have been added
|
||||
// (Would be nice in a future version to be able to add new custom features to it, rather than replacing entirely)
|
||||
if (!_loco || d.featureCollection) {
|
||||
_loco = new LocationConflation({ type: 'FeatureCollection', features: Object.values(_customFeatures) });
|
||||
resolveLocationSet({ include: ['Q2'] }); // resolve the default "world" feature
|
||||
// Resolve all locationSet features.
|
||||
// When done, assign the locationSetIDs (we use these to quickly test where the preset/field is valid).
|
||||
if (newLocationSets.length) {
|
||||
locationManager.mergeLocationSets(newLocationSets)
|
||||
.then(() => {
|
||||
newFields.forEach(f => f.locationSetID = locationManager.locationSetID(f.locationSet));
|
||||
newPresets.forEach(p => p.locationSetID = locationManager.locationSetID(p.locationSet));
|
||||
});
|
||||
}
|
||||
|
||||
// Resolve all features -> geojson, processing data in chunks
|
||||
let toResolve = Object.values(_presets).concat(Object.values(_fields))
|
||||
.filter(d => !d.locationSetID);
|
||||
|
||||
_queue = utilArrayChunk(toResolve, 250);
|
||||
|
||||
// Everything after here will be deferred.
|
||||
processQueue()
|
||||
.then(() => { // Rebuild feature index
|
||||
_featureIndex = whichPolygon({ type: 'FeatureCollection', features: Object.values(_resolvedFeatures) });
|
||||
});
|
||||
|
||||
return _this;
|
||||
|
||||
|
||||
function processQueue() {
|
||||
if (!_queue.length) return Promise.resolve();
|
||||
|
||||
const chunk = _queue.pop();
|
||||
return new Promise(resolvePromise => {
|
||||
const handle = window.requestIdleCallback(() => {
|
||||
_deferred.delete(handle);
|
||||
resolveLocationSets(chunk);
|
||||
resolvePromise();
|
||||
});
|
||||
_deferred.add(handle);
|
||||
})
|
||||
.then(() => processQueue());
|
||||
}
|
||||
|
||||
|
||||
function resolveLocationSets(items) {
|
||||
if (!Array.isArray(items)) return;
|
||||
items.forEach(item => {
|
||||
let locationSet = item.locationSet || { include: ['Q2'] }; // fallback to world
|
||||
let locationSetID;
|
||||
|
||||
try {
|
||||
locationSetID = resolveLocationSet(locationSet);
|
||||
} catch (err) {
|
||||
locationSet = { include: ['Q2'] }; // fallback to world
|
||||
locationSetID = '+[Q2]';
|
||||
}
|
||||
// store this info with the preset/field
|
||||
item.locationSet = locationSet;
|
||||
item.locationSetID = locationSetID;
|
||||
});
|
||||
}
|
||||
|
||||
function resolveLocationSet(locationSet) {
|
||||
const resolved = _loco.resolveLocationSet(locationSet);
|
||||
const locationSetID = resolved.id;
|
||||
if (!resolved.feature.geometry.coordinates.length || !resolved.feature.properties.area) {
|
||||
throw new Error(`locationSet ${locationSetID} resolves to an empty feature.`);
|
||||
}
|
||||
if (!_resolvedFeatures[locationSetID]) { // First time seeing this locationSet feature
|
||||
let feature = JSON.parse(JSON.stringify(resolved.feature)); // deep clone
|
||||
feature.id = locationSetID; // Important: always use the locationSet `id` (`+[Q30]`), not the feature `id` (`Q30`)
|
||||
feature.properties.id = locationSetID;
|
||||
_resolvedFeatures[locationSetID] = feature; // insert into cache
|
||||
}
|
||||
return locationSetID;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { t } from '../core/localizer';
|
||||
import { matcher } from 'name-suggestion-index';
|
||||
import * as countryCoder from '@ideditor/country-coder';
|
||||
import LocationConflation from '@ideditor/location-conflation';
|
||||
import { matcher as Matcher } from 'name-suggestion-index';
|
||||
|
||||
import { presetManager } from '../presets';
|
||||
import { fileFetcher } from '../core/file_fetcher';
|
||||
@@ -23,35 +24,55 @@ export function validationOutdatedTags() {
|
||||
// and `_nsi` will not be available at first, so the data on early tiles
|
||||
// may not have tags validated fully.
|
||||
|
||||
// initialize deprecated tags array
|
||||
// fetch deprecated tags
|
||||
fileFetcher.get('deprecated')
|
||||
.then(d => _dataDeprecated = d)
|
||||
.catch(() => { /* ignore */ });
|
||||
|
||||
fileFetcher.get('nsi_brands')
|
||||
.then(d => {
|
||||
_nsi = {
|
||||
brands: d.brands,
|
||||
matcher: matcher(),
|
||||
wikidata: {},
|
||||
wikipedia: {}
|
||||
};
|
||||
|
||||
// initialize name-suggestion-index matcher
|
||||
_nsi.matcher.buildMatchIndex(d.brands);
|
||||
// console.log('NSI: start fetching..');
|
||||
// // fetch the name-suggestion-index data
|
||||
// Promise.all([
|
||||
// fileFetcher.get('nsi_data'),
|
||||
// fileFetcher.get('nsi_features'),
|
||||
// fileFetcher.get('nsi_generics'),
|
||||
// fileFetcher.get('nsi_replacements'),
|
||||
// fileFetcher.get('nsi_trees')
|
||||
// ])
|
||||
// .then(vals => {
|
||||
// _nsi = {
|
||||
// data: vals[0].nsi,
|
||||
// features: vals[1],
|
||||
// generics: vals[2].genericWords,
|
||||
// replacements: vals[3].replacements,
|
||||
// trees: vals[4].trees
|
||||
// };
|
||||
|
||||
// index all known wikipedia and wikidata tags
|
||||
Object.keys(d.brands).forEach(kvnd => {
|
||||
const brand = d.brands[kvnd];
|
||||
const wd = brand.tags['brand:wikidata'];
|
||||
const wp = brand.tags['brand:wikipedia'];
|
||||
if (wd) { _nsi.wikidata[wd] = kvnd; }
|
||||
if (wp) { _nsi.wikipedia[wp] = kvnd; }
|
||||
});
|
||||
// console.log('NSI: done fetching..');
|
||||
// console.log('NSI: start indexing..');
|
||||
|
||||
return _nsi;
|
||||
})
|
||||
.catch(() => { /* ignore */ });
|
||||
// _nsi.loco = new LocationConflation(_nsi.features);
|
||||
// _nsi.matcher = Matcher();
|
||||
// _nsi.matcher.buildMatchIndex(_nsi.data);
|
||||
// _nsi.matcher.buildLocationIndex(_nsi.data, _nsi.loco);
|
||||
|
||||
// console.log('NSI: done indexing..');
|
||||
|
||||
// // initialize name-suggestion-index matcher
|
||||
// // _nsi.matcher.buildMatchIndex(d.brands);
|
||||
|
||||
// // index all known wikipedia and wikidata tags
|
||||
// // Object.keys(d.brands).forEach(kvnd => {
|
||||
// // const brand = d.brands[kvnd];
|
||||
// // const wd = brand.tags['brand:wikidata'];
|
||||
// // const wp = brand.tags['brand:wikipedia'];
|
||||
// // if (wd) { _nsi.wikidata[wd] = kvnd; }
|
||||
// // if (wp) { _nsi.wikipedia[wp] = kvnd; }
|
||||
// // });
|
||||
|
||||
// return _nsi;
|
||||
// })
|
||||
// .catch(() => { /* ignore */ });
|
||||
|
||||
|
||||
function oldTagIssues(entity, graph) {
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
"dependencies": {
|
||||
"@ideditor/country-coder": "^4.0.0",
|
||||
"@ideditor/location-conflation": "~0.7.0",
|
||||
"@mapbox/geojson-area": "^0.2.2",
|
||||
"@mapbox/geojson-rewind": "^0.5.0",
|
||||
"@mapbox/sexagesimal": "1.2.0",
|
||||
"@mapbox/togeojson": "0.16.0",
|
||||
@@ -126,4 +127,4 @@
|
||||
"browserslist": [
|
||||
"> 0.2%, last 6 major versions, Firefox ESR, IE 11, maintained node versions"
|
||||
]
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user