@inproceedings{damenEtAl:2008, address = {Montpellier, France}, author = {Damen, Jonathan and van Kreveld, Marc and Spaan, Bert}, booktitle = {Proceedings of 12th ICA Workshop on Generalization and Multiple Representation}, title = {{High Quality Building Generalization by Extending the Morphological Operators}}, url = {https://kartographie.geo.tu-dresden.de/downloads/ica-gen/workshop2008/04\_Damen\_et\_al.pdf}, year = {2008} } @phdthesis{bereuter:phd, author = {Bereuter, Pia}, school = {University of Z\"{u}rich}, title = {{Quadtree-based Real-time Point Generalisation for Web and Mobile Mapping}}, year = {2015} } @article{liuEtAl:2014, abstract = {{Scale reduction from source to target maps inevitably leads to conflicts of map symbols in cartography and geographic information systems (GIS). Displacement is one of the most important map generalization operators and it can be used to resolve the problems that arise from conflict among two or more map objects. In this paper, we propose a combined approach based on constraint Delaunay triangulation (CDT) skeleton and improved elastic beam algorithm for automated building displacement. In this approach, map data sets are first partitioned. Then the displacement operation is conducted in each partition as a cyclic and iterative process of conflict detection and resolution. In the iteration, the skeleton of the gap spaces is extracted using CDT. It then serves as an enhanced data model to detect conflicts and construct the proximity graph. Then, the proximity graph is adjusted using local grouping information. Under the action of forces derived from the detected conflicts, the proximity graph is deformed using the improved elastic beam algorithm. In this way, buildings are displaced to find an optimal compromise between related cartographic constraints. To validate this approach, two topographic map data sets (i.e., urban and suburban areas) were tested. The results were reasonable with respect to each constraint when the density of the map was not extremely high. In summary, the improvements include (1) an automated parameter-setting method for elastic beams, (2) explicit enforcement regarding the positional accuracy constraint, added by introducing drag forces, (3) preservation of local building groups through displacement over an adjusted proximity graph, and (4) an iterative strategy that is more likely to resolve the proximity conflicts than the one used in the existing elastic beam algorithm.}}, author = {Liu, Yuangang and Guo, Qingsheng and Sun, Yageng and Ma, Xiaoya}, day = {3}, doi = {10.1371/journal.pone.0113953}, journal = {PLOS ONE}, month = dec, number = {12}, pages = {e113953+}, publisher = {Public Library of Science}, title = {{A Combined Approach to Cartographic Displacement for Buildings Based on Skeleton and Improved Elastic Beam Algorithm}}, url = {http://dx.doi.org/10.1371/journal.pone.0113953}, volume = {9}, year = {2014} } @article{mackaness:purves:2001, author = {Mackaness, W. A. and Purves, R. S.}, booktitle = {Algorithmica}, doi = {10.1007/s00453-001-0007-9}, number = {2}, pages = {302--311}, publisher = {Springer-Verlag}, title = {{Automated Displacement for Large Numbers of Discrete Map Objects}}, url = {http://dx.doi.org/10.1007/s00453-001-0007-9}, volume = {30}, year = {2001} } @article{bereuter:2013, abstract = {{With a focus on mobile and web mapping, we propose several algorithms for on-the-fly generalization of point data, such as points of interest (POIs) or large point collections. In order to achieve real-time performance, we use a quadtree data structure. With their hierarchical subdivision structure and progressive levels of detail, indices of the quadtree family lend themselves as auxiliary data structures to support algorithms for generalization operations, including selection, simplification, aggregation, and displacement of point data. The spatial index can further be used to generate several local and global measures that can then serve to make educated guesses on the density and proximity of points across map scales, and thus enable control of the operation of the generalization algorithms. An implementation of the proposed algorithms has shown that, and thanks to the quadtree index, real-time performance can be achieved even for large point sets. Furthermore, the quadtree data structure can be extended into a caching structure, which can be used to store pre-computed generalizations; thus, a desired level of detail (LOD) can simply be retrieved from cache.}}, author = {Bereuter, Pia and Weibel, Robert}, day = {1}, doi = {10.1080/15230406.2013.779779}, journal = {Cartography and Geographic Information Science}, month = sep, number = {4}, pages = {271--281}, publisher = {Taylor \& Francis}, title = {{Real-time generalization of point data in mobile and web mapping using quadtrees}}, url = {http://dx.doi.org/10.1080/15230406.2013.779779}, volume = {40}, year = {2013} } @inproceedings{savino:touya:2015, abstract = {{Unlike road or river networks, railway networks automatic generalization are missing to properly handle the detailed networks provided in current geo-datasets like OpenStreetMap. This paper proposes automatic methods to automatically identify key structures of railway networks, such as parallel main tracks, or fan and pack patterns inside large train stations. Then, algorithms based on the detected structures are proposed to gener-alize the railway networks. The algorithms are tested on real datasets, in-cluding OpenStreetMap data.}}, address = {Rio de Janeiro, Brazil}, author = {Savino, Sandro and Touya, Guillaume}, booktitle = {Proceedings of 27th International Cartographic Conference}, day = {23-28}, month = aug, organization = {ICA}, title = {{Automatic Structure Detection and Generalization of Railway Networks}}, year = {2015} } @article{aiEtAl:2014, abstract = {{To meet the requirements of multi-scale mapping in maritime applications, marine charts need to be produced at various levels of detail (LOD) using map generalization. As a prominent geographic feature, the coastline has to be generalized considering the geomorphologic characteristics rather than from a pure geometric perspective. Morphologic and domain-specific constraints (e.g., safety) should be incorporated in designing a coastline generalization algorithm. Motivated by the generalization of ria coastlines, this article proposes a simplification algorithm that is specific to coastlines. An analysis of ria coasts results in several morphologic constraints that have to be satisfied in coastline generalization, such as the dendritic pattern of estuaries. To satisfy these constraints, a hierarchical estuary tree model is first established by Delaunay triangulation, which helps to represent the dendritic pattern of ria coastlines. Minor estuaries are then deleted to achieve a reasonable coastline simplification. To imitate manual generalization, an indicator is designed to calculate the importance of estuaries in a context dependent manner. By comparing with a well-known bend simply algorithm, we show that the presented method can maintain dendritic pattern of coastline and is free from self-intersection and also granted for navigation safety. This article also demonstrates that the proposed approach is applicable to coastlines in general.}}, author = {Ai, Tinghua and Zhou, Qi and Zhang, Xiang and Huang, Yafeng and Zhou, Mengjie}, day = {3}, doi = {10.1080/01490419.2014.903215}, journal = {Marine Geodesy}, month = apr, number = {2}, pages = {167--186}, publisher = {Taylor \& Francis}, title = {{A Simplification of Ria Coastline with Geomorphologic Characteristics Preserved}}, url = {http://dx.doi.org/10.1080/01490419.2014.903215}, volume = {37}, year = {2014} } @article{aiEtAl:2015, abstract = {{In map generalization, the displacement operation attempts to resolve proximity conflicts to guarantee map legibility. Owing to the limited representation space, conflicts may occur between both the same and different features under different contexts. A successful displacement should settle multiple conflicts, suppress the generation of secondary conflicts after moving some objects, and preserve the distribution patterns. The effect of displacement can be understood as a force that pushes related objects away with properties of propagation and distance decay. This study borrows the idea of vector fields from physics discipline and establishes a vector field model to handle the displacement of multiple conflicts in building generalization. A scalar field is first constructed based on a Delaunay triangulation skeleton to partition the buildings being examined (e.g., a street block). Then, we build a vector field to conduct displacement measurements through the detection of conflicts from multiple sources. The direction and magnitude of the displacement force are computed based on an iso-line model of vector field. The experiment shows that this global method can settle multiple conflicts and preserve the spatial relations and important building patterns.}}, author = {Ai, Tinghua and Zhang, Xiang and Zhou, Qi and Yang, Min}, day = {3}, doi = {10.1080/13658816.2015.1019886}, journal = {International Journal of Geographical Information Science}, month = aug, number = {8}, pages = {1310--1331}, publisher = {Taylor \& Francis}, title = {{A vector field model to handle the displacement of multiple conflicts in building generalization}}, url = {http://dx.doi.org/10.1080/13658816.2015.1019886}, volume = {29}, year = {2015} } @inproceedings{wangEtAl:2014, abstract = {{Map generalization is commonly used in many GIS applications to produce maps with less detail so as to reduce the mapping complexity. Different from common simplifying strategies which simplify individual geometry objects separately, in this paper we consider the problem of generalizing the geometry objects under the topological constraints among the geometries and given constraining points. We propose a Cross-line algorithm to simplify the map while preserving the topological constraints. The proposed algorithm is extensively evaluated on five real map datasets and large synthetic datasets, and the results show that our proposed approach can greatly simplify the map with extremely high correctness and excellent efficiency.}}, address = {New York, NY, USA}, author = {Wang, Yuwei and Guo, Danhuai and Liu, Kuien and Xiong, Yan}, booktitle = {Proceedings of the 22Nd ACM SIGSPATIAL International Conference on Advances in Geographic Information Systems}, doi = {10.1145/2666310.2666420}, isbn = {978-1-4503-3131-9}, location = {Dallas, Texas}, pages = {609--612}, publisher = {ACM}, series = {SIGSPATIAL '14}, title = {{A Fast Algorithm of Geometry Generalization}}, url = {http://dx.doi.org/10.1145/2666310.2666420}, year = {2014} } @incollection{StanislawskiEtAl:2014, abstract = {{This chapter summarises cartographic generalisation operators used to generalise geospatial data. It includes a review of recent approaches that have been tested or implemented to generalise networks, points, or groups. Emphasis is placed on recent advances that permit additional flexibility to tailor generalisation processing in particular geographic contexts, and to permit more advanced types of reasoning about spatial conflicts, preservation of specific feature characteristics, and local variations in geometry, content and enriched attribution. Rather than an exhaustive review of generalisation operators, the chapter devotes more attention to operators associated with network generalisation, which illustrates well the logic behind map generalisation developments. Three case studies demonstrate the application of operators to road thinning, to river network and braid pruning, and to hierarchical point elimination. The chapter closes with some summary comments and future directions.}}, author = {Stanislawski, Lawrence V. and Buttenfield, Barbara P. and Bereuter, Pia and Savino, Sandro and Brewer, Cynthia A.}, booktitle = {Abstracting Geographic Information in a Data Rich World}, doi = {10.1007/978-3-319-00203-3\_6}, editor = {Burghardt, Dirk and Duch\^{e}ne, C\'{e}cile and Mackaness, William}, pages = {157--195}, publisher = {Springer International Publishing}, series = {Lecture Notes in Geoinformation and Cartography}, title = {{Generalisation Operators}}, url = {http://dx.doi.org/10.1007/978-3-319-00203-3\_6}, year = {2014} } @inproceedings{touya:girres:2014, address = {Vienna, Austria}, author = {Touya, Guillaume and Girres, Jean-Fran\c{c}ois}, booktitle = {Proceedings of 17th ICA Workshop on Generalisation and Multiple Representation}, day = {23}, month = sep, organization = {ICA}, title = {{Generalising Unusual Map Themes from OpenStreetMap}}, year = {2014} } @article{suEtAl:1998, abstract = {{ ” Collapse” is an essential operation for the manipulation of area features in digital data generalization. This operation can be categorized into two types: complete collapse and partial collapse. The former is composed of another two types: area-to-point and area-to-line collapse. In this paper, a set of algebraic models built upon the operators in mathematical morphology is described for the area-to-line collapse and partial collapse operations. For the area-to-line collapse operation, a modified skeleton algorithm is presented. For the partial collapse operation, a procedure is designed which consists of a set of operations, i.e., the skeletonization, separation of areal and linear parts, simplification of areas and an overlay operation. These models are tested using real map data sets.}}, author = {Su, Bo and Li, Zhilin and Lodwick, Graham}, booktitle = {GeoInformatica}, doi = {10.1023/a\%253a1009757422454}, number = {4}, pages = {359--382}, publisher = {Kluwer Academic Publishers}, title = {{Morphological Models for the Collapse of Area Features in Digital Map Generalization}}, url = {http://dx.doi.org/10.1023/a\%253a1009757422454}, volume = {2}, year = {1998} } @article{burghardt:2005, abstract = {{A major focus of research in recent years has been the development of algorithms for automated line smoothing. However, combination of the algorithms with other generalization operators is a challenging problem. In this research a key aim was to extend a snakes optimization approach, allowing displacement of lines, to also be used for line smoothing. Furthermore, automated selection of control parameters is important for fully automated solutions. An existing approach based on line segmentation was used to control the selection of smoothing parameters dependent on object characteristics. Additionally a new typification routine is presented, which uses the same preprocessed analysis for the segmentation of lines to find suitable candidates from curve bends. The typification is realized by deleting undersized bends and emphasizing the remaining curve bends. The main results of this research are two new algorithms for line generalization, where the importance of the line smoothing algorithm lies in the usage of a optimization approach which can also be used for line displacement.}}, address = {Hingham, MA, USA}, author = {Burghardt, Dirk}, booktitle = {GeoInformatica}, doi = {10.1007/s10707-005-1283-3}, issn = {1384-6175}, journal = {Geoinformatica}, number = {3}, pages = {237--252}, publisher = {Kluwer Academic Publishers}, title = {{Controlled Line Smoothing by Snakes}}, url = {http://dx.doi.org/10.1007/s10707-005-1283-3}, volume = {9}, year = {2005} } @article{dutton:1999, abstract = {{This paper examines some assumptions and results of cartographic line simplification in the digital realm, focusing upon two major aspects of map generalization-scale-specificity and the concept of characteristic points. These are widely regarded as critical controls to generalization but, in our estimation, they are rarely well considered or properly applied. First, a look at how scale and shape are treated in various research papers identifies some important conceptual and methodological issues that either have been misconstrued or inadequately treated. We then conduct an empirical analysis with a set of line generalization experiments that control resolution, detail, and sinuosity using four source datasets. The tests yield about 100 different versions of two island coastlines digitized at two scales, exploring systematically the consequences of linking scale with spatial resolution as well as a variety of point selection strategies. The generalized results are displayed (at scale and enlarged) along with some basic statistics. We conclude with a set of observations on the need for more careful and robust evaluations of strategies for point selection and for at-scale verification of visual quality. The results indicate that regardless of which methods are used, sensitivity to scale and sinuosity can and must be built into all digital map generalization toolkits.}}, author = {Dutton, Geoffrey}, day = {1}, doi = {10.1559/152304099782424929}, journal = {Cartography and Geographic Information Science}, month = jan, number = {1}, pages = {33--54}, publisher = {Taylor \& Francis}, title = {{Scale, Sinuosity, and Point Selection in Digital Line Generalization}}, url = {http://dx.doi.org/10.1559/152304099782424929}, volume = {26}, year = {1999} } @inproceedings{szombara:2013, address = {Dresden, Germany}, author = {Szombara, Stanislaw}, booktitle = {Proceedings of 16th ICA Workshop on Generalisation and Multiple Representation}, month = aug, title = {{Unambiguous Collapse Operator of Digital Cartographic Generalisation}}, url = {http://generalisation.icaci.org/images/files/workshop/workshop2013/genemappro2013\_submission\_20.pdf}, year = {2013} } @inproceedings{schwartges:2013, address = {Dresden, Germany}, author = {Schwartges, N. and Allerkamp, D. and Haunert, Jan-Henrik and Wolff, Alexander}, booktitle = {Proceedings of 16th ICA Generalisation Workshop}, day = {23-24}, month = aug, organization = {ICA}, title = {{Optimizing Active Ranges for Point Selection in Dynamic Maps}}, url = {http://generalisation.icaci.org/images/files/workshop/workshop2013/genemappro2013\_submission\_5.pdf}, year = {2013} } @article{korpi:ahonen:2013, abstract = {{Map mashups are often visually chaotic and methods for solving this chaos are required. We introduce a set of clutter reduction criteria for evaluating methods to reduce clutter in map mashups. We present a synthesis of cartographic generalisation operators for point data and clutter reduction methods used in information visualisation and evaluate the methods against the criteria. The resulting evaluation table can be used in finding suitable clutter reduction methods for cases of map mashups with different primary criteria, and more specifically in finding methods that cover each others' limitations.}}, author = {Korpi, Jari and Ahonen-Rainio, Paula}, doi = {10.1179/1743277413y.0000000065}, issn = {0008-7041}, journal = {Cartographic Journal, The}, month = aug, number = {3}, pages = {257--265}, publisher = {Maney Publishing}, title = {{Clutter Reduction Methods for Point Symbols in Map Mashups}}, url = {http://dx.doi.org/10.1179/1743277413y.0000000065}, volume = {50}, year = {2013} } @mastersthesis{ruas:1988, author = {Ruas, Anne}, citeulike-article-id = {12592809}, school = {ENSG}, title = {{G\'{e}n\'{e}ralisation d'immeubles}}, year = {1988} } @article{raposo:2013, abstract = {{A new method of cartographic line simplification is presented. Regular hexagonal tessellations are used to sample lines for simplification, where hexagon width, reflecting sampling fidelity, is varied in proportion to target scale and drawing resolution. Tesserae constitute loci at which new sets of vertices are defined by vertex clustering quantization, and these vertices are used to compose simplified lines retaining only visually resolvable detail at target scale. Hexagon scaling is informed by the Nyquist?Shannon sampling theorem. The hexagonal quantization algorithm is also compared to an implementation of the Li?Openshaw raster-vector algorithm, which undertakes a similar process using square raster cells. Lines produced by either algorithm using like tessera widths are compared for fidelity to the original line in two ways: Hausdorff distances to the original lines are statistically analyzed, and simplified lines are presented against input lines for visual inspection. Results show that hexagonal quantization offers advantages over square tessellations for vertex clustering line simplification in that simplified lines are significantly less displaced from input lines. Visual inspection suggests lines produced by hexagonal quantization retain informative geographical shapes for greater differences in scale than do those produced by quantization in square cells. This study yields a scale-specific cartographic line simplification algorithm, following Li and Openshaw's natural principle, which is readily applicable to cartographic linework. Open-source Java code implementing the hexagonal quantization algorithm is available online.}}, author = {Raposo, Paulo}, day = {1}, doi = {10.1080/15230406.2013.803707}, journal = {Cartography and Geographic Information Science}, month = nov, number = {5}, pages = {427--443}, publisher = {Taylor \& Francis}, title = {{Scale-specific automated line simplification by vertex clustering on a hexagonal tessellation}}, url = {http://dx.doi.org/10.1080/15230406.2013.803707}, volume = {40}, year = {2013} } @inproceedings{Bereuter2011, address = {Paris, France}, author = {Bereuter, P. and Weibel, R.}, booktitle = {25th International Cartographic Conference, Paris}, month = jul, pages = {10}, title = {{A Diagnostic Toolbox for Assessing Point Data Generalisation Algorithms}}, year = {2011} } @article{andersonTarver:2011, abstract = {{This paper describes a method to approach area-patch problems in model generalization using fuzzy set theory. The area-patch problem identifies polygons with the same semantics (feature code) but varying geometry and spatial distribution. Area-patch generalization can be understood as a pattern recognition problem with a variety of viable solutions, constrained by the purposes of the generalized output. The focus lies with identifying geometric textures associated with groups of swamps collectively identified as archipelagos in preparation for generalization. In this research, polygonal swamp/marsh features as area-patches are considered, and constraints are defined in part by USGS data production specifications. The method developed will identify archipelagos for swamps and marshes in an NHD High-Resolution subbasin dataset; and the study area for this work spans the Florida-Georgia border. Fuzzy membership functions are assigned to swamp/marsh polygons based on area, inter-polygon distance and number of neighbors within a predefined distance. These attributes are combined in a fuzzy overlay to derive degrees of membership that rank each polygon for possible aggregation into archipelagos. The final aggregation is based on 'alpha cuts' thresh-holding. A sensitivity analysis evaluates the impact of different fuzzy overlay operators and varying alpha-cuts on the resulting pattern. To validate the change in geometric properties (area, area/perimeter ratios) and overall texture from the original scale to the target scale, a comparison is made between our approach and a solution that does not account for archipelagos. Preliminary findings indicate that a fuzzy set approach allows for the identification of area-patches that would otherwise not be delineated in a generalization solution.}}, author = {Anderson-Tarver, Chris and Leyk, Stefan and Buttenfield, Barbara P.}, day = {1}, doi = {10.1559/15230406382137}, journal = {Cartography and Geographic Information Science}, month = jan, number = {2}, pages = {137--145}, publisher = {Taylor \& Francis}, title = {{Fuzzy Modeling of Geometric Textures for Identifying Archipelagos in Area-Patch Generalization}}, url = {http://dx.doi.org/10.1559/15230406382137}, volume = {38}, year = {2011} } @inproceedings{zhang:tian:1997, address = {Stockholm, Sweden}, author = {Zhang, L. and Tian, Z.}, booktitle = {Proceedings of the 18th ICA/ACI International Cartographic Conference}, pages = {830--835}, title = {{Refinement of Douglas-Peucker algorithm to move the segments toward only one side}}, year = {1997} } @article{li:openshaw:1993, abstract = {{Generalization is a major research theme in geographic information systems. This paper describes a natural principle for the objective generalization of digital map data and other spatial data. First, the basic concepts related to generalization are reexamined and the key relationships between conventional cartographic generalization and automated digital generalization are discussed. Some of the previous approaches that have been used for generalization are evaluated to provide a context for developing what is termed a natural principle for objective generalization. How the natural principle works is compared with some methods that are currently used for generalization purposes. Finally, some examples of applying this principle to cartographic features are given.}}, author = {Li, Zhilin and Openshaw, Stan}, doi = {10.1559/152304093782616779}, issn = {1523-0406}, journal = {Cartography and Geographic Information Science}, month = jan, number = {1}, pages = {19--29}, publisher = {Cartography and Geographic Information Society}, title = {{A Natural Principle for the Objective Generalization of Digital Maps}}, url = {http://dx.doi.org/10.1559/152304093782616779}, volume = {20}, year = {1993} } @inproceedings{raposo:2010 , address = {Orlando, Florida, USA}, author = {Raposo, Paulo}, booktitle = {Proceedings of the ASPRS/CaGIS 2010 Fall Specialty Conference (AutoCarto)}, title = {{Piece by Piece: A Method of Cartographic Line Generalization Using Regular Hexagonal Tessellation}}, year = {2010} } @article{christensen:2000, abstract = {{The hydrography of Cat Island, a rather small, albeit interesting feature in the Gulf of Mexico, is the subject of six generalizations executed with a procedure that replicates and extends Julian Perkal's proposal for an objective generalization. The automated procedure, based on the two operations known as waterlining and Medial-Axis Transformation, is shown to lead to results that should be similar to those achieved by a trained cartographer working with the traditional manual method. Three aspects of generalization are covered by the procedure: line simplification in general, and dropping and aggregation of features of simple configurations. As regards Perkal's original objective, this presentation proves that it can be achieved by a combination of waterlining operations. The Medial-Axis Transformation, a concept developed within Pattern Recognition, is proposed as the best choice for spanning Perkal's boundary zones. The six generalizations are offered as evidence that the method preserves the general shape of features for lower scale factors and yields simple and compact shapes for larger factors. However, two problems were encountered that suggest that the method still needs further development in order to maintain its fully automated character, Or, alternatively, the method may need to be interlaced with the occasional use of interactive aids. The first of those two problems is encountered in attempting to bridge boundary zones of complex configurations. The second problem arises from the specifications for an autonomous feature dropping procedure and with the need to invoke it. Both problems are considered in some depth.}}, author = {Christensen, Albert H. J.}, doi = {10.1179/000870400787320860}, issn = {0008-7041}, journal = {Cartographic Journal, The}, month = jun, number = {1}, pages = {19--28}, publisher = {Maney Publishing}, title = {{Line Generalization by Waterlining And Medial-Axis Transformation. Successes and Issues in an Implementation of Perkal's Proposal}}, url = {http://dx.doi.org/10.1179/000870400787320860}, volume = {37}, year = {2000} } @inproceedings{barillot:2001, address = {Beijing, China}, author = {Barillot, Xavier and Hangou\"{e}t, Jean-Fran\c{c}ois and Kadri-Dahmani, H.}, booktitle = {Proceedings of 20th International Cartographic Conference (ICC'01)}, title = {{Generalisation of the Douglas and Peuker Algorithm for Cartographic Applications}}, year = {2001} } @inproceedings{zaksek:podobnikar:2005, abstract = {{The objective of DEM generalization is the generation of a smooth elevation layer with a lower resolution than the original data, which should be appropriate for a smaller scale relief presentation. Moreover, it is desired that the generalized DEM has appropriate data accuracy, including its geomorphological domain, which means that a terrain skeleton has to be preserved. The terrain skeleton was extracted and then used as input data for interpolation to the specified resolution in the described approach. No advance methods were used for the terrain skeleton extraction, because the hypothesis of this study was that high quality DEM generalization is also possible when using basic GIS operations and tools only.}}, address = {La Coru\~{n}a, Spain}, author = {Zaksek, K. and Podobnikar, T.}, booktitle = {Proceedings of 8th ICA Workshop on Generalisation and Multiple Representation}, title = {{An Effective DEM Generalization with Basic GIS Operations}}, url = {http://aci.ign.fr/Acoruna/Papers/Zaksek\_Podobnikar.pdf}, year = {2005} } @article{fei:he:2009, abstract = {{This article reviews the formation of the 3-D DouglasPeucker algorithm on the basis of analyzing the nature of the 2-D algorithm and further studies the application of the 3-D method to the automated global generalization of digital elevation model (DEM). Compared to the general 3-D DouglasPeucker algorithm put forward by the authors in the previous publication, further improvements and expansion of the algorithm have been included in this article, namely, (1) for randomly distributed points, loneliness index of the current point has been taken as the dynamic weight factor of the pointplane distance, so as to improve the selection of the feature points; (2) aiming at the mass volume of the regular square grids, which forms the majority of the DEM data; three measures have been suggested in this article for the improvement of the efficiency of the automated generalization. Experiments have proven that these measures can greatly heighten the efficiency of the DEM generalization with satisfactory results and have offered us the practical possibility of on-the-fly global generalization of DEM with a huge volume of data.}}, author = {Fei, Lifan and He, Jin}, day = {3}, doi = {10.1080/13658810701703001}, journal = {International Journal of Geographical Information Science}, month = aug, number = {6}, pages = {703--718}, publisher = {Taylor \& Francis}, title = {{A three-dimensional Douglas \& Peucker algorithm and its application to automated generalization of DEMs}}, url = {http://dx.doi.org/10.1080/13658810701703001}, volume = {23}, year = {2009} } @incollection{meijers:2011, abstract = {{We employ a batch generalization process for obtaining a variable- scale planar partition. We describe an algorithm to simplify the boundary lines after a map generalization operation (either a merge or a split operation) has been applied on a polygonal area and its neighbours. The simplification is performed simultaneously on the resulting boundaries of the new polygonal areas that replace the areas that were processed. As the simplification strategy has to keep the planar partition valid, we define what we consider to be a valid planar partition (among other requirements, no zero-sized areas and no unwanted intersections in the boundary polylines). Furthermore, we analyse the effects of the line simplification for the content of the data structures in which the planar partition is stored.}}, address = {Berlin, Heidelberg}, author = {Meijers, Martijn}, booktitle = {Advancing Geoinformation Science for a Changing World}, chapter = {17}, doi = {10.1007/978-3-642-19789-5\_17}, editor = {Geertman, Stan and Reinhardt, Wolfgang and Toppen, Fred}, isbn = {978-3-642-19788-8}, pages = {337--358}, publisher = {Springer Berlin Heidelberg}, series = {Lecture Notes in Geoinformation and Cartography}, title = {{Simultaneous \& topologically-safe line simplification for a variable-scale planar partition}}, url = {http://dx.doi.org/10.1007/978-3-642-19789-5\_17}, volume = {1}, year = {2011} } @inproceedings{haunert:wolff:2010, abstract = {{We present an optimization approach to simplify sets of building footprints represented as polygons. We simplify each polygonal ring by selecting a subsequence of its original edges; the vertices of the simplified ring are defined by intersections of consecutive (and possibly extended) edges in the selected sequence. Our aim is to minimize the number of all output edges subject to a user-defined error tolerance. Since we earlier showed that the problem is NP-hard when requiring non-intersecting simple polygons as output, we cannot hope for an efficient, exact algorithm. Therefore, we present an efficient algorithm for a relaxed problem and an integer program (IP) that allows us to solve the original problem with existing software. Our IP is large, since it has O(m6) constraints, where m is the number of input edges. In order to keep the running time small, we first consider a subset of only O(m) constraints. The choice of the constraints ensures some basic properties of the solution. Constraints that were neglected are added during optimization whenever they become violated by a new solution encountered. Using this approach we simplified a set of 144 buildings with a total of 2056 edges in 4.1 seconds on a standard desktop PC; the simplified building set contained 762 edges. During optimization, the number of constraints increased by a mere 13\%. We also show how to apply cartographic quality measures in our method and discuss their effects on examples.}}, address = {New York, NY, USA}, author = {Haunert, Jan-Henrik and Wolff, Alexander}, booktitle = {Proceedings of the 18th SIGSPATIAL International Conference on Advances in Geographic Information Systems}, doi = {10.1145/1869790.1869819}, isbn = {978-1-4503-0428-3}, location = {San Jose, California}, pages = {192--201}, publisher = {ACM}, series = {GIS '10}, title = {{Optimal and topologically safe simplification of building footprints}}, url = {http://dx.doi.org/10.1145/1869790.1869819}, year = {2010} } @incollection{regnauld:mcmaster:2007, abstract = {{SummaryMost of the research in generalisation assumes that the process can be broken down into a series of logical operations that can be classified according to the type of geometry of the feature, into what we call generalisation operators. For instance, a smoothing operator is designed for linear features, while an amalgamation operator works on areal features. This chapter provides an overview of what has been achieved so far towards creating a comprehensive set of generalisation operators. It contains discussions related to the classification of these operators, and how different classifications have been defined to suite different contexts (such as raster vs. vector data, or 2D vs. Digital Elevation Model data); it proposes a generic list of generalisation operators, and a detailed list of implementation of these operators for different types of features. This provides a virtual toolbox that can be used when designing automatic generalisation solutions. The chapter concludes by discussing the changing nature of algorithms and operators in response to technological developments and changing contexts of use.}}, author = {Regnauld, Nicolas and McMaster, Robert C.}, booktitle = {Generalisation of Geographic Information}, doi = {10.1016/b978-008045374-3/50005-3}, editor = {Mackaness, William A. and Ruas, Anne and Sarjakoski, L. Tina}, isbn = {978-0-08-045374-3}, pages = {37--66}, publisher = {Elsevier}, title = {{A synoptic View of Generalisation Operators}}, url = {http://dx.doi.org/10.1016/b978-008045374-3/50005-3}, year = {2007} } @article{nickerson:1988, author = {Nickerson, Bradford G.}, journal = {Cartographica}, number = {3}, pages = {15--66}, title = {{Automated Cartographic Generalization for Linear Features}}, volume = {25}, year = {1988} } @article{lecordixEtAl:1997, author = {Lecordix, Fran\c{c}ois and Plazanet, Corinne and Lagrange, Jean-Philippe}, doi = {10.1023/A:1009736628698}, journal = {GeoInformatica}, number = {2}, pages = {161--182}, title = {{A Platform for Research in Generalization: Application to Caricature}}, url = {http://dx.doi.org/10.1023/A:1009736628698}, volume = {1}, year = {1997} } @phdthesis{fritsch:1997, author = {Fritsch, Emmanuel}, school = {Universit\'{e} Marne-la-Vall\'{e}e}, title = {{Repr\'{e}sentations de la g\'{e}om\'{e}trie et des contraintes cartographiques pour la g\'{e}n\'{e}ralisation du lin\'{e}aire routier}}, year = {1997} } @article{christensen:1999, author = {Christensen, A. H. J.}, journal = {Cartography and Geographic Information Systems}, number = {1}, pages = {19--32}, title = {{Cartographic Line Generalization with Waterlines and Medial-Axes}}, volume = {26}, year = {1999} } @article{mcmaster:1989, abstract = {{The cartographic generalization of vector data in digital format involves six distinct processes, including simplification, smoothing, enhancement, displacement, merging, and omission. Although the research agenda has addressed each of the six elements individually, little consideration has been given to the geometric interaction of the components. This paper proposes a conceptual model, based on a sequential set of five procedures – or transformations – for the processing of linear digital data. The geometric interaction of simplification and smoothing algorithms is especially emphasized. The first process involves cleaning the digital file, whereby digitizing errors and duplicate coordinate pairs are eliminated. This is followed by a simple smoothing–normally based on weighted-averaging–designed to eliminate the 'gridding' constraints of the digitizing tablet or other encoding device. A third manipulation involves what is called database simplification where a sequential approach (such as Lang tolerancing) is applied in order to prepare the data for storage. At this stage the major concern is with minimizing the geometric modifications of the data, such as areal displacement. For display at a reduced scale, a fourth manipulation in the model utilizes the Douglas routine. At this level, the significant consideration is on critical point selection and, to a lesser extent, error minimization. The fifth and final manipulation entails additional smoothing, such as the application of splines for aesthetical considerations. It is proposed that such a sequential processing of line data (based on this model) will (1) improve the quality of the original digital information, (2) result in minimal database storage with minimal error, and (3) produce the most aesthetically acceptable generalization possible at greatly reduced scales. La g\'{e}n\'{e}ralisation cartographique de donn\'{e}es vectorielles en format num\'{e}rique implique six processus distincts \`{a} savoir, la simplification, le lissage, le rehaussement, le d\'{e}placement, l'int\'{e}gration et l'omission. Malgr\'{e} le grand nombre de recherches sur chacun des six \'{e}l\'{e}ments, on peu consid\'{e}r\'{e} l'interaction g\'{e}om\'{e}trique entre les \'{e}l\'{e}ments. Cet article pr\'{e}sente donc un mod\`{e}le conceptuel, bas\'{e} sur une s\'{e}quence de cinq proc\'{e}dures, ou transformations, en vue du traitement des donn\'{e}es num\'{e}riques lin\'{e}aires. On souligne particuli\`{e}rement l'interaction g\'{e}om\'{e}trique entre les algorithmes de simplification et de lissage. La premi\`{e}re proc\'{e}dure touche le nettoyage du fichier num\'{e}rique, en \'{e}liminant les erreurs de num\'{e}risation et les paires de coordonn\'{e}es qui s'y sont d\'{e}doubl\'{e}es. Un lissage simple est ensuite effectu\'{e} en utilisant des moyennes pond\'{e}r\'{e}es pour \'{e}liminer l'effet de quadrillage caus\'{e} par les tables num\'{e}risantes et certains autres outils de codage. Une troisi\`{e}me manipulation vise \`{a} pr\'{e}parer les donn\'{e}es pour leur entreposage en op\'{e}rant une simplification de la base de donn\'{e}es au moyen d'une approche s\'{e}quentielle, telle que la tol\'{e}rance de Lang. \`{A} cette \'{e}tape, il faut principalement minimiser les modifications \`{a} la g\'{e}om\'{e}trie des donn\'{e}es, telles que les d\'{e}placements de surface. Dans ce mod\`{e}le, une quatri\`{e}me manipulation traite l'affichage \`{a} \'{e}chelle r\'{e}duite en utilisant la routine de Douglas. Ici, l'emphase est mise sur le choix de points critiques et la minimisation des erreurs. La cinqui\`{e}me et derni\`{e}re manipulation effectue un lissage additionnel en utilisant, entre autres, les fonctions de lissage ('Splines') \`{a} des fins esth\'{e}tiques. En utilisant ce mod\`{e}le, on s'attend \`{a} ce qu'un tel traitement s\'{e}quentiel des donn\'{e}es lin\'{e}aires 1 am\'{e}liore la qualit\'{e} de l'information num\'{e}rique originale, 2 r\'{e}duise les erreurs et 3 produise, \`{a} des \'{e}chelles grandement r\'{e}duites, la g\'{e}n\'{e}ralisation la plus acceptable et la plus esth\'{e}tique possible.}}, author = {McMaster, Robert B.}, day = {1}, doi = {10.3138/c213-3627-90x7-lr15}, journal = {Cartographica: The International Journal for Geographic Information and Geovisualization}, month = oct, number = {1}, pages = {101--121}, title = {{The Integration Of Simplification And Smoothing Algorithms In Line Generalization}}, url = {http://dx.doi.org/10.3138/c213-3627-90x7-lr15}, volume = {26}, year = {1989} } @article{visvalingam:1993, author = {Visvalingam, M. and Wyatt, J. D.}, journal = {The Cartographic Journal}, number = {1}, pages = {46--51}, title = {{Line Generalization by Repeated Elimination of Points}}, volume = {30}, year = {1993} } @inproceedings{Schylberg:1992, author = {Schylberg, L.}, booktitle = {Proceedings of ISPRS'92}, location = {Washington, USA}, organization = {ISPRS}, pages = {135--138}, title = {{Cartographic amalgamation of area objects}}, url = {http://www.isprs.org/proceedings/XXIX/congress/part4/135\_XXIX-part4.pdf}, year = {1992} } @phdthesis{hangouet:1998, author = {Hangou\"{e}t, Jean-Fran\c{c}ois}, citeulike-article-id = {8167659}, school = {Universit\'{e} Marne-la-Vall\'{e}e}, title = {{Approche et m\'{e}thodes pour l'automatisation de la g\'{e}n\'{e}ralisation cartographique; application en bord de ville}}, year = {1998} } @phdthesis{regnauld:1998, author = {Regnauld, Nicolas}, school = {Universit\'{e} de Provence}, title = {{G\'{e}n\'{e}ralisation du b\^{a}ti: Structure spatiale de type graphe et repr\'{e}sentation cartographique}}, year = {1998} } @inproceedings{fan:meng:2010, abstract = {{In cartography, a number of methods for the simplification or generalization of building ground plans have been developed. They are mainly focused on preserving and enhancing the properties of buildings like right angles or parallelism. Few methods can handle ground plans of buildings with complex geometrical forms, i.e. non-rectangular, non parallel shapes, long narrow angles. This paper presents a generic approach which can simplify ground plans with arbitrary shapes. The algorithm is implemented and tested for a large data set. The test shows our approach can provide good results by giving a predefined threshold. Moreover, the algorithm is very efficient.}}, author = {Fan, Hongchao and Meng, Liqiu}, booktitle = {13th Workshop of the ICA commission on Generalisation and Multiple Representation}, location = {Zurich, Switzerland}, organization = {ICA}, title = {{A generic approach for simplification of building ground plan}}, year = {2010} } @article{suEtAl:1997, abstract = {{Generalization is a fundamental function in GIS. It has been an important research theme for many years in cartography and GIS. A number of generalization operations have been identified, however most of them, especially those rule-based operations, remain at the conceptual level. This paper describes a set of mathematical (algebraic) models for area aggregation based on the operators developed in mathematical morphology. In this paper, the process of area aggregation is decomposed into two components, viz., combination and shape refinement, and algebraic models for both components are developed. These are demonstrated using various examples. The models provide a mathematical basis for area aggregation in digital generalization of map and other spatial data. The results show that these algebraic models have the potential for successful application.}}, author = {Su, B. O. and Li, Zhilin and Lodwick, Graham and Muller, Jean-Claude}, doi = {10.1080/136588197242374}, journal = {International Journal of Geographical Information Science}, number = {3}, pages = {233--246}, publisher = {Taylor \& Francis}, title = {{Algebraic models for the aggregation of area features based upon morphological operators}}, url = {http://dx.doi.org/10.1080/136588197242374}, volume = {11}, year = {1997} } @inproceedings{sester:2001, author = {Sester, Monika}, booktitle = {6th ICA Workshop on progress in automated map generalisation}, location = {Beijing, China}, organization = {ICA}, title = {{Kohonen Feature Nets for Typification}}, url = {http://ica.ign.fr/BDpubli/pbeijing2001/papers/sester\_v0.pdf}, year = {2001} } @inproceedings{sester:brenner:2000, author = {Sester, Monika and Brenner, Claus}, booktitle = {1st international conference on Geographic Information Science}, location = {Savannah, USA}, pages = {21--22}, title = {{Typification based on Kohonen Feature Nets}}, year = {2000} } @article{jonesEtAl:1995, abstract = {{Automation of map generalization requires facilities to monitor the spatial relationships and interactions among multiple map objects. An experimental map generalization system has been developed which addresses this issue by representing spatial objects within a simplicial data structure (SDS) based on constrained Delaunay triangulation of the source data. Geometric generalization operators that have been implemented include object exaggeration, collapse, amalgamation, boundary reduction and displacement. The generalization operators exploit a set of primitive SDS functions to determine topological and proximal relationships, measure map objects, apply transformations, and detect and resolve spatial conflicts. Proximal search functions are used for efficient analysis of the structure and dimensions of the intervening spaces between map objects. Because geometric generalization takes place within a fully triangulated representation of the map surface, the presence of overlap conflicts, resulting from individual operators, can be detected due to the introduction of singularities in the triangulation, the structure of which is used to generate displacement vectors to resolve the conflict. Examples of the application of the implemented operators are described and illustrated using large scale topographic map data.}}, author = {Jones, Christopher B. and Bundy, Geraint L. and Ware, Mark J.}, doi = {10.1559/152304095782540221}, issn = {1523-0406}, journal = {Cartography and Geographic Information Science}, month = oct, number = {4}, pages = {317--331}, publisher = {Cartography and Geographic Information Society}, title = {{Map Generalization with a Triangulated Data Structure}}, url = {http://dx.doi.org/10.1559/152304095782540221}, volume = {22}, year = {1995} } @article{airault:1996, author = {Airault, Sylvain}, citeulike-article-id = {4889786}, journal = {Revue Internationale de G\'{e}omatique}, number = {2-3}, pages = {203--217}, title = {{De la base de donn\'{e}es \`{a} la carte : une approche globale pour l'\'{e}quarrissage de b\^{a}timents}}, volume = {6}, year = {1996} } @inproceedings{lichtner:1979, author = {Lichtner, W.}, booktitle = {Geo-Processing 1}, pages = {183--199}, title = {{Computer-assisted processes of cartographic generalisation in topographic maps}}, year = {1979} } @inproceedings{weihuaEtAl:2008, author = {Weihua, D. and Jiping, L. and Qinsheng, G.}, booktitle = {ISPRS Commission II, WG II/3}, location = {Beijing, China}, title = {{Visualizing Schematic Maps Through Generalization Based on Adaptive Regular Square Grid Model}}, year = {2008} } @inproceedings{haunert:wolff:2008, author = {Haunert, Jan-Henrik and Wolff, A.}, booktitle = {ISPRS Commission II, WG II/3}, location = {Beijing, China}, organization = {ISPRS}, title = {{Optimal Simplification of Building Ground Plans}}, year = {2008} } @techreport{agent:1999, author = {Agent, Project}, institution = {EU}, title = {{Selection of Basic Algorithms}}, url = {http://agent.ign.fr/deliverable/DD2.pdf}, year = {1999} } @inproceedings{haunert:sester:2004, author = {Haunert, Jan-Henrik and Sester, Monika}, booktitle = {6th CA Workshop on progress in automated map generalisation}, location = {Leicester, UK}, organization = {ICA}, title = {{Using the Straight Skeleton for Generalisation in a Multiple Representation Environment}}, url = {http://aci.ign.fr/Leicester/paper/Haunert-v2-ICAWorkshop.pdf}, year = {2004} } @inproceedings{dougenik:1980, author = {Dougenik, J.}, booktitle = {Auto-Carto 4}, organization = {ACSM-ASPRS}, pages = {304--311}, title = {{WHIRLPOOL: A geometric processor for polygon coverage data}}, url = {http://mapcontext.com/autocarto/proceedings/auto-carto-4-vol-2/pdf/whirlpool-a-geometric-processor-for-polygon-coverage-data.pdf}, year = {1980} } @article{douglas:peucker:1973, abstract = {{All digitizing methods, as a general rule, record lines with far more data than is necessary for accurate graphic reproduction or for computer analysis. Two algorithms to reduce the number of points required to represent the line and, if desired, produce caricatures, are presented and compared with the most promising methods so far suggested. Line reduction will form a major part of automated generalization. R\`{e}gle g\'{e}n\'{e}rale, les m\'{e}thodes num\'{e}riques enregistrent des lignes avec beaucoup plus de donn\'{e}es qu'il n'est n\'{e}cessaire \`{a} la reproduction graphique pr\'{e}cise ou \`{a} la recherche par ordinateur. L'auteur pr\'{e}sente deux algorithmes pour r\'{e}duire le nombre de points n\'{e}cessaires pour repr\'{e}senter la ligne et produire des caricatures si d\'{e}sir\'{e}, et les compare aux m\'{e}thodes les plus prometteuses sugg\'{e}r\'{e}es jusqu'ici. La r\'{e}duction de la ligne constituera une partie importante de la g\'{e}n\'{e}ralisation automatique.}}, author = {Douglas, David H. and Peucker, Thomas K.}, day = {1}, doi = {10.3138/FM57-6770-U75U-7727}, journal = {Cartographica: The International Journal for Geographic Information and Geovisualization}, month = oct, number = {2}, pages = {112--122}, title = {{Algorithms for the Reduction of the Number of Points Required to Represent a Digitized Line or its Caricature}}, url = {http://dx.doi.org/10.3138/FM57-6770-U75U-7727}, volume = {10}, year = {1973} } @techreport{ruas:2001, author = {Ruas, Anne}, institution = {OEEPE Official Publication n°39}, keywords = {algorithms, evaluation, generalisation, software}, title = {{Automatic generalisation project: Learning process from interactive generalisation}}, year = {2001} } @inproceedings{peterEtAl:2008, author = {Peter, M. and Haala, N. and Fritsch, D.}, booktitle = {ISPRS Commission II, WG II/3}, location = {Beijing, China}, organization = {ISPRS}, title = {{Preserving Ground Plan and Fa\c{c}ade Lines for 3D Building Generalization}}, year = {2008} } @inbook{meng:forberg:2007, author = {Meng, Liqiu and Forberg, Andrea}, booktitle = {The Generalisation of Geographic Information : Models and Applications}, chapter = {11}, editor = {Mackaness, William A. and Ruas, Anne and Sarjakoski, Tina}, publisher = {Elsevier}, title = {{3D Building Generalisation}}, year = {2007} } @inproceedings{forberg:2004, author = {Forberg, Andrea}, booktitle = {6th ICA Workshop on progress in automated map generalisation}, location = {Leicester, UK}, organization = {ICA}, title = {{Simplification of 3D building data}}, year = {2004} } @inproceedings{Olszewski:2005, author = {Olszewski, Robert}, booktitle = {22nd International Cartographic Conference}, location = {La Coru\~{n}a, Spain}, organization = {ICA}, title = {{Utilisation of Artificial Intelligence Methods and Neurofuzzy Algorithms in the Process of Digital Terrain Model Generalization}}, year = {2005} } @inproceedings{kada:2007, author = {Kada, Martin}, booktitle = {23rd International Cartographic Conference}, location = {Moscow, Russia}, organization = {ICA}, title = {{3D Building Generalisation by Roof Simplification and Typification}}, year = {2007} } @inproceedings{kada:2005, author = {Kada, Martin}, booktitle = {22nd International Cartographic Conference}, location = {La Coru\~{n}a, Spain}, organization = {ICA}, title = {{3D Building Generalization}}, year = {2005} } @incollection{ai:zhang:2007, abstract = {{This paper presents a model for building cluster distribution analysis based on the Delaunay triangulation skeleton. The skeleton connection within the gap area among the building polygons obtains a special geometric construction similar to Voronoi diagram that spatially partitions the gap area equally. Each building polygon is surrounded by a partitioning polygon which can be regarded as the growth region of inner building. Based on this model, several cluster structure variables can be computed, such as the distribution density, the topological neighbour, the adjacent distance and the adjacent direction. Considering the constraints of position accuracy, statistical area balance, orthogonal shape in building generalization, the study presents a progressive algorithm of building cluster aggregation, including the conflict detection (where), the object (who) displacement and the geometric combination (how). The algorithm has been realized in a generalization system and some experiment illustrations are provided in the paper.}}, author = {Ai, Tinghua and Zhang, Xiang}, doi = {10.1007/978-3-540-72385-1\_9}, journal = {The European Information Society}, keywords = {amalgamation, collapse, gen-operators, skeleton, spatial-analysis, urban}, pages = {153--170}, title = {{The Aggregation of Urban Building Clusters Based on the Skeleton Partitioning of Gap Space}}, url = {http://dx.doi.org/10.1007/978-3-540-72385-1\_9}, year = {2007} } @article{burghardt:cecconi:4826626, abstract = {{This paper describes an approach for the typification of buildings using a mesh?simplification technique. The approach is adapted from the area of computer graphics and was originally developed for surface reconstruction and mesh simplification. The main goal was to develop an algorithm which creates fast and reproducible results. The typification procedure is modelled as a two?stage process, with the steps ?positioning? and ?representation?. While the positioning step determines the number and the position of the building objects based on Delaunay triangulation, the representation step is used to calculate the size and orientation for the replacement buildings. The results presented show the important influence of weights during positioning steps to control the object distribution. The proposed parameters are the number of objects as well as several object characteristics such as size, shape, orientation, and semantic. The approach has to be extended, if building alignments are also to be preserved. Further applications are imaginable, for instance the icon placement on dynamic maps.}}, address = {Bristol, PA, USA}, author = {Burghardt, D. and Cecconi, A.}, day = {1}, doi = {10.1080/13658810600912323}, issn = {1365-8816}, journal = {International Journal Geographic Information Science}, month = mar, number = {3}, pages = {283--298}, publisher = {Taylor \& Francis, Inc.}, title = {{Mesh simplification for building typification}}, url = {http://dx.doi.org/10.1080/13658810600912323}, volume = {21}, year = {2007} } @inproceedings{christensen:2003, author = {Christensen, A. H. J.}, booktitle = {21st International Cartographic Conference}, location = {Durban, South Africa}, pages = {146--153}, title = {{Two Experiments on Stream Network Generalization}}, year = {2003} } @inproceedings{heEtAl:2001, author = {He, Z. and Chen, T. and Pang, X. and Guo, L.}, booktitle = {20th International Cartographic Conference}, location = {Beijing, China}, organization = {ICA}, title = {{The Cartographic Generalization of Hydrographic Feature Based on the Fractal Geometry}}, year = {2001} } @book{li:2007, author = {Li, Zhilin}, isbn = {0-8493-9072-9}, publisher = {CRC Press, Taylor \& Francis}, series = {Environmental Engineering}, title = {{Algorithmic Foundation of Multi-Scale Spatial Representation}}, year = {2007} } @article{haunert:sester:2008, abstract = {{Skeletonization of polygons is a technique, which is often applied to problems of cartography and geographic information science. Especially it is needed for generalization tasks such as the collapse of small or narrow areas, which are negligible for a certain scale. Different skeleton operators can be used for such tasks. One of them is the straight skeleton, which was rediscovered by computer scientists several years ago after decades of neglect. Its full range of practicability and its benefits for cartographic applications have not been revealed yet. Based on the straight skeleton an area collapse that preserves topological constraints as well as a partial area collapse can be performed. An automatic method for the derivation of road centerlines from a cadastral dataset, which uses special characteristics of the straight skeleton, is shown.}}, author = {Haunert, Jan-Henrik and Sester, Monika}, booktitle = {GeoInformatica}, day = {19}, doi = {10.1007/s10707-007-0028-x}, journal = {GeoInformatica}, month = jun, number = {2}, pages = {169--191}, publisher = {Springer US}, title = {{Area Collapse and Road Centerlines based on Straight Skeletons}}, url = {http://dx.doi.org/10.1007/s10707-007-0028-x}, volume = {12}, year = {2008} } @inproceedings{thom:2005, author = {Thom, Stuart}, booktitle = {8th ICA Workshop on Generalisation and Multiple Representation}, location = {La Coru\~{n}a, Spain}, title = {{A Strategy for Collapsing OS Integrated Transport Network Dual Carriageways}}, url = {http://aci.ign.fr/Acoruna/Papers/Thom.pdf}, year = {2005} } @article{regnauld:2001, abstract = {{Abstract. \ \  Cartographic generalization aims to represent geographical information on a map whose specifications are different from those of the original database. Generalization often implies scale reduction, which generates legibility problems. To be readable at smaller scale, geographical objects often need to be enlarged, which generates problems of overlapping features or map congestion. To manage this problem with respect to buildings, we present a method of selection based on the typification principle that creates a result with fewer objects, but preserves the initial pattern of distribution. For this we use a graph of proximity on the building set, which is analysed and segmented with respect to various criteria, taken from gestalt theory. This analysis provides geographical information that is attached to each group of buildings such as the mean size of buildings, shape of the group, and density. This information is independent of scale. The information from the analysis stage is used to define methods to represent them at the target scale. The aim is to preserve the pattern as far as possible, preserve similarities and differences between the groups with regard to density, size and orientation of buildings. We present some results that have been obtained using the platform Strat�ge, developed in the COGIT laboratory at the Institut G�ographique National, Paris.}}, author = {Regnauld, N.}, day = {21}, doi = {10.1007/s00453-001-0008-8}, journal = {Algorithmica}, month = dec, number = {2}, pages = {312--333}, title = {{Contextual Building Typification in Automated Map Generalization}}, url = {http://dx.doi.org/10.1007/s00453-001-0008-8}, volume = {30}, year = {2001} } @inproceedings{huEtAl:2007, abstract = {{This paper proposes a novel approach of selective omission for streets based on mesh density. For meshes with density over threshold, the largest density meshes are taken out by turns and one road segment of them is eliminated. Meanwhile the taken mesh is merged with the adjacent mesh at a time. Road segment to be eliminated is the most unimportant in all segments on the boundary of this mesh according to the parameters reflecting the importance of road segments. In this study, for the different type meshes, their thresholds are different, which can preserve the density difference. The process of eliminating road segments and merging meshes would ensure the network connectivity. This proposed approach considers topological, geometric and semantic properties of the street network. A street network is selectively eliminated employing this approach, and the results of selection achieve better effects.}}, address = {Washington, DC, USA}, author = {Hu, Yungang and Chen, Jun and Li, Zhilin and Zhao, Renliang}, booktitle = {ICIG '07: Proceedings of the Fourth International Conference on Image and Graphics}, doi = {10.1109/icig.2007.168}, isbn = {0-7695-2929-1}, pages = {903--908}, publisher = {IEEE Computer Society}, title = {{Selection of Streets Based on Mesh Density for Digital Map Generalization}}, url = {http://dx.doi.org/10.1109/icig.2007.168}, year = {2007} } @article{ruas:1998, abstract = {{The automation of the map design process through map generalisation continues to be a challenging area of research. It is acknowledged that a diverse range of techniques are applied during the process of map generalisation and these have been mirrored by the creation of a range of algorithms that mimic these discrete operations (such as typification, aggregation, selection). This paper discusses in detail one such algorithm that resolves conflict among objects through displacement. Perhaps more critical than the algorithm itself, is the stage prior to the application of displacement (identification, modelling), and the phase after application (the evaluation). It is argued that these two stages are absolutely critical to the successful design of automated systems. The paper begins with a review of other approaches to displacement and then describes a methodology that encompasses detection, resolution through displacement, and evaluation. This methodology has been implemented in Stratege, an object oriented expert system devoted to contextual generalisation. Details of the implementation are given, and results are illustrated using 'real' geographical data. The results are evaluated and the applicability of the entire methodology is discussed in the broader context of other map generalisation algorithms, based on the explicit representation of constraints.}}, author = {Ruas, Anne}, doi = {10.1080/136588198241509}, journal = {International Journal of Geographical Information Science}, number = {8}, pages = {789--803}, publisher = {Taylor \& Francis}, title = {{A method for building displacement in automated map generalisation}}, url = {http://dx.doi.org/10.1080/136588198241509}, volume = {12}, year = {1998} } @article{allouche:moulin:2005, abstract = {{Empirical observations of the way cartographers deal with generalization problems lead to the hypothesis that they first detect patterns of anomalies in the cartographic data set and then eliminate anomalies by transforming the data. Automatically identifying patterns of anomalies on the map is a difficult task when using GIS functions or traditional algorithmic approaches. Techniques based on the use of neural networks have been widely used in artificial intelligence in order to solve pattern-recognition problems. In this paper, we explore how Kohonen-type neural networks can be used to deal with map generalization applications in which the main problem is to identify high-density regions that include cartographic elements of the same type. We also propose an algorithm to replace cartographic elements located in a region by its surrounding polygon. The use of this type of neural network permitted us to generate different levels of grouping according to the chosen zoom-scale on the map. These levels correspond to a multiple representation of the generalized cartographic elements. As an illustration, we apply our approach to the automatic replacement of a group of houses represented as a set of very close points in the original data set, by a polygon representing the corresponding urban area in the generalized map.}}, author = {Allouche, M. K. and Moulin, B.}, doi = {10.1080/13658810500161211}, journal = {International Journal of Geographical Information Science}, number = {8}, pages = {899--914}, publisher = {Taylor \& Francis}, title = {{Amalgamation in cartographic generalization using Kohonen's feature nets}}, url = {http://www.informaworld.com/smpp/content\~{}content=a723823704\~{}db=all}, volume = {19}, year = {2005} } @incollection{anders:2006, abstract = {{In this paper the detection and typification of grid structures in building groups is described. Typification is a generalization operation that replaces a large number of similar objects by a smaller number of objects, while preserving the global structure of the object distribution. The typification approach is based on three processes. First the grid structures are detected based on the so-called relative neighborhood graph. Second the detected grid structures are regularized by a least square adjustment of an affine or Helmert transformation. The third process is the reduction or simplification of the grid structure, which can be done using the same affine or Helmert transformation approach.}}, author = {Anders, Karl-Heinrich}, booktitle = {Progress in Spatial Data Handling}, doi = {10.1007/3-540-35589-8\_40}, editor = {Riedl, Andreas and Kainz, Wolfgang and Elmes, Gregory A.}, isbn = {978-3-540-35588-5}, journal = {Progress in Spatial Data Handling}, pages = {633--642}, publisher = {Springer}, title = {{Grid Typification}}, url = {http://dx.doi.org/10.1007/3-540-35589-8\_40}, year = {2006} } @article{regnauld:revell:2007, author = {Regnauld, Nicolas and Revell, Patrick}, doi = {10.1179/000870407x241782}, issn = {0008-7041}, journal = {The Cartographic Journal}, month = aug, number = {3}, pages = {239--250}, publisher = {Maney Publishing}, title = {{Automatic Amalgamation of Buildings for Producing Ordnance Survey 1:50 000 Scale Maps}}, url = {http://dx.doi.org/10.1179/000870407x241782}, volume = {44}, year = {2007} }