Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/mapsme/omim.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlex Zolotarev <deathbaba@gmail.com>2011-03-20 06:20:31 +0300
committerAlex Zolotarev <alex@maps.me>2015-09-23 01:13:54 +0300
commit3d2498ee2119e667580a54074126f7eff12f5aca (patch)
treecb35082737d6075de58b78ae5a1fe2b5b62cdcff /generator
parent1e7be338c611ee9f6ea8a2c139b2eb9ac242207f (diff)
- Created [generator],[generator_tests] and moved indexer_tool to [generator_tool]
Diffstat (limited to 'generator')
-rw-r--r--generator/classif_routine.cpp70
-rw-r--r--generator/classif_routine.hpp9
-rw-r--r--generator/data_cache_file.hpp254
-rw-r--r--generator/data_generator.cpp158
-rw-r--r--generator/data_generator.hpp8
-rw-r--r--generator/feature_bucketer.hpp126
-rw-r--r--generator/feature_generator.cpp331
-rw-r--r--generator/feature_generator.hpp59
-rw-r--r--generator/feature_merger.cpp31
-rw-r--r--generator/feature_merger.hpp30
-rw-r--r--generator/feature_sorter.cpp428
-rw-r--r--generator/feature_sorter.hpp54
-rw-r--r--generator/first_pass_parser.hpp107
-rw-r--r--generator/generator.pro44
-rw-r--r--generator/generator_tests/feature_bucketer_test.cpp65
-rw-r--r--generator/generator_tests/generator_tests.pro23
-rw-r--r--generator/generator_tool/generator_tool.cpp197
-rw-r--r--generator/generator_tool/generator_tool.pro22
-rw-r--r--generator/grid_generator.cpp192
-rw-r--r--generator/grid_generator.hpp8
-rw-r--r--generator/kml_parser.cpp298
-rw-r--r--generator/kml_parser.hpp28
-rw-r--r--generator/osm2type.cpp652
-rw-r--r--generator/osm2type.hpp20
-rw-r--r--generator/osm_element.hpp608
-rw-r--r--generator/polygonizer.hpp203
-rw-r--r--generator/statistics.cpp157
-rw-r--r--generator/statistics.hpp76
-rw-r--r--generator/update_generator.cpp117
-rw-r--r--generator/update_generator.hpp8
-rw-r--r--generator/world_map_generator.hpp201
-rw-r--r--generator/xml_element.cpp82
-rw-r--r--generator/xml_element.hpp49
33 files changed, 4715 insertions, 0 deletions
diff --git a/generator/classif_routine.cpp b/generator/classif_routine.cpp
new file mode 100644
index 0000000000..dd9a6248e7
--- /dev/null
+++ b/generator/classif_routine.cpp
@@ -0,0 +1,70 @@
+#include "classif_routine.hpp"
+#include "osm2type.hpp"
+
+#include "../indexer/classificator.hpp"
+#include "../indexer/drawing_rules.hpp"
+
+//#include "../indexer/scales.hpp"
+
+//#include "../coding/reader.hpp"
+
+//#include "../base/logging.hpp"
+
+//#include "../std/stdio.hpp"
+
+//#include "../base/start_mem_debug.hpp"
+
+
+namespace classificator
+{
+ void parse_osm_types(int start, int end, string const & path)
+ {
+ for (int i = start; i <= end; ++i)
+ {
+ char buf[5] = { 0 };
+ sprintf(buf, "%d", i);
+
+ string const inFile = path + buf + ".xml";
+ ftype::ParseOSMTypes(inFile.c_str(), i);
+ }
+ }
+
+ void GenerateAndWrite(string const & path)
+ {
+ // Experimental - add drawing rules in programm.
+ //string const fullName = path + "drawing_rules.bin";
+ //drule::ReadRules(fullName.c_str());
+
+ //int const color = 0;
+ //double const pixWidth = 1.5;
+ //for (int i = 0; i <= scales::GetUpperScale(); ++i)
+ //{
+ // size_t const ind = drule::rules().AddLineRule(i, color, pixWidth);
+ // LOG_SHORT(LINFO, ("Scale = ", i, "; Index = ", ind));
+ //}
+
+ //drule::WriteRules(fullName.c_str());
+
+ //return;
+
+ // 1. generic types
+ parse_osm_types(0, 11, path + "styles/caption-z");
+ parse_osm_types(6, 17, path + "styles/osm-map-features-z");
+
+ // 2. POI (not used)
+ //parse_osm_types(12, 17, path + "styles/osm-POI-features-z");
+
+ // 3. generate map
+ string const inFile = path + "styles/mapswithme.xml";
+ for (int i = 0; i <= 17; ++i)
+ ftype::ParseOSMTypes(inFile.c_str(), i);
+
+ drule::WriteRules(string(path + "drawing_rules.bin").c_str());
+ classif().PrintClassificator(string(path + "classificator.txt").c_str());
+ }
+
+ void PrepareForFeatureGeneration()
+ {
+ classif().SortClassificator();
+ }
+}
diff --git a/generator/classif_routine.hpp b/generator/classif_routine.hpp
new file mode 100644
index 0000000000..0dc158a783
--- /dev/null
+++ b/generator/classif_routine.hpp
@@ -0,0 +1,9 @@
+#pragma once
+
+#include "../std/string.hpp"
+
+namespace classificator
+{
+ void GenerateAndWrite(string const & dir);
+ void PrepareForFeatureGeneration();
+}
diff --git a/generator/data_cache_file.hpp b/generator/data_cache_file.hpp
new file mode 100644
index 0000000000..086b70804b
--- /dev/null
+++ b/generator/data_cache_file.hpp
@@ -0,0 +1,254 @@
+#pragma once
+
+#include "../indexer/file_reader_stream.hpp"
+#include "../indexer/file_writer_stream.hpp"
+#include "../indexer/osm_decl.hpp"
+
+#include "../coding/file_reader.hpp"
+#include "../coding/file_writer.hpp"
+
+#include "../base/logging.hpp"
+
+#include "../std/utility.hpp"
+#include "../std/vector.hpp"
+#include "../std/algorithm.hpp"
+#include "../std/limits.hpp"
+#include "../std/exception.hpp"
+
+
+/// Classes for reading and writing any data in file with map of offsets for
+/// fast searching in memory by some user-id.
+namespace cache
+{
+ namespace detail
+ {
+ template <class TFile, class TValue> class file_map_t
+ {
+ typedef pair<uint64_t, TValue> element_t;
+ typedef vector<element_t> id_cont_t;
+ id_cont_t m_memory;
+ TFile m_file;
+
+ static const size_t s_max_count = 1024;
+
+ struct element_less_t
+ {
+ bool operator() (element_t const & r1, element_t const & r2) const
+ {
+ return ((r1.first == r2.first) ? r1.second < r2.second : r1.first < r2.first);
+ }
+ bool operator() (element_t const & r1, uint64_t r2) const
+ {
+ return (r1.first < r2);
+ }
+ bool operator() (uint64_t r1, element_t const & r2) const
+ {
+ return (r1 < r2.first);
+ }
+ };
+
+ size_t uint64_to_size(uint64_t v)
+ {
+ ASSERT ( v < numeric_limits<size_t>::max(), ("Value to long for memory address : ", v) );
+ return static_cast<size_t>(v);
+ }
+
+ public:
+ file_map_t(string const & name) : m_file(name.c_str()) {}
+
+ string get_name() const { return m_file.GetName(); }
+
+ void flush_to_file()
+ {
+ if (!m_memory.empty())
+ {
+ m_file.Write(&m_memory[0], m_memory.size() * sizeof(element_t));
+ m_memory.clear();
+ }
+ }
+
+ void read_to_memory()
+ {
+ m_memory.clear();
+ uint64_t const fileSize = m_file.Size();
+ if (fileSize == 0) return;
+
+ LOG_SHORT(LINFO, ("Reading offsets started in file ", get_name()));
+
+ try
+ {
+ m_memory.resize(uint64_to_size(fileSize / sizeof(element_t)));
+ }
+ catch (exception const &) // bad_alloc
+ {
+ LOG(LCRITICAL, ("Insufficient memory for required offset map"));
+ }
+
+ m_file.Read(0, &m_memory[0], uint64_to_size(fileSize));
+
+ sort(m_memory.begin(), m_memory.end(), element_less_t());
+
+ LOG_SHORT(LINFO, ("Reading offsets finished"));
+ }
+
+ void write(uint64_t k, TValue const & v)
+ {
+ if (m_memory.size() > s_max_count)
+ flush_to_file();
+
+ m_memory.push_back(make_pair(k, v));
+ }
+
+ bool read_one(uint64_t k, TValue & v) const
+ {
+ typename id_cont_t::const_iterator i =
+ lower_bound(m_memory.begin(), m_memory.end(), k, element_less_t());
+ if ((i != m_memory.end()) && ((*i).first == k))
+ {
+ v = (*i).second;
+ return true;
+ }
+ return false;
+ }
+
+ typedef typename id_cont_t::const_iterator iter_t;
+ pair<iter_t, iter_t> GetRange(uint64_t k) const
+ {
+ return equal_range(m_memory.begin(), m_memory.end(), k, element_less_t());
+ }
+
+ template <class ToDo> void for_each_ret(uint64_t k, ToDo & toDo) const
+ {
+ pair<iter_t, iter_t> range = GetRange(k);
+ for (; range.first != range.second; ++range.first)
+ if (toDo((*range.first).second))
+ return;
+ }
+ };
+ }
+
+ template <class TStream, class TOffsetFile> class DataFileBase
+ {
+ public:
+ typedef uint64_t user_id_t;
+
+ protected:
+ TStream m_stream;
+ detail::file_map_t<TOffsetFile, uint64_t> m_offsets;
+
+ public:
+ DataFileBase(string const & name)
+ : m_stream(name.c_str()), m_offsets(name + OFFSET_EXT)
+ {
+ }
+ };
+
+ class DataFileWriter : public DataFileBase<FileWriterStream, FileWriter>
+ {
+ typedef DataFileBase<FileWriterStream, FileWriter> base_type;
+
+ static const size_t s_max_count = 1024;
+
+ public:
+ DataFileWriter(string const & name) : base_type(name) {}
+
+ template <class T> void Write(user_id_t id, T const & t)
+ {
+ m_offsets.write(id, m_stream.Pos());
+ m_stream << t;
+ }
+
+ void SaveOffsets()
+ {
+ m_offsets.flush_to_file();
+ }
+ };
+
+ class DataFileReader : public DataFileBase<FileReaderStream, FileReader>
+ {
+ typedef DataFileBase<FileReaderStream, FileReader> base_type;
+
+ public:
+ DataFileReader(string const & name) : base_type(name) {}
+
+ template <class T> bool Read(user_id_t id, T & t)
+ {
+ uint64_t pos;
+ if (m_offsets.read_one(id, pos))
+ {
+ m_stream.Seek(pos);
+ m_stream >> t;
+ return true;
+ }
+ else
+ {
+ LOG_SHORT(LWARNING, ("Can't find offset in file ", m_offsets.get_name(), " by id ", id) );
+ return false;
+ }
+ }
+
+ void LoadOffsets()
+ {
+ m_offsets.read_to_memory();
+ }
+ };
+
+ class MappedWay
+ {
+ public:
+
+ enum WayType
+ {
+ coast_direct = 0,
+ empty_direct = 1,
+ coast_opposite = 2,
+ empty_opposite = 3
+ };
+
+ MappedWay() : m_id(0) {}
+ MappedWay(uint64_t id, WayType type) : m_id((id << 2) | type)
+ {
+ CHECK_EQUAL(0, id & 0xC000000000000000ULL, ("Highest 2 bits should be 0.", id));
+ }
+
+ bool operator<(MappedWay const & r) const
+ {
+ return m_id < r.m_id;
+ }
+
+ uint64_t GetId() const { return m_id >> 2; }
+ WayType GetType() const { return static_cast<WayType>(m_id & 3); }
+
+ private:
+ uint64_t m_id;
+ };
+ STATIC_ASSERT(sizeof(MappedWay) == 8);
+
+ template <class TNodesHolder, class TData, class TFile>
+ class BaseFileHolder
+ {
+ protected:
+ typedef typename TData::user_id_t user_id_t;
+
+ TNodesHolder & m_nodes;
+
+ TData m_ways, m_relations;
+
+ typedef detail::file_map_t<TFile, uint64_t> offset_map_t;
+ offset_map_t m_nodes2rel, m_ways2rel;
+
+ typedef detail::file_map_t<TFile, MappedWay> ways_map_t;
+ ways_map_t m_mappedWays;
+
+ public:
+ BaseFileHolder(TNodesHolder & nodes, string const & dir)
+ : m_nodes(nodes),
+ m_ways(dir + WAYS_FILE),
+ m_relations(dir + RELATIONS_FILE),
+ m_nodes2rel(dir + NODES_FILE + ID2REL_EXT),
+ m_ways2rel(dir + WAYS_FILE + ID2REL_EXT),
+ m_mappedWays(dir + MAPPED_WAYS)
+ {
+ }
+ };
+}
diff --git a/generator/data_generator.cpp b/generator/data_generator.cpp
new file mode 100644
index 0000000000..b02fd9551c
--- /dev/null
+++ b/generator/data_generator.cpp
@@ -0,0 +1,158 @@
+#include "data_generator.hpp"
+#include "data_cache_file.hpp"
+#include "first_pass_parser.hpp"
+
+#include "../indexer/osm_decl.hpp"
+
+#include "../base/std_serialization.hpp"
+#include "../base/logging.hpp"
+
+#include "../std/bind.hpp"
+
+#include "../base/start_mem_debug.hpp"
+
+
+namespace data
+{
+
+template <class TNodesHolder>
+class FileHolder : public cache::BaseFileHolder<TNodesHolder, cache::DataFileWriter, FileWriter>
+{
+ typedef cache::BaseFileHolder<TNodesHolder, cache::DataFileWriter, FileWriter> base_type;
+
+ typedef typename base_type::user_id_t user_id_t;
+
+ template <class TMap, class TVec>
+ void add_id2rel_vector(TMap & rMap, user_id_t relid, TVec const & v)
+ {
+ for (size_t i = 0; i < v.size(); ++i)
+ rMap.write(v[i].first, relid);
+ }
+
+public:
+ FileHolder(TNodesHolder & nodes, string const & dir) : base_type(nodes, dir) {}
+
+ void AddNode(uint64_t id, double lat, double lng)
+ {
+ this->m_nodes.AddPoint(id, lat, lng);
+ }
+
+ void AddWay(user_id_t id, WayElement const & e)
+ {
+ this->m_ways.Write(id, e);
+ }
+
+ void AddRelation(user_id_t id, RelationElement const & e)
+ {
+ this->m_relations.Write(id, e);
+
+ add_id2rel_vector(this->m_nodes2rel, id, e.nodes);
+ add_id2rel_vector(this->m_ways2rel, id, e.ways);
+ }
+
+ void AddMappedWay(user_id_t id, WayElement const & e, bool emptyTags)
+ {
+ typedef cache::MappedWay way_t;
+
+ way_t::WayType const directType = (emptyTags ? way_t::empty_direct : way_t::coast_direct);
+ way_t::WayType const oppositeType = (emptyTags ? way_t::empty_opposite : way_t::coast_opposite);
+
+ this->m_mappedWays.write(e.nodes.front(), way_t(id, directType)); // direct
+ this->m_mappedWays.write(e.nodes.back(), way_t(id, oppositeType)); // opposite
+ }
+
+ void SaveIndex()
+ {
+ this->m_ways.SaveOffsets();
+ this->m_relations.SaveOffsets();
+
+ this->m_nodes2rel.flush_to_file();
+ this->m_ways2rel.flush_to_file();
+ this->m_mappedWays.flush_to_file();
+ }
+};
+
+
+class points_in_file_base
+{
+protected:
+ FileWriter m_file;
+ progress_policy m_progress;
+
+public:
+ points_in_file_base(string const & name, size_t factor) : m_file(name.c_str())
+ {
+ m_progress.Begin(name, factor);
+ }
+
+ uint64_t GetCount() const { return m_progress.GetCount(); }
+};
+
+class points_in_file : public points_in_file_base
+{
+public:
+ points_in_file(string const & name) : points_in_file_base(name, 1000) {}
+
+ void AddPoint(uint64_t id, double lat, double lng)
+ {
+ LatLon ll;
+ ll.lat = lat;
+ ll.lon = lng;
+ m_file.Seek(id * sizeof(ll));
+ m_file.Write(&ll, sizeof(ll));
+
+ m_progress.Inc();
+ }
+};
+
+class points_in_file_light : public points_in_file_base
+{
+public:
+ points_in_file_light(string const & name) : points_in_file_base(name, 10000) {}
+
+ void AddPoint(uint64_t id, double lat, double lng)
+ {
+ LatLonPos ll;
+ ll.pos = id;
+ ll.lat = lat;
+ ll.lon = lng;
+ m_file.Write(&ll, sizeof(ll));
+
+ m_progress.Inc();
+ }
+};
+
+template <class TNodesHolder>
+bool GenerateImpl(string const & dir)
+{
+ try
+ {
+ TNodesHolder nodes(dir + NODES_FILE);
+ typedef FileHolder<TNodesHolder> holder_t;
+ holder_t holder(nodes, dir);
+
+ FirstPassParser<holder_t> parser(holder);
+ ParseXMLFromStdIn(parser);
+
+ LOG(LINFO, ("Added points count = ", nodes.GetCount()));
+
+ holder.SaveIndex();
+ }
+ catch (Writer::Exception const & e)
+ {
+ LOG(LERROR, ("Error with file ", e.what()));
+ return false;
+ }
+
+ return true;
+}
+
+bool GenerateToFile(string const & dir, bool lightNodes)
+{
+ if (lightNodes)
+ return GenerateImpl<points_in_file_light>(dir);
+ else
+ return GenerateImpl<points_in_file>(dir);
+}
+
+}
diff --git a/generator/data_generator.hpp b/generator/data_generator.hpp
new file mode 100644
index 0000000000..c836c9feed
--- /dev/null
+++ b/generator/data_generator.hpp
@@ -0,0 +1,8 @@
+#pragma once
+
+#include "../std/string.hpp"
+
+namespace data
+{
+ bool GenerateToFile(string const & dir, bool lightNodes);
+}
diff --git a/generator/feature_bucketer.hpp b/generator/feature_bucketer.hpp
new file mode 100644
index 0000000000..786421c438
--- /dev/null
+++ b/generator/feature_bucketer.hpp
@@ -0,0 +1,126 @@
+#pragma once
+
+#include "world_map_generator.hpp"
+
+#include "../base/base.hpp"
+
+#include "../coding/file_writer.hpp"
+
+#include "../geometry/rect2d.hpp"
+
+#include "../indexer/feature.hpp"
+
+#include "../std/string.hpp"
+
+namespace feature
+{
+
+// Groups features in buckets according to their coordinates.
+template <class FeatureOutT, class FeatureClipperT, class BoundsT, typename CellIdT>
+class CellFeatureBucketer
+{
+ typedef typename FeatureClipperT::feature_builder_t feature_builder_t;
+
+ void Init()
+ {
+ uint32_t const size = 1 << 2 * m_Level;
+ m_Buckets.resize(size);
+ for (uint32_t i = 0; i < m_Buckets.size(); ++i)
+ {
+ CellIdT cell = CellIdT::FromBitsAndLevel(i, m_Level);
+ double minX, minY, maxX, maxY;
+ CellIdConverter<BoundsT, CellIdT>::GetCellBounds(cell, minX, minY, maxX, maxY);
+ m_Buckets[i].m_Rect = m2::RectD(minX, minY, maxX, maxY);
+ }
+ }
+
+public:
+ template <class TInfo>
+ explicit CellFeatureBucketer(TInfo & info)
+ : m_Level(info.cellBucketingLevel), m_FeatureOutInitData(info.datFilePrefix, info.datFileSuffix),
+ m_worldMap(info.maxScaleForWorldFeatures, info.mergeCoastlines, m_FeatureOutInitData)
+ {
+ Init();
+ }
+
+ /// @note this constructor doesn't support world file generation
+ CellFeatureBucketer(int level, typename FeatureOutT::InitDataType const & initData)
+ : m_Level(level), m_FeatureOutInitData(initData), m_worldMap(-1, false, initData)
+ {
+ Init();
+ }
+
+ void operator () (feature_builder_t & fb)
+ {
+ m_worldMap(fb);
+
+ FeatureClipperT clipper(fb);
+ // TODO: Is feature fully inside GetLimitRect()?
+ m2::RectD const limitRect = fb.GetLimitRect();
+ for (uint32_t i = 0; i < m_Buckets.size(); ++i)
+ {
+ // First quick and dirty limit rect intersection.
+ // Clipper may (or may not) do a better intersection.
+ if (m_Buckets[i].m_Rect.IsIntersect(limitRect))
+ {
+ feature_builder_t clippedFb;
+ if (clipper(m_Buckets[i].m_Rect, clippedFb))
+ {
+ if (!m_Buckets[i].m_pOut)
+ m_Buckets[i].m_pOut = new FeatureOutT(BucketName(i), m_FeatureOutInitData);
+
+ (*(m_Buckets[i].m_pOut))(clippedFb);
+ }
+ }
+ }
+ }
+
+ template <typename F> void GetBucketNames(F f) const
+ {
+ for (uint32_t i = 0; i < m_Buckets.size(); ++i)
+ if (m_Buckets[i].m_pOut)
+ f(BucketName(i));
+ }
+
+private:
+ inline string BucketName(uint32_t i) const
+ {
+ return CellIdT::FromBitsAndLevel(i, m_Level).ToString();
+ }
+
+ struct Bucket
+ {
+ Bucket() : m_pOut(NULL) {}
+ ~Bucket() { delete m_pOut; }
+
+ FeatureOutT * m_pOut;
+ m2::RectD m_Rect;
+ };
+
+ int m_Level;
+ typename FeatureOutT::InitDataType m_FeatureOutInitData;
+ vector<Bucket> m_Buckets;
+ WorldMapGenerator<FeatureOutT> m_worldMap;
+};
+
+class SimpleFeatureClipper
+{
+public:
+ typedef FeatureBuilder1 feature_builder_t;
+
+private:
+ feature_builder_t const & m_Feature;
+
+public:
+ explicit SimpleFeatureClipper(feature_builder_t const & f) : m_Feature(f)
+ {
+ }
+
+ bool operator () (m2::RectD const & /*rect*/, feature_builder_t & clippedF) const
+ {
+ clippedF = m_Feature;
+ return true;
+ }
+};
+
+}
diff --git a/generator/feature_generator.cpp b/generator/feature_generator.cpp
new file mode 100644
index 0000000000..4d38c84a46
--- /dev/null
+++ b/generator/feature_generator.cpp
@@ -0,0 +1,331 @@
+#include "feature_generator.hpp"
+#include "feature_bucketer.hpp"
+#include "data_cache_file.hpp"
+#include "osm_element.hpp"
+#include "polygonizer.hpp"
+
+#include "../defines.hpp"
+
+#include "../indexer/data_header.hpp"
+#include "../indexer/osm_decl.hpp"
+#include "../indexer/mercator.hpp"
+#include "../indexer/cell_id.hpp"
+
+#include "../coding/varint.hpp"
+
+#include "../base/assert.hpp"
+#include "../base/logging.hpp"
+#include "../base/stl_add.hpp"
+
+#include "../std/bind.hpp"
+#include "../std/unordered_map.hpp"
+
+
+namespace feature
+{
+
+template <class TNodesHolder>
+class FileHolder : public cache::BaseFileHolder<TNodesHolder, cache::DataFileReader, FileReader>
+{
+ typedef cache::DataFileReader reader_t;
+ typedef cache::BaseFileHolder<TNodesHolder, reader_t, FileReader> base_type;
+
+ typedef typename base_type::offset_map_t offset_map_t;
+ typedef typename base_type::ways_map_t ways_map_t;
+
+ typedef typename base_type::user_id_t user_id_t;
+
+ template <class TElement, class ToDo> struct process_base
+ {
+ reader_t & m_reader;
+ protected:
+ ToDo & m_toDo;
+ public:
+ process_base(reader_t & reader, ToDo & toDo) : m_reader(reader), m_toDo(toDo) {}
+
+ bool operator() (uint64_t id)
+ {
+ TElement e;
+ if (m_reader.Read(id, e))
+ return m_toDo(id, e);
+ return false;
+ }
+ };
+
+ template <class ToDo> struct process_relation : public process_base<RelationElement, ToDo>
+ {
+ typedef process_base<RelationElement, ToDo> base_type;
+ public:
+ process_relation(reader_t & reader, ToDo & toDo) : base_type(reader, toDo) {}
+ };
+
+ template <class ToDo> struct process_relation_cached : public process_relation<ToDo>
+ {
+ typedef process_relation<ToDo> base_type;
+
+ public:
+ process_relation_cached(reader_t & rels, ToDo & toDo)
+ : base_type(rels, toDo) {}
+
+ bool operator() (uint64_t id)
+ {
+ switch (this->m_toDo(id))
+ {
+ case 1: return true;
+ case -1: return false;
+ default: return base_type::operator()(id);
+ }
+ }
+ };
+
+public:
+ FileHolder(TNodesHolder & holder, string const & dir) : base_type(holder, dir) {}
+
+ bool GetNode(uint64_t id, double & lat, double & lng)
+ {
+ return this->m_nodes.GetPoint(id, lat, lng);
+ }
+
+ bool GetWay(user_id_t id, WayElement & e)
+ {
+ return this->m_ways.Read(id, e);
+ }
+
+ bool GetNextWay(user_id_t & prevWay, user_id_t node, WayElement & e)
+ {
+ typedef typename ways_map_t::iter_t iter_t;
+ pair<iter_t, iter_t> range = this->m_mappedWays.GetRange(node);
+ for (; range.first != range.second; ++range.first)
+ {
+ cache::MappedWay const & w = range.first->second;
+ if (w.GetType() != cache::MappedWay::coast_opposite && w.GetId() != prevWay)
+ {
+ this->m_ways.Read(w.GetId(), e);
+ prevWay = w.GetId();
+ return true;
+ }
+ }
+ return false;
+ }
+
+ template <class ToDo> void ForEachRelationByWay(user_id_t id, ToDo & toDo)
+ {
+ process_relation<ToDo> processor(this->m_relations, toDo);
+ this->m_ways2rel.for_each_ret(id, processor);
+ }
+
+ template <class ToDo> void ForEachRelationByNodeCached(user_id_t id, ToDo & toDo)
+ {
+ process_relation_cached<ToDo> processor(this->m_relations, toDo);
+ this->m_nodes2rel.for_each_ret(id, processor);
+ }
+
+ template <class ToDo> void ForEachRelationByWayCached(user_id_t id, ToDo & toDo)
+ {
+ process_relation_cached<ToDo> processor(this->m_relations, toDo);
+ this->m_ways2rel.for_each_ret(id, processor);
+ }
+
+ void LoadIndex()
+ {
+ this->m_ways.LoadOffsets();
+ this->m_relations.LoadOffsets();
+
+ this->m_nodes2rel.read_to_memory();
+ this->m_ways2rel.read_to_memory();
+ this->m_mappedWays.read_to_memory();
+ }
+};
+
+///////////////////////////////////////////////////////////////////////////////////////////////////
+// FeaturesCollector implementation
+///////////////////////////////////////////////////////////////////////////////////////////////////
+
+FeaturesCollector::FeaturesCollector(string const & fName)
+: m_datFile(fName)
+{
+}
+
+FeaturesCollector::FeaturesCollector(string const & bucket,
+ FeaturesCollector::InitDataType const & prefix)
+: m_datFile(prefix.first + bucket + prefix.second)
+{
+}
+
+uint32_t FeaturesCollector::GetFileSize(FileWriter const & f)
+{
+ // .dat file should be less than 4Gb
+ uint64_t const pos = f.Pos();
+ uint32_t const ret = static_cast<uint32_t>(pos);
+
+ CHECK_EQUAL(static_cast<uint64_t>(ret), pos, ("Feature offset is out of 32bit boundary!"));
+ return ret;
+}
+
+void FeaturesCollector::WriteFeatureBase(vector<char> const & bytes, FeatureBuilder1 const & fb)
+{
+ size_t const sz = bytes.size();
+ CHECK ( sz != 0, ("Empty feature not allowed here!") );
+
+ if (sz > 0)
+ {
+ WriteVarUint(m_datFile, sz);
+ m_datFile.Write(&bytes[0], sz);
+
+ m_bounds.Add(fb.GetLimitRect());
+ }
+}
+
+void FeaturesCollector::operator() (FeatureBuilder1 const & fb)
+{
+ (void)GetFileSize(m_datFile);
+
+ FeatureBuilder1::buffer_t bytes;
+ fb.Serialize(bytes);
+ WriteFeatureBase(bytes, fb);
+}
+
+///////////////////////////////////////////////////////////////////////////////////////////////////
+// Generate functions implementations.
+///////////////////////////////////////////////////////////////////////////////////////////////////
+
+class points_in_file
+{
+ FileReader m_file;
+
+public:
+ points_in_file(string const & name) : m_file(name) {}
+
+ bool GetPoint(uint64_t id, double & lat, double & lng) const
+ {
+ LatLon ll;
+ m_file.Read(id * sizeof(ll), &ll, sizeof(ll));
+
+ // assume that valid coordinate is not (0, 0)
+ if (ll.lat != 0.0 || ll.lon != 0.0)
+ {
+ lat = ll.lat;
+ lng = ll.lon;
+ return true;
+ }
+ else
+ {
+ LOG(LERROR, ("Node with id = ", id, " not found!"));
+ return false;
+ }
+ }
+};
+
+class points_in_map
+{
+ typedef unordered_map<uint64_t, pair<double, double> > cont_t;
+ typedef cont_t::const_iterator iter_t;
+ cont_t m_map;
+
+ static bool equal_coord(double d1, double d2)
+ {
+ return ::fabs(d1 - d2) < 1.0E-8;
+ }
+
+public:
+ points_in_map(string const & name)
+ {
+ FileReader reader(name);
+ uint64_t const count = reader.Size();
+
+ uint64_t pos = 0;
+ while (pos < count)
+ {
+ LatLonPos ll;
+ reader.Read(pos, &ll, sizeof(ll));
+
+ pair<iter_t, bool> ret = m_map.insert(make_pair(ll.pos, make_pair(ll.lat, ll.lon)));
+ if (ret.second == true)
+ {
+#ifdef DEBUG
+ pair<double, double> const & c = ret.first->second;
+ ASSERT ( equal_coord(c.first, ll.lat), () );
+ ASSERT ( equal_coord(c.second, ll.lon), () );
+#endif
+ }
+
+ pos += sizeof(ll);
+ }
+ }
+
+ bool GetPoint(uint64_t id, double & lat, double & lng) const
+ {
+ iter_t i = m_map.find(id);
+ if (i != m_map.end())
+ {
+ lat = i->second.first;
+ lng = i->second.second;
+ return true;
+ }
+ return false;
+ }
+};
+
+template <class TNodesHolder, template <class, class> class TParser>
+bool GenerateImpl(GenerateInfo & info)
+{
+ try
+ {
+ TNodesHolder nodes(info.tmpDir + NODES_FILE);
+
+ typedef FileHolder<TNodesHolder> holder_t;
+ holder_t holder(nodes, info.tmpDir);
+
+ holder.LoadIndex();
+
+ if (info.splitByPolygons)
+ {
+ typedef Polygonizer<FeaturesCollector, MercatorBounds, RectId> FeaturePolygonizerType;
+ // prefix is data dir
+ FeaturePolygonizerType bucketer(info);
+ TParser<FeaturePolygonizerType, holder_t> parser(bucketer, holder);
+ ParseXMLFromStdIn(parser);
+ bucketer.Finish();
+ info.bucketNames = bucketer.Names();
+ }
+ else
+ {
+ CHECK_GREATER_OR_EQUAL(info.cellBucketingLevel, 0, ());
+ CHECK_LESS(info.cellBucketingLevel, 10, ());
+
+ typedef CellFeatureBucketer<FeaturesCollector, SimpleFeatureClipper,
+ MercatorBounds, RectId> FeatureBucketerType;
+ FeatureBucketerType bucketer(info);
+ TParser<FeatureBucketerType, holder_t> parser(bucketer, holder);
+ ParseXMLFromStdIn(parser);
+ bucketer.GetBucketNames(MakeBackInsertFunctor(info.bucketNames));
+ }
+ }
+ catch (Reader::Exception const & e)
+ {
+ LOG(LERROR, ("Error with file ", e.what()));
+ return false;
+ }
+
+ return true;
+}
+
+bool GenerateFeatures(GenerateInfo & info, bool lightNodes)
+{
+ if (lightNodes)
+ return GenerateImpl<points_in_map, SecondPassParserUsual>(info);
+ else
+ return GenerateImpl<points_in_file, SecondPassParserUsual>(info);
+}
+
+/*
+bool GenerateCoastlines(GenerateInfo & info, bool lightNodes)
+{
+ if (lightNodes)
+ return GenerateImpl<points_in_map, SecondPassParserJoin>(info);
+ else
+ return GenerateImpl<points_in_file, SecondPassParserJoin>(info);
+}
+*/
+
+}
diff --git a/generator/feature_generator.hpp b/generator/feature_generator.hpp
new file mode 100644
index 0000000000..802aae8598
--- /dev/null
+++ b/generator/feature_generator.hpp
@@ -0,0 +1,59 @@
+#pragma once
+
+#include "../indexer/osm_decl.hpp"
+
+#include "../geometry/rect2d.hpp"
+
+#include "../coding/file_container.hpp"
+
+#include "../std/vector.hpp"
+#include "../std/string.hpp"
+
+class FeatureBuilder1;
+
+namespace feature
+{
+ struct GenerateInfo
+ {
+ GenerateInfo()
+ : maxScaleForWorldFeatures(-1), splitByPolygons(false),
+ simplifyCountriesLevel(-1), mergeCoastlines(false) {}
+ string tmpDir, datFilePrefix, datFileSuffix;
+ /// If not -1, world will be split by buckets with specified level
+ int cellBucketingLevel;
+ vector<string> bucketNames;
+ /// Features with scale level [0..maxScaleForWorldFeatures] will be
+ /// included into separate world data file
+ /// @note if -1, world file will not be created
+ int maxScaleForWorldFeatures;
+ bool splitByPolygons;
+ int simplifyCountriesLevel;
+ bool mergeCoastlines;
+ };
+
+ bool GenerateFeatures(GenerateInfo & info, bool lightNodes);
+ //bool GenerateCoastlines(GenerateInfo & info, bool lightNodes);
+
+ // Writes features to dat file.
+ class FeaturesCollector
+ {
+ protected:
+ FileWriter m_datFile;
+
+ m2::RectD m_bounds;
+
+ protected:
+ static uint32_t GetFileSize(FileWriter const & f);
+
+ void WriteFeatureBase(vector<char> const & bytes, FeatureBuilder1 const & fb);
+
+ public:
+ // Stores prefix and suffix of a dat file name.
+ typedef pair<string, string> InitDataType;
+
+ FeaturesCollector(string const & fName);
+ FeaturesCollector(string const & bucket, InitDataType const & prefix);
+
+ void operator() (FeatureBuilder1 const & f);
+ };
+}
diff --git a/generator/feature_merger.cpp b/generator/feature_merger.cpp
new file mode 100644
index 0000000000..b4ecbf82fc
--- /dev/null
+++ b/generator/feature_merger.cpp
@@ -0,0 +1,31 @@
+#include "feature_merger.hpp"
+
+#include "../base/logging.hpp"
+
+#define MAX_MERGED_POINTS_COUNT 10000
+
+FeatureBuilder1Merger::FeatureBuilder1Merger(FeatureBuilder1 const & fb)
+ : FeatureBuilder1(fb)
+{
+}
+
+bool FeatureBuilder1Merger::ReachedMaxPointsCount() const
+{
+ return (m_Geometry.size() > MAX_MERGED_POINTS_COUNT);
+}
+
+void FeatureBuilder1Merger::AppendFeature(FeatureBuilder1Merger const & fb)
+{
+ // check that both features are of linear type
+ CHECK(fb.m_bLinear && m_bLinear, ("Not linear feature"));
+
+ // check that classificator types are the same
+ CHECK_EQUAL(fb.m_Types, m_Types, ("Not equal types"));
+
+ // check last-first points equality
+ CHECK_EQUAL(m_Geometry.back(), fb.m_Geometry.front(), ("End and Start point are no equal"));
+ // merge fb at the end
+ size_t const size = fb.m_Geometry.size();
+ for (size_t i = 1; i < size; ++i)
+ AddPoint(fb.m_Geometry[i]);
+}
diff --git a/generator/feature_merger.hpp b/generator/feature_merger.hpp
new file mode 100644
index 0000000000..103c43bad5
--- /dev/null
+++ b/generator/feature_merger.hpp
@@ -0,0 +1,30 @@
+#pragma once
+
+#include "../indexer/feature.hpp"
+
+class FeatureBuilder1Merger : public FeatureBuilder1
+{
+public:
+ FeatureBuilder1Merger(FeatureBuilder1 const & fb);
+
+ /// adds fb's geometry at the end of own geometry,
+ /// but only if they have common point
+ void AppendFeature(FeatureBuilder1Merger const & fb);
+
+ void SetAreaSafe()
+ {
+ if (!m_bArea)
+ m_bArea = true;
+ }
+
+ uint32_t KeyType() const
+ {
+ ASSERT_EQUAL ( m_Types.size(), 1, () );
+ return m_Types.front();
+ }
+
+ bool ReachedMaxPointsCount() const;
+
+ m2::PointD FirstPoint() const { return m_Geometry.front(); }
+ m2::PointD LastPoint() const { return m_Geometry.back(); }
+};
diff --git a/generator/feature_sorter.cpp b/generator/feature_sorter.cpp
new file mode 100644
index 0000000000..467da10d09
--- /dev/null
+++ b/generator/feature_sorter.cpp
@@ -0,0 +1,428 @@
+#include "feature_sorter.hpp"
+#include "feature_generator.hpp"
+
+#include "../defines.hpp"
+
+#include "../indexer/data_header.hpp"
+#include "../indexer/feature_processor.hpp"
+#include "../indexer/feature_visibility.hpp"
+#include "../indexer/feature_impl.hpp"
+#include "../indexer/geometry_serialization.hpp"
+#include "../indexer/tesselator.hpp"
+
+#include "../geometry/polygon.hpp"
+
+#include "../platform/platform.hpp"
+
+#include "../coding/file_container.hpp"
+
+#include "../base/string_utils.hpp"
+#include "../base/logging.hpp"
+
+#include "../base/start_mem_debug.hpp"
+
+
+namespace
+{
+ typedef pair<uint64_t, uint64_t> TCellAndOffset;
+
+ class CalculateMidPoints
+ {
+ m2::PointD m_midLoc, m_midAll;
+ size_t m_locCount, m_allCount;
+
+ public:
+ CalculateMidPoints() : m_midAll(0, 0), m_allCount(0) {}
+
+ std::vector<TCellAndOffset> m_vec;
+
+ void operator() (FeatureBuilder1 const & ft, uint64_t pos)
+ {
+ // reset state
+ m_midLoc = m2::PointD(0, 0);
+ m_locCount = 0;
+
+ ft.ForEachPointRef(*this);
+ m_midLoc = m_midLoc / m_locCount;
+
+ uint64_t const pointAsInt64 = PointToInt64(m_midLoc.x, m_midLoc.y);
+ uint64_t const minScale = feature::MinDrawableScaleForFeature(ft.GetFeatureBase());
+ CHECK(minScale <= scales::GetUpperScale(), ("Dat file contain invisible feature"));
+
+ uint64_t const order = (minScale << 59) | (pointAsInt64 >> 5);
+ m_vec.push_back(make_pair(order, pos));
+ }
+
+ void operator() (m2::PointD const & p)
+ {
+ m_midLoc += p;
+ m_midAll += p;
+ ++m_locCount;
+ ++m_allCount;
+ }
+
+ m2::PointD GetCenter() const { return m_midAll / m_allCount; }
+ };
+
+ bool SortMidPointsFunc(TCellAndOffset const & c1, TCellAndOffset const & c2)
+ {
+ return c1.first < c2.first;
+ }
+}
+
+namespace feature
+{
+ typedef array<uint8_t, 4> scales_t;
+
+ class FeaturesCollector2 : public FeaturesCollector
+ {
+ FilesContainerW m_writer;
+
+ vector<FileWriter*> m_geoFile, m_trgFile;
+
+ feature::DataHeader m_header;
+
+ public:
+ FeaturesCollector2(string const & fName, feature::DataHeader const & header)
+ : FeaturesCollector(fName + DATA_FILE_TAG), m_writer(fName), m_header(header)
+ {
+ for (int i = 0; i < m_header.GetScalesCount(); ++i)
+ {
+ string const postfix = utils::to_string(i);
+ m_geoFile.push_back(new FileWriter(fName + GEOMETRY_FILE_TAG + postfix));
+ m_trgFile.push_back(new FileWriter(fName + TRIANGLE_FILE_TAG + postfix));
+ }
+ }
+
+ ~FeaturesCollector2()
+ {
+ // write own mwm header (now it's a base point only)
+ m_header.SetBounds(m_bounds);
+ FileWriter w = m_writer.GetWriter(HEADER_FILE_TAG);
+ m_header.Save(w);
+ w.Flush();
+
+ // assume like we close files
+ m_datFile.Flush();
+
+ m_writer.Append(m_datFile.GetName(), DATA_FILE_TAG);
+
+ for (int i = 0; i < m_header.GetScalesCount(); ++i)
+ {
+ string const geomFile = m_geoFile[i]->GetName();
+ string const trgFile = m_trgFile[i]->GetName();
+
+ delete m_geoFile[i];
+ delete m_trgFile[i];
+
+ string const postfix = utils::to_string(i);
+
+ string geoPostfix = GEOMETRY_FILE_TAG;
+ geoPostfix += postfix;
+ string trgPostfix = TRIANGLE_FILE_TAG;
+ trgPostfix += postfix;
+
+ m_writer.Append(geomFile, geoPostfix);
+ m_writer.Append(trgFile, trgPostfix);
+
+ FileWriter::DeleteFileX(geomFile);
+ FileWriter::DeleteFileX(trgFile);
+ }
+
+ m_writer.Finish();
+ }
+
+ private:
+ typedef vector<m2::PointD> points_t;
+ typedef list<points_t> holes_t;
+
+ class GeometryHolder
+ {
+ public:
+ FeatureBuilder2::buffers_holder_t m_buffer;
+
+ private:
+ FeaturesCollector2 & m_rMain;
+ FeatureBuilder2 & m_rFB;
+
+ points_t m_current;
+
+ int64_t m_base;
+
+ void WriteOuterPoints(points_t const & points, int i)
+ {
+ m_buffer.m_ptsMask |= (1 << i);
+ m_buffer.m_ptsOffset.push_back(m_rMain.GetFileSize(*m_rMain.m_geoFile[i]));
+ serial::SaveOuterPath(points, m_base, *m_rMain.m_geoFile[i]);
+ }
+
+ void WriteOuterTriangles(points_t const & bound, holes_t const & holes, int i)
+ {
+ m_buffer.m_trgMask |= (1 << i);
+ m_buffer.m_trgOffset.push_back(m_rMain.GetFileSize(*m_rMain.m_trgFile[i]));
+
+ // tesselation
+ tesselator::TrianglesInfo info;
+ tesselator::TesselateInterior(bound, holes, info);
+
+ serial::TrianglesChainSaver saver(m_base);
+
+ // points conversion
+ tesselator::PointsInfo points;
+ info.GetPointsInfo(saver.GetBasePoint(), saver.GetMaxPoint(), &serial::pts::D2U, points);
+
+ // triangles processing (should be optimal)
+ info.ProcessPortions(points, saver, true);
+
+ // check triangles processing (to compare with optimal)
+ //serial::TrianglesChainSaver checkSaver(m_base);
+ //info.ProcessPortions(points, checkSaver, false);
+
+ //CHECK_LESS_OR_EQUAL(saver.GetBufferSize(), checkSaver.GetBufferSize(), ());
+
+ // saving to file
+ saver.Save(*m_rMain.m_trgFile[i]);
+ }
+
+ void FillInnerPointsMask(points_t const & points, uint32_t scaleIndex)
+ {
+ points_t const & src = m_buffer.m_innerPts;
+ ASSERT ( !src.empty(), () );
+
+ ASSERT ( are_points_equal(src.front(), points.front()), () );
+ ASSERT ( are_points_equal(src.back(), points.back()), () );
+
+ size_t j = 1;
+ for (size_t i = 1; i < points.size()-1; ++i)
+ {
+ for (; j < src.size()-1; ++j)
+ {
+ if (are_points_equal(src[j], points[i]))
+ {
+ // set corresponding 2 bits for source point [j] to scaleIndex
+ uint32_t mask = 0x3;
+ m_buffer.m_ptsSimpMask &= ~(mask << (2*(j-1)));
+ m_buffer.m_ptsSimpMask |= (scaleIndex << (2*(j-1)));
+ break;
+ }
+ }
+
+ ASSERT_LESS ( j, src.size()-1, ("Simplified point not found in source point array") );
+ }
+ }
+
+ bool m_ptsInner, m_trgInner;
+
+ class strip_emitter
+ {
+ points_t const & m_src;
+ points_t & m_dest;
+ public:
+ strip_emitter(points_t const & src, points_t & dest)
+ : m_src(src), m_dest(dest)
+ {
+ m_dest.reserve(m_src.size());
+ }
+ void operator() (size_t i)
+ {
+ m_dest.push_back(m_src[i]);
+ }
+ };
+
+ public:
+ GeometryHolder(FeaturesCollector2 & rMain, FeatureBuilder2 & fb, int64_t base)
+ : m_rMain(rMain), m_rFB(fb), m_base(base), m_ptsInner(true), m_trgInner(true)
+ {
+ }
+
+ points_t const & GetSourcePoints()
+ {
+ return (!m_current.empty() ? m_current : m_rFB.GetGeometry());
+ }
+
+ void AddPoints(points_t const & points, int scaleIndex)
+ {
+ if (m_ptsInner && points.size() < 15)
+ {
+ if (m_buffer.m_innerPts.empty())
+ m_buffer.m_innerPts = points;
+ else
+ FillInnerPointsMask(points, scaleIndex);
+ m_current = points;
+ }
+ else
+ {
+ m_ptsInner = false;
+ WriteOuterPoints(points, scaleIndex);
+ }
+ }
+
+ bool NeedProcessTriangles() const
+ {
+ return (!m_trgInner || m_buffer.m_innerTrg.empty());
+ }
+
+ bool TryToMakeStrip(points_t & points)
+ {
+ ASSERT ( are_points_equal(points.front(), points.back()), () );
+ // At this point we don't need last point equal to first.
+ // If you try to remove it in first step, 'simplify' will work bad for such polygons.
+ points.pop_back();
+
+ size_t const count = points.size();
+ if (!m_trgInner || count > 15 + 2)
+ {
+ m_trgInner = false;
+ return false;
+ }
+
+ ASSERT ( m_buffer.m_innerTrg.empty(), () );
+
+ if (!IsPolygonCCW(points.begin(), points.end()))
+ {
+ reverse(points.begin(), points.end());
+ ASSERT ( IsPolygonCCW(points.begin(), points.end()), (points) );
+ }
+
+ size_t const index = FindSingleStrip(count,
+ IsDiagonalVisibleFunctor<points_t::const_iterator>(points.begin(), points.end()));
+
+ if (index == count)
+ {
+ m_trgInner = false;
+ return false;
+ }
+
+ MakeSingleStripFromIndex(index, count, strip_emitter(points, m_buffer.m_innerTrg));
+
+ ASSERT_EQUAL ( count, m_buffer.m_innerTrg.size(), () );
+ return true;
+ }
+
+ void AddTriangles(points_t const & bound, holes_t const & holes, int scaleIndex)
+ {
+ ASSERT ( m_buffer.m_innerTrg.empty(), () );
+ m_trgInner = false;
+
+ WriteOuterTriangles(bound, holes, scaleIndex);
+ }
+ };
+
+ public:
+ void operator() (FeatureBuilder2 & fb)
+ {
+ (void)GetFileSize(m_datFile);
+
+ GeometryHolder holder(*this, fb, m_header.GetBase());
+
+ bool const isLine = fb.IsLine();
+ bool const isArea = fb.IsArea();
+
+ for (int i = m_header.GetScalesCount()-1; i >= 0; --i)
+ {
+ if (fb.IsDrawableInRange(i > 0 ? m_header.GetScale(i-1) + 1 : 0, m_header.GetScale(i)))
+ {
+ // simplify and serialize geometry
+ points_t points;
+ SimplifyPoints(holder.GetSourcePoints(), points, m_header.GetScale(i));
+
+ if (isLine)
+ holder.AddPoints(points, i);
+
+ if (isArea && points.size() > 3 && holder.NeedProcessTriangles())
+ {
+ // simplify and serialize triangles
+
+ holes_t const & holes = fb.GetHoles();
+
+ if (holes.empty() && holder.TryToMakeStrip(points))
+ continue;
+
+ holes_t simpleHoles;
+ for (holes_t::const_iterator iH = holes.begin(); iH != holes.end(); ++iH)
+ {
+ simpleHoles.push_back(points_t());
+
+ SimplifyPoints(*iH, simpleHoles.back(), m_header.GetScale(i));
+
+ if (simpleHoles.back().size() < 3)
+ simpleHoles.pop_back();
+ }
+
+ holder.AddTriangles(points, simpleHoles, i);
+ }
+ }
+ }
+
+ if (fb.PreSerialize(holder.m_buffer))
+ {
+ fb.Serialize(holder.m_buffer, m_header.GetBase());
+
+ WriteFeatureBase(holder.m_buffer.m_buffer, fb);
+ }
+ }
+ };
+
+ /// Simplify geometry for the upper scale.
+ FeatureBuilder2 & GetFeatureBuilder2(FeatureBuilder1 & fb)
+ {
+ return static_cast<FeatureBuilder2 &>(fb);
+ }
+
+
+ bool GenerateFinalFeatures(string const & datFilePath, bool bSort, bool bWorld)
+ {
+ // rename input file
+ Platform & platform = GetPlatform();
+ string tempDatFilePath(datFilePath);
+ tempDatFilePath += ".notsorted";
+
+ FileWriter::DeleteFileX(tempDatFilePath);
+ if (!platform.RenameFileX(datFilePath, tempDatFilePath))
+ {
+ LOG(LWARNING, ("File ", datFilePath, " doesn't exist or sharing violation!"));
+ return false;
+ }
+
+ // stores cellIds for middle points
+ CalculateMidPoints midPoints;
+ ForEachFromDatRawFormat(tempDatFilePath, midPoints);
+
+ // sort features by their middle point
+ if (bSort)
+ std::sort(midPoints.m_vec.begin(), midPoints.m_vec.end(), &SortMidPointsFunc);
+
+ // store sorted features
+ {
+ FileReader reader(tempDatFilePath);
+
+ feature::DataHeader header;
+ header.SetBase(midPoints.GetCenter());
+ header.SetScales(bWorld ? g_arrWorldScales : g_arrCountryScales);
+
+ FeaturesCollector2 collector(datFilePath, header);
+
+ FeatureBuilder1::buffer_t buffer;
+ for (size_t i = 0; i < midPoints.m_vec.size(); ++i)
+ {
+ ReaderSource<FileReader> src(reader);
+ src.Skip(midPoints.m_vec[i].second);
+
+ FeatureBuilder1 f;
+ feature::ReadFromSourceRowFormat(src, f);
+
+ // emit the feature
+ collector(GetFeatureBuilder2(f));
+ }
+
+ // at this point files should be closed
+ }
+
+ // remove old not-sorted dat file
+ FileWriter::DeleteFileX(tempDatFilePath);
+
+ FileWriter::DeleteFileX(datFilePath + DATA_FILE_TAG);
+
+ return true;
+ }
+} // namespace feature
diff --git a/generator/feature_sorter.hpp b/generator/feature_sorter.hpp
new file mode 100644
index 0000000000..5f111f91eb
--- /dev/null
+++ b/generator/feature_sorter.hpp
@@ -0,0 +1,54 @@
+#pragma once
+
+#include "../geometry/point2d.hpp"
+#include "../geometry/simplification.hpp"
+#include "../geometry/distance.hpp"
+
+#include "../indexer/scales.hpp"
+
+#include "../std/string.hpp"
+#include "../std/vector.hpp"
+
+namespace feature
+{
+ /// Final generation of data from input feature-dat-file.
+ /// @param[in] bSort sorts features in the given file by their mid points
+ bool GenerateFinalFeatures(string const & datFile, bool bSort, bool bWorld);
+
+ template <class PointT>
+ inline bool are_points_equal(PointT const & p1, PointT const & p2)
+ {
+ return p1 == p2;
+ }
+
+ template <>
+ inline bool are_points_equal<m2::PointD>(m2::PointD const & p1, m2::PointD const & p2)
+ {
+ return AlmostEqual(p1, p2);
+ }
+
+ template <class PointsContainerT>
+ void SimplifyPoints(PointsContainerT const & in, PointsContainerT & out, int level)
+ {
+ if (in.size() >= 2)
+ {
+ typedef mn::DistanceToLineSquare<m2::PointD> DistanceF;
+ double const eps = my::sq(scales::GetEpsilonForSimplify(level));
+
+ SimplifyNearOptimal<DistanceF>(20, in.begin(), in.end(), eps,
+ AccumulateSkipSmallTrg<DistanceF, m2::PointD>(out, eps));
+
+ ASSERT_GREATER ( out.size(), 1, () );
+ ASSERT ( are_points_equal(in.front(), out.front()), () );
+ ASSERT ( are_points_equal(in.back(), out.back()), () );
+
+#ifdef DEBUG
+ //for (size_t i = 2; i < out.size(); ++i)
+ //{
+ // double const dist = DistanceF(out[i-2], out[i])(out[i-1]);
+ // ASSERT ( dist >= eps, (dist, eps, in) );
+ //}
+#endif
+ }
+ }
+}
diff --git a/generator/first_pass_parser.hpp b/generator/first_pass_parser.hpp
new file mode 100644
index 0000000000..cbea6fac04
--- /dev/null
+++ b/generator/first_pass_parser.hpp
@@ -0,0 +1,107 @@
+#pragma once
+
+#include "xml_element.hpp"
+
+#include "../indexer/osm_decl.hpp"
+#include "../indexer/mercator.hpp"
+
+#include "../base/string_utils.hpp"
+
+
+template <class THolder>
+class FirstPassParser : public BaseOSMParser
+{
+ THolder & m_holder;
+
+public:
+ FirstPassParser(THolder & holder) : m_holder(holder)
+ {
+ static char const * tags[] = { "osm", "node", "way", "relation" };
+ SetTags(tags);
+ }
+
+protected:
+ virtual void EmitElement(XMLElement * p)
+ {
+ uint64_t id;
+ VERIFY ( utils::to_uint64(p->attrs["id"], id), ("Unknown element with invalid id : ", p->attrs["id"]) );
+
+ if (p->name == "node")
+ {
+ // store point
+
+ double lat, lng;
+ VERIFY ( utils::to_double(p->attrs["lat"], lat), ("Bad node lat : ", p->attrs["lat"]) );
+ VERIFY ( utils::to_double(p->attrs["lon"], lng), ("Bad node lon : ", p->attrs["lon"]) );
+
+ // convert to mercator
+ lat = MercatorBounds::LatToY(lat);
+ lng = MercatorBounds::LonToX(lng);
+
+ m_holder.AddNode(id, lat, lng);
+ }
+ else if (p->name == "way")
+ {
+ // store way
+
+ WayElement e;
+ bool bUnite = false;
+ bool bEmptyTags = true;
+
+ for (size_t i = 0; i < p->childs.size(); ++i)
+ {
+ if (p->childs[i].name == "nd")
+ {
+ uint64_t ref;
+ VERIFY ( utils::to_uint64(p->childs[i].attrs["ref"], ref), ("Bad node ref in way : ", p->childs[i].attrs["ref"]) );
+ e.nodes.push_back(ref);
+ }
+ else if (!bUnite && (p->childs[i].name == "tag"))
+ {
+ bEmptyTags = false;
+
+ // process way's tags to define - if we need to join ways
+ string const & k = p->childs[i].attrs["k"];
+ string const & v = p->childs[i].attrs["v"];
+ bUnite = feature::NeedUnite(k, v);
+ }
+ }
+
+ if (e.IsValid())
+ {
+ m_holder.AddWay(id, e);
+ if (bUnite || bEmptyTags)
+ m_holder.AddMappedWay(id, e, bEmptyTags);
+ }
+ }
+ else if (p->name == "relation")
+ {
+ // store relation
+
+ RelationElement e;
+ for (size_t i = 0; i < p->childs.size(); ++i)
+ {
+ if (p->childs[i].name == "member")
+ {
+ uint64_t ref;
+ VERIFY ( utils::to_uint64(p->childs[i].attrs["ref"], ref), ("Bad ref in relation : ", p->childs[i].attrs["ref"]) );
+
+ string const & type = p->childs[i].attrs["type"];
+ string const & role = p->childs[i].attrs["role"];
+ if (type == "node")
+ e.nodes.push_back(make_pair(ref, role));
+ else
+ e.ways.push_back(make_pair(ref, role));
+ }
+ else if (p->childs[i].name == "tag")
+ {
+ // relation tags writing as is
+ e.tags.insert(make_pair(p->childs[i].attrs["k"], p->childs[i].attrs["v"]));
+ }
+ }
+
+ if (e.IsValid())
+ m_holder.AddRelation(id, e);
+ }
+ }
+};
diff --git a/generator/generator.pro b/generator/generator.pro
new file mode 100644
index 0000000000..d6b3122152
--- /dev/null
+++ b/generator/generator.pro
@@ -0,0 +1,44 @@
+# Library to use in Generator Tool
+
+TARGET = generator
+TEMPLATE = lib
+CONFIG += staticlib
+
+ROOT_DIR = ..
+DEPENDENCIES = indexer geometry coding base expat
+
+include($$ROOT_DIR/common.pri)
+
+QT += core
+
+SOURCES += \
+ feature_merger.cpp \
+ xml_element.cpp \
+ data_generator.cpp \
+ feature_generator.cpp \
+ feature_sorter.cpp \
+ update_generator.cpp \
+ grid_generator.cpp \
+ statistics.cpp \
+ kml_parser.cpp \
+ osm2type.cpp \
+ classif_routine.cpp \
+
+HEADERS += \
+ feature_merger.hpp \
+ xml_element.hpp \
+ feature_bucketer.hpp \
+ osm_element.hpp \
+ data_generator.hpp \
+ feature_generator.hpp \
+ first_pass_parser.hpp \
+ data_cache_file.hpp \
+ feature_sorter.hpp \
+ update_generator.hpp \
+ grid_generator.hpp \
+ statistics.hpp \
+ kml_parser.hpp \
+ polygonizer.hpp \
+ world_map_generator.hpp \
+ osm2type.hpp \
+ classif_routine.hpp \
diff --git a/generator/generator_tests/feature_bucketer_test.cpp b/generator/generator_tests/feature_bucketer_test.cpp
new file mode 100644
index 0000000000..eee746f76c
--- /dev/null
+++ b/generator/generator_tests/feature_bucketer_test.cpp
@@ -0,0 +1,65 @@
+#include "../../testing/testing.hpp"
+
+#include "../feature_bucketer.hpp"
+
+#include "../../indexer/feature.hpp"
+#include "../../indexer/mercator.hpp"
+#include "../../indexer/cell_id.hpp"
+
+#include "../../indexer/indexer_tests/feature_routine.hpp"
+
+#include "../../base/stl_add.hpp"
+
+
+namespace
+{
+ class PushBackFeatureDebugStringOutput
+ {
+ public:
+ typedef map<string, vector<string> > * InitDataType;
+
+ PushBackFeatureDebugStringOutput(string const & name, InitDataType const & initData)
+ : m_pContainer(&((*initData)[name]))
+ {
+ }
+
+ void operator() (FeatureBuilder1 const & fb)
+ {
+ FeatureType f;
+ FeatureBuilder2Feature(
+ static_cast<FeatureBuilder2 &>(const_cast<FeatureBuilder1 &>(fb)), f);
+ m_pContainer->push_back(f.DebugString(0));
+ }
+
+ private:
+ vector<string> * m_pContainer;
+ };
+
+ typedef feature::CellFeatureBucketer<
+ PushBackFeatureDebugStringOutput,
+ feature::SimpleFeatureClipper,
+ MercatorBounds,
+ RectId
+ > FeatureBucketer;
+}
+
+UNIT_TEST(FeatureBucketerSmokeTest)
+{
+ map<string, vector<string> > out, expectedOut;
+ FeatureBucketer bucketer(1, &out);
+
+ FeatureBuilder2 fb;
+ fb.AddPoint(m2::PointD(10, 10));
+ fb.AddPoint(m2::PointD(20, 20));
+ fb.SetType(0);
+ bucketer(fb);
+
+ FeatureType f;
+ FeatureBuilder2Feature(fb, f);
+ expectedOut["3"].push_back(f.DebugString(0));
+ TEST_EQUAL(out, expectedOut, ());
+
+ vector<string> bucketNames;
+ bucketer.GetBucketNames(MakeBackInsertFunctor(bucketNames));
+ TEST_EQUAL(bucketNames, vector<string>(1, "3"), ());
+}
diff --git a/generator/generator_tests/generator_tests.pro b/generator/generator_tests/generator_tests.pro
new file mode 100644
index 0000000000..a9df30da98
--- /dev/null
+++ b/generator/generator_tests/generator_tests.pro
@@ -0,0 +1,23 @@
+TARGET = generator_tests
+CONFIG += console
+CONFIG -= app_bundle
+TEMPLATE = app
+
+ROOT_DIR = ../..
+DEPENDENCIES = map generator indexer platform geometry coding base expat sgitess
+
+include($$ROOT_DIR/common.pri)
+
+QT *= core
+
+win32-g++ {
+ LIBS += -lpthread
+}
+
+HEADERS += \
+ ../../indexer/indexer_tests/feature_routine.hpp \
+
+SOURCES += \
+ ../../testing/testingmain.cpp \
+ ../../indexer/indexer_tests/feature_routine.cpp \
+ feature_bucketer_test.cpp \
diff --git a/generator/generator_tool/generator_tool.cpp b/generator/generator_tool/generator_tool.cpp
new file mode 100644
index 0000000000..a8734bbba0
--- /dev/null
+++ b/generator/generator_tool/generator_tool.cpp
@@ -0,0 +1,197 @@
+#include "../data_generator.hpp"
+#include "../feature_generator.hpp"
+#include "../feature_sorter.hpp"
+#include "../update_generator.hpp"
+#include "../feature_bucketer.hpp"
+#include "../grid_generator.hpp"
+#include "../statistics.hpp"
+#include "../classif_routine.hpp"
+
+#include "../../indexer/features_vector.hpp"
+#include "../../indexer/index_builder.hpp"
+#include "../../indexer/osm_decl.hpp"
+#include "../../indexer/data_header.hpp"
+#include "../../indexer/classificator_loader.hpp"
+
+#include "../../defines.hpp"
+
+#include "../../platform/platform.hpp"
+
+#include "../../3party/gflags/src/gflags/gflags.h"
+
+#include "../../std/ctime.hpp"
+#include "../../std/iostream.hpp"
+#include "../../std/iomanip.hpp"
+#include "../../std/numeric.hpp"
+
+#include "../../version/version.hpp"
+
+#include "../../base/start_mem_debug.hpp"
+
+DEFINE_bool(version, false, "Display version");
+DEFINE_bool(generate_update, false,
+ "If specified, update.maps file will be generated from cells in the data path");
+
+DEFINE_bool(sort_features, true, "Sort features data for better cache-friendliness.");
+DEFINE_bool(generate_classif, false, "Generate classificator.");
+DEFINE_bool(preprocess_xml, false, "1st pass - create nodes/ways/relations data");
+DEFINE_bool(generate_features, false, "2nd pass - generate intermediate features");
+DEFINE_bool(generate_geometry, false, "3rd pass - split and simplify geometry and triangles for features");
+DEFINE_bool(generate_index, false, "4rd pass - generate index");
+DEFINE_bool(generate_grid, false, "Generate grid for given bucketing_level");
+DEFINE_bool(calc_statistics, false, "Calculate feature statistics for specified mwm bucket files");
+DEFINE_bool(use_light_nodes, false,
+ "If true, use temporary vector of nodes, instead of huge temp file");
+DEFINE_string(data_path, "", "Working directory, 'path_to_exe/../../data' if empty.");
+DEFINE_string(output, "", "Prefix of filenames of outputted .dat and .idx files.");
+DEFINE_string(intermediate_data_path, "", "Path to store nodes, ways, relations.");
+DEFINE_int32(bucketing_level, -1, "If positive, level of cell ids for bucketing.");
+DEFINE_int32(generate_world_scale, -1, "If specified, features for zoomlevels [0..this_value] "
+ "which are enabled in classificator will be MOVED to the separate world file");
+DEFINE_bool(split_by_polygons, false, "Use kml shape files to split planet by regions and countries");
+DEFINE_int32(simplify_countries_level, -1, "If positive, simplifies country polygons. Recommended values [10..15]");
+DEFINE_bool(merge_coastlines, false, "If defined, tries to merge coastlines when renerating World file");
+
+string AddSlashIfNeeded(string const & str)
+{
+ string result(str);
+ size_t const size = result.size();
+ if (size)
+ {
+ if (result.find_last_of('\\') == size - 1)
+ result[size - 1] = '/';
+ else
+ if (result.find_last_of('/') != size - 1)
+ result.push_back('/');
+ }
+ return result;
+}
+
+int main(int argc, char ** argv)
+{
+ google::SetUsageMessage(
+ "Takes OSM XML data from stdin and creates data and index files in several passes.");
+
+ google::ParseCommandLineFlags(&argc, &argv, true);
+
+ string const path =
+ FLAGS_data_path.empty() ? GetPlatform().WritableDir() : AddSlashIfNeeded(FLAGS_data_path);
+
+ if (FLAGS_version)
+ {
+ cout << "Tool version: " << VERSION_STRING << endl;
+ cout << "Built on: " << VERSION_DATE_STRING << endl;
+ }
+
+ // Make a classificator
+ if (FLAGS_generate_classif)
+ {
+ classificator::GenerateAndWrite(path);
+ }
+
+ if (FLAGS_generate_grid)
+ {
+ grid::GenerateGridToStdout(FLAGS_bucketing_level);
+ }
+
+ // Generating intermediate files
+ if (FLAGS_preprocess_xml)
+ {
+ LOG(LINFO, ("Generating intermediate data ...."));
+ if (!data::GenerateToFile(FLAGS_intermediate_data_path, FLAGS_use_light_nodes))
+ return -1;
+ }
+
+ feature::GenerateInfo genInfo;
+ genInfo.tmpDir = FLAGS_intermediate_data_path;
+
+ // load classificator only if necessary
+ if (FLAGS_generate_features || FLAGS_generate_geometry ||
+ FLAGS_generate_index || FLAGS_calc_statistics)
+ {
+ classificator::Read(path + "drawing_rules.bin",
+ path + "classificator.txt",
+ path + "visibility.txt");
+ classificator::PrepareForFeatureGeneration();
+ }
+
+ // Generate dat file
+ if (FLAGS_generate_features)
+ {
+ LOG(LINFO, ("Generating final data ..."));
+
+ if (FLAGS_output.empty() || FLAGS_split_by_polygons) // do not break data path for polygons
+ genInfo.datFilePrefix = path;
+ else
+ genInfo.datFilePrefix = path + FLAGS_output + (FLAGS_bucketing_level > 0 ? "-" : "");
+ genInfo.datFileSuffix = DATA_FILE_EXTENSION;
+
+ // split data by countries polygons
+ genInfo.splitByPolygons = FLAGS_split_by_polygons;
+ genInfo.simplifyCountriesLevel = FLAGS_simplify_countries_level;
+
+ genInfo.cellBucketingLevel = FLAGS_bucketing_level;
+ genInfo.maxScaleForWorldFeatures = FLAGS_generate_world_scale;
+ genInfo.mergeCoastlines = FLAGS_merge_coastlines;
+
+ if (!feature::GenerateFeatures(genInfo, FLAGS_use_light_nodes))
+ return -1;
+
+ for (size_t i = 0; i < genInfo.bucketNames.size(); ++i)
+ genInfo.bucketNames[i] = genInfo.datFilePrefix + genInfo.bucketNames[i] + genInfo.datFileSuffix;
+
+ if (FLAGS_generate_world_scale >= 0)
+ genInfo.bucketNames.push_back(genInfo.datFilePrefix + WORLD_FILE_NAME + genInfo.datFileSuffix);
+ }
+ else
+ {
+ genInfo.bucketNames.push_back(path + FLAGS_output + DATA_FILE_EXTENSION);
+ }
+
+ // Enumerate over all dat files that were created.
+ size_t const count = genInfo.bucketNames.size();
+ for (size_t i = 0; i < count; ++i)
+ {
+ string const & datFile = genInfo.bucketNames[i];
+
+ if (FLAGS_generate_geometry)
+ {
+ LOG(LINFO, ("Generating result features for ", datFile));
+ if (!feature::GenerateFinalFeatures(datFile,
+ FLAGS_sort_features, datFile == path + WORLD_FILE_NAME + DATA_FILE_EXTENSION))
+ {
+ // If error - move to next bucket without index generation
+ continue;
+ }
+ }
+
+ if (FLAGS_generate_index)
+ {
+ LOG(LINFO, ("Generating index for ", datFile));
+ if (!indexer::BuildIndexFromDatFile(datFile, FLAGS_intermediate_data_path + FLAGS_output))
+ {
+ LOG(LCRITICAL, ("Error generating index."));
+ }
+ }
+
+ if (FLAGS_calc_statistics)
+ {
+ LOG(LINFO, ("Calculating statistics for ", datFile));
+
+ stats::FileContainerStatistic(datFile);
+
+ stats::MapInfo info;
+ stats::CalcStatistic(datFile, info);
+ stats::PrintStatistic(info);
+ }
+ }
+
+ // Create http update list for countries and corresponding files
+ if (FLAGS_generate_update)
+ {
+ LOG(LINFO, ("Creating maps.update file..."));
+ update::GenerateFilesList(path);
+ }
+
+ return 0;
+}
diff --git a/generator/generator_tool/generator_tool.pro b/generator/generator_tool/generator_tool.pro
new file mode 100644
index 0000000000..460772d87f
--- /dev/null
+++ b/generator/generator_tool/generator_tool.pro
@@ -0,0 +1,22 @@
+# Generator binary
+
+ROOT_DIR = ../..
+DEPENDENCIES = map storage generator indexer platform geometry coding base gflags expat sgitess version
+
+include($$ROOT_DIR/common.pri)
+
+CONFIG += console
+CONFIG -= app_bundle
+TEMPLATE = app
+
+# needed for Platform::WorkingDir()
+QT += core
+
+win32 {
+ LIBS += -lShell32
+}
+
+SOURCES += \
+ generator_tool.cpp \
+
+HEADERS += \
diff --git a/generator/grid_generator.cpp b/generator/grid_generator.cpp
new file mode 100644
index 0000000000..b3f3e9ade2
--- /dev/null
+++ b/generator/grid_generator.cpp
@@ -0,0 +1,192 @@
+#include "grid_generator.hpp"
+
+#include "../base/logging.hpp"
+
+#include "../indexer/cell_id.hpp"
+#include "../indexer/mercator.hpp"
+
+// tags used for grid drawing
+#define GRIDKEY "mapswithme"
+#define GRIDVALUE "grid"
+#define CAPTIONKEY "place"
+#define CAPTIONVALUE "country"
+
+namespace grid
+{
+ static size_t const MIN_GRID_LEVEL = 1;
+ static size_t const MAX_GRID_LEVEL = 10;
+
+ template <class TCellId>
+ string MercatorPointToCellIdString(double x, double y, size_t bucketingLevel)
+ {
+ TCellId id = CellIdConverter<MercatorBounds, TCellId>::ToCellId(x, y);
+ return id.ToString().substr(0, bucketingLevel);
+ }
+
+ void GenerateGridToStdout(size_t bucketingLevel)
+ {
+ if (bucketingLevel < MIN_GRID_LEVEL || bucketingLevel > MAX_GRID_LEVEL)
+ {
+ LOG(LERROR, ("Bucketing level", bucketingLevel, "for grid is not within valid range [", MIN_GRID_LEVEL, "..", MAX_GRID_LEVEL, "]"));
+ return;
+ }
+
+ size_t const COUNT = 2 << (bucketingLevel - 1);
+ double const STEPX = (MercatorBounds::maxX - MercatorBounds::minX) / COUNT;
+ double const STEPY = (MercatorBounds::maxY - MercatorBounds::minY) / COUNT;
+
+ cout <<
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+ "<osm version=\"0.6\" generator=\"MapsWithMe Indexer Tool\">\n"
+ " <bounds minlat=\"" << MercatorBounds::YToLat(MercatorBounds::minY) <<
+ "\" minlon=\"" << MercatorBounds::XToLon(MercatorBounds::minX) <<
+ "\" maxlat=\"" << MercatorBounds::YToLat(MercatorBounds::maxY) <<
+ "\" maxlon=\"" << MercatorBounds::XToLon(MercatorBounds::maxX) << "\"/>\n";
+
+ // generate nodes and ways
+ size_t nodeID = 1;
+ size_t wayID = 1;
+ for (double y = MercatorBounds::minY; y <= MercatorBounds::maxY; y += STEPY)
+ {
+ size_t const firstID = nodeID;
+ cout <<
+ " <node id=\"" << nodeID++ <<
+ "\" lat=\"" << MercatorBounds::YToLat(y) <<
+ "\" lon=\"" << MercatorBounds::XToLon(MercatorBounds::minX) <<
+ "\"/>\n";
+ size_t const secondID = nodeID;
+ cout <<
+ " <node id=\"" << nodeID++ <<
+ "\" lat=\"" << MercatorBounds::YToLat(y) <<
+ "\" lon=\"" << MercatorBounds::XToLon(MercatorBounds::maxX) <<
+ "\"/>\n";
+ cout <<
+ " <way id=\"" << wayID++ << "\">\n"
+ " <nd ref=\"" << firstID << "\"/>\n"
+ " <nd ref=\"" << secondID << "\"/>\n"
+ " <tag k=\"" << GRIDKEY << "\" v=\"" << GRIDVALUE << "\"/>\n"
+ " <tag k=\"layer\" v=\"-5\"/>\n"
+ " </way>\n";
+ }
+ for (double x = MercatorBounds::minX; x <= MercatorBounds::maxX; x += STEPX)
+ {
+ size_t const firstID = nodeID;
+ cout <<
+ " <node id=\"" << nodeID++ <<
+ "\" lat=\"" << MercatorBounds::YToLat(MercatorBounds::minY) <<
+ "\" lon=\"" << MercatorBounds::XToLon(x) <<
+ "\"/>\n";
+ size_t const secondID = nodeID;
+ cout <<
+ " <node id=\"" << nodeID++ <<
+ "\" lat=\"" << MercatorBounds::YToLat(MercatorBounds::maxY) <<
+ "\" lon=\"" << MercatorBounds::XToLon(x) <<
+ "\"/>\n";
+ cout <<
+ " <way id=\"" << wayID++ << "\">\n"
+ " <nd ref=\"" << firstID << "\"/>\n"
+ " <nd ref=\"" << secondID << "\"/>\n"
+ " <tag k=\"" << GRIDKEY << "\" v=\"" << GRIDVALUE << "\"/>\n"
+ " <tag k=\"layer\" v=\"-5\"/>\n"
+ " </way>\n";
+ }
+
+ // generate nodes with captions
+ for (size_t y = 0; y <= COUNT - 1; ++y)
+ {
+ for (size_t x = 0; x <= COUNT - 1; ++x)
+ {
+ double const mercY = MercatorBounds::minY + y * STEPY + STEPY / 2;
+ double const mercX = MercatorBounds::minX + x * STEPX + STEPX / 2;
+ string const title = MercatorPointToCellIdString<m2::CellId<MAX_GRID_LEVEL> >(mercX, mercY, bucketingLevel);
+ cout <<
+ " <node id=\"" << nodeID++ <<
+ "\" lat=\"" << MercatorBounds::YToLat(mercY) <<
+ "\" lon=\"" << MercatorBounds::XToLon(mercX) <<
+ "\">\n";
+ cout <<
+ " <tag k=\"" << CAPTIONKEY << "\" v=\"" << CAPTIONVALUE << "\"/>\n";
+ cout <<
+ " <tag k=\"name\" v=\"" << title << "\"/>\n";
+ cout <<
+ " </node>\n";
+ }
+ }
+ cout <<
+ "</osm>\n";
+ }
+
+/* void GenerateGridToStdout(size_t bucketingLevel)
+ {
+ if (bucketingLevel < MIN_GRID_LEVEL || bucketingLevel > MAX_GRID_LEVEL)
+ {
+ LOG(LERROR, ("Bucketing level", bucketingLevel, "for grid is not within valid range [", MIN_GRID_LEVEL, "..", MAX_GRID_LEVEL, "]"));
+ return;
+ }
+
+ size_t const COUNT = 2 << (bucketingLevel - 1);
+ double const STEPX = (MercatorBounds::maxX - MercatorBounds::minX) / COUNT;
+ double const STEPY = (MercatorBounds::maxY - MercatorBounds::minY) / COUNT;
+
+ cout <<
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+ "<osm version=\"0.6\" generator=\"MapsWithMe Indexer Tool\">\n"
+ " <bounds minlat=\"" << MercatorBounds::YToLat(MercatorBounds::minY) <<
+ "\" minlon=\"" << MercatorBounds::XToLon(MercatorBounds::minX) <<
+ "\" maxlat=\"" << MercatorBounds::YToLat(MercatorBounds::maxY) <<
+ "\" maxlon=\"" << MercatorBounds::XToLon(MercatorBounds::maxX) << "\"/>\n";
+
+ // generate nodes
+ size_t nodeID = 1;
+ for (double y = MercatorBounds::minY; y <= MercatorBounds::maxY; y += STEPY)
+ {
+ for (double x = MercatorBounds::minX; x <= MercatorBounds::maxX; x += STEPX)
+ {
+ cout <<
+ " <node id=\"" << nodeID++ <<
+ "\" lat=\"" << MercatorBounds::YToLat(y) <<
+ "\" lon=\"" << MercatorBounds::XToLon(x) <<
+ "\"/>\n";
+ }
+ }
+ // generate squares and captions
+ size_t wayID = 1;
+ for (size_t y = 0; y <= COUNT - 1; ++y)
+ {
+ for (size_t x = 0; x <= COUNT - 1; ++x)
+ {
+ size_t const first = x + y * (COUNT + 1) + 1;
+ size_t const second = first + 1;
+ size_t const third = second + COUNT + 1;
+ size_t const fourth = third - 1;
+ string title = CellStringFromXYLevel(x, y, bucketingLevel);
+ ++nodeID;
+ cout <<
+ " <node id=\"" << nodeID <<
+ "\" lat=\"" << MercatorBounds::YToLat(MercatorBounds::minY + y * STEPY + STEPY / 2) <<
+ "\" lon=\"" << MercatorBounds::XToLon(MercatorBounds::minX + x * STEPX + STEPX / 2) <<
+ "\">\n";
+ cout <<
+ " <tag k=\"" << TAGKEY << "\" v=\"" << CAPTIONVALUE << "\"/>\n";
+ cout <<
+ " <tag k=\"name\" v=\"" << title << "\"/>\n";
+ cout <<
+ " </node>\n";
+
+ cout <<
+ " <way id=\"" << wayID++ << "\">\n"
+ " <nd ref=\"" << first << "\"/>\n"
+ " <nd ref=\"" << second << "\"/>\n"
+ " <nd ref=\"" << third << "\"/>\n"
+ " <nd ref=\"" << fourth << "\"/>\n"
+ " <nd ref=\"" << first << "\"/>\n"
+ " <tag k=\"name\" v=\"" << title << "\"/>\n"
+ " <tag k=\"" << TAGKEY << "\" v=\"" << GRIDVALUE << "\"/>\n"
+ " <tag k=\"layer\" v=\"-5\"/>\n"
+ " </way>\n";
+ }
+ }
+ cout <<
+ "</osm>\n";
+ }*/
+}
diff --git a/generator/grid_generator.hpp b/generator/grid_generator.hpp
new file mode 100644
index 0000000000..a1bdc31e45
--- /dev/null
+++ b/generator/grid_generator.hpp
@@ -0,0 +1,8 @@
+#pragma once
+
+#include "../std/iostream.hpp"
+
+namespace grid
+{
+ void GenerateGridToStdout(size_t bucketingLevel);
+}
diff --git a/generator/kml_parser.cpp b/generator/kml_parser.cpp
new file mode 100644
index 0000000000..55f4402354
--- /dev/null
+++ b/generator/kml_parser.cpp
@@ -0,0 +1,298 @@
+#include "kml_parser.hpp"
+#include "feature_sorter.hpp"
+
+#include "../base/string_utils.hpp"
+#include "../base/logging.hpp"
+
+#include "../coding/parse_xml.hpp"
+#include "../coding/file_reader.hpp"
+
+#include "../geometry/rect2d.hpp"
+#include "../geometry/cellid.hpp"
+
+#include "../indexer/cell_id.hpp"
+#include "../indexer/mercator.hpp"
+#include "../indexer/feature.hpp"
+#include "../indexer/covering.hpp"
+
+#include "../std/fstream.hpp"
+
+#define POLYGONS_FILE "polygons.lst"
+#define BORDERS_DIR "borders/"
+#define BORDERS_EXTENSION ".kml"
+
+#define MIN_SIMPLIFIED_POINTS_COUNT 4
+
+namespace feature
+{
+ typedef vector<m2::PointD> points_t;
+ void TesselateInterior(points_t const & bound, list<points_t> const & holes,
+ points_t & triangles);
+}
+
+namespace kml
+{
+ typedef vector<Region> PolygonsContainerT;
+
+ class KmlParser
+ {
+ vector<string> m_tags;
+ /// buffer for text with points
+ string m_data;
+ string m_name;
+
+ PolygonsContainerT & m_country;
+ int m_level;
+
+ public:
+ KmlParser(PolygonsContainerT & country, int level);
+
+ bool Push(string const & element);
+ void Pop(string const & element);
+ void AddAttr(string const &, string const &) {}
+ void CharData(string const & data);
+ };
+
+ KmlParser::KmlParser(PolygonsContainerT & country, int level)
+ : m_country(country), m_level(level)
+ {
+ }
+
+ bool KmlParser::Push(string const & element)
+ {
+ m_tags.push_back(element);
+
+ return true;
+ }
+
+ template <class PointsContainerT>
+ class PointsCollector
+ {
+ PointsContainerT & m_container;
+
+ public:
+ PointsCollector(PointsContainerT & container) : m_container(container)
+ {
+ }
+
+ void operator()(string const & latLon)
+ {
+ size_t const firstCommaPos = latLon.find(',');
+ CHECK(firstCommaPos != string::npos, ("invalid latlon", latLon));
+ string const lonStr = latLon.substr(0, firstCommaPos);
+ double lon;
+ CHECK(utils::to_double(lonStr, lon), ("invalid lon", lonStr));
+ size_t const secondCommaPos = latLon.find(',', firstCommaPos + 1);
+ string latStr;
+ if (secondCommaPos == string::npos)
+ latStr = latLon.substr(firstCommaPos + 1);
+ else
+ latStr = latLon.substr(firstCommaPos + 1, secondCommaPos - firstCommaPos - 1);
+ double lat;
+ CHECK(utils::to_double(latStr, lat), ("invalid lon", latStr));
+ // to mercator
+ m2::PointD const mercPoint(MercatorBounds::LonToX(lon), MercatorBounds::LatToY(lat));
+ m_container.push_back(mercPoint);
+ }
+ };
+
+ class AreaFeature : public FeatureType
+ {
+ public:
+ template <class IterT>
+ AreaFeature(IterT beg, IterT end)
+ {
+ // manually fill bordering geometry points
+ m_bPointsParsed = true;
+ for (IterT it = beg; it != end; ++it)
+ {
+ m_Points.push_back(*it);
+ m_LimitRect.Add(*it);
+ }
+
+ // manually fill triangles points
+ m_bTrianglesParsed = true;
+ list<feature::points_t> const holes;
+ feature::points_t points(beg, end);
+ feature::points_t triangles;
+ feature::TesselateInterior(points, holes, triangles);
+ CHECK(!triangles.empty(), ("Tesselation unsuccessfull?"));
+ for (size_t i = 0; i < triangles.size(); ++i)
+ m_Triangles.push_back(triangles[i]);
+ }
+ };
+ void KmlParser::Pop(string const & element)
+ {
+ if (element == "Placemark")
+ {
+ }
+ else if (element == "coordinates")
+ {
+ size_t const size = m_tags.size();
+ CHECK(m_tags.size() > 3, ());
+ CHECK(m_tags[size - 2] == "LinearRing", ());
+
+ if (m_tags[size - 3] == "outerBoundaryIs")
+ {
+ // first, collect points in Mercator
+ typedef vector<m2::PointD> MercPointsContainerT;
+ MercPointsContainerT points;
+ PointsCollector<MercPointsContainerT> collector(points);
+ utils::TokenizeString(m_data, " \n\r\a", collector);
+ size_t const numPoints = points.size();
+ if (numPoints > 3 && points[numPoints - 1] == points[0])
+ {
+// // create feature for country's polygon
+// AreaFeature ft(points.begin(), points.end());
+// // get polygon covering (cellids)
+// vector<int64_t> ids;
+// ids = covering::CoverFeature(ft, -1);
+// // debug output
+// set<int64_t> ids8;
+// for (size_t i = 0; i < ids.size(); ++i)
+// {
+// int64_t a = ids[i] >> (2 * 11);
+// if (ids8.insert(a).second)
+// LOG(LINFO, (RectId::FromInt64(a).ToString()));
+// }
+// LOG(LINFO, ("Total cellids:", ids8.size()));
+ // second, simplify points if necessary
+ if (m_level > 0)
+ {
+ MercPointsContainerT simplifiedPoints;
+ feature::SimplifyPoints(points, simplifiedPoints, m_level);
+ if (simplifiedPoints.size() > MIN_SIMPLIFIED_POINTS_COUNT)
+ {
+ // LOG_SHORT(LINFO, (m_name, numPoints, "simplified to ", simplifiedPoints.size()));
+ points.swap(simplifiedPoints);
+ }
+ else
+ {
+ // LOG_SHORT(LINFO, (m_name, numPoints, "NOT simplified"));
+ }
+ }
+
+ // remove last point which is equal to first
+ // it's not needed for Region::Contains
+ points.pop_back();
+
+ m_country.push_back(Region());
+ for (MercPointsContainerT::iterator it = points.begin(); it != points.end(); ++it)
+ m_country.back().AddPoint(*it);
+ }
+ else
+ {
+ LOG(LWARNING, ("Invalid region for country"/*, m_country.m_name*/));
+ }
+ }
+ else if (m_tags[size - 3] == "innerBoundaryIs")
+ { // currently we're ignoring holes
+ }
+ else
+ {
+ CHECK(false, ("Unsupported tag", m_tags[size - 3]));
+ }
+
+ m_data.clear();
+ }
+ else if (element == "Polygon")
+ {
+ }
+
+ m_tags.pop_back();
+ }
+
+ void KmlParser::CharData(string const & data)
+ {
+ size_t const size = m_tags.size();
+
+ if (size > 1 && m_tags[size - 1] == "name" && m_tags[size - 2] == "Placemark")
+ {
+ m_name = data;
+ }
+ else if (size > 4 && m_tags[size - 1] == "coordinates"
+ && m_tags[size - 2] == "LinearRing" && m_tags[size - 4] == "Polygon")
+ {
+ // text block can be really huge
+ m_data.append(data);
+ }
+ }
+
+ bool LoadPolygonsFromKml(string const & kmlFile, PolygonsContainerT & country, int level)
+ {
+ KmlParser parser(country, level);
+ try
+ {
+ FileReader file(kmlFile);
+ ReaderSource<FileReader> source(file);
+ bool const bRes = ParseXML(source, parser, true);
+ return bRes;
+ }
+ catch (std::exception const &)
+ {
+ }
+ return false;
+ }
+
+ class PolygonLoader
+ {
+ string m_baseDir;
+ int m_level;
+
+ CountryPolygons & m_polygons;
+ m2::RectD & m_rect;
+
+ public:
+ PolygonLoader(string const & basePolygonsDir, int level, CountryPolygons & polygons, m2::RectD & rect)
+ : m_baseDir(basePolygonsDir), m_level(level), m_polygons(polygons), m_rect(rect)
+ {
+ }
+
+ void operator()(string const & name)
+ {
+ if (m_polygons.m_name.empty())
+ m_polygons.m_name = name;
+
+ PolygonsContainerT current;
+ if (LoadPolygonsFromKml(m_baseDir + BORDERS_DIR + name + BORDERS_EXTENSION, current, m_level))
+ {
+ for (size_t i = 0; i < current.size(); ++i)
+ {
+ m2::RectD const rect(current[i].GetRect());
+ m_rect.Add(rect);
+ m_polygons.m_regions.Add(current[i], rect);
+ }
+ }
+ }
+ };
+
+ bool LoadCountriesList(string const & baseDir, CountriesContainerT & countries,
+ int simplifyCountriesLevel)
+ {
+ if (simplifyCountriesLevel > 0)
+ {
+ LOG_SHORT(LINFO, ("Simplificator level for country polygons:", simplifyCountriesLevel));
+ }
+
+ countries.Clear();
+ ifstream stream((baseDir + POLYGONS_FILE).c_str());
+ string line;
+ LOG(LINFO, ("Loading countries."));
+ while (stream.good())
+ {
+ std::getline(stream, line);
+ if (line.empty())
+ continue;
+
+ CountryPolygons country;
+ m2::RectD rect;
+
+ PolygonLoader loader(baseDir, simplifyCountriesLevel, country, rect);
+ utils::TokenizeString(line, "|", loader);
+ if (!country.m_regions.IsEmpty())
+ countries.Add(country, rect);
+ }
+ LOG(LINFO, ("Countries loaded:", countries.GetSize()));
+ return !countries.IsEmpty();
+ }
+}
diff --git a/generator/kml_parser.hpp b/generator/kml_parser.hpp
new file mode 100644
index 0000000000..25a0e9d5da
--- /dev/null
+++ b/generator/kml_parser.hpp
@@ -0,0 +1,28 @@
+#pragma once
+
+#include "../geometry/region2d.hpp"
+#include "../geometry/tree4d.hpp"
+
+#include "../std/string.hpp"
+
+
+namespace kml
+{
+ typedef m2::RegionD Region;
+ typedef m4::Tree<Region> RegionsContainerT;
+
+ struct CountryPolygons
+ {
+ CountryPolygons(string const & name = "") : m_name(name), m_index(-1) {}
+
+ RegionsContainerT m_regions;
+ string m_name;
+ mutable int m_index;
+ };
+
+ typedef m4::Tree<CountryPolygons> CountriesContainerT;
+
+ /// @param[in] simplifyCountriesLevel if positive, used as a level for simplificator
+ bool LoadCountriesList(string const & baseDir, CountriesContainerT & countries,
+ int simplifyCountriesLevel = -1);
+}
diff --git a/generator/osm2type.cpp b/generator/osm2type.cpp
new file mode 100644
index 0000000000..16513a69d6
--- /dev/null
+++ b/generator/osm2type.cpp
@@ -0,0 +1,652 @@
+#include "osm2type.hpp"
+#include "xml_element.hpp"
+
+#include "../indexer/classificator.hpp"
+#include "../indexer/drawing_rules.hpp"
+#include "../indexer/feature_visibility.hpp"
+
+#include "../coding/parse_xml.hpp"
+#include "../coding/file_reader.hpp"
+
+#include "../base/assert.hpp"
+#include "../base/string_utils.hpp"
+
+#include "../std/fstream.hpp"
+#include "../std/bind.hpp"
+#include "../std/vector.hpp"
+#include "../std/set.hpp"
+#include "../std/algorithm.hpp"
+
+#include "../base/start_mem_debug.hpp"
+
+namespace ftype {
+
+ namespace
+ {
+ /// get value of mark (1 == "yes", -1 == "no", 0 == not a "yes\no")
+ static int get_mark_value(string const & k, string const & v)
+ {
+ static char const * aTrue[] = { "yes", "true", "1", "*" };
+ static char const * aFalse[] = { "no", "false", "-1" };
+
+ utils::TokenizeIterator it(v, "|");
+ while (!it.end())
+ {
+ if (utils::IsInArray(aTrue, *it)) return 1;
+ if (utils::IsInArray(aFalse, *it)) return -1;
+ ++it;
+ }
+
+ // "~" means no this tag, so sometimes it means true,
+ // and all other cases - false. Choose according to key.
+ if (v == "~")
+ return (k == "access" ? 1 : -1);
+
+ return 0;
+ }
+
+ class OSMTypesStream
+ {
+ /// @name processing elements definitions
+ //@{
+ struct element_t
+ {
+ element_t() : pObj(0) {}
+
+ string name;
+ map<string, string> attr;
+
+ ClassifObject * pObj;
+ };
+
+ vector<element_t> m_elements;
+ element_t & current() { return m_elements.back(); }
+
+ int m_priority;
+ //@}
+
+ /// check if element is a draw rule (commonly it's a leaf in xml)
+ static bool is_draw_rule(string const & e)
+ {
+ static char const * rules[] = { "line", "tunnel", "area", "symbol", "caption", "text",
+ "circle", "pathText", "wayMarker" };
+ return utils::IsInArray(rules, e);
+ }
+
+ uint8_t get_rule_type()
+ {
+ int count = static_cast<int>(m_elements.size()) - 2;
+ ASSERT ( count >= 0, (count) );
+
+ string e;
+ while (e.empty() && count >= 0)
+ {
+ e = m_elements[count].attr["e"];
+ --count;
+ }
+ ASSERT ( !e.empty(), () );
+
+ utils::TokenizeIterator it(e, "|");
+ uint8_t ret = 0;
+ while (!it.end())
+ {
+ string const & s = *it;
+ if (s == "node")
+ ret |= drule::node;
+ else if (s == "way")
+ ret |= drule::way;
+ ++it;
+ }
+
+ ASSERT ( ret != 0, () );
+ return static_cast<drule::rule_geo_t>(ret);
+ }
+
+ /// check if it's our element to parse
+ static bool is_our_element(string const & e)
+ {
+ static char const * elems[] = { "rules", "rule", "else", "layer",
+ // addclass appear in small scales (6-11)
+ // don't skip it during parsing, but we don't process it like a rule
+ "addclass" };
+ return (utils::IsInArray(elems, e) || is_draw_rule(e));
+ }
+
+ /// check if it's processing key
+ static bool is_valid_key(string const & k)
+ {
+ static char const * bad[] = { "osmarender:render", "osmarender:rendername",
+ "osmarender:renderref", "addr:housenumber" };
+ return (!k.empty() && !utils::IsInArray(bad, k));
+ }
+
+ static bool is_valid_value(string const & v)
+ {
+ return !v.empty();
+ }
+
+ /// check if key is a 'mark'
+ static bool is_mark_key(string const & k)
+ {
+ static char const * mark[] = { "bridge", "tunnel", "area", "lock", "oneway", "junction",
+ "embankment", "cutting", "motorroad", "cycleway",
+ "bicycle", "horse", "capital", "fee" };
+ return utils::IsInArray(mark, k);
+ }
+
+ static bool process_feature_like_mark_from_root(string const & /*k*/, string const & v)
+ {
+ static char const * mark[] = { "turning_circle", "dyke", "dike", "levee", "embankment" };
+ return utils::IsInArray(mark, v);
+ }
+
+ static bool process_feature_like_mark(string const & k, string const & v)
+ {
+ return (k == "highway" && (v == "construction" || v == "disused"));
+ }
+
+ /// check if skip whole element by it's key
+ static bool is_skip_element_by_key(string const & k)
+ {
+ static char const * skip[] = { "addr:housenumber", "fixme" };
+ return utils::IsInArray(skip, k);
+ }
+
+ /// skip element and all it's sub-elements
+ bool m_forceSkip;
+
+ public:
+ OSMTypesStream() : m_priority(0), m_forceSkip(false) {}
+
+ void CharData(string const &) {}
+ bool Push(string const & name)
+ {
+ if (!m_forceSkip && is_our_element(name))
+ {
+ m_elements.push_back(element_t());
+ current().name = name;
+ return true;
+ }
+
+ return false;
+ }
+
+ public:
+ void AddAttr(string name, string value)
+ {
+ // make lower case for equivalent string comparison
+ utils::make_lower_case(name);
+ utils::make_lower_case(value);
+
+ if ((name == "k") && is_skip_element_by_key(value))
+ m_forceSkip = true;
+ else
+ current().attr[name] = value;
+ }
+
+ ClassifObject * get_root() { return classif().GetMutableRoot(); }
+
+ void Pop(string const & /*element*/)
+ {
+ if (!m_forceSkip)
+ add_type_recursive(get_root(), 0, vector<string>());
+ else
+ m_forceSkip = false;
+
+ m_elements.pop_back();
+ }
+
+ private:
+ vector<string> make_concat(vector<string> const & v, int intV, string const & s)
+ {
+ if (intV == 1)
+ {
+ vector<string> vv;
+ vv.reserve(v.size() + 1);
+ bool inserted = false;
+ for (size_t i = 0; i < v.size(); ++i)
+ {
+ if (!(v[i] < s) && !inserted)
+ {
+ inserted = true;
+ vv.push_back(s);
+ }
+ vv.push_back(v[i]);
+ }
+ if (!inserted) vv.push_back(s);
+
+ return vv;
+ }
+ else return v;
+ }
+
+ /// get parent of object (p) in created chain of elements
+ ClassifObject * get_parent_of(size_t i, ClassifObject * p)
+ {
+ ASSERT ( i > 0, () );
+ while (--i > 0)
+ if (m_elements[i].pObj == p) break;
+
+ ASSERT ( i > 0, () );
+ while (--i > 0)
+ if (m_elements[i].pObj)
+ return m_elements[i].pObj;
+
+ return get_root();
+ }
+
+ void clear_states(size_t start)
+ {
+ for (size_t i = start; i < m_elements.size(); ++i)
+ m_elements[i].pObj = 0;
+ }
+
+ void add_type_recursive(ClassifObject * pParent,
+ size_t start,
+ std::vector<string> const & marks)
+ {
+ for (size_t i = start; i < m_elements.size(); ++i)
+ {
+ element_t & e = m_elements[i];
+
+ if (e.pObj) continue;
+
+ if (e.name == "rule")
+ {
+ // process rule
+ string k = e.attr["k"];
+ if (!is_valid_key(k)) continue;
+
+ string v = e.attr["v"];
+ if (!is_valid_value(v)) continue;
+
+ utils::TokenizeIterator iK(k, "|");
+ if (iK.is_last())
+ {
+ // process one key
+ ASSERT ( *iK == k, () );
+
+ int intV = get_mark_value(k, v);
+ if (is_mark_key(k) && (intV != 0))
+ {
+ // key is a mark, so save it and go futher
+ add_type_recursive(pParent, i + 1, make_concat(marks, intV, k));
+ clear_states(i);
+ }
+ else
+ {
+ // buildings assume as feature type
+ bool lets_try = (k == "building" && intV == 1);
+
+ // default access is yes. If "no" - make additional feature type
+ if (!lets_try && (k == "access" && intV == -1))
+ {
+ lets_try = true;
+ intV = 0;
+ v = "no-access";
+ }
+
+ if (!lets_try && intV != 0)
+ {
+ // skip this keys, because they are dummy
+ continue;
+ }
+ else
+ {
+ // add root or criterion
+ if (pParent == get_root())
+ {
+ pParent = pParent->Add(k);
+ e.pObj = pParent;
+
+ // use m_elements[1] to hold first parent of futher creation objects
+ // need for correct working "get_parent_of" function
+ m_elements[1].pObj = pParent;
+ }
+ else
+ {
+ // avoid recursion like this:
+ // <k = "x", v = "a|b|c">
+ // <k = "x", v = "a">
+ // <k = "x", v = "b">
+ // <k = "x", v = "c">
+ ClassifObject * ppParent = get_parent_of(i, pParent);
+ if (k != ppParent->GetName())
+ {
+ // do not set criterion like base object
+ if (k != pParent->GetName() &&
+ !process_feature_like_mark(pParent->GetName(), k))
+ pParent->AddCriterion(k);
+ }
+ else
+ pParent = ppParent;
+ }
+
+ // process values
+ utils::TokenizeIterator iV(v, "|");
+ while (!iV.end())
+ {
+ bool const b1 = process_feature_like_mark_from_root(k, *iV);
+ if (b1 || process_feature_like_mark(k, *iV))
+ {
+ // process value like mark, so save it and go futher
+ add_type_recursive(
+ b1 ? get_root() : pParent, i + 1, make_concat(marks, 1, *iV));
+ clear_states(i);
+ }
+ else
+ {
+ ClassifObject * p = pParent;
+ if (intV == 0)
+ p = pParent->Add(*iV);
+ e.pObj = p;
+
+ add_type_recursive(p, i + 1, marks);
+ clear_states(i);
+ }
+
+ ++iV;
+ }
+ }
+ }
+ }
+ else
+ {
+ char const * aTry[] = { "natural", "landuse" };
+
+ while (!iK.end())
+ {
+ // let's try to add root keys
+ bool addMode = (pParent == get_root() && utils::IsInArray(aTry, *iK));
+
+ ClassifObject * p = (addMode ? pParent->Add(*iK) : pParent->Find(*iK));
+ if (p && (get_mark_value(*iK, v) == 0))
+ {
+ if (p->IsCriterion()) p = pParent;
+
+ utils::TokenizeIterator iV(v, "|");
+ while (!iV.end())
+ {
+ ClassifObject * pp = (addMode ? p->Add(*iV) : p->Find(*iV));
+ if (pp)
+ {
+ e.pObj = pp;
+
+ add_type_recursive(pp, i + 1, marks);
+ clear_states(i);
+ }
+ ++iV;
+ }
+ }
+ ++iK;
+ }
+ }
+
+ return; // processed to the end - exit
+ }
+ else if (is_draw_rule(e.name))
+ {
+ ASSERT ( i == m_elements.size()-1, ("drawing rules should be leavs") );
+
+ // process draw rule
+ if (pParent != get_root())
+ {
+ if (!marks.empty())
+ {
+ // make final mark string
+ string res;
+ for (size_t i = 0; i < marks.size(); ++i)
+ {
+ if (!res.empty()) res += '-';
+ res += marks[i];
+ }
+
+ pParent = pParent->Add(res);
+ }
+
+ vector<drule::Key> keys;
+ drule::rules().CreateRules(e.name, get_rule_type(), e.attr, keys);
+
+ // if no "layer" tag, then atoi returns 0 - it's ok for us
+ // 1000 - is a base count of rules for layer
+ int const layer = atoi(e.attr["layer"].c_str()) * drule::layer_base_priority;
+ for (size_t i = 0; i < keys.size(); ++i)
+ keys[i].SetPriority(layer + m_priority++);
+
+ for_each(keys.begin(), keys.end(), bind(&ClassifObject::AddDrawRule, pParent, _1));
+ }
+ }
+ }
+ }
+ };
+ }
+
+ void ParseOSMTypes(char const * fPath, int scale)
+ {
+ drule::rules().SetParseFile(fPath, scale);
+
+ FileReader reader(fPath);
+ ReaderSource<FileReader> source(reader);
+ OSMTypesStream stream;
+ ParseXML(source, stream);
+ }
+
+ namespace
+ {
+ bool is_skip_tag(string const & k, string const & /*v*/)
+ {
+ // skip "cycleway's" tags because they interfer to set a valid types like "highway's"
+ return (k == "created_by" || k == "description" || k == "cycleway" || k == "embankment");
+ }
+
+ template <class ToDo> typename ToDo::result_type for_each_tag(XMLElement * p, ToDo toDo)
+ {
+ typedef typename ToDo::result_type res_t;
+
+ for (size_t i = 0; i < p->childs.size(); ++i)
+ {
+ if (p->childs[i].name == "tag")
+ {
+ string const & k = p->childs[i].attrs["k"];
+ string const & v = p->childs[i].attrs["v"];
+
+ if (is_skip_tag(k, v)) continue;
+
+ // this means "no"
+ //if (get_mark_value(k, v) == -1)
+ // continue;
+
+ res_t res = toDo(k, v);
+ if (res) return res;
+ }
+ }
+ return res_t();
+ }
+
+ bool is_name_tag(string const & k)
+ {
+ return (string::npos != k.find("name"));
+ }
+
+ class do_print
+ {
+ ostream & m_s;
+ public:
+ typedef bool result_type;
+
+ do_print(ostream & s) : m_s(s) {}
+ bool operator() (string const & k, string const & v) const
+ {
+ m_s << k << " <---> " << v << endl;
+ return false;
+ }
+ };
+
+ class do_find_name
+ {
+ size_t & m_count;
+ string & m_name;
+ int32_t & m_layer;
+ public:
+ typedef bool result_type;
+
+ do_find_name(size_t & count, string & name, int32_t & layer)
+ : m_count(count), m_name(name), m_layer(layer)
+ {
+ m_count = 0;
+ m_layer = 0;
+ }
+ bool operator() (string const & k, string const & v)
+ {
+ ++m_count;
+
+ // do not call is_name_tag(k), but exactly "name" tag
+ if (m_name.empty() && k == "name")
+ m_name = v;
+
+ // add house number as name
+ if (m_name.empty() && k == "addr:housenumber")
+ m_name = v;
+
+ if (k == "layer" && m_layer == 0)
+ m_layer = atoi(v.c_str());
+
+ return false;
+ }
+ };
+
+ class do_find_obj
+ {
+ ClassifObject const * m_parent;
+ bool m_isKey;
+
+ public:
+ typedef ClassifObjectPtr result_type;
+
+ do_find_obj(ClassifObject const * p, bool isKey) : m_parent(p), m_isKey(isKey) {}
+ ClassifObjectPtr operator() (string const & k, string const & v) const
+ {
+ if (!is_name_tag(k))
+ {
+ ClassifObjectPtr p = m_parent->BinaryFind(m_isKey ? k : v);
+ if (p) return p;
+ }
+ return ClassifObjectPtr(0, 0);
+ }
+ };
+
+ class do_find_root_obj : public do_find_obj
+ {
+ typedef do_find_obj base_type;
+
+ set<string> const & m_skipTags;
+
+ public:
+ do_find_root_obj(set<string> const & skipTags)
+ : base_type(classif().GetRoot(), true), m_skipTags(skipTags)
+ {
+ }
+ ClassifObjectPtr operator() (string const & k, string const & v) const
+ {
+ if (m_skipTags.find(k) == m_skipTags.end())
+ return base_type::operator() (k, v);
+
+ return ClassifObjectPtr(0, 0);
+ }
+ };
+
+ typedef vector<ClassifObjectPtr> path_type;
+ }
+
+ ClassifObjectPtr find_object(ClassifObject const * parent, XMLElement * p, bool isKey)
+ {
+ return for_each_tag(p, do_find_obj(parent, isKey));
+ }
+
+ size_t find_name_and_count(XMLElement * p, string & name, int32_t & layer)
+ {
+ size_t count;
+ for_each_tag(p, do_find_name(count, name, layer));
+ return count;
+ }
+
+//#ifdef DEBUG
+// class debug_find_string
+// {
+// string m_comp;
+// public:
+// debug_find_string(string const & comp) : m_comp(comp) {}
+// typedef bool result_type;
+// bool operator() (string const & k, string const & v) const
+// {
+// return (k == m_comp || v == m_comp);
+// }
+// };
+//#endif
+
+ bool GetNameAndType(XMLElement * p, vector<uint32_t> & types, string & name, int32_t & layer)
+ {
+//#ifdef DEBUG
+// // code to set a breakpoint
+// if (for_each_tag(p, debug_find_string("bridge")))
+// {
+// int break_here = 0;
+// }
+//#endif
+
+ // maybe an empty feature
+ if (find_name_and_count(p, name, layer) == 0)
+ return false;
+
+ set<string> skipRootKeys;
+
+ do
+ {
+ path_type path;
+
+ // find first root object by key
+ do_find_root_obj doFindRoot(skipRootKeys);
+ ClassifObjectPtr pRoot = for_each_tag(p, doFindRoot);
+
+ // find path from root
+ ClassifObjectPtr pObj = pRoot;
+ while (pObj)
+ {
+ path.push_back(pObj);
+
+ // next objects trying to find by value first
+ pObj = find_object(path.back().get(), p, false);
+ if (!pObj)
+ {
+ // if no - try find object by key (in case of k = "area", v = "yes")
+ pObj = find_object(path.back().get(), p, true);
+ }
+ }
+
+ size_t const count = path.size();
+ if (count >= 1)
+ {
+ // assign type
+ uint32_t t = ftype::GetEmptyValue();
+
+ for (size_t i = 0; i < count; ++i)
+ ftype::PushValue(t, path[i].GetIndex());
+
+ // use features only with drawing rules
+ if (feature::IsDrawableAny(t))
+ types.push_back(t);
+ }
+
+ if (pRoot)
+ {
+ // save this root to skip, and try again
+ skipRootKeys.insert(pRoot->GetName());
+ }
+ else
+ break;
+
+ } while (true);
+
+ return !types.empty();
+ }
+}
diff --git a/generator/osm2type.hpp b/generator/osm2type.hpp
new file mode 100644
index 0000000000..6a2449c29d
--- /dev/null
+++ b/generator/osm2type.hpp
@@ -0,0 +1,20 @@
+#pragma once
+
+#include "../base/base.hpp"
+
+#include "../std/string.hpp"
+#include "../std/vector.hpp"
+
+#include "../base/start_mem_debug.hpp"
+
+struct XMLElement;
+
+namespace ftype
+{
+ void ParseOSMTypes(char const * fPath, int scale);
+
+ /// Get the types, name and layer fot feature with the tree of tags.
+ bool GetNameAndType(XMLElement * p, vector<uint32_t> & types, string & name, int32_t & layer);
+}
+
+#include "../base/stop_mem_debug.hpp"
diff --git a/generator/osm_element.hpp b/generator/osm_element.hpp
new file mode 100644
index 0000000000..4a213946f7
--- /dev/null
+++ b/generator/osm_element.hpp
@@ -0,0 +1,608 @@
+#pragma once
+
+#include "osm2type.hpp"
+#include "xml_element.hpp"
+
+#include "../indexer/feature.hpp"
+#include "../indexer/osm_decl.hpp"
+#include "../indexer/feature_visibility.hpp"
+
+#include "../base/string_utils.hpp"
+#include "../base/logging.hpp"
+#include "../base/stl_add.hpp"
+
+#include "../std/unordered_map.hpp"
+#include "../std/list.hpp"
+#include "../std/set.hpp"
+#include "../std/vector.hpp"
+
+#include "../base/start_mem_debug.hpp"
+
+
+/// @param TEmitter Feature accumulating policy
+/// @param THolder Nodes, ways, relations holder
+template <class TEmitter, class THolder>
+class SecondPassParserBase : public BaseOSMParser
+{
+protected:
+ TEmitter & m_emitter;
+ THolder & m_holder;
+
+ /// max possible number of types per feature
+ static const size_t max_number_of_types = 16;
+
+ SecondPassParserBase(TEmitter & emitter, THolder & holder)
+ : m_emitter(emitter), m_holder(holder)
+ {
+ static char const * tags[] = { "osm", "node", "way", "relation" };
+ SetTags(tags);
+ }
+
+ static bool IsValidAreaPath(vector<m2::PointD> const & pts)
+ {
+ return (pts.size() > 2 && pts.front() == pts.back());
+ }
+
+ bool GetPoint(uint64_t id, m2::PointD & pt)
+ {
+ return m_holder.GetNode(id, pt.y, pt.x);
+ }
+
+ template <class ToDo> class process_points
+ {
+ SecondPassParserBase * m_pMain;
+ ToDo & m_toDo;
+
+ public:
+ process_points(SecondPassParserBase * pMain, ToDo & toDo)
+ : m_pMain(pMain), m_toDo(toDo)
+ {
+ }
+ void operator () (uint64_t id)
+ {
+ m2::PointD pt;
+ if (m_pMain->GetPoint(id, pt))
+ m_toDo(pt);
+ }
+ };
+
+ typedef multimap<uint64_t, shared_ptr<WayElement> > way_map_t;
+
+ void GetWay(uint64_t id, way_map_t & m)
+ {
+ shared_ptr<WayElement> e(new WayElement());
+ if (m_holder.GetWay(id, *e) && e->IsValid())
+ {
+ m.insert(make_pair(e->nodes.front(), e));
+ m.insert(make_pair(e->nodes.back(), e));
+ }
+ }
+
+ template <class ToDo>
+ void ForEachWayPoint(uint64_t id, ToDo toDo)
+ {
+ WayElement e;
+ if (m_holder.GetWay(id, e))
+ {
+ process_points<ToDo> process(this, toDo);
+ e.ForEachPoint(process);
+ }
+ }
+
+ template <class ToDo>
+ void ProcessWayPoints(way_map_t & m, ToDo toDo)
+ {
+ if (m.empty()) return;
+
+ typedef way_map_t::iterator iter_t;
+
+ // start
+ iter_t i = m.begin();
+ uint64_t id = i->first;
+
+ process_points<ToDo> process(this, toDo);
+ do
+ {
+ // process way points
+ shared_ptr<WayElement> e = i->second;
+ e->ForEachPoint(process);
+
+ m.erase(i);
+
+ // next 'id' to process
+ id = e->GetOtherEndPoint(id);
+ pair<iter_t, iter_t> r = m.equal_range(id);
+
+ // finally erase element 'e' and find next way in chain
+ i = r.second;
+ while (r.first != r.second)
+ {
+ if (r.first->second == e)
+ m.erase(r.first++);
+ else
+ {
+ i = r.first;
+ ++r.first;
+ }
+ }
+
+ if (i == r.second) break;
+ } while (true);
+ }
+
+ class holes_accumulator
+ {
+ SecondPassParserBase * m_pMain;
+
+ public:
+ /// @param[out] list of holes
+ list<vector<m2::PointD> > m_holes;
+
+ holes_accumulator(SecondPassParserBase * pMain) : m_pMain(pMain) {}
+
+ void operator() (uint64_t id)
+ {
+ m_holes.push_back(vector<m2::PointD>());
+
+ m_pMain->ForEachWayPoint(id, MakeBackInsertFunctor(m_holes.back()));
+
+ if (!m_pMain->IsValidAreaPath(m_holes.back()))
+ m_holes.pop_back();
+ }
+ };
+
+ /// Find holes for way with 'id' in first relation.
+ class multipolygon_holes_processor
+ {
+ uint64_t m_id; ///< id of way to find it's holes
+ holes_accumulator m_holes;
+
+ public:
+ multipolygon_holes_processor(uint64_t id, SecondPassParserBase * pMain)
+ : m_id(id), m_holes(pMain)
+ {
+ }
+
+ /// 1. relations process function
+ bool operator() (uint64_t /*id*/, RelationElement const & e)
+ {
+ if (e.GetType() == "multipolygon")
+ {
+ string role;
+ if (e.FindWay(m_id, role) && (role == "outer"))
+ {
+ e.ForEachWay(*this);
+ // stop processing (??? assume that "outer way" exists in one relation only ???)
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /// 2. "ways in relation" process function
+ void operator() (uint64_t id, string const & role)
+ {
+ if (id != m_id && role == "inner")
+ m_holes(id);
+ }
+
+ list<vector<m2::PointD> > & GetHoles() { return m_holes.m_holes; }
+ };
+
+ /// Feature description struct.
+ struct value_t
+ {
+ typedef vector<uint32_t> types_t;
+ types_t types; ///< 1-n types, not empty
+ string name; ///< 1-1 name, @todo 1-n names
+ int32_t layer; ///< layer
+
+ value_t()
+ {
+ types.reserve(max_number_of_types);
+ }
+ bool IsValid() const { return !types.empty(); }
+ void Add(value_t const & v)
+ {
+ types.insert(types.end(), v.types.begin(), v.types.end());
+ }
+ };
+
+ /// Feature types processor.
+ class type_processor
+ {
+ void make_xml_element(RelationElement const & rel, XMLElement & out)
+ {
+ for (map<string, string>::const_iterator i = rel.tags.begin(); i != rel.tags.end(); ++i)
+ {
+ if (i->first == "type") continue;
+
+ out.childs.push_back(XMLElement());
+ XMLElement & e = out.childs.back();
+ e.name = "tag";
+ e.attrs["k"] = i->first;
+ e.attrs["v"] = i->second;
+ }
+ }
+
+ /// @param[in] ID of processing feature.
+ uint64_t m_featureID;
+
+ /// @param[out] Feature value as result.
+ value_t * m_val;
+
+ /// Cache: relation id -> feature value (for fast feature parsing)
+ unordered_map<uint64_t, value_t> m_typeCache;
+
+ public:
+ /// Start process new feature.
+ void Reset(uint64_t fID, value_t * val)
+ {
+ m_featureID = fID;
+ m_val = val;
+ }
+
+ /// 1. "initial relation" process
+ int operator() (uint64_t id)
+ {
+ typename unordered_map<uint64_t, value_t>::const_iterator i = m_typeCache.find(id);
+ if (i != m_typeCache.end())
+ {
+ m_val->Add(i->second);
+ return -1; // continue process relations
+ }
+ return 0; // read relation from file (see next operator)
+ }
+
+ /// 2. "relation from file" process
+ bool operator() (uint64_t id, RelationElement const & rel)
+ {
+ // make XMLElement struct from relation's tags for GetNameAndType function.
+ XMLElement e;
+ make_xml_element(rel, e);
+
+ if (rel.GetType() == "multipolygon")
+ {
+ // we will process multipolygons later
+ }
+ else
+ {
+ // process types of relation and add them to m_val
+ value_t val;
+ if (ftype::GetNameAndType(&e, val.types, val.name, val.layer))
+ {
+ m_typeCache[id] = val;
+ m_val->Add(val);
+ }
+ else
+ m_typeCache[id] = value_t();
+ }
+
+ // continue process relations
+ return false;
+ }
+ } m_typeProcessor;
+
+ typedef FeatureBuilder1 feature_builder_t;
+
+ void FinishAreaFeature(uint64_t id, feature_builder_t & ft)
+ {
+ ASSERT ( ft.IsGeometryClosed(), () );
+
+ multipolygon_holes_processor processor(id, this);
+ m_holder.ForEachRelationByWay(id, processor);
+ ft.SetAreaAddHoles(processor.GetHoles());
+ }
+
+ bool ParseType(XMLElement * p, uint64_t & id, value_t & fValue)
+ {
+ VERIFY ( utils::to_uint64(p->attrs["id"], id),
+ ("Unknown element with invalid id : ", p->attrs["id"]) );
+
+ // try to get type from element tags
+ ftype::GetNameAndType(p, fValue.types, fValue.name, fValue.layer);
+
+ // try to get type from relations tags
+ m_typeProcessor.Reset(id, &fValue);
+
+ if (p->name == "node" && !fValue.IsValid())
+ {
+ // additional process of nodes ONLY if there is no native types
+ m_holder.ForEachRelationByNodeCached(id, m_typeProcessor);
+ }
+ else if (p->name == "way")
+ {
+ // always make additional process of ways
+ m_holder.ForEachRelationByWayCached(id, m_typeProcessor);
+ }
+
+ // remove duplicating types
+ sort(fValue.types.begin(), fValue.types.end());
+ fValue.types.erase(unique(fValue.types.begin(), fValue.types.end()), fValue.types.end());
+
+ // unrecognized feature by classificator
+ return fValue.IsValid();
+ }
+};
+
+/*
+template <class TEmitter, class THolder>
+class SecondPassParserJoin : public SecondPassParserBase<TEmitter, THolder>
+{
+ typedef SecondPassParserBase<TEmitter, THolder> base_type;
+
+ set<uint64_t> m_usedDirect;
+
+ bool TryEmitUnited(uint64_t featureID, typename base_type::feature_builder_t & ft)
+ {
+ // check, if feature already processed early
+ if (m_usedDirect.count(featureID) > 0)
+ return true;
+
+ set<uint64_t> path;
+ path.insert(featureID);
+
+ WayElement e;
+
+ // process geometry of initial way itself
+ base_type::m_holder.GetWay(featureID, e);
+ if (e.nodes.empty())
+ return false;
+
+ for (size_t i = 0; i < e.nodes.size(); ++i)
+ {
+ m2::PointD pt;
+ if (base_type::GetPoint(e.nodes[i], pt))
+ ft.AddPoint(pt);
+ else
+ return false;
+ }
+
+ // process connected ways in cycle while ...
+ uint64_t fID = featureID;
+ while (!ft.IsGeometryClosed())
+ {
+ uint64_t const nodeID = e.nodes.back();
+ if (!base_type::m_holder.GetNextWay(fID, nodeID, e))
+ break;
+
+ if (!path.insert(fID).second)
+ {
+ LOG_SHORT(LWARNING, ("JOIN_DBG! Cycle found during way joining, duplicate id = ", fID));
+ break;
+ }
+
+ // skip first point, because it's equal with previous
+ size_t i;
+ int inc;
+ if (e.nodes.front() == nodeID)
+ {
+ i = 1;
+ inc = 1;
+ }
+ else
+ {
+ ASSERT ( e.nodes.back() == nodeID, () );
+
+ i = e.nodes.size() - 2;
+ inc = -1;
+ }
+
+ size_t count = 1;
+ while (count++ < e.nodes.size())
+ {
+ m2::PointD pt;
+ if (base_type::GetPoint(e.nodes[i], pt))
+ ft.AddPoint(pt);
+ else
+ return false;
+
+ i += inc;
+ }
+ }
+
+ if (ft.IsGeometryClosed())
+ {
+ m_usedDirect.insert(path.begin(), path.end());
+
+ base_type::FinishAreaFeature(featureID, ft);
+
+ base_type::m_emitter(ft);
+ return true;
+ }
+ else
+ {
+ LOG_SHORT(LWARNING, ("JOIN_DBG! Ways not connected for root way = ", featureID));
+ return false;
+ }
+ }
+
+protected:
+ virtual void EmitElement(XMLElement * p)
+ {
+ uint64_t id;
+ typename base_type::value_t fValue;
+ if (!ParseType(p, id, fValue))
+ return;
+
+ // check, if we can make united feature
+ for (typename base_type::value_t::types_t::iterator i = fValue.types.begin(); i != fValue.types.end(); ++i)
+ if (feature::NeedUnite(*i))
+ {
+ typename base_type::feature_builder_t ft;
+ ft.AddName(fValue.name);
+ ft.AddTypes(fValue.types.begin(), fValue.types.end());
+ ft.AddLayer(fValue.layer);
+
+ TryEmitUnited(id, ft);
+ break;
+ }
+ }
+
+public:
+ SecondPassParserJoin(TEmitter & emitter, THolder & holder)
+ : base_type(emitter, holder)
+ {
+ }
+};
+*/
+
+template <class TEmitter, class THolder>
+class SecondPassParserUsual : public SecondPassParserBase<TEmitter, THolder>
+{
+ typedef SecondPassParserBase<TEmitter, THolder> base_type;
+
+ typedef typename base_type::value_t type_t;
+ typedef typename base_type::feature_builder_t feature_t;
+
+ void InitFeature(type_t const & fValue, feature_t & ft)
+ {
+ ft.AddName(fValue.name);
+ ft.AddTypes(fValue.types.begin(), fValue.types.end());
+ ft.AddLayer(fValue.layer);
+ }
+
+protected:
+ virtual void EmitElement(XMLElement * p)
+ {
+ uint64_t id;
+ type_t fValue;
+ if (!ParseType(p, id, fValue))
+ return;
+
+ feature_t ft;
+ InitFeature(fValue, ft);
+
+ if (p->name == "node")
+ {
+ if (!feature::IsDrawableLike(fValue.types, feature::fpoint))
+ return;
+
+ m2::PointD pt;
+ if (p->childs.empty() || !base_type::GetPoint(id, pt))
+ return;
+
+ ft.SetCenter(pt);
+ }
+ else if (p->name == "way")
+ {
+//#ifdef DEBUG
+// if (id == 41082185 || id == 64452462 || id == 48922414)
+// __debugbreak();
+//#endif
+
+ bool const isLine = feature::IsDrawableLike(fValue.types, feature::fline);
+ bool const isArea = feature::IsDrawableLike(fValue.types, feature::farea);
+
+ if (!isLine && !isArea)
+ return;
+
+ // geometry of feature
+ for (size_t i = 0; i < p->childs.size(); ++i)
+ {
+ if (p->childs[i].name == "nd")
+ {
+ uint64_t nodeID;
+ VERIFY ( utils::to_uint64(p->childs[i].attrs["ref"], nodeID),
+ ("Bad node ref in way : ", p->childs[i].attrs["ref"]) );
+
+ m2::PointD pt;
+ if (!base_type::GetPoint(nodeID, pt))
+ return;
+
+ ft.AddPoint(pt);
+ }
+ }
+
+ size_t const count = ft.GetPointsCount();
+ if (count < 2)
+ return;
+
+ if (isLine)
+ ft.SetLinear();
+
+ // Get the tesselation for an area object (only if it has area drawing rules,
+ // otherwise it will stay a linear object).
+ if (isArea && count > 2 && ft.IsGeometryClosed())
+ base_type::FinishAreaFeature(id, ft);
+ }
+ else if (p->name == "relation")
+ {
+//#ifdef DEBUG
+// if (id == 254789)
+// __debugbreak();
+//#endif
+
+ if (!feature::IsDrawableLike(fValue.types, feature::farea))
+ return;
+
+ // check, if this is our processable relation
+ bool isProcess = false;
+ for (size_t i = 0; i < p->childs.size(); ++i)
+ {
+ if (p->childs[i].name == "tag" &&
+ p->childs[i].attrs["k"] == "type" &&
+ p->childs[i].attrs["v"] == "multipolygon")
+ {
+ isProcess = true;
+ }
+ }
+ if (!isProcess)
+ return;
+
+ typename base_type::holes_accumulator holes(this);
+ typename base_type::way_map_t wayMap;
+
+ // iterate ways to get 'outer' and 'inner' geometries
+ for (size_t i = 0; i < p->childs.size(); ++i)
+ {
+ if (p->childs[i].name == "member" &&
+ p->childs[i].attrs["type"] == "way")
+ {
+ string const & role = p->childs[i].attrs["role"];
+ uint64_t wayID;
+ VERIFY ( utils::to_uint64(p->childs[i].attrs["ref"], wayID),
+ ("Bad way ref in relation : ", p->childs[i].attrs["ref"]) );
+
+ if (role == "outer")
+ {
+ GetWay(wayID, wayMap);
+ }
+ else if (role == "inner")
+ {
+ holes(wayID);
+ }
+ }
+ }
+
+ // cycle through the ways in map
+ while (!wayMap.empty())
+ {
+ feature_t f;
+ InitFeature(fValue, f);
+
+ ProcessWayPoints(wayMap, bind(&base_type::feature_builder_t::AddPoint, ref(f), _1));
+
+ if (f.IsGeometryClosed())
+ {
+ f.SetAreaAddHoles(holes.m_holes);
+ if (f.PreSerialize())
+ base_type::m_emitter(f);
+ }
+ }
+
+ return;
+ }
+
+ if (ft.PreSerialize())
+ base_type::m_emitter(ft);
+ }
+
+public:
+ SecondPassParserUsual(TEmitter & emitter, THolder & holder)
+ : base_type(emitter, holder)
+ {
+ }
+};
+
+#include "../../base/stop_mem_debug.hpp"
diff --git a/generator/polygonizer.hpp b/generator/polygonizer.hpp
new file mode 100644
index 0000000000..8ce1963780
--- /dev/null
+++ b/generator/polygonizer.hpp
@@ -0,0 +1,203 @@
+#pragma once
+#include "kml_parser.hpp"
+#include "world_map_generator.hpp"
+
+#include "../indexer/feature.hpp"
+#include "../indexer/feature_visibility.hpp"
+#include "../indexer/cell_id.hpp"
+#include "../geometry/rect2d.hpp"
+#include "../coding/file_writer.hpp"
+#include "../base/base.hpp"
+#include "../base/buffer_vector.hpp"
+#include "../base/macros.hpp"
+#include "../std/scoped_ptr.hpp"
+#include "../std/string.hpp"
+
+#ifndef PARALLEL_POLYGONIZER
+#define PARALLEL_POLYGONIZER 1
+#endif
+
+#if PARALLEL_POLYGONIZER
+#include <QSemaphore>
+#include <QThreadPool>
+#include <QMutex>
+#include <QMutexLocker>
+#endif
+
+namespace feature
+{
+ // Groups features according to country polygons
+ template <class FeatureOutT, class BoundsT, typename CellIdT>
+ class Polygonizer
+ {
+ public:
+ template <class TInfo>
+ Polygonizer(TInfo & info) : m_FeatureOutInitData(info.datFilePrefix, info.datFileSuffix),
+ m_worldMap(info.maxScaleForWorldFeatures, info.mergeCoastlines, m_FeatureOutInitData)
+#if PARALLEL_POLYGONIZER
+ , m_ThreadPoolSemaphore(m_ThreadPool.maxThreadCount() * 8)
+#endif
+ {
+#if PARALLEL_POLYGONIZER
+ LOG(LINFO, ("Polygonizer thread pool threads:", m_ThreadPool.maxThreadCount()));
+#endif
+
+ CHECK(kml::LoadCountriesList(info.datFilePrefix, m_countries, info.simplifyCountriesLevel),
+ ("Error loading country polygons files"));
+
+ //LOG_SHORT(LINFO, ("Loaded polygons count for regions:"));
+ //for (size_t i = 0; i < m_countries.size(); ++i)
+ //{
+ // LOG_SHORT(LINFO, (m_countries[i].m_name, m_countries[i].m_regions.size()));
+ //}
+ }
+ ~Polygonizer()
+ {
+ Finish();
+ for_each(m_Buckets.begin(), m_Buckets.end(), DeleteFunctor());
+ }
+
+ struct PointChecker
+ {
+ kml::RegionsContainerT const & m_regions;
+ bool m_belongs;
+
+ PointChecker(kml::RegionsContainerT const & regions)
+ : m_regions(regions), m_belongs(false) {}
+
+ bool operator()(m2::PointD const & pt)
+ {
+ m_regions.ForEachInRect(m2::RectD(pt, pt), bind<void>(ref(*this), _1, cref(pt)));
+ return !m_belongs;
+ }
+
+ void operator() (kml::Region const & rgn, kml::Region::value_type const & point)
+ {
+ if (!m_belongs)
+ m_belongs = rgn.Contains(point);
+ }
+ };
+
+ class InsertCountriesPtr
+ {
+ typedef buffer_vector<kml::CountryPolygons const *, 32> vec_type;
+ vec_type & m_vec;
+
+ public:
+ InsertCountriesPtr(vec_type & vec) : m_vec(vec) {}
+ void operator() (kml::CountryPolygons const & c)
+ {
+ m_vec.push_back(&c);
+ }
+ };
+
+ void operator () (FeatureBuilder1 const & fb)
+ {
+ m_worldMap(fb);
+
+ buffer_vector<kml::CountryPolygons const *, 32> vec;
+ m_countries.ForEachInRect(fb.GetLimitRect(), InsertCountriesPtr(vec));
+
+ switch (vec.size())
+ {
+ case 0:
+ break;
+ case 1:
+ EmitFeature(vec[0], fb);
+ break;
+ default:
+ {
+#if PARALLEL_POLYGONIZER
+ m_ThreadPoolSemaphore.acquire();
+ m_ThreadPool.start(new PolygonizerTask(this, vec, fb));
+#else
+ PolygonizerTask task(this, vec, fb);
+ task.RunBase();
+#endif
+ }
+ }
+ }
+
+ void Finish()
+ {
+#if PARALLEL_POLYGONIZER
+ m_ThreadPool.waitForDone();
+#endif
+ }
+
+ void EmitFeature(kml::CountryPolygons const * country, FeatureBuilder1 const & fb)
+ {
+#if PARALLEL_POLYGONIZER
+ QMutexLocker mutexLocker(&m_EmitFeatureMutex);
+ UNUSED_VALUE(mutexLocker);
+#endif
+ if (country->m_index == -1)
+ {
+ m_Names.push_back(country->m_name);
+ m_Buckets.push_back(new FeatureOutT(country->m_name, m_FeatureOutInitData));
+ country->m_index = m_Buckets.size()-1;
+ }
+
+ (*(m_Buckets[country->m_index]))(fb);
+ }
+
+ vector<string> const & Names()
+ {
+ return m_Names;
+ }
+
+ private:
+ typename FeatureOutT::InitDataType m_FeatureOutInitData;
+
+ vector<FeatureOutT*> m_Buckets;
+ vector<string> m_Names;
+ kml::CountriesContainerT m_countries;
+ WorldMapGenerator<FeatureOutT> m_worldMap;
+
+#if PARALLEL_POLYGONIZER
+ QThreadPool m_ThreadPool;
+ QSemaphore m_ThreadPoolSemaphore;
+ QMutex m_EmitFeatureMutex;
+#endif
+
+ friend class PolygonizerTask;
+
+ class PolygonizerTask
+#if PARALLEL_POLYGONIZER
+ : public QRunnable
+#endif
+ {
+ public:
+ PolygonizerTask(Polygonizer * pPolygonizer,
+ buffer_vector<kml::CountryPolygons const *, 32> const & countries,
+ FeatureBuilder1 const & fb)
+ : m_pPolygonizer(pPolygonizer), m_Countries(countries), m_FB(fb) {}
+
+ void RunBase()
+ {
+ for (size_t i = 0; i < m_Countries.size(); ++i)
+ {
+ PointChecker doCheck(m_Countries[i]->m_regions);
+ m_FB.ForEachTruePointRef(doCheck);
+
+ if (doCheck.m_belongs)
+ m_pPolygonizer->EmitFeature(m_Countries[i], m_FB);
+ }
+ }
+
+#if PARALLEL_POLYGONIZER
+ void run()
+ {
+ RunBase();
+
+ m_pPolygonizer->m_ThreadPoolSemaphore.release();
+ }
+#endif
+
+ private:
+ Polygonizer * m_pPolygonizer;
+ buffer_vector<kml::CountryPolygons const *, 32> m_Countries;
+ FeatureBuilder1 m_FB;
+ };
+ };
+}
diff --git a/generator/statistics.cpp b/generator/statistics.cpp
new file mode 100644
index 0000000000..84c99ea74a
--- /dev/null
+++ b/generator/statistics.cpp
@@ -0,0 +1,157 @@
+#include "../base/SRC_FIRST.hpp"
+
+#include "statistics.hpp"
+
+#include "../indexer/feature_processor.hpp"
+#include "../indexer/classificator.hpp"
+#include "../indexer/feature_impl.hpp"
+
+#include "../base/string_utils.hpp"
+
+#include "../std/iostream.hpp"
+
+#include "../base/start_mem_debug.hpp"
+
+
+namespace stats
+{
+ void FileContainerStatistic(string const & fName)
+ {
+ FilesContainerR cont(fName);
+
+ vector<string> tags;
+ tags.push_back(DATA_FILE_TAG);
+ for (int i = 0; i < ARRAY_SIZE(feature::g_arrCountryScales); ++i)
+ {
+ tags.push_back(feature::GetTagForIndex(GEOMETRY_FILE_TAG, i));
+ tags.push_back(feature::GetTagForIndex(TRIANGLE_FILE_TAG, i));
+ }
+ tags.push_back(INDEX_FILE_TAG);
+
+ for (size_t i = 0; i < tags.size(); ++i)
+ cout << tags[i] << " : " << cont.GetReader(tags[i]).Size() << endl;
+ }
+
+ class AccumulateStatistic
+ {
+ MapInfo & m_info;
+
+ class ProcessType
+ {
+ MapInfo & m_info;
+ uint32_t m_size;
+
+ public:
+ ProcessType(MapInfo & info, uint32_t sz) : m_info(info), m_size(sz) {}
+ void operator() (uint32_t type)
+ {
+ m_info.AddToSet(TypeTag(type), m_size, m_info.m_byClassifType);
+ }
+ };
+
+ public:
+ AccumulateStatistic(MapInfo & info) : m_info(info) {}
+
+ void operator() (FeatureType const & f, uint32_t)
+ {
+ f.ParseBeforeStatistic();
+
+ FeatureType::inner_geom_stat_t const innerStats = f.GetInnerStatistic();
+
+ m_info.m_inner[0].Add(innerStats.m_Points);
+ m_info.m_inner[1].Add(innerStats.m_Strips);
+ m_info.m_inner[2].Add(innerStats.m_Size);
+
+ FeatureType::geom_stat_t const geom = f.GetGeometrySize(-1);
+ FeatureType::geom_stat_t const trg = f.GetTrianglesSize(-1);
+
+ m_info.AddToSet(geom.m_count, geom.m_size, m_info.m_byPointsCount);
+ m_info.AddToSet(trg.m_count / 3, trg.m_size, m_info.m_byTrgCount);
+
+ uint32_t const allSize = innerStats.m_Size + geom.m_size + trg.m_size;
+
+ m_info.AddToSet(f.GetFeatureType(), allSize, m_info.m_byGeomType);
+
+ ProcessType doProcess(m_info, allSize);
+ f.ForEachTypeRef(doProcess);
+ }
+ };
+
+ void CalcStatistic(string const & fName, MapInfo & info)
+ {
+ AccumulateStatistic doProcess(info);
+ feature::ForEachFromDat(fName, doProcess);
+ }
+
+ void PrintInfo(char const * prefix, GeneralInfo const & info)
+ {
+ cout << prefix << ": size = " << info.m_size << "; count = " << info.m_count << endl;
+ }
+
+ string GetKey(FeatureBase::FeatureType type)
+ {
+ switch (type)
+ {
+ case FeatureBase::FEATURE_TYPE_LINE: return "Line";
+ case FeatureBase::FEATURE_TYPE_AREA: return "Area";
+ default: return "Point";
+ }
+ }
+
+ string GetKey(uint32_t i)
+ {
+ return utils::to_string(i);
+ }
+
+ string GetKey(TypeTag t)
+ {
+ return classif().GetFullObjectName(t.m_val);
+ }
+
+ template <class TSortCr, class TSet>
+ void PrintTop(char const * prefix, TSet const & theSet)
+ {
+ cout << prefix << endl;
+
+ vector<typename TSet::value_type> vec(theSet.begin(), theSet.end());
+
+ sort(vec.begin(), vec.end(), TSortCr());
+
+ size_t const count = min(static_cast<size_t>(10), vec.size());
+ for (size_t i = 0; i < count; ++i)
+ {
+ cout << i << ". ";
+ PrintInfo(GetKey(vec[i].m_key).c_str(), vec[i].m_info);
+ }
+ }
+
+ struct greater_size
+ {
+ template <class TInfo>
+ bool operator() (TInfo const & r1, TInfo const & r2) const
+ {
+ return r1.m_info.m_size > r2.m_info.m_size;
+ }
+ };
+
+ struct greater_count
+ {
+ template <class TInfo>
+ bool operator() (TInfo const & r1, TInfo const & r2) const
+ {
+ return r1.m_info.m_count > r2.m_info.m_count;
+ }
+ };
+
+ void PrintStatistic(MapInfo & info)
+ {
+ PrintInfo("DAT header", info.m_inner[2]);
+ PrintInfo("Points header", info.m_inner[0]);
+ PrintInfo("Strips header", info.m_inner[1]);
+
+ PrintTop<greater_size>("Top SIZE by Geometry Type", info.m_byGeomType);
+ PrintTop<greater_size>("Top SIZE by Classificator Type", info.m_byClassifType);
+ PrintTop<greater_size>("Top SIZE by Points Count", info.m_byPointsCount);
+ PrintTop<greater_size>("Top SIZE by Triangles Count", info.m_byTrgCount);
+ }
+}
diff --git a/generator/statistics.hpp b/generator/statistics.hpp
new file mode 100644
index 0000000000..a4a73c03c1
--- /dev/null
+++ b/generator/statistics.hpp
@@ -0,0 +1,76 @@
+#pragma once
+
+#include "../indexer/feature.hpp"
+
+#include "../std/map.hpp"
+
+
+namespace stats
+{
+ struct GeneralInfo
+ {
+ uint64_t m_count, m_size;
+
+ GeneralInfo() : m_count(0), m_size(0) {}
+
+ void Add(uint64_t sz)
+ {
+ if (sz > 0)
+ {
+ ++m_count;
+ m_size += sz;
+ }
+ }
+ };
+
+ template <class TKey>
+ struct GeneralInfoKey
+ {
+ TKey m_key;
+ GeneralInfo m_info;
+
+ GeneralInfoKey(TKey key) : m_key(key) {}
+
+ bool operator< (GeneralInfoKey const & rhs) const
+ {
+ return m_key < rhs.m_key;
+ }
+ };
+
+ struct TypeTag
+ {
+ uint32_t m_val;
+
+ TypeTag(uint32_t v) : m_val(v) {}
+
+ bool operator< (TypeTag const & rhs) const
+ {
+ return m_val < rhs.m_val;
+ }
+ };
+
+ struct MapInfo
+ {
+ set<GeneralInfoKey<FeatureBase::FeatureType> > m_byGeomType;
+ set<GeneralInfoKey<TypeTag> > m_byClassifType;
+ set<GeneralInfoKey<uint32_t> > m_byPointsCount, m_byTrgCount;
+
+ GeneralInfo m_inner[3];
+
+ template <class TKey, class TSet>
+ void AddToSet(TKey key, uint32_t sz, TSet & theSet)
+ {
+ if (sz > 0)
+ {
+ // GCC doesn't allow to modify set value ...
+ const_cast<GeneralInfo &>(
+ theSet.insert(GeneralInfoKey<TKey>(key)).first->m_info).Add(sz);
+ }
+ }
+ };
+
+ void FileContainerStatistic(string const & fName);
+
+ void CalcStatistic(string const & fName, MapInfo & info);
+ void PrintStatistic(MapInfo & info);
+}
diff --git a/generator/update_generator.cpp b/generator/update_generator.cpp
new file mode 100644
index 0000000000..59f9e54213
--- /dev/null
+++ b/generator/update_generator.cpp
@@ -0,0 +1,117 @@
+#include "update_generator.hpp"
+
+#include "../coding/file_writer.hpp"
+
+#include "../geometry/cellid.hpp"
+
+#include "../platform/platform.hpp"
+
+#include "../storage/country.hpp"
+#include "../defines.hpp"
+
+#include "../base/string_utils.hpp"
+#include "../base/logging.hpp"
+#include "../base/macros.hpp"
+
+#include "../std/target_os.hpp"
+#include "../std/fstream.hpp"
+#include "../std/iterator.hpp"
+
+using namespace storage;
+
+/// files which can be updated through downloader
+char const * gExtensionsToUpdate[] = {
+ "*" DATA_FILE_EXTENSION, "*.txt", "*.bin", "*.skn", "*.ttf", "*.png"
+};
+
+namespace update
+{
+ // we don't support files without name or without extension
+ bool SplitExtension(string const & file, string & name, string & ext)
+ {
+ // get extension
+ size_t const index = file.find_last_of('.');
+ if (index == string::npos || (index + 1) == file.size() || index == 0
+ || file == "." || file == "..")
+ {
+ name = file;
+ ext.clear();
+ return false;
+ }
+ ext = file.substr(index);
+ name = file.substr(0, index);
+ return true;
+ }
+
+ bool GenerateFilesList(string const & dataDir)
+ {
+ Platform & platform = GetPlatform();
+
+ Platform::FilesList files;
+ for (size_t i = 0; i < ARRAY_SIZE(gExtensionsToUpdate); ++i)
+ {
+ Platform::FilesList otherFiles;
+ platform.GetFilesInDir(dataDir, gExtensionsToUpdate[i], otherFiles);
+ std::copy(otherFiles.begin(), otherFiles.end(), std::back_inserter(files));
+ }
+ { // remove minsk-pass from list
+ Platform::FilesList::iterator minskPassIt = std::find(files.begin(), files.end(), "minsk-pass" DATA_FILE_EXTENSION);
+ if (minskPassIt != files.end())
+ files.erase(minskPassIt);
+ }
+ if (files.empty())
+ {
+ LOG(LERROR, ("Can't find any files at path", dataDir));
+ return false;
+ }
+ else
+ {
+ LOG_SHORT(LINFO, ("Files count included in update file:", files.size()));
+ }
+
+ TDataFiles cellFiles;
+ TCommonFiles commonFiles;
+ string name, ext;
+ int32_t level = -1;
+ uint16_t bits;
+ for (Platform::FilesList::iterator it = files.begin(); it != files.end(); ++it)
+ {
+ uint64_t size = 0;
+ CHECK( platform.GetFileSize(dataDir + *it, size), ());
+ CHECK_EQUAL( size, static_cast<uint32_t>(size), ("We don't support files > 4gb", *it));
+ if (SplitExtension(*it, name, ext))
+ {
+ // is it data cell file?
+ if (ext == DATA_FILE_EXTENSION)
+ {
+ if (CountryCellId::IsCellId(name))
+ {
+ CountryCellId cellId = CountryCellId::FromString(name);
+ pair<int64_t, int> bl = cellId.ToBitsAndLevel();
+ if (level < 0)
+ level = bl.second;
+ CHECK_EQUAL( level, bl.second, ("Data files with different level?", *it) );
+ bits = static_cast<uint16_t>(bl.first);
+ CHECK_EQUAL( name, CountryCellId::FromBitsAndLevel(bits, level).ToString(), (name));
+ cellFiles.push_back(make_pair(bits, static_cast<uint32_t>(size)));
+ }
+ else
+ {
+ commonFiles.push_back(make_pair(*it, static_cast<uint32_t>(size)));
+ }
+ }
+ else
+ {
+ commonFiles.push_back(make_pair(*it, static_cast<uint32_t>(size)));
+ }
+ }
+ }
+
+ SaveTiles(dataDir + DATA_UPDATE_FILE, level, cellFiles, commonFiles);
+
+ LOG_SHORT(LINFO, ("Created update file with", cellFiles.size(), "cell data files and",
+ commonFiles.size(), "other files"));
+
+ return true;
+ }
+} // namespace update
diff --git a/generator/update_generator.hpp b/generator/update_generator.hpp
new file mode 100644
index 0000000000..d7dad18ccb
--- /dev/null
+++ b/generator/update_generator.hpp
@@ -0,0 +1,8 @@
+#pragma once
+
+#include "../std/string.hpp"
+
+namespace update
+{
+ bool GenerateFilesList(string const & dataDir);
+} // namespace update
diff --git a/generator/world_map_generator.hpp b/generator/world_map_generator.hpp
new file mode 100644
index 0000000000..27ad626acd
--- /dev/null
+++ b/generator/world_map_generator.hpp
@@ -0,0 +1,201 @@
+#pragma once
+
+#include "feature_merger.hpp"
+
+#include "../defines.hpp"
+
+#include "../base/logging.hpp"
+
+#include "../indexer/classificator.hpp"
+#include "../indexer/feature.hpp"
+#include "../indexer/feature_visibility.hpp"
+#include "../indexer/point_to_int64.hpp"
+
+#include "../std/map.hpp"
+#include "../std/vector.hpp"
+#include "../std/iostream.hpp"
+#include "../std/scoped_ptr.hpp"
+#include "../std/unordered_map.hpp"
+
+
+namespace m2
+{
+ inline size_t hash_value(m2::PointD const & pt)
+ {
+ return static_cast<size_t>(PointToInt64(pt.x, pt.y));
+ }
+}
+
+template <class FeatureOutT>
+class WorldMapGenerator
+{
+ /// if NULL, separate world data file is not generated
+ scoped_ptr<FeatureOutT> m_worldBucket;
+ /// features visible before or at this scale level will go to World map
+ int m_maxWorldScale;
+ bool m_mergeCoastlines;
+
+ size_t m_mergedCounter;
+ size_t m_areasCounter;
+
+ typedef unordered_map<m2::PointD, FeatureBuilder1Merger> FeaturesContainerT;
+ typedef map<uint32_t, FeaturesContainerT> TypesContainerT;
+ TypesContainerT m_features;
+
+private:
+ bool EmitAreaFeature(FeatureBuilder1Merger & fbm)
+ {
+ if (fbm.FirstPoint() == fbm.LastPoint())
+ {
+ fbm.SetAreaSafe();
+ (*m_worldBucket)(fbm);
+ ++m_areasCounter;
+ return true;
+ }
+ else return false;
+ }
+
+ /// scans all features and tries to merge them with each other
+ /// @return true if one feature was merged
+ bool ReMergeFeatures(FeaturesContainerT & features)
+ {
+ for (FeaturesContainerT::iterator base = features.begin(); base != features.end(); ++base)
+ {
+ FeaturesContainerT::iterator found = features.find(base->second.LastPoint());
+ if (found != features.end())
+ {
+ CHECK(found != base, ());
+ base->second.AppendFeature(found->second);
+ features.erase(found);
+ ++m_mergedCounter;
+
+ if (EmitAreaFeature(base->second))
+ features.erase(base);
+ return true;
+ }
+ }
+ return false;
+ }
+
+ void TryToMerge(FeatureBuilder1Merger & fbm)
+ {
+ FeaturesContainerT & container = m_features[fbm.KeyType()];
+ FeaturesContainerT::iterator found = container.find(fbm.LastPoint());
+ if (found != container.end())
+ {
+ fbm.AppendFeature(found->second);
+ container.erase(found);
+ ++m_mergedCounter;
+ }
+
+ if (!EmitAreaFeature(fbm))
+ {
+ pair<FeaturesContainerT::iterator, bool> result = container.insert(make_pair(fbm.FirstPoint(), fbm));
+ // if we found feature with the same starting point, emit it directly
+ if (!result.second)
+ {
+ LOG(LWARNING, ("Found features with common first point, points counts are:",
+ result.first->second.GetPointsCount(), fbm.GetPointsCount()));
+ (*m_worldBucket)(fbm);
+ }
+ }
+ }
+
+ //struct FeatureTypePrinter
+ //{
+ // void operator()(uint32_t type) const
+ // {
+ // cout << classif().GetFullObjectName(type) << ".";
+ // }
+ //};
+
+ vector<uint32_t> m_MergeTypes;
+
+public:
+ WorldMapGenerator(int maxWorldScale, bool mergeCoastlines,
+ typename FeatureOutT::InitDataType featureOutInitData)
+ : m_maxWorldScale(maxWorldScale), m_mergeCoastlines(mergeCoastlines),
+ m_mergedCounter(0), m_areasCounter(0)
+ {
+ if (maxWorldScale >= 0)
+ m_worldBucket.reset(new FeatureOutT(WORLD_FILE_NAME, featureOutInitData));
+
+ // fill vector with types that need to be merged
+ static size_t const MAX_TYPES_IN_PATH = 3;
+ char const * arrMerge[][MAX_TYPES_IN_PATH] = {
+ {"natural", "coastline", ""},
+ {"boundary", "administrative", "2"}
+ };
+
+ for (size_t i = 0; i < ARRAY_SIZE(arrMerge); ++i)
+ {
+ vector<string> path;
+ for (size_t j = 0; j < MAX_TYPES_IN_PATH; ++j)
+ {
+ string const strType(arrMerge[i][j]);
+ if (!strType.empty())
+ path.push_back(strType);
+ }
+ m_MergeTypes.push_back(classif().GetTypeByPath(path));
+
+ ASSERT_NOT_EQUAL ( m_MergeTypes.back(), ftype::GetEmptyValue(), () );
+ }
+
+ sort(m_MergeTypes.begin(), m_MergeTypes.end());
+ }
+
+ ~WorldMapGenerator()
+ {
+ if (m_mergeCoastlines)
+ {
+ LOG(LINFO, ("Final merging of coastlines started"));
+ }
+
+ // try to merge all merged features with each other
+ for (TypesContainerT::iterator it = m_features.begin(); it != m_features.end(); ++it)
+ {
+ LOG(LINFO, (it->second.size()));
+ while (ReMergeFeatures(it->second))
+ {}
+ // emit all merged features
+ for (FeaturesContainerT::iterator itF = it->second.begin(); itF != it->second.end(); ++itF)
+ (*m_worldBucket)(itF->second);
+ }
+
+ if (m_mergeCoastlines)
+ {
+ LOG(LINFO, ("Final merging of coastlines ended"));
+ LOG(LINFO, ("Merged features:", m_mergedCounter, "new areas created:", m_areasCounter));
+ }
+ }
+
+ void operator()(FeatureBuilder1 const & fb)
+ {
+ if (m_worldBucket)
+ {
+ FeatureBase fBase = fb.GetFeatureBase();
+ int const minScale = feature::MinDrawableScaleForFeature(fBase);
+ CHECK_GREATER(minScale, -1, ("Non-drawable feature found!?"));
+
+ if (m_maxWorldScale >= minScale)
+ {
+ if (m_mergeCoastlines && fBase.GetFeatureType() == FeatureBase::FEATURE_TYPE_LINE)
+ {
+ for (size_t i = 0; i < m_MergeTypes.size(); ++i)
+ {
+ if (fb.IsTypeExist(m_MergeTypes[i]))
+ {
+ FeatureBuilder1Merger fbm(fb);
+ fbm.SetType(m_MergeTypes[i]);
+ TryToMerge(fbm);
+ }
+ }
+ }
+
+ FeatureBuilder1 fbm(fb);
+ if (fbm.AssignType_SetDifference(m_MergeTypes))
+ (*m_worldBucket)(fbm);
+ }
+ }
+ }
+};
diff --git a/generator/xml_element.cpp b/generator/xml_element.cpp
new file mode 100644
index 0000000000..0a92750f6d
--- /dev/null
+++ b/generator/xml_element.cpp
@@ -0,0 +1,82 @@
+#include "xml_element.hpp"
+
+#include "../coding/parse_xml.hpp"
+#include "../coding/reader.hpp"
+#include "../std/stdio.hpp"
+
+#include "../std/algorithm.hpp"
+
+bool BaseOSMParser::is_our_tag(string const & name)
+{
+ return (find(m_tags.begin(), m_tags.end(), name) != m_tags.end());
+}
+
+bool BaseOSMParser::Push(string const & name)
+{
+ if (!is_our_tag(name) && (m_depth != 2))
+ return false;
+
+ ++m_depth;
+
+ if (m_depth == 1)
+ {
+ m_current = 0;
+ }
+ else if (m_depth == 2)
+ {
+ m_current = &m_element;
+ m_current->parent = 0;
+ }
+ else
+ {
+ m_current->childs.push_back(XMLElement());
+ m_current->childs.back().parent = m_current;
+ m_current = &m_current->childs.back();
+ }
+
+ if (m_depth >= 2)
+ m_current->name = name;
+ return true;
+}
+
+void BaseOSMParser::AddAttr(string const & name, string const & value)
+{
+ if (m_current)
+ m_current->attrs[name] = value;
+}
+
+void BaseOSMParser::Pop(string const &)
+{
+ --m_depth;
+
+ if (m_depth >= 2)
+ m_current = m_current->parent;
+
+ else if (m_depth == 1)
+ {
+ EmitElement(m_current);
+ m_current->Clear();
+ }
+}
+
+
+struct StdinReader
+{
+ size_t Read(char * buffer, size_t bufferSize)
+ {
+ return fread(buffer, sizeof(char), bufferSize, stdin);
+ }
+};
+
+
+void ParseXMLFromStdIn(BaseOSMParser & parser)
+{
+ StdinReader reader;
+ ParseXML(reader, parser);
+}
+
+void ParseXMLFromFile(FileReader const & reader, BaseOSMParser & parser)
+{
+ ReaderSource<FileReader> src(reader);
+ ParseXML(src, parser);
+}
diff --git a/generator/xml_element.hpp b/generator/xml_element.hpp
new file mode 100644
index 0000000000..966c0943bf
--- /dev/null
+++ b/generator/xml_element.hpp
@@ -0,0 +1,49 @@
+#pragma once
+#include "../coding/file_reader.hpp"
+#include "../std/string.hpp"
+#include "../std/vector.hpp"
+#include "../std/map.hpp"
+
+struct XMLElement
+{
+ string name;
+ map<string, string> attrs;
+ vector<XMLElement> childs;
+ XMLElement * parent;
+
+ void Clear()
+ {
+ name.clear();
+ attrs.clear();
+ childs.clear();
+ parent = 0;
+ }
+};
+
+class BaseOSMParser
+{
+ XMLElement m_element;
+ XMLElement * m_current;
+
+ size_t m_depth;
+
+ vector<string> m_tags;
+ bool is_our_tag(string const & name);
+
+public:
+ BaseOSMParser() : m_current(0), m_depth(0) {}
+
+ template <size_t N> void SetTags(char const * (&arr)[N]) { m_tags.assign(&arr[0], &arr[N]); }
+
+ bool Push(string const & name);
+ void AddAttr(string const & name, string const & value);
+ void Pop(string const &);
+ void CharData(string const &) {}
+
+protected:
+ virtual void EmitElement(XMLElement * p) = 0;
+};
+
+void ParseXMLFromStdIn(BaseOSMParser & parser);
+
+void ParseXMLFromFile(FileReader const & reader, BaseOSMParser & parser);