GEDLIB  1.0
ring_ml.ipp
Go to the documentation of this file.
1 /***************************************************************************
2 * *
3 * Copyright (C) 2018 by David B. Blumenthal *
4 * *
5 * This file is part of GEDLIB. *
6 * *
7 * GEDLIB is free software: you can redistribute it and/or modify it *
8 * under the terms of the GNU Lesser General Public License as published *
9 * by the Free Software Foundation, either version 3 of the License, or *
10 * (at your option) any later version. *
11 * *
12 * GEDLIB is distributed in the hope that it will be useful, *
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
15 * GNU Lesser General Public License for more details. *
16 * *
17 * You should have received a copy of the GNU Lesser General Public *
18 * License along with GEDLIB. If not, see <http://www.gnu.org/licenses/>. *
19 * *
20 ***************************************************************************/
21 
27 #ifndef SRC_METHODS_RING_ML_IPP_
28 #define SRC_METHODS_RING_ML_IPP_
29 
30 namespace ged {
31 
32 // === Destructors and constructors. ===
33 template<class UserNodeLabel, class UserEdgeLabel>
34 RingML<UserNodeLabel, UserEdgeLabel>::
35 ~RingML() {}
36 
37 template<class UserNodeLabel, class UserEdgeLabel>
38 RingML<UserNodeLabel, UserEdgeLabel>::
39 RingML(const GEDData<UserNodeLabel, UserEdgeLabel> & ged_data) :
40 MLBasedMethod<UserNodeLabel, UserEdgeLabel>(ged_data),
41 rings_(),
42 led_method_{LSAPE_OPTIMAL},
43 sort_method_{COUNTING},
44 use_topological_features_{true},
45 use_global_features_{true},
46 num_layers_{undefined()},
47 global_features_() {}
48 
49 // === Definitions of member functions inherited from MLBasedMethod. ===
50 template<class UserNodeLabel, class UserEdgeLabel>
51 void
53 ml_init_graph_(const GEDGraph & graph) {
54  build_rings_(graph);
55 }
56 
57 template<class UserNodeLabel, class UserEdgeLabel>
58 void
61  led_method_ = LSAPE_OPTIMAL;
62  sort_method_ = COUNTING;
63  use_topological_features_ = true;
64  use_global_features_ = true;
65 }
66 
67 template<class UserNodeLabel, class UserEdgeLabel>
68 std::string
71  return "[--led-method <arg>] [--sort-method <arg>] [--topological-features <arg>] [--global-features <arg>]";
72 }
73 
74 template<class UserNodeLabel, class UserEdgeLabel>
75 bool
77 ml_parse_option_(const std::string & option, const std::string & arg) {
78  if (option == "led-method") {
79  if (arg == "LSAPE_OPTIMAL") {
80  led_method_ = LSAPE_OPTIMAL;
81  }
82  else if (arg == "LSAPE_GREEDY") {
83  led_method_ = LSAPE_GREEDY;
84  }
85  else if (arg == "GAMMA") {
86  led_method_ = GAMMA;
87  }
88  else {
89  throw Error(std::string("Invalid argument \"") + arg + "\" for option led-method. Usage: options = \"[--led-method LSAPE_OPTIMAL|LSAPE_GREEDY|GAMMA] [...]\"");
90  }
91  return true;
92  }
93  else if (option == "sort-method") {
94  if (arg == "COUNTING") {
95  sort_method_ = COUNTING;
96  }
97  else if (arg == "STD") {
98  sort_method_ = STD;
99  }
100  else {
101  throw Error(std::string("Invalid argument \"") + arg + "\" for option sort-method. Usage: options = \"[--sort-method COUNTING|STD] [...]\"");
102  }
103  return true;
104  }
105  else if (option == "topological-features") {
106  if (arg == "TRUE") {
107  use_topological_features_ = true;
108  }
109  else if (arg == "FALSE") {
110  use_topological_features_ = false;
111  }
112  else {
113  throw Error(std::string("Invalid argument \"") + arg + "\" for option topological-features. Usage: options = \"[--topological-features TRUE|FALSE] [...]\"");
114  }
115  return true;
116  }
117  else if (option == "global-features") {
118  if (arg == "TRUE") {
119  use_global_features_ = true;
120  }
121  else if (arg == "FALSE") {
122  use_global_features_ = false;
123  }
124  else {
125  throw Error(std::string("Invalid argument \"") + arg + "\" for option global-features. Usage: options = \"[--global-features TRUE|FALSE] [...]\"");
126  }
127  return true;
128  }
129  return false;
130 
131 }
132 
133 template<class UserNodeLabel, class UserEdgeLabel>
134 void
136 ml_init_feature_variables_(const GEDGraph & g, const GEDGraph & h, std::size_t num_threads) {
137  if (use_global_features_) {
138  global_features_.clear();
139  global_features_.push_back(static_cast<double>(g.num_nodes()));
140  global_features_.push_back(static_cast<double>(h.num_nodes()));
141  global_features_.push_back(this->ged_data_.mean_node_subs_cost(g, h));
142  global_features_.push_back(this->ged_data_.mean_node_del_cost(g));
143  global_features_.push_back(this->ged_data_.mean_node_ins_cost(h));
144  global_features_.push_back(static_cast<double>(g.num_edges()));
145  global_features_.push_back(static_cast<double>(h.num_edges()));
146  global_features_.push_back(this->ged_data_.mean_edge_subs_cost(g, h));
147  global_features_.push_back(this->ged_data_.mean_edge_del_cost(g));
148  global_features_.push_back(this->ged_data_.mean_edge_ins_cost(h));
149  }
150 }
151 
152 template<class UserNodeLabel, class UserEdgeLabel>
153 void
155 ml_populate_substitution_feature_vector_(const GEDGraph & g, const GEDGraph & h, GEDGraph::NodeID i, GEDGraph::NodeID k, std::vector<double> & feature_vector) {
156  feature_vector.clear();
157  add_global_features_(feature_vector);
158  const Ring_ & ring_i = rings_.at(g.id()).at(i);
159  const Ring_ & ring_k = rings_.at(h.id()).at(k);
160  for (std::size_t level{0}; level < num_layers_; level++) {
161  add_layer_substitution_features_(ring_i, ring_k, level, feature_vector);
162  }
163 }
164 
165 template<class UserNodeLabel, class UserEdgeLabel>
166 void
168 ml_populate_deletion_feature_vector_(const GEDGraph & g, GEDGraph::NodeID i, std::vector<double> & feature_vector) {
169  feature_vector.clear();
170  add_global_features_(feature_vector);
171  const Ring_ & ring_i = rings_.at(g.id()).at(i);
172  for (std::size_t level{0}; level < num_layers_; level++) {
173  add_layer_deletion_features_(ring_i, level, feature_vector);
174  }
175 }
176 
177 template<class UserNodeLabel, class UserEdgeLabel>
178 void
180 ml_populate_insertion_feature_vector_(const GEDGraph & h, GEDGraph::NodeID k, std::vector<double> & feature_vector) {
181  feature_vector.clear();
182  add_global_features_(feature_vector);
183  const Ring_ & ring_k = rings_.at(h.id()).at(k);
184  for (std::size_t level{0}; level < num_layers_; level++) {
185  add_layer_deletion_features_(ring_k, level, feature_vector);
186  }
187 }
188 
189 template<class UserNodeLabel, class UserEdgeLabel>
190 void
193  if (this->num_features_ == undefined()) {
194  return;
195  }
196  std::size_t num_local_features{this->num_features_};
197  if (use_global_features_) {
198  num_local_features -= 10;
199  }
200  if (use_topological_features_) {
201  num_layers_ = num_local_features / 6;
202  }
203  else {
204  num_layers_ = num_local_features / 3;
205  }
206 }
207 
208 template<class UserNodeLabel, class UserEdgeLabel>
209 std::size_t
212  set_num_layers_();
213  std::size_t num_features{num_layers_};
214  if (use_topological_features_) {
215  num_features *= 6;
216  }
217  else {
218  num_features *= 3;
219  }
220  if (use_global_features_) {
221  num_features += 10;
222  }
223  return num_features;
224 }
225 
226 // === Defintion of private struct Layer_. ===
227 template<class UserNodeLabel, class UserEdgeLabel>
229 Layer_ ::
230 Layer_(std::size_t level) :
231 level{level},
232 node_labels(),
233 inner_edge_labels(),
234 outer_edge_labels() {}
235 
236 // === Definition of private struct Ring_. ===
237 template<class UserNodeLabel, class UserEdgeLabel>
239 Ring_ ::
240 Ring_() :
241 layers() {}
242 
243 // === Definitions of helper member functions. ===
244 template<class UserNodeLabel, class UserEdgeLabel>
245 void
247 build_rings_(const GEDGraph & graph) {
248  rings_[graph.id()] = NodeRingMap_();
249  for (auto node = graph.nodes().first; node != graph.nodes().second; node++) {
250  build_ring_(graph, *node, rings_.at(graph.id()));
251  }
252 }
253 
254 template<class UserNodeLabel, class UserEdgeLabel>
255 void
257 build_ring_(const GEDGraph & graph, GEDGraph::NodeID root, NodeRingMap_ & rings) {
258  std::map<GEDGraph::NodeID, int> distance_to_root;
259  for (auto node = graph.nodes().first; node != graph.nodes().second; node++) {
260  distance_to_root[*node] = -1;
261  }
262  distance_to_root[root] = 0;
263 
264  std::map<GEDGraph::EdgeID, bool> discovered_edge;
265  for (auto edge = graph.edges().first; edge != graph.edges().second; edge++) {
266  discovered_edge[*edge] = false;
267  }
268 
269  Layer_ current_layer(0);
270  std::queue<GEDGraph::NodeID> queue;
271  queue.push(root);
272  while (not queue.empty()) {
273  GEDGraph::NodeID current_node{queue.front()};
274  queue.pop();
275  if (static_cast<int>(current_layer.level) < distance_to_root.at(current_node)) {
276  if (led_method_ == GAMMA) {
277  if (sort_method_ == COUNTING) {
278  util::counting_sort(current_layer.node_labels.begin(), current_layer.node_labels.end());
279  util::counting_sort(current_layer.inner_edge_labels.begin(), current_layer.inner_edge_labels.end());
280  util::counting_sort(current_layer.outer_edge_labels.begin(), current_layer.outer_edge_labels.end());
281  }
282  else {
283  std::sort(current_layer.node_labels.begin(), current_layer.node_labels.end());
284  std::sort(current_layer.inner_edge_labels.begin(), current_layer.inner_edge_labels.end());
285  std::sort(current_layer.outer_edge_labels.begin(), current_layer.outer_edge_labels.end());
286  }
287  }
288  rings[root].layers.push_back(current_layer);
289  current_layer = Layer_(current_layer.level + 1);
290  }
291  current_layer.node_labels.push_back(graph.get_node_label(current_node));
292  for (auto edge = graph.incident_edges(current_node).first; edge != graph.incident_edges(current_node).second; edge++) {
293  GEDGraph::NodeID next_node{graph.head(*edge)};
294  if (distance_to_root.at(next_node) == -1) {
295  distance_to_root[next_node] = current_layer.level + 1;
296  if (current_layer.level < num_layers_) {
297  queue.push(next_node);
298  }
299  }
300  if (not discovered_edge.at(*edge)) {
301  discovered_edge[*edge] = true;
302  if (distance_to_root.at(current_node) == distance_to_root.at(next_node)) {
303  current_layer.inner_edge_labels.push_back(graph.get_edge_label(*edge));
304  }
305  else if (distance_to_root.at(current_node) < distance_to_root.at(next_node)) {
306  current_layer.outer_edge_labels.push_back(graph.get_edge_label(*edge));
307  }
308  else {
309  throw Error(std::string("Error when building ring rooted at ") + std::to_string(root) +
310  " for graph " + std::to_string(graph.id()) + ": dist(" +
311  std::to_string(current_node) +") = " + std::to_string(distance_to_root.at(current_node)) +
312  " > dist(" + std::to_string(next_node) +") = " + std::to_string(distance_to_root.at(next_node)));
313  }
314  }
315  }
316  }
317  if (led_method_ == GAMMA) {
318  if (sort_method_ == COUNTING) {
319  util::counting_sort(current_layer.node_labels.begin(), current_layer.node_labels.end());
320  util::counting_sort(current_layer.inner_edge_labels.begin(), current_layer.inner_edge_labels.end());
321  util::counting_sort(current_layer.outer_edge_labels.begin(), current_layer.outer_edge_labels.end());
322  }
323  else {
324  std::sort(current_layer.node_labels.begin(), current_layer.node_labels.end());
325  std::sort(current_layer.inner_edge_labels.begin(), current_layer.inner_edge_labels.end());
326  std::sort(current_layer.outer_edge_labels.begin(), current_layer.outer_edge_labels.end());
327  }
328  }
329  rings[root].layers.push_back(current_layer);
330 }
331 
332 template<class UserNodeLabel, class UserEdgeLabel>
333 void
335 set_num_layers_() {
336  num_layers_ = 0;
337  for (auto graph_rings_pair = rings_.begin(); graph_rings_pair != rings_.end(); graph_rings_pair++) {
338  for (auto ring = graph_rings_pair->second.begin(); ring != graph_rings_pair->second.end(); ring++) {
339  num_layers_ = std::max(num_layers_, ring->second.layers.size());
340  }
341  }
342 }
343 
344 template<class UserNodeLabel, class UserEdgeLabel>
345 void
347 add_global_features_(std::vector<double> & feature_vector) const {
348  for (auto feature = global_features_.begin(); feature != global_features_.end(); feature++) {
349  feature_vector.push_back(*feature);
350  }
351 }
352 
353 template<class UserNodeLabel, class UserEdgeLabel>
354 void
356 add_layer_substitution_features_(const Ring_ & ring_i, const Ring_ & ring_k, std::size_t level, std::vector<double> & feature_vector) const {
357  if ((ring_i.layers.size() > level) and (ring_k.layers.size() > level)) {
358  add_layer_features_(ring_i.layers.at(level), ring_k.layers.at(level), feature_vector);
359  }
360  else if (ring_i.layers.size() > level) {
361  add_layer_features_(ring_i.layers.at(level), Layer_(0), feature_vector);
362  }
363  else if (ring_k.layers.size() > level) {
364  add_layer_features_(Layer_(0), ring_k.layers.at(level), feature_vector);
365  }
366  else {
367  std::size_t num_layer_features = use_topological_features_ ? 6 : 3;
368  for (std::size_t layer_feature{0}; layer_feature < num_layer_features; layer_feature++) {
369  feature_vector.push_back(0.0);
370  }
371  }
372 }
373 
374 template<class UserNodeLabel, class UserEdgeLabel>
375 void
377 add_layer_deletion_features_(const Ring_ & ring, std::size_t level, std::vector<double> & feature_vector) const {
378  if (ring.layers.size() > level) {
379  add_layer_features_(ring.layers.at(level), Layer_(0), feature_vector);
380  }
381  else {
382  std::size_t num_layer_features = use_topological_features_ ? 6 : 3;
383  for (std::size_t layer_feature{0}; layer_feature < num_layer_features; layer_feature++) {
384  feature_vector.push_back(0.0);
385  }
386  }
387 }
388 
389 template<class UserNodeLabel, class UserEdgeLabel>
390 void
392 add_layer_insertion_features_(const Ring_ & ring, std::size_t level, std::vector<double> & feature_vector) const {
393  if (ring.layers.size() > level) {
394  add_layer_features_(Layer_(0), ring.layers.at(level), feature_vector);
395  }
396  else {
397  std::size_t num_layer_features = use_topological_features_ ? 6 : 3;
398  for (std::size_t layer_feature{0}; layer_feature < num_layer_features; layer_feature++) {
399  feature_vector.push_back(0.0);
400  }
401  }
402 }
403 
404 template<class UserNodeLabel, class UserEdgeLabel>
405 void
407 add_layer_features_(const Layer_ & lhs, const Layer_ & rhs, std::vector<double> & feature_vector) const {
408 
409  if (use_topological_features_) {
410  feature_vector.push_back(static_cast<double>(lhs.node_labels.size() - rhs.node_labels.size()));
411  feature_vector.push_back(static_cast<double>(lhs.inner_edge_labels.size() - rhs.inner_edge_labels.size()));
412  feature_vector.push_back(static_cast<double>(lhs.outer_edge_labels.size() - rhs.outer_edge_labels.size()));
413  }
414 
415  switch (led_method_) {
416  case GAMMA:
417  feature_vector.push_back(gamma_multiset_cost_(lhs.node_labels, rhs.node_labels, true));
418  feature_vector.push_back(gamma_multiset_cost_(lhs.inner_edge_labels, rhs.inner_edge_labels, false));
419  feature_vector.push_back(gamma_multiset_cost_(lhs.outer_edge_labels, rhs.outer_edge_labels, false));
420  break;
421  default:
422  feature_vector.push_back(lsape_multiset_cost_(lhs.node_labels, rhs.node_labels, true));
423  feature_vector.push_back(lsape_multiset_cost_(lhs.inner_edge_labels, rhs.inner_edge_labels, false));
424  feature_vector.push_back(lsape_multiset_cost_(lhs.outer_edge_labels, rhs.outer_edge_labels, false));
425  break;
426  }
427 }
428 
429 template<class UserNodeLabel, class UserEdgeLabel>
430 double
432 lsape_multiset_cost_(const std::vector<LabelID> & lhs, const std::vector<LabelID> & rhs, bool node_labels) const {
433 
434  if ((lhs.size() == 0) and (rhs.size() == 0)) {
435  return 0.0;
436  }
437 
438  if ((lhs.size() > 0) and (rhs.size() == 0)) {
439  double cost{0.0};
440  for (std::size_t row{0}; row < lhs.size(); row++) {
441  if (node_labels) {
442  cost += this->ged_data_.node_cost(lhs.at(row), dummy_label());
443  }
444  else {
445  cost += this->ged_data_.edge_cost(lhs.at(row), dummy_label());
446  }
447  }
448  return cost;
449  }
450 
451  if ((lhs.size() == 0) and (rhs.size() > 0)) {
452  double cost{0.0};
453  for (std::size_t col{0}; col < rhs.size(); col++) {
454  if (node_labels) {
455  cost += this->ged_data_.node_cost(dummy_label(), rhs.at(col));
456  }
457  else {
458  cost += this->ged_data_.edge_cost(dummy_label(), rhs.at(col));
459  }
460  }
461 
462  return cost;
463  }
464 
465  DMatrix problem(lhs.size() + 1, rhs.size() + 1, 0.0);
466  // Collect deletion costs.
467  for (std::size_t row{0}; row < lhs.size(); row++) {
468  if (node_labels) {
469  problem(row, rhs.size()) = this->ged_data_.node_cost(lhs.at(row), dummy_label());
470  }
471  else {
472  problem(row, rhs.size()) = this->ged_data_.edge_cost(lhs.at(row), dummy_label());
473  }
474  }
475 
476  // Collect insertion costs.
477  for (std::size_t col{0}; col < rhs.size(); col++) {
478  if (node_labels) {
479  problem(lhs.size(), col) = this->ged_data_.node_cost(dummy_label(), rhs.at(col));
480  }
481  else {
482  problem(lhs.size(), col) = this->ged_data_.edge_cost(dummy_label(), rhs.at(col));
483  }
484  }
485 
486  // Collect substitution costs.
487  for (std::size_t row{0}; row < lhs.size(); row++) {
488  for (std::size_t col{0}; col < rhs.size(); col++) {
489  if (node_labels) {
490  problem(row, col) = this->ged_data_.node_cost(lhs.at(row), rhs.at(col));
491  }
492  else {
493  problem(row, col) = this->ged_data_.edge_cost(lhs.at(row), rhs.at(col));
494  }
495  }
496  }
497 
498  LSAPESolver problem_solver(&problem);
499  if (led_method_ == LSAPE_OPTIMAL) {
500  problem_solver.set_model(this->lsape_model_);
501  }
502  else {
503  problem_solver.set_greedy_method(this->greedy_method_);
504  }
505  problem_solver.solve();
506 
507  return problem_solver.minimal_cost();
508 }
509 
510 template<class UserNodeLabel, class UserEdgeLabel>
511 double
513 gamma_multiset_cost_(const std::vector<LabelID> & lhs, const std::vector<LabelID> & rhs, bool node_labels) const {
514  double cost{0.0};
515 
516  // Compute and add minimal edge insertion costs.
517  if (lhs.size() < rhs.size()) {
518  double avg_ins_cost{0.0};
519  for (auto label_rhs = rhs.begin(); label_rhs != rhs.end(); label_rhs++) {
520  if (node_labels) {
521  avg_ins_cost += this->ged_data_.node_cost(dummy_label(), *label_rhs);
522  }
523  else {
524  avg_ins_cost += this->ged_data_.edge_cost(dummy_label(), *label_rhs);
525  }
526  }
527  avg_ins_cost /= static_cast<double>(rhs.size());
528  cost += static_cast<double>(rhs.size() - lhs.size()) * avg_ins_cost;
529  }
530 
531  // Compute and add minimal edge deletion costs.
532  if (lhs.size() > rhs.size()) {
533  double avg_del_cost{0.0};
534  for (auto label_lhs = lhs.begin(); label_lhs != lhs.end(); label_lhs++) {
535  if (node_labels) {
536  avg_del_cost += this->ged_data_.node_cost(*label_lhs, dummy_label());
537  }
538  else {
539  avg_del_cost += this->ged_data_.edge_cost(*label_lhs, dummy_label());
540  }
541  }
542  avg_del_cost /= static_cast<double>(lhs.size());
543  cost += static_cast<double>(lhs.size() - rhs.size()) * avg_del_cost;
544  }
545 
546  // Compute minimal edge relabelling costs.
547  double avg_rel_cost{0.0};
548  std::size_t count_diff_labels{0};
549  for (auto label_lhs = lhs.begin(); label_lhs != lhs.end(); label_lhs++) {
550  for (auto label_rhs = rhs.begin(); label_rhs != rhs.end(); label_rhs++) {
551  if (*label_lhs != *label_rhs) {
552  count_diff_labels++;
553  if (node_labels) {
554  avg_rel_cost += this->ged_data_.node_cost(*label_lhs, *label_rhs);
555  }
556  else {
557  avg_rel_cost += this->ged_data_.edge_cost(*label_lhs, *label_rhs);
558  }
559  }
560  }
561  }
562  avg_rel_cost /= static_cast<double>(count_diff_labels);
563 
564  // Compute multiset intersection size.
565  std::size_t intersection_size{0};
566  auto label_lhs = lhs.begin();
567  auto label_rhs = rhs.begin();
568  while ((label_lhs != lhs.end()) and (label_rhs != rhs.end())) {
569  if (*label_lhs == *label_rhs) {
570  intersection_size++;
571  label_lhs++;
572  label_rhs++;
573  }
574  else if (*label_lhs < *label_rhs) {
575  label_lhs++;
576  }
577  else {
578  label_rhs++;
579  }
580  }
581 
582  std::size_t gamma(std::min(lhs.size(), rhs.size()) - intersection_size);
583  if (gamma > 0) {
584  cost += static_cast<double>(gamma) * avg_rel_cost;
585  }
586 
587  return cost;
588 }
589 
590 }
591 
592 #endif /* SRC_METHODS_RING_ML_IPP_ */
void set_model(const Model &model)
Makes the solver use a specific model for optimal solving.
virtual void ml_init_for_num_features_() final
Initializes the derived class for running with feature vectors of size ged::MLBasedMethod::num_featur...
Definition: ring_ml.ipp:192
std::size_t num_nodes() const
Returns the number of nodes.
Definition: ged_graph.ipp:211
virtual void ml_populate_substitution_feature_vector_(const GEDGraph &g, const GEDGraph &h, GEDGraph::NodeID i, GEDGraph::NodeID k, std::vector< double > &feature_vector) final
Computes substitution feature vector.
Definition: ring_ml.ipp:155
This class solves LSAPE instances by calling the library lsape available at https://bougleux.users.greyc.fr/lsape/.
virtual std::string ml_valid_options_string_() const final
Returns string of all valid options that are not among the ones shared by all derived classes of ged:...
Definition: ring_ml.ipp:70
virtual void ml_set_default_options_() final
Sets all options that are not among the ones shared by all derived classes of ged::MLBasedMethod to d...
Definition: ring_ml.ipp:60
const GEDData< UserNodeLabel, UserEdgeLabel > & ged_data_
The data on which the method is run.
Definition: ged_method.hpp:124
NodeID head(EdgeID edge) const
Returns the head of an edge.
Definition: ged_graph.ipp:199
void solve(int num_solutions=1)
Solves the LSAPE problem instance.
virtual void ml_populate_deletion_feature_vector_(const GEDGraph &g, GEDGraph::NodeID i, std::vector< double > &feature_vector) final
Computes deletion feature vector.
Definition: ring_ml.ipp:168
virtual bool ml_parse_option_(const std::string &option, const std::string &arg) final
Parses one option that is not among the ones shared by all derived classes of ged::MLBasedMethod.
Definition: ring_ml.ipp:77
Uses ring structures for defining feature vectors for node edit operations.
Definition: ring_ml.hpp:48
void counting_sort(std::vector< LabelID >::iterator first, std::vector< LabelID >::iterator last)
Implementation of counting sort.
Definition: misc.ipp:111
LabelID get_node_label(NodeID node) const
Returns the label of a given node.
Definition: ged_graph.ipp:126
std::size_t num_edges() const
Returns the number of edges.
Definition: ged_graph.ipp:223
Runtime error class.
Definition: error.hpp:37
std::size_t num_features_
The size of the feature vectors.
LSAPESolver::Model lsape_model_
Specifies model for optimal LSAPE solver.
virtual void ml_init_graph_(const GEDGraph &graph) final
Initializes global variables for one graph.
Definition: ring_ml.ipp:53
LabelID get_edge_label(EdgeID edge) const
Returns the label of a given edge.
Definition: ged_graph.ipp:135
std::pair< incident_edge_iterator, incident_edge_iterator > incident_edges(NodeID node) const
Provides access to all incident edges of a node.
Definition: ged_graph.ipp:150
double minimal_cost() const
Returns the cost of the computed solutions.
std::pair< node_iterator, node_iterator > nodes() const
Provides access to all nodes in the graph.
Definition: ged_graph.ipp:205
The normalized input graphs used by GEDLIB. All labels are integers.
Definition: ged_graph.hpp:104
virtual void ml_populate_insertion_feature_vector_(const GEDGraph &h, GEDGraph::NodeID k, std::vector< double > &feature_vector) final
Computes insertion feature vector.
Definition: ring_ml.ipp:180
constexpr LabelID dummy_label()
Returns a dummy label.
constexpr std::size_t undefined()
Returns undefined size.
Global namespace for GEDLIB.
virtual std::size_t ml_get_num_features_() final
Returns the number of features.
Definition: ring_ml.ipp:211
LSAPESolver::GreedyMethod greedy_method_
Specifies method for greedy LSAPE solver.
std::size_t NodeID
Internally used vertex ID type.
Definition: ged_graph.hpp:108
void set_greedy_method(const GreedyMethod &greedy_method)
Makes the solver use a greedy method.
GraphID id() const
Returns the ID of the graph.
Definition: ged_graph.ipp:184
std::pair< edge_iterator, edge_iterator > edges() const
Provides access to all edge in the graph.
Definition: ged_graph.ipp:217
virtual void ml_init_feature_variables_(const GEDGraph &g, const GEDGraph &h, std::size_t num_threads) final
Initializes variables that are used for populating the feature vectors of assignments between two inp...
Definition: ring_ml.ipp:136