Gröbner basis project
Codebase for research into Gröbner basis computation
particular_orderings_atlas.cpp
1 #ifndef __PARTICULAR_ORDERINGS_CPP_
2 #define __PARTICULAR_ORDERINGS_CPP_
3 
4 /*****************************************************************************\
5 * This file is part of DynGB. *
6 * *
7 * DynGB is free software: you can redistribute it and/or modify *
8 * it under the terms of the GNU General Public License as published by *
9 * the Free Software Foundation, either version 2 of the License, or *
10 * (at your option) any later version. *
11 * *
12 * Foobar is distributed in the hope that it will be useful, *
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
15 * GNU General Public License for more details. *
16 * *
17 * You should have received a copy of the GNU General Public License *
18 * along with DynGB. If not, see <http://www.gnu.org/licenses/>. *
19 \*****************************************************************************/
20 
21 #include <cstring>
22 
23 #include "system_constants.hpp"
24 
25 #include "goda.hpp"
26 #include "particular_orderings.hpp"
27 
35 
43 
44 void * WGrevlex_Order_Data::operator new(size_t size) {
45  if (woda == nullptr) woda = new Grading_Order_Data_Allocator<WGrevlex_Order_Data>(size);
46  WGrevlex_Order_Data * result = woda->get_new_block();
47  return result;
48 }
49 
50 void WGrevlex_Order_Data::operator delete(void *t) {
51  woda->return_used_block(static_cast<WGrevlex_Order_Data *>(t));
52 }
53 
55  const Monomial & t, const Monomial & u
56 ) const {
57  DEG_TYPE dtk = 0;
58  DEG_TYPE duk = 0;
59  NVAR_TYPE n = t.num_vars();
60  const EXP_TYPE * a = t.log();
61  const EXP_TYPE * b = u.log();
62  for (NVAR_TYPE k = 0; k < n; ++k)
63  dtk += a[k];
64  for (NVAR_TYPE k = 0; k < n; ++k)
65  duk += b[k];
66  bool searching = dtk == duk;
67  NVAR_TYPE k = 0;
68  for (/* */; searching and k < n; ++k) {
69  dtk -= a[n - k - 1];
70  duk -= b[n - k - 1];
71  searching = dtk == duk;
72  }
73  //return k != t.num_vars() and k != u.num_vars() and dtk > duk;
74  return dtk > duk;
75 }
76 
78  const Monomial & t, const Monomial & u
79 ) const {
80  DEG_TYPE dtk = 0;
81  DEG_TYPE duk = 0;
82  NVAR_TYPE n = t.num_vars();
83  const EXP_TYPE * a = t.log();
84  const EXP_TYPE * b = u.log();
85  for (NVAR_TYPE k = 0; k < n; ++k)
86  dtk += a[k];
87  for (NVAR_TYPE k = 0; k < n; ++k)
88  duk += b[k];
89  bool searching = dtk == duk;
90  NVAR_TYPE k = 0;
91  for (/* */; searching and k < n; ++k) {
92  dtk -= a[n - k - 1];
93  duk -= b[n - k - 1];
94  searching = dtk == duk;
95  }
96  //return k != t.num_vars() and k != u.num_vars() and dtk < duk;
97  return dtk < duk;
98 }
99 
101  const Monomial & t, const Monomial & u, const Monomial & v
102 ) const {
103  DEG_TYPE dtk = 0;
104  DEG_TYPE duk = 0;
105  DEG_TYPE dvk = 0;
106  NVAR_TYPE n = t.num_vars();
107  const EXP_TYPE * a = t.log();
108  const EXP_TYPE * b = u.log();
109  const EXP_TYPE * c = v.log();
110  for (NVAR_TYPE k = 0; k < n; ++k)
111  dtk += a[k];
112  for (NVAR_TYPE k = 0; k < n; ++k)
113  duk += b[k];
114  for (NVAR_TYPE k = 0; k < n; ++k)
115  duk += c[k];
116  bool searching = dtk == duk + dvk;
117  NVAR_TYPE k = 0;
118  for (/* */; searching and k < n; ++k) {
119  dtk -= a[n - k - 1];
120  duk -= b[n - k - 1];
121  dvk -= c[n - k - 1];
122  searching = dtk == duk + dvk;
123  }
124  //return k != t.num_vars() and k != u.num_vars() and dtk > duk + dvk;
125  return dtk > duk + dvk;
126 }
127 
128 Generic_Grevlex generic_grevlex;
129 Monomial_Ordering * generic_grevlex_ptr = &generic_grevlex;
130 
131 WGrevlex::WGrevlex(NVAR_TYPE num_vars, WT_TYPE * wts, bool thorough) :
132  n(num_vars), weights(wts), thorough_weighting(thorough)
133 {}
134 
135 void WGrevlex::set_data(Monomial & t) const {
136  t.set_ordering_data(nullptr);
137 }
138 
140  const Monomial & t, const Monomial & u
141 ) const {
142  DEG_TYPE dtk = 0;
143  DEG_TYPE duk = 0;
144  NVAR_TYPE n = t.num_vars();
145  const EXP_TYPE * a = t.log();
146  const EXP_TYPE * b = u.log();
147  for (NVAR_TYPE k = 0; k < n; ++k)
148  dtk += a[k] * weights[k];
149  for (NVAR_TYPE k = 0; k < n; ++k)
150  duk += b[k] * weights[k];
151  bool searching = dtk == duk;
152  NVAR_TYPE k = 0;
153  for (/* */; searching and k < n; ++k) {
154  dtk -= a[n - k - 1] * weights[n - k - 1];
155  duk -= b[n - k - 1] * weights[n - k - 1];
156  searching = dtk == duk;
157  }
158  return dtk > duk;
159 }
160 
162  const Monomial & t, const Monomial & u
163 ) const {
164  DEG_TYPE dtk = 0;
165  DEG_TYPE duk = 0;
166  NVAR_TYPE n = t.num_vars();
167  const EXP_TYPE * a = t.log();
168  const EXP_TYPE * b = u.log();
169  for (NVAR_TYPE k = 0; k < n; ++k)
170  dtk += a[k] * weights[k];
171  for (NVAR_TYPE k = 0; k < n; ++k)
172  duk += b[k] * weights[k];
173  bool searching = dtk == duk;
174  NVAR_TYPE k = 0;
175  for (/* */; searching and k < n; ++k) {
176  dtk -= a[n - k - 1] * weights[n - k - 1];
177  duk -= b[n - k - 1] * weights[n - k - 1];
178  searching = dtk == duk;
179  }
180  return dtk < duk;
181 }
182 
184  const Monomial & t, const Monomial & u, const Monomial & v
185 ) const {
186  DEG_TYPE dtk = 0;
187  DEG_TYPE duk = 0;
188  DEG_TYPE dvk = 0;
189  NVAR_TYPE n = t.num_vars();
190  const EXP_TYPE * a = t.log();
191  const EXP_TYPE * b = u.log();
192  const EXP_TYPE * c = v.log();
193  for (NVAR_TYPE k = 0; k < n; ++k)
194  dtk += a[k] * weights[k];
195  for (NVAR_TYPE k = 0; k < n; ++k)
196  duk += b[k] * weights[k];
197  for (NVAR_TYPE k = 0; k < n; ++k)
198  duk += c[k] * weights[k];
199  bool searching = dtk == duk + dvk;
200  NVAR_TYPE k = 0;
201  for (/* */; searching and k < n; ++k) {
202  dtk -= a[n - k - 1] * weights[n - k - 1];
203  duk -= b[n - k - 1] * weights[n - k - 1];
204  dvk -= c[n - k - 1] * weights[n - k - 1];
205  searching = dtk == duk + dvk;
206  }
207  return dtk > duk + dvk;
208 }
209 
211  DEG_TYPE value = 0;
212  for (NVAR_TYPE l = 0; l < number_of_gradings; ++l) {
213  value += t.degree(l);
214  gradings[number_of_gradings-l-1] = value;
215  }
216 }
217 
219  : number_of_gradings(t.num_vars())
220 {
221  const NVAR_TYPE n = number_of_gradings;
222  if (goda == nullptr) goda = new Grading_Order_Data_Allocator<WT_TYPE>(n);
223  gradings = goda->get_new_block();
224  assign_gradings(t);
225 }
226 
228  : number_of_gradings(other.number_of_gradings)
229 {
230  gradings = goda->get_new_block();
231  memcpy(gradings, other.gradings, number_of_gradings*sizeof(DEG_TYPE));
232 }
233 
235  return new Grevlex_Order_Data(*this);
236 }
237 
239 
240 DEG_TYPE Grevlex_Order_Data::operator [] (NVAR_TYPE i) const {
241  return gradings[i];
242 }
243 
244 Grevlex_Ordering::Grevlex_Ordering(NVAR_TYPE number_of_variables)
245  : n(number_of_variables)
246 {}
247 
248 bool Grevlex_Ordering::first_larger(const Monomial & t, const Monomial & u) const
249 {
250  bool still_tied = true;
251  bool result = false;
252  for (NVAR_TYPE k = 0; still_tied and k < n; ++k) {
253  DEG_TYPE dtk = partial_degree(t, k);
254  DEG_TYPE duk = partial_degree(u, k);
255  if (dtk < duk)
256  still_tied = false;
257  else if (dtk > duk) {
258  result = true;
259  still_tied = false;
260  }
261  }
262  return result;
263 }
264 
265 bool Grevlex_Ordering::first_smaller(const Monomial & t, const Monomial & u) const
266 {
267  bool still_tied = true;
268  bool result = false;
269  for (NVAR_TYPE k = 0; still_tied and k < n; ++k) {
270  DEG_TYPE dtk = partial_degree(t, k);
271  DEG_TYPE duk = partial_degree(u, k);
272  if (dtk > duk)
273  still_tied = false;
274  else if (dtk < duk) {
275  result = true;
276  still_tied = false;
277  }
278  }
279  return result;
280 }
281 
283  const Monomial & t, const Monomial & u, const Monomial & v
284 ) const {
285  bool still_tied = true;
286  bool result = false;
287  for (NVAR_TYPE k = 0; still_tied and k < n; ++k) {
288  DEG_TYPE dtk = partial_degree(t, k);
289  DEG_TYPE duk = partial_degree(u, k);
290  DEG_TYPE dvk = partial_degree(v, k);
291  if (dtk < duk + dvk)
292  still_tied = false;
293  else if (dtk > duk + dvk) {
294  result = true;
295  still_tied = false;
296  }
297  }
298  return result;
299 }
300 
302  const Monomial & t, NVAR_TYPE i
303 ) const {
304  return (*(static_cast<Grevlex_Order_Data *>(t.monomial_ordering_data())))[i];
305 }
306 
308  const Monomial & t, NVAR_TYPE i
309 ) const {
310  DEG_TYPE result = 0;
311  for (NVAR_TYPE k = 0; k < n - i; ++k)
312  result += t.degree(k);
313  return result;
314 }
315 
316 void Grevlex_Ordering::set_data(Monomial & t) const {
317  if (t.monomial_ordering_data() == nullptr)
319  else {
320  (static_cast<Grevlex_Order_Data *>(t.monomial_ordering_data()))->assign_gradings(t);
321  }
322 }
323 
324 Lex_Ordering::Lex_Ordering(NVAR_TYPE number_of_variables) : n(number_of_variables)
325 {}
326 
327 bool Lex_Ordering::first_larger(const Monomial & t, const Monomial & u) const {
328  bool still_tied = true;
329  bool result = false;
330  const EXP_TYPE * a = t.log();
331  const EXP_TYPE * b = u.log();
332  for (NVAR_TYPE k = 0; still_tied and k < n; ++k) {
333  if (a[k] < b[k])
334  still_tied = false;
335  else if (a[k] > b[k]) {
336  result = true;
337  still_tied = false;
338  }
339  }
340  return result;
341 }
342 
343 bool Lex_Ordering::first_smaller(const Monomial & t, const Monomial & u) const {
344  bool still_tied = true;
345  bool result = false;
346  const EXP_TYPE * a = t.log();
347  const EXP_TYPE * b = u.log();
348  for (NVAR_TYPE k = 0; still_tied and k < n; ++k) {
349  if (a[k] > b[k])
350  still_tied = false;
351  else if (a[k] < b[k]) {
352  result = true;
353  still_tied = false;
354  }
355  }
356  return result;
357 }
358 
360  const Monomial & t, const Monomial & u, const Monomial & v
361 ) const {
362  bool still_tied = true;
363  bool result = false;
364  const EXP_TYPE * a = t.log();
365  const EXP_TYPE * b = u.log();
366  const EXP_TYPE * c = v.log();
367  for (NVAR_TYPE k = 0; still_tied and k < n; ++k) {
368  if (a[k] < b[k] + c[k])
369  still_tied = false;
370  else if (a[k] > b[k] + c[k]) {
371  result = true;
372  still_tied = false;
373  }
374  }
375  return result;
376 }
377 
379  DEG_TYPE value = 0;
380  const WT_TYPE * w
381  = (
382  static_cast<const CachedWGrevlex_Ordering *>(t.monomial_ordering())
383  )->order_weights();
384  const EXP_TYPE * a = t.log();
385  for (NVAR_TYPE l = 0; l < number_of_gradings; ++l) {
386  value += w[l] * a[l];
387  gradings[number_of_gradings-l-1] = value;
388  }
389 }
390 
392  : number_of_gradings(t.num_vars())
393 {
394  const NVAR_TYPE n = number_of_gradings;
395  if (goda == nullptr) goda = new Grading_Order_Data_Allocator<WT_TYPE>(n);
396  gradings = goda->get_new_block();
397  assign_gradings(t);
398 }
399 
402 {
403  gradings = goda->get_new_block();
404  memcpy(gradings, other.gradings, number_of_gradings*sizeof(DEG_TYPE));
405 }
406 
408  return new WGrevlex_Order_Data(*this);
409 }
410 
412 
413 DEG_TYPE WGrevlex_Order_Data::operator [] (NVAR_TYPE i) const {
414  return gradings[i];
415 }
416 
418  NVAR_TYPE number_of_variables, WT_TYPE * w, bool thorough
419 ) : n(number_of_variables), weights(w), fully_apply(thorough)
420 {}
421 
422 int CachedWGrevlex_Ordering::cmp(const Monomial & t, const Monomial & u) const {
423  int result = 0;
424  for (NVAR_TYPE k = 0; result == 0 and k < n; ++k) {
425  DEG_TYPE dtk = partial_degree(t, k);
426  DEG_TYPE duk = partial_degree(u, k);
427  //if (dtk < duk) result = -1;
428  //else if (dtk > duk) result = 1;
429  result = dtk - duk;
430  }
431  return result;
432 }
433 
435 const {
436  bool still_tied = true;
437  bool result = false;
438  for (NVAR_TYPE k = 0; still_tied and k < n; ++k) {
439  DEG_TYPE dtk = partial_degree(t, k);
440  DEG_TYPE duk = partial_degree(u, k);
441  if (dtk < duk)
442  still_tied = false;
443  else if (dtk > duk) {
444  result = true;
445  still_tied = false;
446  }
447  }
448  return result;
449 }
450 
452 const {
453  bool still_tied = true;
454  bool result = false;
455  for (NVAR_TYPE k = 0; still_tied and k < n; ++k) {
456  DEG_TYPE dtk = partial_degree(t, k);
457  DEG_TYPE duk = partial_degree(u, k);
458  if (dtk > duk)
459  still_tied = false;
460  else if (dtk < duk) {
461  result = true;
462  still_tied = false;
463  }
464  }
465  return result;
466 }
467 
468 const WT_TYPE * CachedWGrevlex_Ordering::order_weights() const {
469  return weights;
470 }
471 
473  const Monomial & t, const Monomial & u, const Monomial & v
474 ) const {
475  bool still_tied = true;
476  bool result = false;
477  for (NVAR_TYPE k = 0; still_tied and k < n; ++k) {
478  DEG_TYPE dtk = partial_degree(t, k);
479  DEG_TYPE duk = partial_degree(u, k);
480  DEG_TYPE dvk = partial_degree(v, k);
481  if (dtk < duk + dvk)
482  still_tied = false;
483  else if (dtk > duk + dvk) {
484  result = true;
485  still_tied = false;
486  }
487  }
488  return result;
489 }
490 
492  const Monomial & t, NVAR_TYPE i
493 ) const {
494  return t.monomial_ordering_data()->grading(i);
495 }
496 
498  const Monomial & t, NVAR_TYPE i
499 ) const {
500  DEG_TYPE result = 0;
501  const EXP_TYPE * a = t.log();
502  if (i == 0)
503  for (NVAR_TYPE k = 0; k < n; ++k)
504  result += weights[k]*a[k];
505  else
506  for (NVAR_TYPE k = 0; k < n - i; ++k)
507  if (fully_apply)
508  result += weights[k]*a[k];
509  else
510  result += a[k];
511  return result;
512 }
513 
515  if (t.monomial_ordering_data() == nullptr) {
516  WGrevlex_Order_Data * new_data = new WGrevlex_Order_Data(t);
517  t.set_ordering_data(new_data);
518  }
519  else
520  (static_cast<WGrevlex_Order_Data *>(t.monomial_ordering_data()))
521  ->assign_gradings(t);
522 }
523 
524 const char * Nonsingular_Matrix_Ordering_Exception::what() const throw() {
525  return "Nonsingular matrix supplied for matrix ordering";
526 }
527 
540 bool nonsingular(NVAR_TYPE m, NVAR_TYPE n, const WT_TYPE **A) {
541  bool possibly = true;
542  // first copy A
543  long long ** M = new long long * [m];
544  for (NVAR_TYPE i = 0; i < m; ++i) {
545  M[i] = new long long [n];
546  for (NVAR_TYPE j = 0; j < n; ++j)
547  M[i][j] = A[i][j];
548  }
549  std::cout << m << ',' << n << std::endl;
550  for (NVAR_TYPE i = 0; i < m; ++i) {
551  for (NVAR_TYPE j = 0; j < n; ++j)
552  std::cout << M[i][j] << ' ';
553  std::cout << std::endl;
554  }
555  std::cout << std::endl;
556  for (NVAR_TYPE i = 0; possibly and i < m; ++i) {
557  // first find a nonzero element in this column
558  bool searching = true;
559  NVAR_TYPE j = i;
560  for (/* */; searching and j < m; ++j)
561  if (M[j][i] != 0) {
562  searching = false;
563  --j;
564  }
565  possibly = not searching;
566  if (possibly and j < m) {
567  // check if we need to swap rows
568  if (j != i) {
569  for (NVAR_TYPE k = i; k < n; ++k) {
570  long long temp = M[j][k];
571  M[j][k] = M[i][k];
572  M[i][k] = temp;
573  }
574  }
575  // clear out the rest of the column
576  for (j = i + 1; j < m; ++j) {
577  WT_TYPE header = M[j][i];
578  if (header != 0) {
579  // delete header & adjust row
580  for (NVAR_TYPE k = i; k < n; ++k)
581  M[j][k] = M[j][k] * M[i][i] - M[i][k] * header;
582  }
583  }
584  }
585  }
586  std::cout << m << ',' << n << std::endl;
587  for (NVAR_TYPE i = 0; i < m; ++i) {
588  for (NVAR_TYPE j = 0; j < n; ++j)
589  std::cout << M[i][j] << ' ';
590  std::cout << std::endl;
591  }
592  std::cout << std::endl;
593  // free memory
594  for (NVAR_TYPE i = 0; i < m; ++i)
595  delete [] M[i];
596  delete [] M;
597  // should be no need to check main diagonal explicitly;
598  // obtaining zero in a main diagonal should mean possibly is now false
599  return possibly;
600 }
601 
603  NVAR_TYPE rows, NVAR_TYPE cols, const WT_TYPE **data
604 ) : m(rows), n(cols), W(data) {
605  if (not nonsingular(m, n, W))
607 }
608 
609 bool Matrix_Ordering::first_larger(const Monomial & t, const Monomial & u) const
610 {
611  bool result = false;
612  bool searching = true;
613  const EXP_TYPE * a = t.log();
614  const EXP_TYPE * b = u.log();
615  for (NVAR_TYPE i = 0; searching and i < m; ++i) {
616  DEG_TYPE wt = 0;
617  DEG_TYPE wu = 0;
618  for (NVAR_TYPE j = 0; j < n; ++j) {
619  wt += W[i][j] * a[j];
620  wu += W[i][j] * b[j];
621  }
622  if (wt < wu)
623  searching = false;
624  else if (wt > wu) {
625  searching = false;
626  result = true;
627  }
628  }
629  return result;
630 }
631 
632 bool Matrix_Ordering::first_smaller(const Monomial & t, const Monomial & u) const
633 {
634  bool result = false;
635  bool searching = true;
636  const EXP_TYPE * a = t.log();
637  const EXP_TYPE * b = u.log();
638  for (NVAR_TYPE i = 0; searching and i < m; ++i) {
639  DEG_TYPE wt = 0;
640  DEG_TYPE wu = 0;
641  for (NVAR_TYPE j = 0; j < n; ++j) {
642  wt += W[i][j] * a[j];
643  wu += W[i][j] * b[j];
644  }
645  if (wt > wu)
646  searching = false;
647  else if (wt < wu) {
648  searching = false;
649  result = true;
650  }
651  }
652  return result;
653 }
654 
656  const Monomial & t, const Monomial & u, const Monomial & v
657 ) const {
658  bool result = false;
659  bool searching = true;
660  const EXP_TYPE * a = t.log();
661  const EXP_TYPE * b = u.log();
662  const EXP_TYPE * c = v.log();
663  for (NVAR_TYPE i = 0; searching and i < m; ++i) {
664  DEG_TYPE wt = 0;
665  DEG_TYPE wu = 0;
666  DEG_TYPE wv = 0;
667  for (NVAR_TYPE j = 0; j < n; ++j) {
668  wt += W[i][j] * a[j];
669  wu += W[i][j] * b[j];
670  wv += W[i][j] * c[j];
671  }
672  if (wt < wu + wv)
673  searching = false;
674  else if (wt > wu + wv) {
675  searching = false;
676  result = true;
677  }
678  }
679  return result;
680 }
681 
682 #endif
virtual const WT_TYPE * order_weights() const override
the weights that define this ordering
void set_ordering_data(Monomial_Order_Data *mordat)
sets the Monomial_Order_Data associated with this Monomial
Definition: monomial.cpp:218
virtual bool first_larger_than_multiple(const Monomial &t, const Monomial &u, const Monomial &v) const override
returns true iff by weighted sums of successively fewer exponents
special memory pool allocator for Grevlex_Order_Data and WGrevlex_Order_Data
Definition: goda.hpp:67
virtual bool first_smaller(const Monomial &t, const Monomial &u) const override
void assign_gradings(const Monomial &)
assigns gradings to a pre-allocated array
virtual void set_data(Monomial &t) const override
sets the Monomial’s monomial_ordering_data
virtual bool first_larger(const Monomial &t, const Monomial &u) const override
returns true iff by sums of successively fewer exponents
data for the grevlex monomial ordering
~WGrevlex_Order_Data()
deletes the array of partial weights
virtual void set_data(Monomial &t) const override
sets the Monomial’s monomial_ordering_data
const NVAR_TYPE number_of_gradings
length of gradings
virtual bool first_smaller(const Monomial &t, const Monomial &u) const override
returns true iff by weighted sums of successively fewer exponents
virtual bool first_larger_than_multiple(const Monomial &t, const Monomial &u, const Monomial &v) const override
returns true iff by sums of successively fewer exponents
~Grevlex_Order_Data()
deletes the array creates by the constructor
virtual bool first_larger_than_multiple(const Monomial &t, const Monomial &u, const Monomial &v) const override
DEG_TYPE operator[](NVAR_TYPE i) const
returns the sum of the first variables’ exponents
DEG_TYPE partial_degree(const Monomial &t, NVAR_TYPE i) const
virtual bool first_larger_than_multiple(const Monomial &, const Monomial &, const Monomial &) const override
returns true iff the first Monomial is larger than the product of the second and the third ...
TYPE * get_new_block()
allocates and returns a block of memory
Definition: goda.hpp:108
virtual bool first_larger(const Monomial &, const Monomial &) const override
returns true iff the first Monomial is larger than the second
CachedWGrevlex_Ordering(NVAR_TYPE number_of_variables, WT_TYPE *w, bool thorough=true)
creates a weighted grevlex ordering specific to variables, using the weights specified by ...
virtual bool first_smaller(const Monomial &, const Monomial &) const override
returns true iff the first Monomial is smaller than the second
Lex_Ordering(NVAR_TYPE number_of_variables)
creates a lex ordering specific to variables
DEG_TYPE * gradings
array of partial weighted sums of exponents
Grading_Order_Data_Allocator< WGrevlex_Order_Data > * woda
Memory manager for graded orderings (not their data; see goda for that).
Matrix_Ordering(NVAR_TYPE rows, NVAR_TYPE cols, const WT_TYPE **data)
checks that data defines a nonsingular matrix, and sets things up
DEG_TYPE partial_degree(const Monomial &t, NVAR_TYPE i) const
virtual bool first_larger(const Monomial &t, const Monomial &u) const override
returns true iff by sums of successively fewer exponents
WGrevlex(NVAR_TYPE, WT_TYPE *, bool=true)
Creates a grevlex ordering specific to the specified number of variables, with the given weights...
virtual bool first_smaller(const Monomial &t, const Monomial &u) const override
NVAR_TYPE num_vars() const
number of variables
Definition: monomial.hpp:130
WGrevlex_Order_Data(Monomial &t)
creates an array of partial weights of t
Grevlex_Order_Data(const Monomial &t)
creates an array of partial weights of t
virtual bool first_larger_than_multiple(const Monomial &t, const Monomial &u, const Monomial &v) const override
virtual bool first_larger(const Monomial &t, const Monomial &u) const override
Implementation of monomials.
Definition: monomial.hpp:69
virtual int cmp(const Monomial &, const Monomial &) const override
resturns 0 if they are alike; positive if first larger; negative otherwise
interface to a monomial ordering
Grading_Order_Data_Allocator< WT_TYPE > * goda
Memory manager for graded ordering data.
bool nonsingular(NVAR_TYPE m, NVAR_TYPE n, const WT_TYPE **A)
verifies that a matrix supplied for an ordering is not nonsingular
DEG_TYPE degree(NVAR_TYPE i) const
Degree of th variable.
Definition: monomial.cpp:183
virtual Grevlex_Order_Data * clone() override
clone “constructor”
const EXP_TYPE * log() const
Direct access to the exponents, for whatever reason.
Definition: monomial.hpp:157
generic grevlex ordering, works with any number of variables
Monomial_Order_Data * monomial_ordering_data() const
the Monomial_Order_Data associated with this Monomial
Definition: monomial.hpp:167
const Monomial_Ordering * monomial_ordering() const
the Monomial_Ordering associated with this Monomial
Definition: monomial.hpp:165
DEG_TYPE * gradings
list of partial sums of exponents
virtual bool first_larger_than_multiple(const Monomial &t, const Monomial &u, const Monomial &v) const override
virtual WGrevlex_Order_Data * clone() override
clone constructor
virtual void set_data(Monomial &t) const override
sets the Monomial’s monomial_ordering_data
void return_used_block(TYPE *freed_block)
returns a block of memory that is no longer needed to the pool
Definition: goda.hpp:121
virtual bool first_smaller(const Monomial &t, const Monomial &u) const override
returns true iff by sums of successively fewer exponents
void assign_gradings(Monomial &t)
assigns gradings to a pre-allocated array
DEG_TYPE compute_ith_weight(const Monomial &t, NVAR_TYPE i) const
computes the sum of the first i exponents
virtual bool first_larger(const Monomial &t, const Monomial &u) const override
data for the weighted grevlex monomial ordering
the weighted grevlex ordering for a specified number of variables, with cached weights for each monom...
virtual bool first_larger(const Monomial &t, const Monomial &u) const override
returns true iff by weighted sums of successively fewer exponents
virtual bool first_smaller(const Monomial &t, const Monomial &u) const override
returns true iff by sums of successively fewer exponents
const NVAR_TYPE number_of_gradings
length of gradings
virtual DEG_TYPE grading(NVAR_TYPE) const
default value is useless; orderings that supply gradings should redefine
Grevlex_Ordering(NVAR_TYPE number_of_variables)
creates a grevlex ordering specific to the specified number of variables
DEG_TYPE compute_ith_weight(const Monomial &t, NVAR_TYPE i) const