//#define CIL_EMIT
//#define FCTC_STATS
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Reflection;
using System.Reflection.Emit;
using System.Threading;

using glue.Collections.BitArray;
using glue.Collections.LockFreeDictionary;
using glue.Collections.XSpinLock;
using glue.Debugging;
using glue.Extensions.Enumerable;
using glue.Extensions.String;


namespace agree
{
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	///
	///
	///
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public partial class TypeMgr
	{
		public partial struct FeatureInfo
		{
			public FeatureInfo(String feature, int i_feat, Type maximal_type)
			{
				this.i_feat = i_feat;
				this.feature = feature;
				this.maximal_type = maximal_type;
#if DEBUG
				this.AppropriateFor = new HashSet<Type>();
#endif
			}

			public int i_feat;
			public String feature;
			public Type maximal_type;
#if DEBUG
			public HashSet<Type> AppropriateFor;
#endif
		};


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public TypeMgr(Grammar g, GrammarConfig config)
		{
			this.g = g;
			this.config = config = config ?? new GrammarConfig();
			this.strings = new Strings(this);

			fcm = new FeatureConfig.Manager(this);

			TopType = Type.MakeTopType(this, config.top_type);
			type_dict.Add(config.top_type, TopType);
			rgt_top_singleton = new Type[] { TopType };
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///	<summary>
		///	Contiguously assigned, zero-based type id numbers are used for GLB caching. Sorted topologically because
		///	topwards types are expected to be more heavily used, so they should allocate the base of the triangular cache
		///	matrix, so as to avoid wasted
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public const int TopId = 0;
		public int StringId;
		public Type[] type_arr;
		public Dictionary<String, Type> type_dict = new Dictionary<String, Type>(StringComparer.OrdinalIgnoreCase);
		public int c_types = -1;
		public int c_t_bits = -1;
		public int code_size;
		int next_glb_num = 1;

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public FeatureConfig.Manager fcm;
		public FeatureConfig[] fc_by_index = null;
		public FeatureConfig[] fc_by_type = null;
		public ConstraintPool[][] rgcp_by_type = null;	// testing
		public int[] fcix_by_type;

		public FeatureInfo[] feat_arr;
		public Dictionary<String, FeatureInfo> feat_map = new Dictionary<String, FeatureInfo>(StringComparer.OrdinalIgnoreCase);

		public Dictionary<String, Entry> entry_dict = new Dictionary<String, Entry>(StringComparer.OrdinalIgnoreCase);

		public IList<Type> AllTypes { get { return type_arr; } }
		public IEnumerable<Entry> AllEntries { get { return entry_dict.Values; } }
		public IEnumerable<Instance> AllInstances { get { return AllTypes.AsEnumerable<Instance>().Concat(AllEntries); } }
		public ICollection<String> Features { get { return feat_map.Keys; } }

		public GrammarConfig config;

		public Strings strings;

		public Grammar g;

		bool f_petrified = false;

		public readonly Type[] rgt_top_singleton = null;

		/// certain special types
		public Type TopType;
		public Type StringType;
		public Type tt_list_head;
		public Type tt_list;
		public Type tt_empty;
		public Type tt_dlist_list;
		public Type tt_dlist_last;
		public Type tt_key_daughter;
		public StartSymbol[] rgtt_start_symbols;

		public Type tt_label;
		public Type tt_meta;

		/// feature index for certain special features
		public int f_ix_list_head;
		public int f_ix_list_tail;
		public int f_ix_dlist_list;
		public int f_ix_dlist_last;

		public int ms_expand = 0;

		public Edge.Flag MultiIdMask = Edge.Flag.LowestFlagValue - 1;

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// Edge fetching
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public Type GetEdgeType(Edge.Flag f)
		{
			if ((f & Edge.Flag.EtmMask) == Edge.Flag.EtmString)
				return StringType;
			int mid = (int)(f & MultiIdMask);
			return mid == 0 ? TopType : type_arr[mid];
		}

		public int GetEdgeTypeId(Edge.Flag f)
		{
			return (f & Edge.Flag.EtmMask) == Edge.Flag.EtmString ? StringId : (int)(f & MultiIdMask);
		}

		public bool IsTopId(Edge e)
		{
			return (int)(e.FlagsId & (MultiIdMask | Edge.Flag.EtmMask)) == 0;
		}

		public bool IsStringValue(Edge.Flag f)
		{
			// GREP: bare-string-type-setting
			return (f & Edge.Flag.EtmMask) == Edge.Flag.EtmString && (f & MultiIdMask) != 0;
		}

		public String GetStringValue(Edge.Flag f)
		{
			int sid;
			if ((f & Edge.Flag.EtmMask) != Edge.Flag.EtmString || (sid = (int)(f & MultiIdMask)) == 0)
				return null;
			return strings.Get(sid);
		}

		public bool CompareStringValues(Edge.Flag f1, Edge.Flag f2)
		{
			return ((f1 ^ f2) & (Edge.Flag.EtmMask | MultiIdMask)) == 0;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void LoadTypeHierarchyFromTdl(CommandToken tx, GrammarFileSet tdlg)
		{
			/// Aggregate Type token streams into groups and load the authored type hierarchy from these identifier groups.
			tx.TransactionStatus("Loading types.");
			LoadTypes(new TokenGrouper(TokenDefinitionGroup.Type.Tdl, tdlg.tdl));
			tdlg.tdl = null;

			/// Embed the bounded complete partial order in the type graph
			tx.TransactionStatus("Computing Greatest Lower Bound closure of the type hierarchy.");
			EmbedBcpo();

			/// Read all Features and attach them to their introducing type. Validate appropriateness conditions and determine 
			/// maximal types for all features. Ensure the validity of the type hierarchy according to certain properties of 
			/// the formalism.
			tx.TransactionStatus("Loading features.");
			LoadFeatures();

			/// Resolve special types: attach special configuration types used in lists, etc.
			tx.TransactionStatus("Resolve special types.");
			ResolveSpecialTypes();
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// Aggregate Entry token streams into groups, construct Entries. Release references to the original tokens
		/// as soon as they are no longer needed. In the future, we may wish to retain these to enable live
		/// editing scenarios.
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void LoadEntriesFromTdl(CommandToken tx, Tray tray, GrammarFileSet tdlg)
		{
			/// 1. basic grammar rule entries
			tx.TransactionStatus("Loading grammar rule entries.");
			LoadEntries(new TokenGrouper(TokenDefinitionGroup.Type.GrammarRule, tdlg.grules), tray);
			tdlg.grules = null;

			/// 2. lexical rule entries
			tx.TransactionStatus("Loading lexical rule entries.");
			LoadEntries(new TokenGrouper(TokenDefinitionGroup.Type.LexicalRule, tdlg.irules), tray);
			tdlg.irules = null;

			/// 3. lexical entries
			tx.TransactionStatus("Scanning lexical entries.");
			LoadEntries(new TokenGrouper(TokenDefinitionGroup.Type.Lexicon, tdlg.lexicon), tray);
			tdlg.lexicon = null;

			/// 4. node label entries
			tx.TransactionStatus("Loading node label entries.");
			LoadEntries(new TokenGrouper(TokenDefinitionGroup.Type.Label, tdlg.labels), tray);
			tdlg.labels = null;

			/// 5. start symbol entries
			tx.TransactionStatus("Loading start symbol entries.");
			LoadEntries(new TokenGrouper(TokenDefinitionGroup.Type.Start, tdlg.roots), tray);
			tdlg.roots = null;

			/// Resolve start symbols and some special rule types
			ResolveSpecialEntries();
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public Type GetMaximalTypeForFeatureToken(TdlTok tok)
		{
			if (tok.t != TdlTok.Type.Identifier)
				TdlTokenizer.ErrorExit(tok, "Expected a feature: '{0}'", tok.t.ToString());

			FeatureInfo fi;
			if (!feat_map.TryGetValue(tok.i_s, out fi))
				TdlTokenizer.ErrorExit(tok, "Unknown feature: '{0}'", tok.i_s);

			return fi.maximal_type;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public Type GetMaximalTypeForFeature(int i_feat)
		{
			return feat_arr[i_feat].maximal_type;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public int GetFeatureIndex(String f)
		{
			FeatureInfo fi;
			return feat_map.TryGetValue(f, out fi) ? fi.i_feat : -1;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// initialize Entry dictionary with all Entry symbol names
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void LoadEntries(IEnumerable<TokenDefinitionGroup> file_edefs, Tray tr)
		{
			foreach (TokenDefinitionGroup tdg in file_edefs)
			{
				if (tdg.f_append)
					TdlTokenizer.ErrorExit(tdg.tok_ident, "Cannot append type information in an entry description.");

				TdlTok id_tok = tdg.tok_ident;

				Type tt_cur = TopType;
				foreach (TdlTok tok_par in tdg.conj_par)
				{
					Type tt_par;
					if (!type_dict.TryGetValue(tok_par.i_s, out tt_par))
						TdlTokenizer.ErrorExit(tok_par, String.Format("Entry description uses undefined parent type {0}", tok_par.i_s));

					int tid_new;
					if ((tid_new = GetGlbCheckTopAndIdentity(tt_cur.m_id, tt_par.m_id)) == -1)
						TdlTokenizer.ErrorExit(tok_par, String.Format("Parent types {0} specified for entry description {1} do not unify",
							tdg.conj_par.Select(t => t.i_s).StringJoin(" "),
							tdg.Identifier));
					tt_cur = type_arr[tid_new];
				}

				Entry e;
				if (entry_dict.TryGetValue(id_tok.i_s, out e))
					TdlTokenizer.ErrorExit(id_tok, String.Format("Symbol {0} is already defined.", id_tok.i_s));

				switch (tdg.type)
				{
					case TokenDefinitionGroup.Type.GrammarRule:
						e = new GrammarRule(tt_cur, id_tok.i_s, tdg.m_bfc);
						break;
					case TokenDefinitionGroup.Type.LexicalRule:
						if (tdg.morph_subrules == null || tdg.morph_subrules.Count == 0)
							e = new LexicalRule(tt_cur, id_tok.i_s, tdg.m_bfc);
						else
							e = new MorphologicalRule(tt_cur, id_tok.i_s, tdg.m_bfc, tdg.morph_subrules);
						tdg.morph_subrules = null;
						break;
					case TokenDefinitionGroup.Type.Lexicon:
						e = new LexicalEntry(tt_cur, id_tok.i_s, tr, tdg.m_bfc);
						break;
					case TokenDefinitionGroup.Type.Label:
						if (tdg.conj_par.Any(tk => StringComparer.InvariantCultureIgnoreCase.Compare(tk.i_s, tt_label.Name) == 0))
							e = new NodeLabelTemplate(tr, tt_cur, id_tok.i_s, tdg.m_bfc);
						else if (tdg.conj_par.Any(tk => StringComparer.InvariantCultureIgnoreCase.Compare(tk.i_s, tt_meta.Name) == 0))
							e = new NodeMetaTemplate(tr, tt_cur, id_tok.i_s, tdg.m_bfc);
						else
						{
							String msg = String.Format(
								"'{0}' must be either a label template derived from type '{1}' or a meta template derived from type '{2}'",
								id_tok.i_s,
								tt_label.Name,
								tt_meta.Name);
							TdlTokenizer.ErrorExit(id_tok, msg);
						}
						break;
					case TokenDefinitionGroup.Type.Start:
						e = new StartSymbol(tt_cur, id_tok.i_s, tdg.m_bfc);
						break;
					default:
						throw new InvalidOperationException();
				}
				Debug.Assert(tdg.morph_subrules == null);

				entry_dict.Add(id_tok.i_s, e);
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public Type LookupType(TdlTok tok)
		{
			String s_type = tok.i_s;
			Type tt;
			if (!type_dict.TryGetValue(s_type, out tt))
				TdlTokenizer.ErrorExit(tok, "Invalid type: '{0}'", s_type);

			return tt;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		Type AddType(String s_name, List<BaseFeatConstraint> bfc)
		{
			Type t = new Type(this, s_name, bfc);
			type_dict.Add(s_name, t);
			return t;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// initialize Type dictionary with all Type names
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void LoadTypes(IEnumerable<TokenDefinitionGroup> file_tdefs)
		{
			/// Since we make two passes on the enumerable, pre-load the grouping computation if necessary
			TokenDefinitionGroup[] rgtdg = file_tdefs as TokenDefinitionGroup[] ?? file_tdefs.ToArray();

			foreach (TokenDefinitionGroup tdg in rgtdg)
			{
				TdlTok id_tok = tdg.tok_ident;
				Type tfst;
				if (!tdg.f_append)
				{
					if (tdg.conj_par.Count == 0 && tdg.type != TokenDefinitionGroup.Type.Label)
						TdlTokenizer.ErrorExit(id_tok, "Type defines no parents.");

					if (type_dict.TryGetValue(id_tok.i_s, out tfst))
						TdlTokenizer.ErrorExit(id_tok, String.Format("{0} already defined", id_tok.i_s));

					tfst = AddType(id_tok.i_s, tdg.m_bfc);
				}
			}

			// check and build each type's parent types
			foreach (TokenDefinitionGroup tdg in rgtdg)
			{
				if (tdg.f_append)
				{
					Type tfst_master;
					if (!type_dict.TryGetValue(tdg.Identifier, out tfst_master))
						TdlTokenizer.ErrorExit(tdg.tok_ident, "Cannot append type information to undefined type.");
					tfst_master.SetConstraints(tdg.m_bfc);
				}

				Type tfst_cur = type_dict[tdg.Identifier];
				foreach (TdlTok tok_par in tdg.conj_par)
				{
					Type tfst;
					if (!type_dict.TryGetValue(tok_par.i_s, out tfst))
						TdlTokenizer.ErrorExit(tok_par, String.Format("Type uses undefined parent type {0}", tok_par.i_s));
					if (tfst_cur == tfst)
						TdlTokenizer.ErrorExit(tok_par, String.Format("Type {0} must not refer to itself as a parent", tok_par.i_s));
					tfst_cur.AddIParent(tfst);
				}
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///	<summary>
		/// EmbedBcpo
		///
		/// Embed the type hierarchy in a bounded complete partial order (BCPO) lattice by inserting greatest-
		/// lower bound (GLB) types as required.
		///
		/// References:
		/// Hassan Ait-Kaci, Robert Boyer, Patrick Lincoln, Roger Nasr. 1989. "Efficient Implementation of Lattice
		///		Operations"
		/// Ulrich Callmeier. 2001. "Efficient Parsing with Large-Scale Unification Grammars" p.28-30.
		/// Gerald Penn. 2000. "The Algebraic Structure of Attributed Type Signatures" p. 30-34
		/// Bernd Kiefer, Hans-Ulrich Krieger, John Carroll, Rob Malouf. "A Bag of Useful Techniques for Efficient
		///		and Robust Parsing" p. 475.
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void EmbedBcpo()
		{
			// retain the number of types prior to adding GLBs
			code_size = type_dict.Count;

			// check for inheritance from *top* and assign a maximal-depth value to each node
			foreach (Type t in type_dict.Values)
				t.FindLevel();

			// generate Aït-Kaci transitive-closure codes for all user-specified types, in order according to
			// the depth values just determined.
			code_dict = new Dictionary<BitArr, Type>();
			Type[] bitpos2type = new Type[code_size];
			int cur_code = code_size - 1;

			foreach (Type tt in type_dict.Values.OrderByDescending(e => e.m_level))
			{
				BitArr code = new BitArr(code_size);
				code[cur_code] = true;
				foreach (Type tc in tt.Children)
					code.OrEq(tc.m_code);

				code_dict.Add(tt.m_code = code, tt);
				bitpos2type[cur_code] = tt;
				cur_code--;
			}

			/// determine codes for the GLB types which will be needed to close the graph
			Type[] rg_tfs = type_dict.Values.Where(t => t.m_flags != Type.Flags.Leaf).ToArray();
			while (true)
			{
				List<Type> added_glbs = new List<Type>();
				for (int i = TopId + 1; i < rg_tfs.Length; i++)
				{
					Type t0 = rg_tfs[i];
					BitArr ba0 = t0.m_code;
					for (int j = i + 1; j < rg_tfs.Length; j++)
					{
						Type t1 = rg_tfs[j];
						if (ba0.FastTest(t1.m_code))
						{
							BitArr z = ba0 & t1.m_code;
							ushort thislevel = t0.m_level;
							if (t1.m_level > thislevel)
								thislevel = t1.m_level;
							Type glbt;
							if (code_dict.TryGetValue(z, out glbt))
							{
								if (thislevel > glbt.m_level)
									glbt.m_level = thislevel;
							}
							else
							{
								glbt = AddType("glbtype" + (next_glb_num++), null);
								glbt.m_flags |= Type.Flags.GlbType;
								glbt.m_code = z;
								glbt.m_level = thislevel;
								code_dict.Add(z, glbt);
								added_glbs.Add(glbt);
							}
						}
					}
				}
				if (added_glbs.Count == 0)
					break;
				rg_tfs = added_glbs.ToArray();
			}

			c_types = code_dict.Count;
			if (c_types >= (int)Edge.Flag.LowestFlagValue)
				throw new Exception(String.Format("System supports a maximum of {0} types and {1} types were specified.", (int)Edge.Flag.LowestFlagValue, c_types));
			c_t_bits = BitArr.HighestOne(c_types) + 1;

			/// sort all types including the new GLB types into a certain topological order
			type_arr = code_dict
							.Values
							.OrderBy(e => e.m_level)
							.ThenByDescending(e => e.m_code.OnesCount())
							.ToArray();

			/// create a set of negations for all the codes, for use in step 1.
			BitArr[] notcodes = new BitArr[c_types];
			for (int i = 0; i < c_types; i++)
				notcodes[i] = ~type_arr[i].m_code;

			int ops = 0;
			/// for proper glb caching: (todo: check if this is still needed here so early)
			StringType = type_arr.FirstOrDefault(vv => StringComparer.OrdinalIgnoreCase.Compare(vv.Name, config.string_type) == 0);
			Debug.Assert(StringType != null);
			StringId = StringType.m_id;

			List<Type> draft = new List<Type>();
			BitArr r = new BitArr(code_size);
			for (int i = c_types - 1; i > 0; i--)
			{
				Type node = type_arr[i];

				/// node ids correspond to their index in 'type_arr'. Set it now.
				node.m_id = (int)i;

				/// Looking only at GLBs, we can directly obtain the transitive closure of the graph by carefully manipulating 
				/// their parents and children.
				if (!node.IsGlb)
					continue;

				/// 1. This GLB as a parent: add its children
				/// 1a. get a list of draft candidates: all descendants of 'node'
				/// The order in which these are added is important because it determines the order of testing children 
				/// in step 1b. It's just a topological order. The list is needed because children may be eliminated from
				/// the list before the point at which they'd otherwise be added.
				draft.Clear();
				for (int j = i + 1; j < c_types; j++)
				{
					Type leafwards = type_arr[j];
					if (!leafwards.m_code.FastTest(notcodes[i]))
					{
						draft.Add(leafwards);
						AddGlb(j, i, (Edge.Flag)leafwards.m_id);
					}
				}

				/// 1b. pick the subset of immediate children from this list
				/// While the list is not empty, add the oldest item as a child and then remove all of its descendants from the 
				/// list.
				while (draft.Count > 0)
				{
					Type new_child = draft[0];
					draft.RemoveAt(0);

					if (!new_child.IsGlb)
						foreach (Type par in new_child.Parents.ToArray())
						{
							ops++;
							if (node.m_code.AndGivesExact(par.m_code))
								new_child.RemoveIParent(par);
						}

					node.AddIChild(new_child);
					for (int q = 0; q < draft.Count; q++)
					{
						ops++;
						if (draft[q].m_code.AndGivesExact(new_child.m_code))
							draft.RemoveAt(q--);
					}
				}

				/// 2. This GLB as a child: add its parents
				/// 2a. get a list of draft candidates between 'node' and *top*
				draft.Clear();
				BitArr nc = node.m_code;
				for (int j = i - 1; j >= 0; j--)
				{
					Type topwards = type_arr[j];
					if ((topwards.m_flags & Instance.Flags.Leaf) == 0)
					{
						ops++;
						if (nc.AndGivesExact(topwards.m_code))
						{
							draft.Add(topwards);
							if (j > 0)
								AddGlb(i, j, (Edge.Flag)node.m_id);
						}
					}
				}

				/// 2b. pick a minimal set of parents from this list.
				/// Each selection allows us to eliminate others others from the list. This effect is maximized by starting 
				/// with candidates with the fewest additional bits set beyond the required match. This was the reason for
				/// the secondary sort on 'arr' above.
				r.CopyFrom(nc);
				for (int j = 0; j < draft.Count; j++)
				{
					Type parent = draft[j];
					if (parent.m_code.FastTestNotArg(r))
					{
						r.OrEq(parent.m_code);
						for (int k = j + 1; k < draft.Count; k++)
						{
							ops++;
							if (parent.m_code.AndGivesExact(draft[k].m_code))
								draft.RemoveAt(k--);
						}
						// all GLB-to-GLB edges are all added as children, above
						if (!parent.IsGlb)
						{
							foreach (uint bit_pos in nc.OnesPositions())
								parent.RemoveIChild(bitpos2type[bit_pos]);
							node.AddIParent(parent);
						}
					}
				}
			}

			for (int i = 1; i < c_types; i++)
			{
				Type parent = type_arr[i];
				if (!parent.IsGlb)
					foreach (Type child in parent.AllDescendants)
						AddGlb(i, child.m_id, (Edge.Flag)child.m_id);
			}

			Console.WriteLine("types {0} closed {1} glb {2} ops {3}", code_size, type_arr.Length, type_arr.Length - code_size, ops);

			Debug.Assert(type_arr[TopId] == TopType && TopType.m_id == TopId);
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void LoadFeatures()
		{
			/// The uninitialized value (zero) of the HasAnyFeatures flag means the type is unconstrainable.
			/// Mark all types below any occurrence of a constraint as non-atomic.
			/// One problem with this code is that the tdl, "t := *top* & [ ]." will cause 't' and its descendants
			/// to be marked as having features
			foreach (Type tt in type_arr)
			{
				if ((tt.m_flags & (Type.Flags.HasConstraints | Type.Flags.HasAnyFeatures)) == Type.Flags.HasConstraints)
					tt.SetHasAnyFeaturesFlagBelow();
			}

			/// Atomic types are those featureless types without features in their subgraph
			foreach (Type tt in type_arr)
			{
				if ((tt.m_flags & (Type.Flags.HasAnyFeatures | Type.Flags.Atomic)) == 0)
				{
					HashSet<Type> hs = tt.AllDescendants;
					if (!hs.Any(e => e.HasAnyFeatures))
					{
						tt.m_flags |= Type.Flags.Atomic;
						foreach (Type tta in hs)
							tta.m_flags |= Type.Flags.Atomic;
					}
				}
			}

			// Add features to a maximal introduction table
			// References:
			// Ann Copestake "The Compleat LKB" 1993, p. 93.
			// Carroll, Copestake, Malouf, Oepen - LKB checktypes.lsp
			// Ann Copestake "Implementing Typed Feature Structure Grammars" 2002

			Dictionary<String, List<Type>> _maximal_types = new Dictionary<String, List<Type>>();

			foreach (Type tt in type_arr.Where(e => e.HasAnyFeatures && e.m_bfc != null))
				foreach (BaseFeatConstraint bfc in tt.m_bfc)
				{
					if (bfc[0].t != TdlTok.Type.Identifier)
						TdlTokenizer.ErrorExit(bfc[0], "Expected an identifier");
					String feat = bfc[0].i_s;

					List<Type> l;
					if (_maximal_types.TryGetValue(feat, out l))
					{
						if (!l.Any(e => tt.IsSubtypeOf(e)))
						{
							bool f_any = false;
							for (int i = 0; i < l.Count; i++)
							{
								if (l[i] == tt || l[i].IsSubtypeOf(tt))
								{
									if (f_any)
										l.RemoveAt(i--);
									else
									{
										l[i] = tt;
										f_any = true;
									}
								}
							}
							if (!f_any)
								l.Add(tt);
						}
					}
					else
					{
						l = new List<Type>() { tt };
						_maximal_types.Add(feat, l);
					}
				}

			// Set the local features. Check for multiple introduction
			int i_feat = 0;
			foreach (var kvp in _maximal_types)
			{
				Debug.Assert(kvp.Value.Count != 0, "Internal error in temporary maximal types computation.");

				List<Type> lt = kvp.Value;
				Type max_type = lt[0];

				/// Feature should be introduced by only one type in the type hierarchy
				if (lt.Count > 1)
					TdlTokenizer.ErrorExit(String.Format("Feature {0} is introduced at multiple types {{{1}}}",
						kvp.Key,
						lt.Select(tt => tt.Name).StringJoin(" ")));

				/// add the feature and associate it with its maximal introducing type
				feat_map.Add(kvp.Key, new FeatureInfo(kvp.Key, i_feat, max_type));
				max_type.AddLocalFeature(i_feat);

				i_feat++;
			}
			_maximal_types = null;

			// construct reverse table which maps the feature index back to the feature info
			feat_arr = new FeatureInfo[i_feat];
			foreach (var fi in feat_map.Values)
				feat_arr[fi.i_feat] = fi;

			// Do the following in topological order
			for (int i = 0; i < c_types; i++)
				type_arr[i].AddNonLocalFeatures();
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///	<summary>
		/// Resolve some static types
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void ResolveSpecialTypes()
		{
			if (!type_dict.TryGetValue(config.string_type, out StringType))
				throw new TdlException("The string type '{0}' was not defined in any TDL file.", config.string_type);
			this.StringId = strings.string_id = StringType.m_id;

			FeatureInfo fi;
			if (!feat_map.TryGetValue(config.f_list_head, out fi))
				throw new TdlException("The list head feature '{0}' was not defined in any TDL file.", config.f_list_head);
			f_ix_list_head = fi.i_feat;
			tt_list_head = fi.maximal_type;

			if (!feat_map.TryGetValue(config.f_list_tail, out fi))
				throw new TdlException("The list tail feature '{0}' was not defined in any TDL file.", config.f_list_tail);
			f_ix_list_tail = fi.i_feat;

			if (!type_dict.TryGetValue(config.typs_list, out tt_list))
				throw new TdlException("The list type '{0}' was not defined in any TDL file.", config.typs_list);

			if (!type_dict.TryGetValue(config.typs_empty_list, out tt_empty))
				throw new TdlException("The empty list type '{0}' was not defined in any TDL file.", config.typs_empty_list);

			if (!type_dict.TryGetValue(config.typs_diff_list, out tt_dlist_list))
				throw new TdlException("The difference list type '{0}' was not defined in any TDL file.", config.typs_diff_list);

			if (!feat_map.TryGetValue(config.f_dlist_list, out fi))
				throw new TdlException("The difference list 'LIST' feature '{0}' was not defined in any TDL file.", config.f_dlist_list);
			f_ix_dlist_list = fi.i_feat;

			if (!feat_map.TryGetValue(config.f_dlist_last, out fi))
				throw new TdlException("The difference list 'LAST' feature '{0}' was not defined in any TDL file.", config.f_dlist_last);
			f_ix_dlist_last = fi.i_feat;

			tt_dlist_last = tt_list;

			if (!type_dict.TryGetValue(config.typs_key_daughter, out tt_key_daughter))
			{
				/// LKB sample grammars do not define the LKB default asserted type '+' for KEY-ARG
				Console.Error.WriteLine("warning: The key daughter path type '{0}' was not defined in any TDL file.", config.typs_key_daughter);
				config.typs_key_daughter = config.top_type;
				tt_key_daughter = TopType;
				//throw new TdlException("The key daughter path type '{0}' was not defined in any TDL file.", config.typs_key_daughter);
			}

			String s = config.NodeLabelConfiguration.LabelTemplateType;
			if (s != null && !type_dict.TryGetValue(s, out tt_label))
				throw new TdlException("The node label template type '{0}' was not defined in any TDL file.", config.NodeLabelConfiguration.LabelTemplateType);

			s = config.NodeLabelConfiguration.MetaTemplateType;
			if (s != null && !type_dict.TryGetValue(s, out tt_meta))
			{
				//throw new TdlException("The node meta template type '{0}' was not defined in any TDL file.", config.NodeLabelConfiguration.MetaTemplateType);
			}
		}


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void ResolveSpecialEntries()
		{
			if (config.start_symbols == null)
				config.start_symbols = new List<String> { "root" };
			rgtt_start_symbols = new StartSymbol[config.start_symbols.Count];
			int i = 0;
			foreach (String ss in config.start_symbols)
			{
				Entry e;
				if (!entry_dict.TryGetValue(ss, out e))
				{
					entry_dict.Add(ss, e = new StartSymbol(TopType, ss, null));
					//throw new TdlException("Start symbol type '{0}' was not defined in any TDL file.", ss);
				}
				if ((rgtt_start_symbols[i] = e as StartSymbol) == null)
				{
					throw new TdlException("Type '{0}', which is listed as a start symbol, was not defined in the start symbols file.", ss);
				}
				i++;
			}

			if (config.span_only_rules != null && config.span_only_rules.Count > 0)
			{
				foreach (String ss in config.span_only_rules)
				{
					Entry ent;
					if (!entry_dict.TryGetValue(ss, out ent))
						throw new TdlException("Span only rule type '{0}' was not defined in any TDL file.", ss);
					var gr = ent as GrammarRule;
					if (gr == null)
						throw new TdlException("Span only rule type '{0}' is not a grammar rule.", ss);
					gr.IsSpanOnly = true;
				}
			}
		}


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void Unfill()
		{
			foreach (Type t in type_dict.Values)
			{



			}

		}


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void Petrify(Tray tr /*temp*/)
		{
			/// calculate how many bits will be used for stringid/typeid
			int c_f_bits = 32 - BitArr.HighestOne((int)Edge.Flag.LowestFlagValue);
			int c_s_bits = BitArr.HighestOne(strings.Count) + 1;
			int c_ts_bits = Math.Max(c_t_bits, c_s_bits);
			MultiIdMask = (Edge.Flag)((1 << c_ts_bits) - 1);

			UpdateGlbCacheBareTypes();

#if FCTC_STATS
			var fconfigs = AllTypes
							.Select(t => new { type = t, features = t.fc })
							.GroupBy(a => a.features, a => a.type)
							.ToArray();

			Console.WriteLine("{0} types ({1}b), {2} strings ({3}b), {4} feat in {5} configurations (max feat/type {6}). -> edge bits: flags:{7} fconfigs:{8} sid/tid: {9}",
				c_types,
				c_t_bits,
				strings.Count,
				c_s_bits,
				feat_arr.Length,
				fconfigs.Length,
				fconfigs.Max(grp => grp.Key.Count),
				c_f_bits,
				32 - c_f_bits - c_ts_bits,
				c_ts_bits);
#endif
			fc_by_index = fcm.ToArray();

			fc_by_type = type_arr.Select(t => t.fc).ToArray();

			fcix_by_type = type_arr.Select(t => t.fc.ConfigIndex).ToArray();

			//temp:
			fcm.BindToTray(tr);
			rgcp_by_type = type_arr.Select(t => t.fc.rgcp).ToArray();

			f_petrified = true;
		}

		public bool IsPetrified { get { return f_petrified; } }

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#if FCTC_STATS
		public void TypePatternReport()
		{
			int node_count = 0;
			int distinct_TC = 0;

			var nodes = g.loadtray.Pools
				.SelectMany(cp => cp.PoolMarks)
				.GroupBy(pm => pm._in_mark)
				.ToArray();

			foreach (var zx in nodes
				.Select(grp => new
				{
					types_used = grp.Select(pm => pm.ConstraintType)
									.OrderBy(t => t.m_id)
									.Select(t => t.m_id.ToString())
									.StringJoin("-"),
					fc = fcm.Get(grp.Select(pm => pm.Pool.i_feat))
				})
				.GroupBy(a => a.fc, a => a.types_used))
			{
				String[] vz = zx.Distinct().ToArray();
				//Console.WriteLine("{0}\t{1} features\t{2} uses\t{3} distinct", zx.Key, zx.Key.Count, zx.Count(), vz.Length);
				foreach (var zs in vz.Take(100))
				{
					//				Console.WriteLine("\t{0}", zs);

				}
				distinct_TC += vz.Length;
				node_count += zx.Count();
			}
			Debug.Assert(node_count == nodes.Length);

			int c_allbarenodes = nodes
				.Where(grp => grp.All(pm => pm.Constraint.Mark == 0))
				.Count();


			double avg_eligible_load = nodes
				.Where(grp => grp.All(pm => pm.Constraint.Mark == 0))
				.Average(grp => grp.Count());


			Console.WriteLine("nodes {0}  distinct TC {1}  {2:00.00}%  all-bare nodes {3}  {4:00.00}%  avg. load {5:00.00}",
				node_count,
				distinct_TC,
				100.0 * distinct_TC / node_count,
				c_allbarenodes,
				100.0 * c_allbarenodes / node_count,
				avg_eligible_load);

			//if (Unification.c_all_unif != 0.0)
			//    Console.WriteLine("unif {0} / {1}  {2:00.00}%", Unification.c_skip_unif, Unification.c_all_unif, 100.0 * Unification.c_skip_unif / Unification.c_all_unif);
			//if (Unification.c_all_feat != 0.0)
			//    Console.WriteLine("feat {0} / {1}  {2:00.00}%", Unification.c_skip_feat, Unification.c_all_feat, 100.0 * Unification.c_skip_feat / Unification.c_all_feat);

			//Unification.c_skip_unif = 0;
			//Unification.c_all_unif = 0;

			//Unification.c_skip_feat = 0;
			//Unification.c_all_feat = 0;
		}
#endif
	};


	public struct EdgePair
	{
		public Edge e1;
		public Edge e2;
	};


	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// <summary>
	/// 
	/// </summary>
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	[DebuggerDisplay("{ToString(),nq}")]
	public sealed class FeatureConfig : IList<int>
	{
		readonly public int[] rg_fix;
		public ConstraintPool[] rgcp;
		readonly TypeMgr tm;
		//readonly Manager fcm;
		//readonly Tray tr;
		readonly int fc_ix;

		public readonly static FeatureConfig Empty;

		static FeatureConfig()
		{
			Empty = new FeatureConfig(null, 0, new int[0]);
		}

		public class Manager : IEnumerable<FeatureConfig>
		{
			readonly TypeMgr tm;
			public Manager(TypeMgr tm)
			{
				this.tm = tm;
				pod.Add(String.Empty, Empty);
			}
			public Dictionary<String, FeatureConfig> pod = new Dictionary<String, FeatureConfig>();

			public FeatureConfig Get(IEnumerable<int> rgfix)
			{
				if (!rgfix.Any())
					return FeatureConfig.Empty;

				int[] rg = rgfix.OrderBy(i => tm.feat_arr[i].maximal_type.m_level).ToArray();
				String s = rg.Select(ix => ix.ToString()).StringJoin("-");

				FeatureConfig fc;
				if (!pod.TryGetValue(s, out fc))
				{
					pod.Add(s, fc = new FeatureConfig(tm, pod.Count, rg));
				}
				return fc;
			}

			public void BindToTray(Tray tr)
			{
				foreach (FeatureConfig fc in pod.Values)
					fc.BindToTray(tr);
			}

			public IEnumerator<FeatureConfig> GetEnumerator()
			{
				return pod.Values.OrderBy(fc => fc.fc_ix).GetEnumerator();
			}

			IEnumerator IEnumerable.GetEnumerator()
			{
				return GetEnumerator();
			}
		};

		FeatureConfig(TypeMgr tm, int ix, int[] rgfix)
		{
			this.tm = tm;
			this.rg_fix = rgfix;
			this.fc_ix = ix;

			this._u_destr = rg_fix.Length == 0 ? (del_u_destr)_u_destr_internal_empty : _first_time;
		}

		public TypeMgr TypeMgr { get { return tm; } }

		//public int[] FeatureIndexes { get { return rg_fix; } }

		public void BindToTray(Tray tr)
		{
			this.rgcp = rg_fix.Select(fix => tr.Pools[fix]).ToArray().ToArray();
		}

		public int ConfigIndex { get { return fc_ix; } }

		public override String ToString()
		{
			return String.Format("#{0} {1}", fc_ix, rg_fix.Select(i => tm.feat_arr[i].feature.ToUpper()).StringJoin(" "));
		}

		public int this[int index]
		{
			get { return rg_fix[index]; }
			set { throw new InvalidOperationException(); }
		}

		public int IndexOf(int item)
		{
			for (int j = 0; j < rg_fix.Length; j++)
				if (rg_fix[j] == item)
					return j;
			return -1;
		}

		public bool Contains(int item)
		{
			for (int j = 0; j < rg_fix.Length; j++)
				if (rg_fix[j] == item)
					return true;
			return false;
		}

		public void CopyTo(int[] array, int arrayIndex)
		{
			for (int j = 0; j < rg_fix.Length; j++)
				array[arrayIndex++] = rg_fix[j];
		}

		public int Count { get { return rg_fix.Length; } }

		public bool IsReadOnly { get { return true; } }

		public void Insert(int index, int item) { throw new InvalidOperationException(); }
		public void Add(int item) { throw new InvalidOperationException(); }
		public void Clear() { throw new InvalidOperationException(); }
		public bool Remove(int item) { throw new InvalidOperationException(); }
		public void RemoveAt(int index) { throw new InvalidOperationException(); }

		public delegate bool del_u_destr(Unification u, int m1, TfsEdge te2, Func<ConstraintPool, int, Edge, TfsEdge, bool> fn);

		public del_u_destr _u_destr;

		bool _first_time(Unification u, int m1, TfsEdge te2, Func<ConstraintPool, int, Edge, TfsEdge, bool> fn)
		{
			_u_destr =
				emit_DestructiveUnify( //u.TargetTray);
				u, m1, te2, fn);
			return _u_destr(u, m1, te2, fn);
		}


		del_u_destr emit_DestructiveUnify(//Tray tr)
			Unification u, int m1, TfsEdge te2, Func<ConstraintPool, int, Edge, TfsEdge, bool> fn)
		{
#if CIL_EMIT
			Tray tr = u.TargetTray;

			System.Type[] t_args =
			{
				typeof(FeatureConfig),									// arg.0	'this'
				typeof(Unification),									// arg.1	'u'
				typeof(int),											// arg.2	'm1'
				typeof(TfsEdge),										// arg.3	'te2'
				typeof(Func<ConstraintPool, int, Edge, TfsEdge, bool>)	// arg.4	'fn'
			};
			DynamicMethod dm = new DynamicMethod(String.Empty, typeof(bool), t_args, typeof(FeatureConfig), true);
			ILGenerator il = dm.GetILGenerator();

			il.DeclareLocal(typeof(Tray));				// loc.0	'tr'
			il.DeclareLocal(typeof(int));				// loc.1	'm2'
			il.DeclareLocal(typeof(ConstraintPool[]));	// loc.2	'rgcp'
			il.DeclareLocal(typeof(ConstraintPool));	// loc.3	'cp'
			il.DeclareLocal(typeof(Edge));				// loc.4	'ne1'
			il.DeclareLocal(typeof(Edge));				// loc.5	'ge'

			/// tr = u.TargetTray
			/// rgcp = tr.Pools;
			il.Emit(OpCodes.Ldarg_1);
			il.Emit(OpCodes.Call, typeof(Unification).GetMethod("get_TargetTray"));
			il.Emit(OpCodes.Dup);
			il.Emit(OpCodes.Stloc_0);
			il.Emit(OpCodes.Ldfld, typeof(Tray).GetField("Pools"));
			il.Emit(OpCodes.Stloc_2);

			/// m2 = te2.Edge.Mark
			il.Emit(OpCodes.Ldarga_S, 3);
			il.Emit(OpCodes.Ldfld, typeof(TfsEdge).GetField("Edge"));
			il.Emit(OpCodes.Ldfld, typeof(Edge).GetField("Mark"));
			il.Emit(OpCodes.Stloc_1);

			foreach (int fix in rg_fix)
			{
				ConstraintPool cp = tr.Pools[fix];
				Label l_next_iter = il.DefineLabel();

				/// cp = rgcp[@fix]
				il.Emit(OpCodes.Ldloc_2);
				il.Emit(OpCodes.Ldc_I4, fix);
				il.Emit(OpCodes.Ldelem_Ref);
				il.Emit(OpCodes.Stloc_3);

				/// danger: overwriting te2.Edge now; original mark is preserved in m2
				/// if (cp.TryGetEdge(m2, &te2.Edge))
				cp.emit_TryGetEdge(il,
					g => il.Emit(OpCodes.Ldloc_3),
					g => il.Emit(OpCodes.Ldloc_1),
					g =>
					{
						il.Emit(OpCodes.Ldarga_S, 3);
						il.Emit(OpCodes.Ldflda, typeof(TfsEdge).GetField("Edge"));
					});
				il.Emit(OpCodes.Brfalse, l_next_iter);

				/// cp.TryGetEdge(m1, out ne1);
				cp.emit_TryGetEdge(il,
					g => il.Emit(OpCodes.Ldloc_3),
					g => il.Emit(OpCodes.Ldarg_2),
					g =>
					{
						il.Emit(OpCodes.Ldloca_S, 4);
						il.Emit(OpCodes.Ldflda, typeof(TfsEdge).GetField("Edge"));
					});
				il.Emit(OpCodes.Pop);


				il.MarkLabel(l_next_iter);
			}
			/// return true
			il.Emit(OpCodes.Ldc_I4_1);
			il.Emit(OpCodes.Ret);

			del_u_destr test = (del_u_destr)dm.CreateDelegate(typeof(del_u_destr), this);

			bool xt = test(u, m1, te2, fn);
			Console.WriteLine(xt);

#endif
			//return _emit_u_destr_closure(u, m1, te2, fn);
			return _u_destr_internal;
			//return (del_u_destr)dm.CreateDelegate(typeof(del_u_destr), this);
		}

		del_u_destr _emit_u_destr_closure(Unification _u, int _m1, TfsEdge _te2, Func<ConstraintPool, int, Edge, TfsEdge, bool> _fn)
		{
#if false
			//Console.WriteLine("{0} {1}", cfg_ix, rg_fix.Select(ix => ix.ToString()).StringJoin("-"));
			Tray tr = _u.TargetTray;
			ConstraintPool[] rgcp = rg_fix.Select(i => tr.Pools[i]).ToArray();

			return new del_u_destr((u, m1, te2, fn) =>
			{
				int m2 = te2.Edge.Mark;
				ConstraintPool cp;
				Edge ne1;
				Edge ge;

				//////////////////////////////////////////////////

				cp = rgcp[0];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 1)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[1];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 2)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[2];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 3)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[3];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 4)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[4];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 5)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[5];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 6)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[6];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 7)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[7];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 8)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[8];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 9)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[9];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 10)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[10];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 11)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[11];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 12)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[12];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				if (rg_fix.Length == 13)
					return true;

				//////////////////////////////////////////////////

				cp = rgcp[13];
				if (cp.TryGetEdge(m2, out te2.Edge))
				{
					cp.TryGetEdge(m1, out ne1);
					if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
					{
						if (!fn(cp, m1, ne1, te2))
						{
							foreach (ConstraintPool _cp in rgcp)
								if (_cp.TryRemoveEdge(m1, out ge))
									tr.RudeTruncate(ge);
							return false;
						}
					}
					if (m1 != m2 && te2.IsTarget)
						u._prune_below(cp, m2);
				}

				return true;
			});
#endif
			return null;
		}


		bool _u_destr_internal_empty(Unification u, int m1, TfsEdge te2, Func<ConstraintPool, int, Edge, TfsEdge, bool> fn)
		{
			return true;
		}

#if true
		bool _u_destr_internal(Unification u, int m1, TfsEdge te2, Func<ConstraintPool, int, Edge, TfsEdge, bool> fn)
		{
#if false
			Tray tr = u.TargetTray;
			int m2 = te2.Edge.Mark;
			ConstraintPool[] rgcp = tr.Pools;
			ConstraintPool cp;
			Edge ne1;
			Edge ge;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[0]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 1)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[1]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 2)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[2]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 3)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[3]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 4)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[4]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 5)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[5]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 6)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[6]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 7)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[7]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 8)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[8]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 9)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[9]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 10)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[10]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 11)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[11]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 12)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[12]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}

			if (rg_fix.Length == 13)
				return true;

			//////////////////////////////////////////////////

			cp = rgcp[rg_fix[13]];
			if (cp.TryGetEdge(m2, out te2.Edge))
			{
				cp.TryGetEdge(m1, out ne1);
				if (ne1.Mark != te2.Edge.Mark || ne1.FlagsId != te2.Edge.FlagsId)
				{
					if (!fn(cp, m1, ne1, te2))
					{
						foreach (int j in rg_fix)
							if (rgcp[j].TryRemoveEdge(m1, out ge))
								tr.RudeTruncate(ge);
						return false;
					}
				}
				if (m1 != m2 && te2.IsTarget)
					u._prune_below(cp, m2);
			}
#endif
			return true;
		}
#endif

		_int_enumerable ie = null;

		public IEnumerator<int> GetEnumerator()
		{
			ie = ie ?? new _int_enumerable(this);
			return ie.GetEnumerator();
		}

		IEnumerator System.Collections.IEnumerable.GetEnumerator()
		{
			ie = ie ?? new _int_enumerable(this);
			return ie.GetEnumerator();
		}


		class _int_enumerable : IEnumerable<int>
		{
			Func<int>[] rgfn;

			static Func<int>[] lfn = null;

			public _int_enumerable(FeatureConfig fc)
			{
				if (lfn == null)
					Interlocked.CompareExchange(ref lfn, new Func<int>[fc.tm.Features.Count], null);
				this.rgfn = fc.rg_fix.Select(i =>
				{
					if (i >= lfn.Length || lfn[i] == null)
					{
						DynamicMethod dm = new DynamicMethod(String.Empty, typeof(int), null, typeof(_int_enumerable), true);
						ILGenerator il = dm.GetILGenerator(256);
						il.Emit(OpCodes.Ldc_I4, i);
						il.Emit(OpCodes.Ret);
						lfn[i] = (Func<int>)dm.CreateDelegate(typeof(Func<int>));
					}
					return lfn[i];
				}).ToArray();
			}

			public IEnumerator<int> GetEnumerator() { return new _enum(rgfn); }

			IEnumerator System.Collections.IEnumerable.GetEnumerator() { return new _enum(rgfn); }

			class _enum : IEnumerator<int>
			{
				uint ix = 0xFFFFFFFF;
				Func<int> cur = null;
				Func<int>[] rgfn;

				public _enum(Func<int>[] rgfn) { this.rgfn = rgfn; }

				public int Current { get { return cur(); } }

				object IEnumerator.Current { get { return cur(); } }

				public bool MoveNext()
				{
					if (++ix >= rgfn.Length)
						return false;
					cur = rgfn[ix];
					return true;
				}

				public void Reset()
				{
					ix = 0xFFFFFFFF;
					cur = null;
				}

				public void Dispose() { }
			};
		};


	};
}